changeset 7040:f29b674cc221

merge with crew-stable
author Dirkjan Ochtman <dirkjan@ochtman.nl>
date Wed, 17 Sep 2008 11:34:37 +0200
parents aafe12bd7174 (diff) d1c82127573e (current diff)
children 277c91fe8384
files mercurial/changelog.py mercurial/commands.py tests/test-highlight tests/test-highlight.out tests/test-username-newline.out
diffstat 322 files changed, 11853 insertions(+), 4936 deletions(-) [+]
line wrap: on
line diff
--- a/.hgignore	Wed Sep 17 11:14:06 2008 +0200
+++ b/.hgignore	Wed Sep 17 11:34:37 2008 +0200
@@ -7,6 +7,7 @@
 *.mergebackup
 *.o
 *.so
+*.pyd
 *.pyc
 *.swp
 *.prof
--- a/contrib/darcs2hg.py	Wed Sep 17 11:14:06 2008 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,255 +0,0 @@
-#!/usr/bin/env python
-# Encoding: iso-8859-1
-# vim: tw=80 ts=4 sw=4 noet
-# -----------------------------------------------------------------------------
-# Project   : Basic Darcs to Mercurial conversion script
-#
-# *** DEPRECATED. Use the convert extension instead. This script will
-# *** be removed soon.
-#
-# -----------------------------------------------------------------------------
-# Authors   : Sebastien Pierre                           <sebastien@xprima.com>
-#             TK Soh                                      <teekaysoh@gmail.com>
-# -----------------------------------------------------------------------------
-# Creation  : 24-May-2006
-# -----------------------------------------------------------------------------
-
-import os, sys
-import tempfile
-import xml.dom.minidom as xml_dom
-from time import strptime, mktime
-import re
-
-DARCS_REPO = None
-HG_REPO    = None
-
-USAGE = """\
-%s DARCSREPO HGREPO [SKIP]
-
-    Converts the given Darcs repository to a new Mercurial repository. The given
-    HGREPO must not exist, as it will be created and filled up (this will avoid
-    overwriting valuable data.
-
-    In case an error occurs within the process, you can resume the process by
-    giving the last successfuly applied change number.
-""" % (os.path.basename(sys.argv[0]))
-
-# ------------------------------------------------------------------------------
-#
-# Utilities
-#
-# ------------------------------------------------------------------------------
-
-def cmd(text, path=None, silent=False):
-	"""Executes a command, in the given directory (if any), and returns the
-	command result as a string."""
-	cwd = None
-	if path:
-		path = os.path.abspath(path)
-		cwd  = os.getcwd()
-		os.chdir(path)
-	if not silent: print "> ", text
-	res = os.popen(text).read()
-	if path:
-		os.chdir(cwd)
-	return res
-
-def writefile(path, data):
-	"""Writes the given data into the given file."""
-	f = file(path, "w") ; f.write(data)  ; f.close()
-
-def error( *args ):
-	sys.stderr.write("ERROR: ")
-	for a in args: sys.stderr.write(str(a))
-	sys.stderr.write("\n")
-	sys.stderr.write("You can make manual fixes if necessary and then resume by"
-	" giving the last changeset number")
-	sys.exit(-1)
-
-# ------------------------------------------------------------------------------
-#
-# Darcs interface
-#
-# ------------------------------------------------------------------------------
-
-def darcs_changes(darcsRepo):
-	"""Gets the changes list from the given darcs repository. This returns the
-	chronological list of changes as (change name, change summary)."""
-	changes    = cmd("darcs changes --reverse --xml-output", darcsRepo)
-	doc        = xml_dom.parseString(changes)
-	for patch_node in doc.childNodes[0].childNodes:
-		name = filter(lambda n: n.nodeName == "name", patch_node.childNodes)
-		comm = filter(lambda n: n.nodeName == "comment", patch_node.childNodes)
-		if not name:continue
-		else: name = name[0].childNodes[0].data
-		if not comm: comm = ""
-		else: comm = comm[0].childNodes[0].data
-		author = patch_node.getAttribute("author")
-		date   = patch_node.getAttribute("date")
-		chash  = os.path.splitext(patch_node.getAttribute("hash"))[0]
-		yield author, date, name, chash, comm
-
-def darcs_tip(darcs_repo):
-	changes = cmd("darcs changes",darcs_repo,silent=True)
-	changes = filter(lambda l: l.strip().startswith("* "), changes.split("\n"))
-	return len(changes)
-
-def darcs_pull(hg_repo, darcs_repo, chash):
-	old_tip = darcs_tip(darcs_repo)
-	res     = cmd("darcs pull \"%s\" --all --match=\"hash %s\"" % (darcs_repo, chash), hg_repo)
-	if re.search('^We have conflicts in the following files:$', res, re.MULTILINE):
-		print "Trying to revert files to work around conflict..."
-		rev_res = cmd ("darcs revert --all", hg_repo)
-		print rev_res
-	print res
-	new_tip = darcs_tip(darcs_repo)
-	if not new_tip != old_tip + 1:
-		error("Darcs pull did not work as expected: " + res)
-
-def darcs_changes_summary(darcs_repo, chash):
-	"""Gets the changes from the darcs summary. This returns the chronological
-	list of changes as (change_type, args). Eg. ('add_file', 'foo.txt') or
-	('move', ['foo.txt','bar.txt'])."""
-	change = cmd("darcs changes --summary --xml-output --match=\"hash %s\"" % (chash), darcs_repo)
-	doc = xml_dom.parseString(change)
-	for patch_node in doc.childNodes[0].childNodes:
-		summary_nodes = filter(lambda n: n.nodeName == "summary" and n.nodeType == n.ELEMENT_NODE, patch_node.childNodes)
-		for summary_node in summary_nodes:
-			change_nodes = filter(lambda n: n.nodeType == n.ELEMENT_NODE, summary_node.childNodes)
-			if len(change_nodes) == 0:
-				name = filter(lambda n: n.nodeName == "name", patch_node.childNodes)
-				if not name:
-					error("Darcs patch has an empty summary node and no name: " + patch_node.toxml())
-				name = name[0].childNodes[0].data.strip()
-				(tag, sub_count) = re.subn('^TAG ', '', name, 1)
-				if sub_count != 1:
-					error("Darcs patch has an empty summary node but doesn't look like a tag: " + patch_node.toxml());
-			for change_node in change_nodes:
-				change = change_node.nodeName
-				if change == 'modify_file':
-					yield change, change_node.childNodes[0].data.strip()
-				elif change == 'add_file':
-					yield change, change_node.childNodes[0].data.strip()
-				elif change == 'remove_file':
-					yield change, change_node.childNodes[0].data.strip()
-				elif change == 'add_directory':
-					yield change, change_node.childNodes[0].data.strip()
-				elif change == 'remove_directory':
-					yield change, change_node.childNodes[0].data.strip()
-				elif change == 'move':
-					yield change, (change_node.getAttribute('from'), change_node.getAttribute('to'))
-				else:
-					error('Problem parsing summary xml: Unexpected element: ' + change_node.toxml())
-
-# ------------------------------------------------------------------------------
-#
-# Mercurial interface
-#
-# ------------------------------------------------------------------------------
-
-def hg_commit( hg_repo, text, author, date ):
-	fd, tmpfile = tempfile.mkstemp(prefix="darcs2hg_")
-	writefile(tmpfile, text)
-	old_tip = hg_tip(hg_repo)
-	cmd("hg add -X _darcs", hg_repo)
-	cmd("hg remove -X _darcs --after", hg_repo)
-	res = cmd("hg commit -l %s -u \"%s\" -d \"%s 0\""  % (tmpfile, author, date), hg_repo)
-	os.close(fd)
-	os.unlink(tmpfile)
-	new_tip = hg_tip(hg_repo)
-	if not new_tip == old_tip + 1:
-		# Sometimes we may have empty commits, we simply skip them
-		if res.strip().lower().find("nothing changed") != -1:
-			pass
-		else:
-			error("Mercurial commit did not work as expected: " + res)
-
-def hg_tip( hg_repo ):
-	"""Returns the latest local revision number in the given repository."""
-	tip = cmd("hg tip", hg_repo, silent=True)
-	tip = tip.split("\n")[0].split(":")[1].strip()
-	return int(tip)
-
-def hg_rename( hg_repo, from_file, to_file ):
-	cmd("hg rename --after \"%s\" \"%s\"" % (from_file, to_file), hg_repo);
-	
-def hg_tag ( hg_repo, text, author, date ):
-	old_tip = hg_tip(hg_repo)
-	res = cmd("hg tag -u \"%s\" -d \"%s 0\" \"%s\""	 % (author, date, text), hg_repo)
-	new_tip = hg_tip(hg_repo)
-	if not new_tip == old_tip + 1:
-		error("Mercurial tag did not work as expected: " + res)
-
-def hg_handle_change( hg_repo, author, date, change, arg ):
-	"""Processes a change event as output by darcs_changes_summary. These
-	consist of file move/rename/add/delete commands."""
-	if change == 'modify_file':
-		pass
-	elif change == 'add_file':
-		pass
-	elif change =='remove_file':
-		pass
-	elif change == 'add_directory':
-		pass
-	elif change == 'remove_directory':
-		pass
-	elif change == 'move':
-		hg_rename(hg_repo, arg[0], arg[1])
-	elif change == 'tag':
-		hg_tag(hg_repo, arg, author, date)
-	else:
-		error('Unknown change type ' + change + ': ' + arg)
-
-# ------------------------------------------------------------------------------
-#
-# Main
-#
-# ------------------------------------------------------------------------------
-
-if __name__ == "__main__":
-	args = sys.argv[1:]
-	# We parse the arguments
-	if len(args)   == 2:
-		darcs_repo = os.path.abspath(args[0])
-		hg_repo    = os.path.abspath(args[1])
-		skip       = None
-	elif len(args) == 3:
-		darcs_repo = os.path.abspath(args[0])
-		hg_repo    = os.path.abspath(args[1])
-		skip       = int(args[2])
-	else:
-		print USAGE
-		sys.exit(-1)
-	print 'This command is deprecated.  Use the convert extension instead.'
-	# Initializes the target repo
-	if not os.path.isdir(darcs_repo + "/_darcs"):
-		print "No darcs directory found at: " + darcs_repo
-		sys.exit(-1)
-	if not os.path.isdir(hg_repo):
-		os.mkdir(hg_repo)
-	elif skip == None:
-		print "Given HG repository must not exist when no SKIP is specified."
-		sys.exit(-1)
-	if skip == None:
-		cmd("hg init \"%s\"" % (hg_repo))
-		cmd("darcs initialize", hg_repo)
-	# Get the changes from the Darcs repository
-	change_number = 0
-	for author, date, summary, chash, description in darcs_changes(darcs_repo):
-		print "== changeset", change_number,
-		if skip != None and change_number <= skip:
-			print "(skipping)"
-		else:
-			text = summary + "\n" + description
-			# The commit hash has a date like 20021020201112
-			# --------------------------------YYYYMMDDHHMMSS
-			date = chash.split("-")[0]
-			epoch = int(mktime(strptime(date, '%Y%m%d%H%M%S')))
-			darcs_pull(hg_repo, darcs_repo, chash)
-			for change, arg in darcs_changes_summary(darcs_repo, chash):
-				hg_handle_change(hg_repo, author, epoch, change, arg)
-			hg_commit(hg_repo, text, author, epoch)
-		change_number += 1
-	print "Darcs repository (_darcs) was not deleted. You can keep or remove it."
-
-# EOF
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/dumprevlog	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,25 @@
+#!/usr/bin/env python
+# Dump revlogs as raw data stream
+# $ find .hg/store/ -name "*.i" | xargs dumprevlog > repo.dump
+
+import sys
+from mercurial import revlog, node, util
+
+for fp in (sys.stdin, sys.stdout, sys.stderr):
+    util.set_binary(fp)
+
+for f in sys.argv[1:]:
+    binopen = lambda fn: open(fn, 'rb')
+    r = revlog.revlog(binopen, f)
+    print "file:", f
+    for i in r:
+        n = r.node(i)
+        p = r.parents(n)
+        d = r.revision(n)
+        print "node:", node.hex(n)
+        print "linkrev:", r.linkrev(n)
+        print "parents:", node.hex(p[0]), node.hex(p[1])
+        print "length:", len(d)
+        print "-start-"
+        print d
+        print "-end-"
--- a/contrib/hgk	Wed Sep 17 11:14:06 2008 +0200
+++ b/contrib/hgk	Wed Sep 17 11:34:37 2008 +0200
@@ -370,6 +370,25 @@
 	lappend tagids($direct) $tag
 	lappend idtags($tag) $direct
     }
+
+    set status [catch {exec $env(HG) --config ui.report_untrusted=false heads} heads]
+    if { $status != 0 } {
+        puts $::errorInfo
+        if { ![string equal $::errorCode NONE] } {
+            exit 2
+        }
+    }
+    regsub -all "\r\n" $heads "\n" heads
+
+    set lines [split $heads "\n"]
+    foreach f $lines {
+        set match ""
+        regexp {changeset:\s+(\S+):(\S+)$} $f match id sha
+        if {$match != ""} {
+        lappend idheads($sha) $id
+        }
+    }
+
 }
 
 proc readotherrefs {base dname excl} {
@@ -1045,7 +1064,7 @@
 }
 
 proc drawtags {id x xt y1} {
-    global idtags idheads idotherrefs
+    global idtags idheads idotherrefs commitinfo
     global linespc lthickness
     global canv mainfont idline rowtextx
 
@@ -1057,8 +1076,11 @@
 	set ntags [llength $marks]
     }
     if {[info exists idheads($id)]} {
-	set marks [concat $marks $idheads($id)]
-	set nheads [llength $idheads($id)]
+	set headmark [lindex $commitinfo($id) 7]
+	if {$headmark ne "default"} {
+	    lappend marks $headmark
+	    set nheads 1
+	}
     }
     if {[info exists idotherrefs($id)]} {
 	set marks [concat $marks $idotherrefs($id)]
--- a/contrib/mercurial.el	Wed Sep 17 11:14:06 2008 +0200
+++ b/contrib/mercurial.el	Wed Sep 17 11:34:37 2008 +0200
@@ -35,8 +35,10 @@
 ;; This code has been developed under XEmacs 21.5, and may not work as
 ;; well under GNU Emacs (albeit tested under 21.4).  Patches to
 ;; enhance the portability of this code, fix bugs, and add features
-;; are most welcome.  You can clone a Mercurial repository for this
-;; package from http://www.serpentine.com/hg/hg-emacs
+;; are most welcome.
+
+;; As of version 22.3, GNU Emacs's VC mode has direct support for
+;; Mercurial, so this package may not prove as useful there.
 
 ;; Please send problem reports and suggestions to bos@serpentine.com.
 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/undumprevlog	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,37 @@
+#!/usr/bin/env python
+# Undump a dump from dumprevlog
+# $ hg init
+# $ undumprevlog < repo.dump
+
+import sys
+from mercurial import revlog, node, util, transaction
+
+for fp in (sys.stdin, sys.stdout, sys.stderr):
+    util.set_binary(fp)
+
+opener = util.opener('.', False)
+tr = transaction.transaction(sys.stderr.write, opener, "undump.journal")
+while 1:
+    l = sys.stdin.readline()
+    if not l:
+        break
+    if l.startswith("file:"):
+        f = l[6:-1]
+        r = revlog.revlog(opener, f)
+        print f
+    elif l.startswith("node:"):
+        n = node.bin(l[6:-1])
+    elif l.startswith("linkrev:"):
+        lr = int(l[9:-1])
+    elif l.startswith("parents:"):
+        p = l[9:-1].split()
+        p1 = node.bin(p[0])
+        p2 = node.bin(p[1])
+    elif l.startswith("length:"):
+        length = int(l[8:-1])
+        sys.stdin.readline() # start marker
+        d = sys.stdin.read(length)
+        sys.stdin.readline() # end marker
+        r.addrevision(d, tr, lr, p1, p2)
+
+tr.close()
--- a/contrib/vim/hgcommand.vim	Wed Sep 17 11:14:06 2008 +0200
+++ b/contrib/vim/hgcommand.vim	Wed Sep 17 11:34:37 2008 +0200
@@ -10,6 +10,12 @@
 "                Bob Hiestand <bob.hiestand@gmail.com> for the fabulous
 "                cvscommand.vim from which this script was directly created by
 "                means of sed commands and minor tweaks.
+" Note:          
+"                For Vim7 the use of Bob Hiestand's vcscommand.vim
+"                <http://www.vim.org/scripts/script.php?script_id=90>
+"                in conjunction with Vladmir Marek's Hg backend
+"                <http://www.vim.org/scripts/script.php?script_id=1898>
+"                is recommended.
 
 """""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""
 "
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/win32/hg.bat	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,12 @@
+@echo off
+rem Windows Driver script for Mercurial
+
+setlocal
+set HG=%~f0
+
+rem Use a full path to Python (relative to this script) as the standard Python
+rem install does not put python.exe on the PATH...
+rem %~dp0 is the directory of this script
+
+%~dp0..\python "%~dp0hg" %*
+endlocal
--- a/contrib/zsh_completion	Wed Sep 17 11:14:06 2008 +0200
+++ b/contrib/zsh_completion	Wed Sep 17 11:34:37 2008 +0200
@@ -205,8 +205,7 @@
 
 _hg_config() {
     typeset -a items
-    local line
-    items=(${${(%f)"$(_hg_cmd showconfig)"}%%\=*})
+    items=(${${(%f)"$(_call_program hg hg showconfig)"}%%\=*})
     (( $#items )) && _describe -t config 'config item' items
 }
 
@@ -291,10 +290,14 @@
     '--cwd[change working directory]:new working directory:_files -/'
     '(--noninteractive -y)'{-y,--noninteractive}'[do not prompt, assume yes for any required answers]'
     '(--verbose -v)'{-v,--verbose}'[enable additional output]'
+    '*--config[set/override config option]:defined config items:_hg_config'
     '(--quiet -q)'{-q,--quiet}'[suppress output]'
     '(--help -h)'{-h,--help}'[display help and exit]'
     '--debug[debug mode]'
     '--debugger[start debugger]'
+    '--encoding[set the charset encoding (default: UTF8)]'
+    '--encodingmode[set the charset encoding mode (default: strict)]'
+    '--lsprof[print improved command execution profile]'
     '--traceback[print traceback on exception]'
     '--time[time how long the command takes]'
     '--profile[profile]'
--- a/doc/Makefile	Wed Sep 17 11:14:06 2008 +0200
+++ b/doc/Makefile	Wed Sep 17 11:34:37 2008 +0200
@@ -30,7 +30,7 @@
 	asciidoc -b html4 $*.txt || asciidoc -b html $*.txt
 
 MANIFEST: man html
-	# versionned files are already in the main MANIFEST
+	# tracked files are already in the main MANIFEST
 	$(RM) $@
 	for i in $(MAN) $(HTML) hg.1.gendoc.txt; do \
 	  echo "doc/$$i" >> $@ ; \
--- a/doc/gendoc.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/doc/gendoc.py	Wed Sep 17 11:34:37 2008 +0200
@@ -3,7 +3,7 @@
 sys.path.insert(0, "..")
 from mercurial import demandimport; demandimport.enable()
 from mercurial.commands import table, globalopts
-from mercurial.i18n import gettext as _
+from mercurial.i18n import gettext, _
 from mercurial.help import helptable
 
 def get_desc(docstr):
@@ -69,6 +69,7 @@
         if f.startswith("debug"): continue
         d = get_cmd(h[f])
         # synopsis
+        ui.write("[[%s]]\n" % d['cmd'])
         ui.write("%s::\n" % d['synopsis'].replace("hg ","", 1))
         # description
         ui.write("%s\n\n" % d['desc'][1])
@@ -91,14 +92,11 @@
             ui.write(_("    aliases: %s\n\n") % " ".join(d['aliases']))
 
     # print topics
-    for t in helptable:
-        l = t.split("|")
-        section = l[-1]
-        underlined(_(section).upper())
-        doc = helptable[t]
+    for names, section, doc in helptable:
+        underlined(gettext(section).upper())
         if callable(doc):
             doc = doc()
-        ui.write(_(doc))
+        ui.write(gettext(doc))
         ui.write("\n")
 
 if __name__ == "__main__":
--- a/doc/hg.1.txt	Wed Sep 17 11:14:06 2008 +0200
+++ b/doc/hg.1.txt	Wed Sep 17 11:34:37 2008 +0200
@@ -30,65 +30,13 @@
 
 repository path::
     either the pathname of a local repository or the URI of a remote
-    repository.  There are two available URI protocols, http:// which is
+    repository. There are two available URI protocols, http:// which is
     fast and the static-http:// protocol which is much slower but does not
     require a special server on the web host.
 
 
 include::hg.1.gendoc.txt[]
 
-SPECIFYING SINGLE REVISIONS
----------------------------
-
-    Mercurial accepts several notations for identifying individual
-    revisions.
-
-    A plain integer is treated as a revision number.  Negative
-    integers are treated as offsets from the tip, with -1 denoting the
-    tip.
-
-    A 40-digit hexadecimal string is treated as a unique revision
-    identifier.
-
-    A hexadecimal string less than 40 characters long is treated as a
-    unique revision identifier, and referred to as a short-form
-    identifier.  A short-form identifier is only valid if it is the
-    prefix of one full-length identifier.
-
-    Any other string is treated as a tag name, which is a symbolic
-    name associated with a revision identifier.  Tag names may not
-    contain the ":" character.
-
-    The reserved name "tip" is a special tag that always identifies
-    the most recent revision.
-
-    The reserved name "null" indicates the null revision. This is the
-    revision of an empty repository, and the parent of revision 0.
-
-    The reserved name "." indicates the working directory parent. If
-    no working directory is checked out, it is equivalent to null.
-    If an uncommitted merge is in progress, "." is the revision of
-    the first parent.
-
-SPECIFYING MULTIPLE REVISIONS
------------------------------
-
-    When Mercurial accepts more than one revision, they may be
-    specified individually, or provided as a continuous range,
-    separated by the ":" character.
-
-    The syntax of range notation is [BEGIN]:[END], where BEGIN and END
-    are revision identifiers.  Both BEGIN and END are optional.  If
-    BEGIN is not specified, it defaults to revision number 0.  If END
-    is not specified, it defaults to the tip.  The range ":" thus
-    means "all revisions".
-
-    If BEGIN is greater than END, revisions are treated in reverse
-    order.
-
-    A range acts as a closed interval.  This means that a range of 3:5
-    gives 3, 4 and 5.  Similarly, a range of 4:2 gives 4, 3, and 2.
-
 FILES
 -----
  .hgignore::
@@ -103,7 +51,7 @@
  /etc/mercurial/hgrc, $HOME/.hgrc, .hg/hgrc::
     This file contains defaults and configuration. Values in .hg/hgrc
     override those in $HOME/.hgrc, and these override settings made in the
-    global /etc/mercurial/hgrc configuration.  See hgrc(5) for details of
+    global /etc/mercurial/hgrc configuration. See hgrc(5) for details of
     the contents and format of these files.
 
 Some commands (e.g. revert) produce backup files ending in .orig, if
--- a/doc/hgignore.5.txt	Wed Sep 17 11:14:06 2008 +0200
+++ b/doc/hgignore.5.txt	Wed Sep 17 11:34:37 2008 +0200
@@ -17,25 +17,25 @@
 -----------
 
 Mercurial ignores every unmanaged file that matches any pattern in an
-ignore file.  The patterns in an ignore file do not apply to files
-managed by Mercurial.  To control Mercurial's handling of files that
-it manages, see the hg(1) man page.  Look for the "-I" and "-X"
+ignore file. The patterns in an ignore file do not apply to files
+managed by Mercurial. To control Mercurial's handling of files that
+it manages, see the hg(1) man page. Look for the "-I" and "-X"
 options.
 
 In addition, a Mercurial configuration file can point to a set of
-per-user or global ignore files.  See the hgrc(5) man page for details
-of how to configure these files.  Look for the "ignore" entry in the
+per-user or global ignore files. See the hgrc(5) man page for details
+of how to configure these files. Look for the "ignore" entry in the
 "ui" section.
 
 SYNTAX
 ------
 
 An ignore file is a plain text file consisting of a list of patterns,
-with one pattern per line.  Empty lines are skipped.  The "#"
+with one pattern per line. Empty lines are skipped. The "#"
 character is treated as a comment character, and the "\" character is
 treated as an escape character.
 
-Mercurial supports several pattern syntaxes.  The default syntax used
+Mercurial supports several pattern syntaxes. The default syntax used
 is Python/Perl-style regular expressions.
 
 To change the syntax used, use a line of the following form:
@@ -52,9 +52,9 @@
 The chosen syntax stays in effect when parsing all patterns that
 follow, until another syntax is selected.
 
-Neither glob nor regexp patterns are rooted.  A glob-syntax pattern of
+Neither glob nor regexp patterns are rooted. A glob-syntax pattern of
 the form "*.c" will match a file ending in ".c" in any directory, and
-a regexp pattern of the form "\.c$" will do the same.  To root a
+a regexp pattern of the form "\.c$" will do the same. To root a
 regexp pattern, start it with "^".
 
 EXAMPLE
--- a/doc/hgrc.5.txt	Wed Sep 17 11:14:06 2008 +0200
+++ b/doc/hgrc.5.txt	Wed Sep 17 11:34:37 2008 +0200
@@ -17,26 +17,26 @@
 
 Mercurial reads configuration data from several files, if they exist.
 The names of these files depend on the system on which Mercurial is
-installed.  *.rc files from a single directory are read in
-alphabetical order, later ones overriding earlier ones.  Where
+installed. *.rc files from a single directory are read in
+alphabetical order, later ones overriding earlier ones. Where
 multiple paths are given below, settings from later paths override
 earlier ones.
 
 (Unix) <install-root>/etc/mercurial/hgrc.d/*.rc::
 (Unix) <install-root>/etc/mercurial/hgrc::
     Per-installation configuration files, searched for in the
-    directory where Mercurial is installed.  <install-root> is the
+    directory where Mercurial is installed. <install-root> is the
     parent directory of the hg executable (or symlink) being run.
     For example, if installed in /shared/tools/bin/hg, Mercurial will
-    look in /shared/tools/etc/mercurial/hgrc.  Options in these files
+    look in /shared/tools/etc/mercurial/hgrc. Options in these files
     apply to all Mercurial commands executed by any user in any
     directory.
 
 (Unix) /etc/mercurial/hgrc.d/*.rc::
 (Unix) /etc/mercurial/hgrc::
     Per-system configuration files, for the system on which Mercurial
-    is running.  Options in these files apply to all Mercurial
-    commands executed by any user in any directory.  Options in these
+    is running. Options in these files apply to all Mercurial
+    commands executed by any user in any directory. Options in these
     files override per-installation options.
 
 (Windows) <install-dir>\Mercurial.ini::
@@ -45,7 +45,7 @@
   or else::
 (Windows) C:\Mercurial\Mercurial.ini::
     Per-installation/system configuration files, for the system on
-    which Mercurial is running.  Options in these files apply to all
+    which Mercurial is running. Options in these files apply to all
     Mercurial commands executed by any user in any directory.
     Registry keys contain PATH-like strings, every part of which must
     reference a Mercurial.ini file or be a directory where *.rc files
@@ -59,16 +59,16 @@
     Per-user configuration file(s), for the user running Mercurial.
     On Windows 9x, %HOME% is replaced by %APPDATA%.
     Options in these files apply to all Mercurial commands executed
-    by this user in any directory.  Options in thes files override
+    by this user in any directory. Options in thes files override
     per-installation and per-system options.
 
 (Unix, Windows) <repo>/.hg/hgrc::
     Per-repository configuration options that only apply in a
-    particular repository.  This file is not version-controlled, and
-    will not get transferred during a "clone" operation.  Options in
+    particular repository. This file is not version-controlled, and
+    will not get transferred during a "clone" operation. Options in
     this file override options in all other configuration files.
     On Unix, most of this file will be ignored if it doesn't belong
-    to a trusted user or to a trusted group.  See the documentation
+    to a trusted user or to a trusted group. See the documentation
     for the trusted section below for more details.
 
 SYNTAX
@@ -82,10 +82,10 @@
     green=
        eggs
 
-Each line contains one entry.  If the lines that follow are indented,
+Each line contains one entry. If the lines that follow are indented,
 they are treated as continuations of that entry.
 
-Leading whitespace is removed from values.  Empty lines are skipped.
+Leading whitespace is removed from values. Empty lines are skipped.
 
 The optional values can contain format strings which refer to other
 values in the same section, or values in a special DEFAULT section.
@@ -100,6 +100,7 @@
 Mercurial "hgrc" file, the purpose of each section, its possible
 keys, and their possible values.
 
+[[decode]]
 decode/encode::
   Filters for transforming files on checkout/checkin. This would
   typically be used for newline processing or other
@@ -107,12 +108,12 @@
 
   Filters consist of a filter pattern followed by a filter command.
   Filter patterns are globs by default, rooted at the repository
-  root.  For example, to match any file ending in ".txt" in the root
-  directory only, use the pattern "*.txt".  To match any file ending
+  root. For example, to match any file ending in ".txt" in the root
+  directory only, use the pattern "*.txt". To match any file ending
   in ".c" anywhere in the repository, use the pattern "**.c".
 
   The filter command can start with a specifier, either "pipe:" or
-  "tempfile:".  If no specifier is given, "pipe:" is used by default.
+  "tempfile:". If no specifier is given, "pipe:" is used by default.
 
   A "pipe:" command must accept data on stdin and return the
   transformed data on stdout.
@@ -129,9 +130,9 @@
     # can safely omit "pipe:", because it's the default)
     *.gz = gzip
 
-  A "tempfile:" command is a template.  The string INFILE is replaced
+  A "tempfile:" command is a template. The string INFILE is replaced
   with the name of a temporary file that contains the data to be
-  filtered by the command.  The string OUTFILE is replaced with the
+  filtered by the command. The string OUTFILE is replaced with the
   name of an empty temporary file, where the filtered data must be
   written by the command.
 
@@ -158,6 +159,7 @@
     [decode]
     **.txt = dumbdecode:
 
+[[defaults]]
 defaults::
   Use the [defaults] section to define command defaults, i.e. the
   default options/arguments to pass to the specified commands.
@@ -173,6 +175,7 @@
   defining command defaults. The command defaults will also be
   applied to the aliases of the commands defined.
 
+[[diff]]
 diff::
   Settings used when displaying diffs. They are all boolean and
   defaults to False.
@@ -189,25 +192,26 @@
   ignoreblanklines;;
     Ignore changes whose lines are all blank.
 
+[[email]]
 email::
   Settings for extensions that send email messages.
   from;;
-    Optional.  Email address to use in "From" header and SMTP envelope
+    Optional. Email address to use in "From" header and SMTP envelope
     of outgoing messages.
   to;;
-    Optional.  Comma-separated list of recipients' email addresses.
+    Optional. Comma-separated list of recipients' email addresses.
   cc;;
-    Optional.  Comma-separated list of carbon copy recipients'
+    Optional. Comma-separated list of carbon copy recipients'
     email addresses.
   bcc;;
-    Optional.  Comma-separated list of blind carbon copy
-    recipients' email addresses.  Cannot be set interactively.
+    Optional. Comma-separated list of blind carbon copy
+    recipients' email addresses. Cannot be set interactively.
   method;;
-    Optional.  Method to use to send email messages.  If value is
+    Optional. Method to use to send email messages. If value is
     "smtp" (default), use SMTP (see section "[smtp]" for
-    configuration).  Otherwise, use as name of program to run that
+    configuration). Otherwise, use as name of program to run that
     acts like sendmail (takes "-f" option for sender, list of
-    recipients on command line, message on stdin).  Normally, setting
+    recipients on command line, message on stdin). Normally, setting
     this to "sendmail" or "/usr/sbin/sendmail" is enough to use
     sendmail to send messages.
 
@@ -217,6 +221,7 @@
     from = Joseph User <joe.user@example.com>
     method = /usr/sbin/sendmail
 
+[[extensions]]
 extensions::
   Mercurial has an extension mechanism for adding new features. To
   enable an extension, create an entry for it in this section.
@@ -241,6 +246,7 @@
     # (this extension will get loaded from the file specified)
     myfeature = ~/.hgext/myfeature.py
 
+[[format]]
 format::
 
   usestore;;
@@ -250,6 +256,7 @@
     you to store longer filenames in some situations at the expense of
     compatibility.
 
+[[merge-patterns]]
 merge-patterns::
   This section specifies merge tools to associate with particular file
   patterns. Tools matched here will take precedence over the default
@@ -261,6 +268,7 @@
     **.c = kdiff3
     **.jpg = myimgmerge
 
+[[merge-tools]]
 merge-tools::
   This section configures external merge tools to use for file-level
   merges.
@@ -281,6 +289,7 @@
     myHtmlTool.priority = 1
 
   Supported arguments:
+
   priority;;
     The priority in which to evaluate this tool.
     Default: 0.
@@ -297,10 +306,10 @@
     launching external tool.
     Default: True
   binary;;
-    This tool can merge binary files.  Defaults to False, unless tool
+    This tool can merge binary files. Defaults to False, unless tool
     was selected by file pattern match.
   symlink;;
-    This tool can merge symlinks.  Defaults to False, even if tool was
+    This tool can merge symlinks. Defaults to False, even if tool was
     selected by file pattern match.
   checkconflicts;;
     Check whether there are conflicts even though the tool reported
@@ -313,19 +322,20 @@
   fixeol;;
     Attempt to fix up EOL changes caused by the merge tool.
     Default: False
-  gui:;
+  gui;;
     This tool requires a graphical interface to run. Default: False
   regkey;;
     Windows registry key which describes install location of this tool.
     Mercurial will search for this key first under HKEY_CURRENT_USER and
-    then under HKEY_LOCAL_MACHINE.  Default: None
+    then under HKEY_LOCAL_MACHINE. Default: None
   regname;;
-    Name of value to read from specified registry key.  Defaults to the
+    Name of value to read from specified registry key. Defaults to the
     unnamed (default) value.
   regappend;;
     String to append to the value read from the registry, typically the
-    executable name of the tool.  Default: None
+    executable name of the tool. Default: None
 
+[[hooks]]
 hooks::
   Commands or Python functions that get automatically executed by
   various actions such as starting or finishing a commit. Multiple
@@ -342,24 +352,24 @@
     incoming.autobuild = /my/build/hook
 
   Most hooks are run with environment variables set that give added
-  useful information.  For each hook below, the environment variables
+  useful information. For each hook below, the environment variables
   it is passed are listed with names of the form "$HG_foo".
 
   changegroup;;
     Run after a changegroup has been added via push, pull or
-    unbundle. ID of the first new changeset is in $HG_NODE.  URL from
+    unbundle. ID of the first new changeset is in $HG_NODE. URL from
     which changes came is in $HG_URL.
   commit;;
     Run after a changeset has been created in the local repository.
-    ID of the newly created changeset is in $HG_NODE.  Parent
+    ID of the newly created changeset is in $HG_NODE. Parent
     changeset IDs are in $HG_PARENT1 and $HG_PARENT2.
   incoming;;
     Run after a changeset has been pulled, pushed, or unbundled into
-    the local repository.  The ID of the newly arrived changeset is in
-    $HG_NODE.  URL that was source of changes came is in $HG_URL.
+    the local repository. The ID of the newly arrived changeset is in
+    $HG_NODE. URL that was source of changes came is in $HG_URL.
   outgoing;;
-    Run after sending changes from local repository to another.  ID of
-    first changeset sent is in $HG_NODE.  Source of operation is in
+    Run after sending changes from local repository to another. ID of
+    first changeset sent is in $HG_NODE. Source of operation is in
     $HG_SOURCE; see "preoutgoing" hook for description.
   post-<command>;;
     Run after successful invocations of the associated command. The
@@ -371,56 +381,56 @@
     the command doesn't execute and Mercurial returns the failure code.
   prechangegroup;;
     Run before a changegroup is added via push, pull or unbundle.
-    Exit status 0 allows the changegroup to proceed.  Non-zero status
-    will cause the push, pull or unbundle to fail.  URL from which
+    Exit status 0 allows the changegroup to proceed. Non-zero status
+    will cause the push, pull or unbundle to fail. URL from which
     changes will come is in $HG_URL.
   precommit;;
-    Run before starting a local commit.  Exit status 0 allows the
-    commit to proceed.  Non-zero status will cause the commit to fail.
+    Run before starting a local commit. Exit status 0 allows the
+    commit to proceed. Non-zero status will cause the commit to fail.
     Parent changeset IDs are in $HG_PARENT1 and $HG_PARENT2.
   preoutgoing;;
     Run before collecting changes to send from the local repository to
-    another.  Non-zero status will cause failure.  This lets you
-    prevent pull over http or ssh.  Also prevents against local pull,
+    another. Non-zero status will cause failure. This lets you
+    prevent pull over http or ssh. Also prevents against local pull,
     push (outbound) or bundle commands, but not effective, since you
-    can just copy files instead then.  Source of operation is in
-    $HG_SOURCE.  If "serve", operation is happening on behalf of
-    remote ssh or http repository.  If "push", "pull" or "bundle",
+    can just copy files instead then. Source of operation is in
+    $HG_SOURCE. If "serve", operation is happening on behalf of
+    remote ssh or http repository. If "push", "pull" or "bundle",
     operation is happening on behalf of repository on same system.
   pretag;;
-    Run before creating a tag.  Exit status 0 allows the tag to be
-    created.  Non-zero status will cause the tag to fail.  ID of
-    changeset to tag is in $HG_NODE.  Name of tag is in $HG_TAG.  Tag
+    Run before creating a tag. Exit status 0 allows the tag to be
+    created. Non-zero status will cause the tag to fail. ID of
+    changeset to tag is in $HG_NODE. Name of tag is in $HG_TAG. Tag
     is local if $HG_LOCAL=1, in repo if $HG_LOCAL=0.
   pretxnchangegroup;;
     Run after a changegroup has been added via push, pull or unbundle,
-    but before the transaction has been committed.  Changegroup is
-    visible to hook program.  This lets you validate incoming changes
-    before accepting them.  Passed the ID of the first new changeset
-    in $HG_NODE.  Exit status 0 allows the transaction to commit.
+    but before the transaction has been committed. Changegroup is
+    visible to hook program. This lets you validate incoming changes
+    before accepting them. Passed the ID of the first new changeset
+    in $HG_NODE. Exit status 0 allows the transaction to commit.
     Non-zero status will cause the transaction to be rolled back and
-    the push, pull or unbundle will fail.  URL that was source of
+    the push, pull or unbundle will fail. URL that was source of
     changes is in $HG_URL.
   pretxncommit;;
     Run after a changeset has been created but the transaction not yet
-    committed.  Changeset is visible to hook program.  This lets you
-    validate commit message and changes.  Exit status 0 allows the
-    commit to proceed.  Non-zero status will cause the transaction to
-    be rolled back.  ID of changeset is in $HG_NODE.  Parent changeset
+    committed. Changeset is visible to hook program. This lets you
+    validate commit message and changes. Exit status 0 allows the
+    commit to proceed. Non-zero status will cause the transaction to
+    be rolled back. ID of changeset is in $HG_NODE. Parent changeset
     IDs are in $HG_PARENT1 and $HG_PARENT2.
   preupdate;;
-    Run before updating the working directory.  Exit status 0 allows
-    the update to proceed.  Non-zero status will prevent the update.
-    Changeset ID of first new parent is in $HG_PARENT1.  If merge, ID
+    Run before updating the working directory. Exit status 0 allows
+    the update to proceed. Non-zero status will prevent the update.
+    Changeset ID of first new parent is in $HG_PARENT1. If merge, ID
     of second new parent is in $HG_PARENT2.
   tag;;
-    Run after a tag is created.  ID of tagged changeset is in
-    $HG_NODE.  Name of tag is in $HG_TAG.  Tag is local if
+    Run after a tag is created. ID of tagged changeset is in
+    $HG_NODE. Name of tag is in $HG_TAG. Tag is local if
     $HG_LOCAL=1, in repo if $HG_LOCAL=0.
   update;;
-    Run after updating the working directory.  Changeset ID of first
-    new parent is in $HG_PARENT1.  If merge, ID of second new parent
-    is in $HG_PARENT2.  If update succeeded, $HG_ERROR=0.  If update
+    Run after updating the working directory. Changeset ID of first
+    new parent is in $HG_PARENT1. If merge, ID of second new parent
+    is in $HG_PARENT2. If update succeeded, $HG_ERROR=0. If update
     failed (e.g. because conflicts not resolved), $HG_ERROR=1.
 
   Note: it is generally better to use standard hooks rather than the
@@ -438,16 +448,17 @@
 
     hookname = python:modulename.submodule.callable
 
-  Python hooks are run within the Mercurial process.  Each hook is
+  Python hooks are run within the Mercurial process. Each hook is
   called with at least three keyword arguments: a ui object (keyword
   "ui"), a repository object (keyword "repo"), and a "hooktype"
-  keyword that tells what kind of hook is used.  Arguments listed as
+  keyword that tells what kind of hook is used. Arguments listed as
   environment variables above are passed as keyword arguments, with no
   "HG_" prefix, and names in lower case.
 
   If a Python hook returns a "true" value or raises an exception, this
   is treated as failure of the hook.
 
+[[http_proxy]]
 http_proxy::
   Used to access web-based Mercurial repositories through a HTTP
   proxy.
@@ -455,68 +466,72 @@
     Host name and (optional) port of the proxy server, for example
     "myproxy:8000".
   no;;
-    Optional.  Comma-separated list of host names that should bypass
+    Optional. Comma-separated list of host names that should bypass
     the proxy.
   passwd;;
-    Optional.  Password to authenticate with at the proxy server.
+    Optional. Password to authenticate with at the proxy server.
   user;;
-    Optional.  User name to authenticate with at the proxy server.
+    Optional. User name to authenticate with at the proxy server.
 
+[[smtp]]
 smtp::
   Configuration for extensions that need to send email messages.
   host;;
     Host name of mail server, e.g. "mail.example.com".
   port;;
-    Optional.  Port to connect to on mail server.  Default: 25.
+    Optional. Port to connect to on mail server. Default: 25.
   tls;;
-    Optional.  Whether to connect to mail server using TLS.  True or
-    False.  Default: False.
+    Optional. Whether to connect to mail server using TLS. True or
+    False. Default: False.
   username;;
-    Optional.  User name to authenticate to SMTP server with.
+    Optional. User name to authenticate to SMTP server with.
     If username is specified, password must also be specified.
     Default: none.
   password;;
-    Optional.  Password to authenticate to SMTP server with.
+    Optional. Password to authenticate to SMTP server with.
     If username is specified, password must also be specified.
     Default: none.
   local_hostname;;
-    Optional.  It's the hostname that the sender can use to identify itself
+    Optional. It's the hostname that the sender can use to identify itself
     to the MTA.
 
+[[paths]]
 paths::
-  Assigns symbolic names to repositories.  The left side is the
+  Assigns symbolic names to repositories. The left side is the
   symbolic name, and the right gives the directory or URL that is the
-  location of the repository.  Default paths can be declared by
+  location of the repository. Default paths can be declared by
   setting the following entries.
   default;;
     Directory or URL to use when pulling if no source is specified.
     Default is set to repository from which the current repository
     was cloned.
   default-push;;
-    Optional.  Directory or URL to use when pushing if no destination
+    Optional. Directory or URL to use when pushing if no destination
     is specified.
 
+[[server]]
 server::
   Controls generic server settings.
   uncompressed;;
     Whether to allow clients to clone a repo using the uncompressed
-    streaming protocol.  This transfers about 40% more data than a
+    streaming protocol. This transfers about 40% more data than a
     regular clone, but uses less memory and CPU on both server and
-    client.  Over a LAN (100Mbps or better) or a very fast WAN, an
+    client. Over a LAN (100Mbps or better) or a very fast WAN, an
     uncompressed streaming clone is a lot faster (~10x) than a regular
-    clone.  Over most WAN connections (anything slower than about
+    clone. Over most WAN connections (anything slower than about
     6Mbps), uncompressed streaming is slower, because of the extra
-    data transfer overhead.  Default is False.
+    data transfer overhead. Default is False.
 
+[[trusted]]
 trusted::
   For security reasons, Mercurial will not use the settings in
   the .hg/hgrc file from a repository if it doesn't belong to a
-  trusted user or to a trusted group.  The main exception is the
+  trusted user or to a trusted group. The main exception is the
   web interface, which automatically uses some safe settings, since
   it's common to serve repositories from different users.
 
-  This section specifies what users and groups are trusted.  The
-  current user is always trusted.  To trust everybody, list a user
+  This section specifies what users and groups are trusted. The
+  current user is always trusted. To trust everybody, list a user
   or a group with name "*".
 
   users;;
@@ -524,6 +539,7 @@
   groups;;
     Comma-separated list of trusted groups.
 
+[[ui]]
 ui::
   User interface controls.
   archivemeta;;
@@ -531,13 +547,19 @@
     (hashes for the repository base and for tip) in archives created by
     the hg archive command or downloaded via hgweb.
     Default is true.
+  askusername;;
+    Whether to prompt for a username when committing. If True, and
+    neither $HGUSER nor $EMAIL has been specified, then the user will
+    be prompted to enter a username.  If no username is entered, the
+    default USER@HOST is used instead.
+    Default is False.
   debug;;
-    Print debugging information.  True or False.  Default is False.
+    Print debugging information. True or False. Default is False.
   editor;;
-    The editor to use during a commit.  Default is $EDITOR or "vi".
+    The editor to use during a commit. Default is $EDITOR or "vi".
   fallbackencoding;;
     Encoding to try if it's not possible to decode the changelog using
-    UTF-8.  Default is ISO-8859-1.
+    UTF-8. Default is ISO-8859-1.
   ignore;;
     A file to read per-user ignore patterns from. This file should be in
     the same format as a repository-wide .hgignore file. This option
@@ -546,7 +568,7 @@
     "ignore.other = ~/.hgignore2". For details of the ignore file
     format, see the hgignore(5) man page.
   interactive;;
-    Allow to prompt the user.  True or False.  Default is True.
+    Allow to prompt the user. True or False. Default is True.
   logtemplate;;
     Template string for commands that print changesets.
   merge;;
@@ -563,18 +585,19 @@
         fail to merge
 
     See the merge-tools section for more information on configuring tools.
+
   patch;;
     command to use to apply patches. Look for 'gpatch' or 'patch' in PATH if
     unset.
   quiet;;
-    Reduce the amount of output printed.  True or False.  Default is False.
+    Reduce the amount of output printed. True or False. Default is False.
   remotecmd;;
     remote command to use for clone/push/pull operations. Default is 'hg'.
   report_untrusted;;
     Warn if a .hg/hgrc file is ignored due to not being owned by a
-    trusted user or group.  True or False.  Default is True.
+    trusted user or group. True or False. Default is True.
   slash;;
-    Display paths using a slash ("/") as the path separator.  This only
+    Display paths using a slash ("/") as the path separator. This only
     makes a difference on systems where the default path separator is not
     the slash character (e.g. Windows uses the backslash character ("\")).
     Default is False.
@@ -582,7 +605,7 @@
     command to use for SSH connections. Default is 'ssh'.
   strict;;
     Require exact command names, instead of allowing unambiguous
-    abbreviations.  True or False.  Default is False.
+    abbreviations. True or False. Default is False.
   style;;
     Name of style to use for command output.
   timeout;;
@@ -591,14 +614,15 @@
   username;;
     The committer of a changeset created when running "commit".
     Typically a person's name and email address, e.g. "Fred Widget
-    <fred@example.com>".  Default is $EMAIL or username@hostname.
+    <fred@example.com>". Default is $EMAIL or username@hostname.
     If the username in hgrc is empty, it has to be specified manually or
     in a different hgrc file (e.g. $HOME/.hgrc, if the admin set "username ="
     in the system hgrc).
   verbose;;
-    Increase the amount of output printed.  True or False.  Default is False.
+    Increase the amount of output printed. True or False. Default is False.
 
 
+[[web]]
 web::
   Web interface configuration.
   accesslog;;
@@ -617,9 +641,9 @@
   allowpull;;
     Whether to allow pulling from the repository. Default is true.
   allow_push;;
-    Whether to allow pushing to the repository.  If empty or not set,
-    push is not allowed.  If the special value "*", any remote user
-    can push, including unauthenticated users.  Otherwise, the remote
+    Whether to allow pushing to the repository. If empty or not set,
+    push is not allowed. If the special value "*", any remote user
+    can push, including unauthenticated users. Otherwise, the remote
     user must have been authenticated, and the authenticated user name
     must be present in this list (separated by whitespace or ",").
     The contents of the allow_push list are examined after the
@@ -635,11 +659,11 @@
     Name or email address of the person in charge of the repository.
     Defaults to ui.username or $EMAIL or "unknown" if unset or empty.
   deny_push;;
-    Whether to deny pushing to the repository.  If empty or not set,
-    push is not denied.  If the special value "*", all remote users
-    are denied push.  Otherwise, unauthenticated users are all denied,
+    Whether to deny pushing to the repository. If empty or not set,
+    push is not denied. If the special value "*", all remote users
+    are denied push. Otherwise, unauthenticated users are all denied,
     and any authenticated user name present in this list (separated by
-    whitespace or ",") is also denied.  The contents of the deny_push
+    whitespace or ",") is also denied. The contents of the deny_push
     list are examined before the allow_push list.
   description;;
     Textual description of the repository's purpose or contents.
@@ -666,7 +690,7 @@
     Prefix path to serve from. Default is '' (server root).
   push_ssl;;
     Whether to require that inbound pushes be transported over SSL to
-    prevent password sniffing.  Default is true.
+    prevent password sniffing. Default is true.
   staticurl;;
     Base URL to use for static files. If unset, static files (e.g.
     the hgicon.png favicon) will be served by the CGI script itself.
--- a/hgext/acl.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/hgext/acl.py	Wed Sep 17 11:34:37 2008 +0200
@@ -46,79 +46,45 @@
 #   ** = user6
 
 from mercurial.i18n import _
-from mercurial.node import bin, short
 from mercurial import util
 import getpass
 
-class checker(object):
-    '''acl checker.'''
-
-    def buildmatch(self, key):
-        '''return tuple of (match function, list enabled).'''
-        if not self.ui.has_section(key):
-            self.ui.debug(_('acl: %s not enabled\n') % key)
-            return None, False
-
-        thisuser = self.getuser()
-        pats = [pat for pat, users in self.ui.configitems(key)
-                if thisuser in users.replace(',', ' ').split()]
-        self.ui.debug(_('acl: %s enabled, %d entries for user %s\n') %
-                      (key, len(pats), thisuser))
-        if pats:
-            match = util.matcher(self.repo.root, names=pats)[1]
-        else:
-            match = util.never
-        return match, True
-
-    def getuser(self):
-        '''return name of authenticated user.'''
-        return self.user
+def buildmatch(ui, repo, user, key):
+    '''return tuple of (match function, list enabled).'''
+    if not ui.has_section(key):
+        ui.debug(_('acl: %s not enabled\n') % key)
+        return None
 
-    def __init__(self, ui, repo):
-        self.ui = ui
-        self.repo = repo
-        self.user = getpass.getuser()
-        cfg = self.ui.config('acl', 'config')
-        if cfg:
-            self.ui.readsections(cfg, 'acl.allow', 'acl.deny')
-        self.allow, self.allowable = self.buildmatch('acl.allow')
-        self.deny, self.deniable = self.buildmatch('acl.deny')
-
-    def skipsource(self, source):
-        '''true if incoming changes from this source should be skipped.'''
-        ok_sources = self.ui.config('acl', 'sources', 'serve').split()
-        return source not in ok_sources
-
-    def check(self, node):
-        '''return if access allowed, raise exception if not.'''
-        files = self.repo.changectx(node).files()
-        if self.deniable:
-            for f in files:
-                if self.deny(f):
-                    self.ui.debug(_('acl: user %s denied on %s\n') %
-                                  (self.getuser(), f))
-                    raise util.Abort(_('acl: access denied for changeset %s') %
-                                     short(node))
-        if self.allowable:
-            for f in files:
-                if not self.allow(f):
-                    self.ui.debug(_('acl: user %s not allowed on %s\n') %
-                                  (self.getuser(), f))
-                    raise util.Abort(_('acl: access denied for changeset %s') %
-                                     short(node))
-        self.ui.debug(_('acl: allowing changeset %s\n') % short(node))
+    pats = [pat for pat, users in ui.configitems(key)
+            if user in users.replace(',', ' ').split()]
+    ui.debug(_('acl: %s enabled, %d entries for user %s\n') %
+             (key, len(pats), user))
+    if pats:
+        return util.matcher(repo.root, names=pats)[1]
+    return util.never
 
 def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
     if hooktype != 'pretxnchangegroup':
         raise util.Abort(_('config error - hook type "%s" cannot stop '
                            'incoming changesets') % hooktype)
-
-    c = checker(ui, repo)
-    if c.skipsource(source):
+    if source not in ui.config('acl', 'sources', 'serve').split():
         ui.debug(_('acl: changes have source "%s" - skipping\n') % source)
         return
 
-    start = repo.changelog.rev(bin(node))
-    end = repo.changelog.count()
-    for rev in xrange(start, end):
-        c.check(repo.changelog.node(rev))
+    user = getpass.getuser()
+    cfg = ui.config('acl', 'config')
+    if cfg:
+        ui.readsections(cfg, 'acl.allow', 'acl.deny')
+    allow = buildmatch(ui, repo, user, 'acl.allow')
+    deny = buildmatch(ui, repo, user, 'acl.deny')
+
+    for rev in xrange(repo[node], len(repo)):
+        ctx = repo[rev]
+        for f in ctx.files():
+            if deny and deny(f):
+                ui.debug(_('acl: user %s denied on %s\n') % (user, f))
+                raise util.Abort(_('acl: access denied for changeset %s') % ctx)
+            if allow and not allow(f):
+                ui.debug(_('acl: user %s not allowed on %s\n') % (user, f))
+                raise util.Abort(_('acl: access denied for changeset %s') % ctx)
+        ui.debug(_('acl: allowing changeset %s\n') % ctx)
--- a/hgext/alias.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/hgext/alias.py	Wed Sep 17 11:34:37 2008 +0200
@@ -11,6 +11,7 @@
 
 from mercurial.cmdutil import findcmd, UnknownCommand, AmbiguousCommand
 from mercurial import commands
+from mercurial.i18n import _
 
 cmdtable = {}
 
@@ -49,13 +50,13 @@
                 commands.norepo += ' %s' % self._name
             return
         except UnknownCommand:
-            msg = '*** [alias] %s: command %s is unknown' % \
+            msg = _('*** [alias] %s: command %s is unknown') % \
                   (self._name, self._target)
         except AmbiguousCommand:
-            msg = '*** [alias] %s: command %s is ambiguous' % \
+            msg = _('*** [alias] %s: command %s is ambiguous') % \
                   (self._name, self._target)
         except RecursiveCommand:
-            msg = '*** [alias] %s: circular dependency on %s' % \
+            msg = _('*** [alias] %s: circular dependency on %s') % \
                   (self._name, self._target)
         def nocmd(*args, **opts):
             self._ui.warn(msg + '\n')
@@ -67,7 +68,7 @@
 def uisetup(ui):
     for cmd, target in ui.configitems('alias'):
         if not target:
-            ui.warn('*** [alias] %s: no definition\n' % cmd)
+            ui.warn(_('*** [alias] %s: no definition\n') % cmd)
             continue
         args = target.split(' ')
         tcmd = args.pop(0)
--- a/hgext/bugzilla.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/hgext/bugzilla.py	Wed Sep 17 11:34:37 2008 +0200
@@ -55,7 +55,7 @@
 from mercurial.i18n import _
 from mercurial.node import short
 from mercurial import cmdutil, templater, util
-import os, re, time
+import re, time
 
 MySQLdb = None
 
@@ -80,11 +80,7 @@
         self.conn = MySQLdb.connect(host=host, user=user, passwd=passwd,
                                     db=db, connect_timeout=timeout)
         self.cursor = self.conn.cursor()
-        self.run('select fieldid from fielddefs where name = "longdesc"')
-        ids = self.cursor.fetchall()
-        if len(ids) != 1:
-            raise util.Abort(_('unknown database schema'))
-        self.longdesc_id = ids[0][0]
+        self.longdesc_id = self.get_longdesc_id()
         self.user_ids = {}
 
     def run(self, *args, **kwargs):
@@ -96,12 +92,18 @@
             self.ui.note(_('failed query: %s %s\n') % (args, kwargs))
             raise
 
+    def get_longdesc_id(self):
+        '''get identity of longdesc field'''
+        self.run('select fieldid from fielddefs where name = "longdesc"')
+        ids = self.cursor.fetchall()
+        if len(ids) != 1:
+            raise util.Abort(_('unknown database schema'))
+        return ids[0][0]
+
     def filter_real_bug_ids(self, ids):
         '''filter not-existing bug ids from list.'''
         self.run('select bug_id from bugs where bug_id in %s' % buglist(ids))
-        ids = [c[0] for c in self.cursor.fetchall()]
-        ids.sort()
-        return ids
+        return util.sort([c[0] for c in self.cursor.fetchall()])
 
     def filter_unknown_bug_ids(self, node, ids):
         '''filter bug ids from list that already refer to this changeset.'''
@@ -114,9 +116,7 @@
             self.ui.status(_('bug %d already knows about changeset %s\n') %
                            (id, short(node)))
             unknown.pop(id, None)
-        ids = unknown.keys()
-        ids.sort()
-        return ids
+        return util.sort(unknown.keys())
 
     def notify(self, ids):
         '''tell bugzilla to send mail.'''
@@ -127,7 +127,7 @@
             cmd = self.ui.config('bugzilla', 'notify',
                                'cd /var/www/html/bugzilla && '
                                './processmail %s nobody@nowhere.com') % id
-            fp = os.popen('(%s) 2>&1' % cmd)
+            fp = util.popen('(%s) 2>&1' % cmd)
             out = fp.read()
             ret = fp.close()
             if ret:
@@ -186,11 +186,26 @@
                     values (%s, %s, %s, %s)''',
                  (bugid, userid, now, self.longdesc_id))
 
+class bugzilla_3_0(bugzilla_2_16):
+    '''support for bugzilla 3.0 series.'''
+
+    def __init__(self, ui):
+        bugzilla_2_16.__init__(self, ui)
+
+    def get_longdesc_id(self):
+        '''get identity of longdesc field'''
+        self.run('select id from fielddefs where name = "longdesc"')
+        ids = self.cursor.fetchall()
+        if len(ids) != 1:
+            raise util.Abort(_('unknown database schema'))
+        return ids[0][0]
+
 class bugzilla(object):
     # supported versions of bugzilla. different versions have
     # different schemas.
     _versions = {
         '2.16': bugzilla_2_16,
+        '3.0':  bugzilla_3_0
         }
 
     _default_bug_re = (r'bugs?\s*,?\s*(?:#|nos?\.?|num(?:ber)?s?)?\s*'
@@ -300,7 +315,7 @@
                          hooktype)
     try:
         bz = bugzilla(ui, repo)
-        ctx = repo.changectx(node)
+        ctx = repo[node]
         ids = bz.find_bug_ids(ctx)
         if ids:
             for id in ids:
--- a/hgext/children.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/hgext/children.py	Wed Sep 17 11:34:37 2008 +0200
@@ -25,7 +25,7 @@
     if file_:
         ctx = repo.filectx(file_, changeid=rev)
     else:
-        ctx = repo.changectx(rev)
+        ctx = repo[rev]
 
     displayer = cmdutil.show_changeset(ui, repo, opts)
     for node in [cp.node() for cp in ctx.children()]:
--- a/hgext/churn.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/hgext/churn.py	Wed Sep 17 11:34:37 2008 +0200
@@ -4,15 +4,10 @@
 #
 # This software may be used and distributed according to the terms
 # of the GNU General Public License, incorporated herein by reference.
-#
-#
-# Aliases map file format is simple one alias per line in the following
-# format:
-#
-# <alias email> <actual email>
+'''allow graphing the number of lines changed per contributor'''
 
 from mercurial.i18n import gettext as _
-from mercurial import mdiff, cmdutil, util, node
+from mercurial import patch, cmdutil, util, node
 import os, sys
 
 def get_tty_width():
@@ -36,98 +31,41 @@
         pass
     return 80
 
-def __gather(ui, repo, node1, node2):
-    def dirtywork(f, mmap1, mmap2):
-        lines = 0
-
-        to = mmap1 and repo.file(f).read(mmap1[f]) or None
-        tn = mmap2 and repo.file(f).read(mmap2[f]) or None
-
-        diff = mdiff.unidiff(to, "", tn, "", f, f).split("\n")
-
-        for line in diff:
-            if not line:
-                continue # skip EOF
-            if line.startswith(" "):
-                continue # context line
-            if line.startswith("--- ") or line.startswith("+++ "):
-                continue # begining of diff
-            if line.startswith("@@ "):
-                continue # info line
-
-            # changed lines
-            lines += 1
-
-        return lines
-
-    ##
-
-    lines = 0
-
-    changes = repo.status(node1, node2, None, util.always)[:5]
-
-    modified, added, removed, deleted, unknown = changes
-
-    who = repo.changelog.read(node2)[1]
-    who = util.email(who) # get the email of the person
-
-    mmap1 = repo.manifest.read(repo.changelog.read(node1)[0])
-    mmap2 = repo.manifest.read(repo.changelog.read(node2)[0])
-    for f in modified:
-        lines += dirtywork(f, mmap1, mmap2)
-
-    for f in added:
-        lines += dirtywork(f, None, mmap2)
-
-    for f in removed:
-        lines += dirtywork(f, mmap1, None)
-
-    for f in deleted:
-        lines += dirtywork(f, mmap1, mmap2)
-
-    for f in unknown:
-        lines += dirtywork(f, mmap1, mmap2)
-
-    return (who, lines)
-
-def gather_stats(ui, repo, amap, revs=None, progress=False):
+def countrevs(ui, repo, amap, revs, progress=False):
     stats = {}
-
-    cl    = repo.changelog
-
+    count = pct = 0
     if not revs:
-        revs = range(0, cl.count())
-
-    nr_revs = len(revs)
-    cur_rev = 0
+        revs = range(len(repo))
 
     for rev in revs:
-        cur_rev += 1 # next revision
-
-        node2    = cl.node(rev)
-        node1    = cl.parents(node2)[0]
-
-        if cl.parents(node2)[1] != node.nullid:
+        ctx2 = repo[rev]
+        parents = ctx2.parents()
+        if len(parents) > 1:
             ui.note(_('Revision %d is a merge, ignoring...\n') % (rev,))
             continue
 
-        who, lines = __gather(ui, repo, node1, node2)
+        ctx1 = parents[0]
+        lines = 0
+        ui.pushbuffer()
+        patch.diff(repo, ctx1.node(), ctx2.node())
+        diff = ui.popbuffer()
 
-        # remap the owner if possible
-        if who in amap:
-            ui.note("using '%s' alias for '%s'\n" % (amap[who], who))
-            who = amap[who]
+        for l in diff.split('\n'):
+            if (l.startswith("+") and not l.startswith("+++ ") or
+                l.startswith("-") and not l.startswith("--- ")):
+                lines += 1
 
-        if not who in stats:
-            stats[who] = 0
-        stats[who] += lines
-
-        ui.note("rev %d: %d lines by %s\n" % (rev, lines, who))
+        user = util.email(ctx2.user())
+        user = amap.get(user, user) # remap
+        stats[user] = stats.get(user, 0) + lines
+        ui.debug(_("rev %d: %d lines by %s\n") % (rev, lines, user))
 
         if progress:
-            nr_revs = max(nr_revs, 1)
-            if int(100.0*(cur_rev - 1)/nr_revs) < int(100.0*cur_rev/nr_revs):
-                ui.write("\rGenerating stats: %d%%" % (int(100.0*cur_rev/nr_revs),))
+            count += 1
+            newpct = int(100.0 * count / max(len(revs), 1))
+            if pct < newpct:
+                pct = newpct
+                ui.write(_("\rGenerating stats: %d%%") % pct)
                 sys.stdout.flush()
 
     if progress:
@@ -137,64 +75,39 @@
     return stats
 
 def churn(ui, repo, **opts):
-    "Graphs the number of lines changed"
+    '''graphs the number of lines changed
+
+    The map file format used to specify aliases is fairly simple:
+
+    <alias email> <actual email>'''
 
     def pad(s, l):
-        if len(s) < l:
-            return s + " " * (l-len(s))
-        return s[0:l]
-
-    def graph(n, maximum, width, char):
-        maximum = max(1, maximum)
-        n = int(n * width / float(maximum))
-
-        return char * (n)
-
-    def get_aliases(f):
-        aliases = {}
-
-        for l in f.readlines():
-            l = l.strip()
-            alias, actual = l.split()
-            aliases[alias] = actual
-
-        return aliases
+        return (s + " " * l)[:l]
 
     amap = {}
     aliases = opts.get('aliases')
     if aliases:
-        try:
-            f = open(aliases,"r")
-        except OSError, e:
-            print "Error: " + e
-            return
+        for l in open(aliases, "r"):
+            l = l.strip()
+            alias, actual = l.split()
+            amap[alias] = actual
 
-        amap = get_aliases(f)
-        f.close()
-
-    revs = [int(r) for r in cmdutil.revrange(repo, opts['rev'])]
-    revs.sort()
-    stats = gather_stats(ui, repo, amap, revs, opts.get('progress'))
+    revs = util.sort([int(r) for r in cmdutil.revrange(repo, opts['rev'])])
+    stats = countrevs(ui, repo, amap, revs, opts.get('progress'))
+    if not stats:
+        return
 
-    # make a list of tuples (name, lines) and sort it in descending order
-    ordered = stats.items()
-    if not ordered:
-        return
-    ordered.sort(lambda x, y: cmp(y[1], x[1]))
-    max_churn = ordered[0][1]
+    stats = util.sort([(-l, u, l) for u,l in stats.items()])
+    maxchurn = float(max(1, stats[0][2]))
+    maxuser = max([len(u) for k, u, l in stats])
 
-    tty_width = get_tty_width()
-    ui.note(_("assuming %i character terminal\n") % tty_width)
-    tty_width -= 1
-
-    max_user_width = max([len(user) for user, churn in ordered])
+    ttywidth = get_tty_width()
+    ui.debug(_("assuming %i character terminal\n") % ttywidth)
+    width = ttywidth - maxuser - 2 - 6 - 2 - 2
 
-    graph_width = tty_width - max_user_width - 1 - 6 - 2 - 2
-
-    for user, churn in ordered:
-        print "%s %6d %s" % (pad(user, max_user_width),
-                             churn,
-                             graph(churn, max_churn, graph_width, '*'))
+    for k, user, churn in stats:
+        print "%s %6d %s" % (pad(user, maxuser), churn,
+                             "*" * int(churn * width / maxchurn))
 
 cmdtable = {
     "churn":
--- a/hgext/convert/__init__.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/hgext/convert/__init__.py	Wed Sep 17 11:34:37 2008 +0200
@@ -4,27 +4,29 @@
 #
 # This software may be used and distributed according to the terms
 # of the GNU General Public License, incorporated herein by reference.
+'''converting foreign VCS repositories to Mercurial'''
 
 import convcmd
 from mercurial import commands
+from mercurial.i18n import _
 
 # Commands definition was moved elsewhere to ease demandload job.
 
 def convert(ui, src, dest=None, revmapfile=None, **opts):
     """Convert a foreign SCM repository to a Mercurial one.
 
-    Accepted source formats:
-    - Mercurial
-    - CVS
-    - Darcs
-    - git
-    - Subversion
-    - Monotone
-    - GNU Arch
+    Accepted source formats [identifiers]:
+    - Mercurial [hg]
+    - CVS [cvs]
+    - Darcs [darcs]
+    - git [git]
+    - Subversion [svn]
+    - Monotone [mtn]
+    - GNU Arch [gnuarch]
 
-    Accepted destination formats:
-    - Mercurial
-    - Subversion (history on branches is not preserved)
+    Accepted destination formats [identifiers]:
+    - Mercurial [hg]
+    - Subversion [svn] (history on branches is not preserved)
 
     If no revision is given, all revisions will be converted. Otherwise,
     convert will only import up to the named revision (given in a format
@@ -84,6 +86,52 @@
 
     --config convert.hg.saverev=True          (boolean)
         allow target to preserve source revision ID
+    --config convert.hg.startrev=0            (hg revision identifier)
+        convert start revision and its descendants
+
+    CVS Source
+    ----------
+
+    CVS source will use a sandbox (i.e. a checked-out copy) from CVS
+    to indicate the starting point of what will be converted. Direct
+    access to the repository files is not needed, unless of course
+    the repository is :local:. The conversion uses the top level
+    directory in the sandbox to find the CVS repository, and then uses
+    CVS rlog commands to find files to convert. This means that unless
+    a filemap is given, all files under the starting directory will be
+    converted, and that any directory reorganisation in the CVS
+    sandbox is ignored.
+
+    Because CVS does not have changesets, it is necessary to collect
+    individual commits to CVS and merge them into changesets. CVS source
+    can use the external 'cvsps' program (this is a legacy option and may
+    be removed in future) or use its internal changeset merging code.
+    External cvsps is default, and options may be passed to it by setting
+        --config convert.cvsps='cvsps -A -u --cvs-direct -q'
+    The options shown are the defaults.
+
+    Internal cvsps is selected by setting
+        --config convert.cvsps=builtin
+    and has a few more configurable options:
+        --config convert.cvsps.fuzz=60   (integer)
+            Specify the maximum time (in seconds) that is allowed between
+            commits with identical user and log message in a single
+            changeset. When very large files were checked in as part
+            of a changeset then the default may not be long enough.
+        --config convert.cvsps.mergeto='{{mergetobranch ([-\w]+)}}'
+            Specify a regular expression to which commit log messages are
+            matched. If a match occurs, then the conversion process will
+            insert a dummy revision merging the branch on which this log
+            message occurs to the branch indicated in the regex.
+        --config convert.cvsps.mergefrom='{{mergefrombranch ([-\w]+)}}'
+            Specify a regular expression to which commit log messages are
+            matched. If a match occurs, then the conversion process will
+            add the most recent revision on the branch indicated in the
+            regex as the second parent of the changeset.
+
+    The hgext/convert/cvsps wrapper script allows the builtin changeset
+    merging code to be run without doing a conversion. Its parameters and
+    output are similar to that of cvsps 2.1.
 
     Subversion Source
     -----------------
@@ -134,14 +182,14 @@
 cmdtable = {
     "convert":
         (convert,
-         [('A', 'authors', '', 'username mapping filename'),
-          ('d', 'dest-type', '', 'destination repository type'),
-          ('', 'filemap', '', 'remap file names using contents of file'),
-          ('r', 'rev', '', 'import up to target revision REV'),
-          ('s', 'source-type', '', 'source repository type'),
-          ('', 'splicemap', '', 'splice synthesized history into place'),
-          ('', 'datesort', None, 'try to sort changesets by date')],
-         'hg convert [OPTION]... SOURCE [DEST [REVMAP]]'),
+         [('A', 'authors', '', _('username mapping filename')),
+          ('d', 'dest-type', '', _('destination repository type')),
+          ('', 'filemap', '', _('remap file names using contents of file')),
+          ('r', 'rev', '', _('import up to target revision REV')),
+          ('s', 'source-type', '', _('source repository type')),
+          ('', 'splicemap', '', _('splice synthesized history into place')),
+          ('', 'datesort', None, _('try to sort changesets by date'))],
+         _('hg convert [OPTION]... SOURCE [DEST [REVMAP]]')),
     "debugsvnlog":
         (debugsvnlog,
          [],
--- a/hgext/convert/common.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/hgext/convert/common.py	Wed Sep 17 11:34:37 2008 +0200
@@ -153,26 +153,18 @@
         mapping equivalent authors identifiers for each system."""
         return None
 
-    def putfile(self, f, e, data):
-        """Put file for next putcommit().
-        f: path to file
-        e: '', 'x', or 'l' (regular file, executable, or symlink)
-        data: file contents"""
-        raise NotImplementedError()
-
-    def delfile(self, f):
-        """Delete file for next putcommit().
-        f: path to file"""
-        raise NotImplementedError()
-
-    def putcommit(self, files, parents, commit):
+    def putcommit(self, files, copies, parents, commit, source):
         """Create a revision with all changed files listed in 'files'
         and having listed parents. 'commit' is a commit object containing
         at a minimum the author, date, and message for this changeset.
-        Called after putfile() and delfile() calls. Note that the sink
-        repository is not told to update itself to a particular revision
-        (or even what that revision would be) before it receives the
-        file data."""
+        'files' is a list of (path, version) tuples, 'copies'is a dictionary
+        mapping destinations to sources, and 'source' is the source repository.
+        Only getfile() and getmode() should be called on 'source'.
+
+        Note that the sink repository is not told to update itself to
+        a particular revision (or even what that revision would be)
+        before it receives the file data.
+        """
         raise NotImplementedError()
 
     def puttags(self, tags):
@@ -181,7 +173,7 @@
         raise NotImplementedError()
 
     def setbranch(self, branch, pbranches):
-        """Set the current branch name. Called before the first putfile
+        """Set the current branch name. Called before the first putcommit
         on the branch.
         branch: branch name for subsequent commits
         pbranches: (converted parent revision, parent branch) tuples"""
@@ -236,7 +228,7 @@
 
     def _run(self, cmd, *args, **kwargs):
         cmdline = self._cmdline(cmd, *args, **kwargs)
-        self.ui.debug('running: %s\n' % (cmdline,))
+        self.ui.debug(_('running: %s\n') % (cmdline,))
         self.prerun()
         try:
             return util.popen(cmdline)
--- a/hgext/convert/convcmd.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/hgext/convert/convcmd.py	Wed Sep 17 11:34:37 2008 +0200
@@ -52,8 +52,8 @@
             exceptions.append(inst)
     if not ui.quiet:
         for inst in exceptions:
-            ui.write(_("%s\n") % inst)
-    raise util.Abort('%s: unknown repository type' % path)
+            ui.write("%s\n" % inst)
+    raise util.Abort(_('%s: missing or unsupported repository') % path)
 
 def convertsink(ui, path, type):
     for name, sink in sink_converters:
@@ -62,7 +62,7 @@
                 return sink(ui, path)
         except NoRepo, inst:
             ui.note(_("convert: %s\n") % inst)
-    raise util.Abort('%s: unknown repository type' % path)
+    raise util.Abort(_('%s: unknown repository type') % path)
 
 class converter(object):
     def __init__(self, ui, source, dest, revmapfile, opts):
@@ -184,7 +184,7 @@
     def writeauthormap(self):
         authorfile = self.authorfile
         if authorfile:
-           self.ui.status('Writing author map file %s\n' % authorfile)
+           self.ui.status(_('Writing author map file %s\n') % authorfile)
            ofile = open(authorfile, 'w+')
            for author in self.authors:
                ofile.write("%s=%s\n" % (author, self.authors[author]))
@@ -201,15 +201,15 @@
                 dstauthor = dstauthor.strip()
                 if srcauthor in self.authors and dstauthor != self.authors[srcauthor]:
                     self.ui.status(
-                        'Overriding mapping for author %s, was %s, will be %s\n'
+                        _('Overriding mapping for author %s, was %s, will be %s\n')
                         % (srcauthor, self.authors[srcauthor], dstauthor))
                 else:
-                    self.ui.debug('Mapping author %s to %s\n'
+                    self.ui.debug(_('Mapping author %s to %s\n')
                                   % (srcauthor, dstauthor))
                     self.authors[srcauthor] = dstauthor
             except IndexError:
                 self.ui.warn(
-                    'Ignoring bad line in author map file %s: %s\n'
+                    _('Ignoring bad line in author map file %s: %s\n')
                     % (authorfile, line.rstrip()))
         afile.close()
 
@@ -221,8 +221,6 @@
 
     def copy(self, rev):
         commit = self.commitcache[rev]
-        do_copies = hasattr(self.dest, 'copyfile')
-        filenames = []
 
         changes = self.source.getchanges(rev)
         if isinstance(changes, basestring):
@@ -241,29 +239,14 @@
                 pbranches.append((self.map[prev],
                                   self.commitcache[prev].branch))
         self.dest.setbranch(commit.branch, pbranches)
-        for f, v in files:
-            filenames.append(f)
-            try:
-                data = self.source.getfile(f, v)
-            except IOError, inst:
-                self.dest.delfile(f)
-            else:
-                e = self.source.getmode(f, v)
-                self.dest.putfile(f, e, data)
-                if do_copies:
-                    if f in copies:
-                        copyf = copies[f]
-                        # Merely marks that a copy happened.
-                        self.dest.copyfile(copyf, f)
-
         try:
             parents = self.splicemap[rev].replace(',', ' ').split()
-            self.ui.status('spliced in %s as parents of %s\n' %
+            self.ui.status(_('spliced in %s as parents of %s\n') %
                            (parents, rev))
             parents = [self.map.get(p, p) for p in parents]
         except KeyError:
             parents = [b[0] for b in pbranches]
-        newnode = self.dest.putcommit(filenames, parents, commit)
+        newnode = self.dest.putcommit(files, copies, parents, commit, self.source)
         self.source.converted(rev, newnode)
         self.map[rev] = newnode
 
@@ -273,15 +256,15 @@
             self.source.before()
             self.dest.before()
             self.source.setrevmap(self.map)
-            self.ui.status("scanning source...\n")
+            self.ui.status(_("scanning source...\n"))
             heads = self.source.getheads()
             parents = self.walktree(heads)
-            self.ui.status("sorting...\n")
+            self.ui.status(_("sorting...\n"))
             t = self.toposort(parents)
             num = len(t)
             c = None
 
-            self.ui.status("converting...\n")
+            self.ui.status(_("converting...\n"))
             for c in t:
                 num -= 1
                 desc = self.commitcache[c].desc
@@ -291,7 +274,7 @@
                 # tolocal() because util._encoding conver() use it as
                 # 'utf-8'
                 self.ui.status("%d %s\n" % (num, recode(desc)))
-                self.ui.note(_("source: %s\n" % recode(c)))
+                self.ui.note(_("source: %s\n") % recode(c))
                 self.copy(c)
 
             tags = self.source.gettags()
@@ -326,7 +309,7 @@
 
     if not dest:
         dest = hg.defaultdest(src) + "-hg"
-        ui.status("assuming destination %s\n" % dest)
+        ui.status(_("assuming destination %s\n") % dest)
 
     destc = convertsink(ui, dest, opts.get('dest_type'))
 
--- a/hgext/convert/cvs.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/hgext/convert/cvs.py	Wed Sep 17 11:34:37 2008 +0200
@@ -3,8 +3,10 @@
 import os, locale, re, socket
 from cStringIO import StringIO
 from mercurial import util
+from mercurial.i18n import _
 
 from common import NoRepo, commit, converter_source, checktool
+import cvsps
 
 class convert_cvs(converter_source):
     def __init__(self, ui, path, rev=None):
@@ -14,10 +16,13 @@
         if not os.path.exists(cvs):
             raise NoRepo("%s does not look like a CVS checkout" % path)
 
+        checktool('cvs')
         self.cmd = ui.config('convert', 'cvsps', 'cvsps -A -u --cvs-direct -q')
         cvspsexe = self.cmd.split(None, 1)[0]
-        for tool in (cvspsexe, 'cvs'):
-            checktool(tool)
+        self.builtin = cvspsexe == 'builtin'
+
+        if not self.builtin:
+            checktool(cvspsexe)
 
         self.changeset = {}
         self.files = {}
@@ -28,10 +33,11 @@
         self.cvsroot = file(os.path.join(cvs, "Root")).read()[:-1]
         self.cvsrepo = file(os.path.join(cvs, "Repository")).read()[:-1]
         self.encoding = locale.getpreferredencoding()
-        self._parse()
+
+        self._parse(ui)
         self._connect()
 
-    def _parse(self):
+    def _parse(self, ui):
         if self.changeset:
             return
 
@@ -48,7 +54,7 @@
                     util.parsedate(self.rev, ['%Y/%m/%d %H:%M:%S'])
                     cmd = '%s -d "1970/01/01 00:00:01" -d "%s"' % (cmd, self.rev)
                 except util.Abort:
-                    raise util.Abort('revision %s is not a patchset number or date' % self.rev)
+                    raise util.Abort(_('revision %s is not a patchset number or date') % self.rev)
 
         d = os.getcwd()
         try:
@@ -56,80 +62,114 @@
             id = None
             state = 0
             filerevids = {}
-            for l in util.popen(cmd):
-                if state == 0: # header
-                    if l.startswith("PatchSet"):
-                        id = l[9:-2]
-                        if maxrev and int(id) > maxrev:
-                            # ignore everything
-                            state = 3
-                    elif l.startswith("Date"):
-                        date = util.parsedate(l[6:-1], ["%Y/%m/%d %H:%M:%S"])
-                        date = util.datestr(date)
-                    elif l.startswith("Branch"):
-                        branch = l[8:-1]
-                        self.parent[id] = self.lastbranch.get(branch, 'bad')
-                        self.lastbranch[branch] = id
-                    elif l.startswith("Ancestor branch"):
-                        ancestor = l[17:-1]
-                        # figure out the parent later
-                        self.parent[id] = self.lastbranch[ancestor]
-                    elif l.startswith("Author"):
-                        author = self.recode(l[8:-1])
-                    elif l.startswith("Tag:") or l.startswith("Tags:"):
-                        t = l[l.index(':')+1:]
-                        t = [ut.strip() for ut in t.split(',')]
-                        if (len(t) > 1) or (t[0] and (t[0] != "(none)")):
-                            self.tags.update(dict.fromkeys(t, id))
-                    elif l.startswith("Log:"):
-                        # switch to gathering log
-                        state = 1
-                        log = ""
-                elif state == 1: # log
-                    if l == "Members: \n":
-                        # switch to gathering members
-                        files = {}
-                        oldrevs = []
-                        log = self.recode(log[:-1])
-                        state = 2
-                    else:
-                        # gather log
-                        log += l
-                elif state == 2: # members
-                    if l == "\n": # start of next entry
-                        state = 0
-                        p = [self.parent[id]]
-                        if id == "1":
-                            p = []
-                        if branch == "HEAD":
-                            branch = ""
-                        if branch:
-                            latest = None
-                            # the last changeset that contains a base
-                            # file is our parent
-                            for r in oldrevs:
-                                latest = max(filerevids.get(r, None), latest)
-                            if latest:
-                                p = [latest]
+
+            if self.builtin:
+                # builtin cvsps code
+                ui.status(_('using builtin cvsps\n'))
+
+                db = cvsps.createlog(ui, cache='update')
+                db = cvsps.createchangeset(ui, db,
+                      fuzz=int(ui.config('convert', 'cvsps.fuzz', 60)),
+                      mergeto=ui.config('convert', 'cvsps.mergeto', None),
+                      mergefrom=ui.config('convert', 'cvsps.mergefrom', None))
+
+                for cs in db:
+                    if maxrev and cs.id>maxrev:
+                        break
+                    id = str(cs.id)
+                    cs.author = self.recode(cs.author)
+                    self.lastbranch[cs.branch] = id
+                    cs.comment = self.recode(cs.comment)
+                    date = util.datestr(cs.date)
+                    self.tags.update(dict.fromkeys(cs.tags, id))
+
+                    files = {}
+                    for f in cs.entries:
+                        files[f.file] = "%s%s" % ('.'.join([str(x) for x in f.revision]),
+                                                  ['', '(DEAD)'][f.dead])
 
-                        # add current commit to set
-                        c = commit(author=author, date=date, parents=p,
-                                   desc=log, branch=branch)
-                        self.changeset[id] = c
-                        self.files[id] = files
-                    else:
-                        colon = l.rfind(':')
-                        file = l[1:colon]
-                        rev = l[colon+1:-2]
-                        oldrev, rev = rev.split("->")
-                        files[file] = rev
+                    # add current commit to set
+                    c = commit(author=cs.author, date=date,
+                             parents=[str(p.id) for p in cs.parents],
+                             desc=cs.comment, branch=cs.branch or '')
+                    self.changeset[id] = c
+                    self.files[id] = files
+            else:
+                # external cvsps
+                for l in util.popen(cmd):
+                    if state == 0: # header
+                        if l.startswith("PatchSet"):
+                            id = l[9:-2]
+                            if maxrev and int(id) > maxrev:
+                                # ignore everything
+                                state = 3
+                        elif l.startswith("Date"):
+                            date = util.parsedate(l[6:-1], ["%Y/%m/%d %H:%M:%S"])
+                            date = util.datestr(date)
+                        elif l.startswith("Branch"):
+                            branch = l[8:-1]
+                            self.parent[id] = self.lastbranch.get(branch, 'bad')
+                            self.lastbranch[branch] = id
+                        elif l.startswith("Ancestor branch"):
+                            ancestor = l[17:-1]
+                            # figure out the parent later
+                            self.parent[id] = self.lastbranch[ancestor]
+                        elif l.startswith("Author"):
+                            author = self.recode(l[8:-1])
+                        elif l.startswith("Tag:") or l.startswith("Tags:"):
+                            t = l[l.index(':')+1:]
+                            t = [ut.strip() for ut in t.split(',')]
+                            if (len(t) > 1) or (t[0] and (t[0] != "(none)")):
+                                self.tags.update(dict.fromkeys(t, id))
+                        elif l.startswith("Log:"):
+                            # switch to gathering log
+                            state = 1
+                            log = ""
+                    elif state == 1: # log
+                        if l == "Members: \n":
+                            # switch to gathering members
+                            files = {}
+                            oldrevs = []
+                            log = self.recode(log[:-1])
+                            state = 2
+                        else:
+                            # gather log
+                            log += l
+                    elif state == 2: # members
+                        if l == "\n": # start of next entry
+                            state = 0
+                            p = [self.parent[id]]
+                            if id == "1":
+                                p = []
+                            if branch == "HEAD":
+                                branch = ""
+                            if branch:
+                                latest = None
+                                # the last changeset that contains a base
+                                # file is our parent
+                                for r in oldrevs:
+                                    latest = max(filerevids.get(r, None), latest)
+                                if latest:
+                                    p = [latest]
 
-                        # save some information for identifying branch points
-                        oldrevs.append("%s:%s" % (oldrev, file))
-                        filerevids["%s:%s" % (rev, file)] = id
-                elif state == 3:
-                    # swallow all input
-                    continue
+                            # add current commit to set
+                            c = commit(author=author, date=date, parents=p,
+                                       desc=log, branch=branch)
+                            self.changeset[id] = c
+                            self.files[id] = files
+                        else:
+                            colon = l.rfind(':')
+                            file = l[1:colon]
+                            rev = l[colon+1:-2]
+                            oldrev, rev = rev.split("->")
+                            files[file] = rev
+
+                            # save some information for identifying branch points
+                            oldrevs.append("%s:%s" % (oldrev, file))
+                            filerevids["%s:%s" % (rev, file)] = id
+                    elif state == 3:
+                        # swallow all input
+                        continue
 
             self.heads = self.lastbranch.values()
         finally:
@@ -141,7 +181,7 @@
         user, host = None, None
         cmd = ['cvs', 'server']
 
-        self.ui.status("connecting to %s\n" % root)
+        self.ui.status(_("connecting to %s\n") % root)
 
         if root.startswith(":pserver:"):
             root = root[9:]
@@ -181,7 +221,7 @@
                 sck.send("\n".join(["BEGIN AUTH REQUEST", root, user, passw,
                                     "END AUTH REQUEST", ""]))
                 if sck.recv(128) != "I LOVE YOU\n":
-                    raise util.Abort("CVS pserver authentication failed")
+                    raise util.Abort(_("CVS pserver authentication failed"))
 
                 self.writep = self.readp = sck.makefile('r+')
 
@@ -224,7 +264,7 @@
         self.writep.flush()
         r = self.readp.readline()
         if not r.startswith("Valid-requests"):
-            raise util.Abort("server sucks")
+            raise util.Abort(_("server sucks"))
         if "UseUnchanged" in r:
             self.writep.write("UseUnchanged\n")
             self.writep.flush()
@@ -243,7 +283,7 @@
             while count > 0:
                 data = fp.read(min(count, chunksize))
                 if not data:
-                    raise util.Abort("%d bytes missing from remote file" % count)
+                    raise util.Abort(_("%d bytes missing from remote file") % count)
                 count -= len(data)
                 output.write(data)
             return output.getvalue()
@@ -278,14 +318,14 @@
                 if line == "ok\n":
                     return (data, "x" in mode and "x" or "")
                 elif line.startswith("E "):
-                    self.ui.warn("cvs server: %s\n" % line[2:])
+                    self.ui.warn(_("cvs server: %s\n") % line[2:])
                 elif line.startswith("Remove"):
                     l = self.readp.readline()
                     l = self.readp.readline()
                     if l != "ok\n":
-                        raise util.Abort("unknown CVS response: %s" % l)
+                        raise util.Abort(_("unknown CVS response: %s") % l)
                 else:
-                    raise util.Abort("unknown CVS response: %s" % line)
+                    raise util.Abort(_("unknown CVS response: %s") % line)
 
     def getfile(self, file, rev):
         data, mode = self._getfile(file, rev)
@@ -297,10 +337,7 @@
 
     def getchanges(self, rev):
         self.modecache = {}
-        files = self.files[rev]
-        cl = files.items()
-        cl.sort()
-        return (cl, {})
+        return util.sort(self.files[rev].items()), {}
 
     def getcommit(self, rev):
         return self.changeset[rev]
@@ -309,7 +346,4 @@
         return self.tags
 
     def getchangedfiles(self, rev, i):
-        files = self.files[rev].keys()
-        files.sort()
-        return files
-
+        return util.sort(self.files[rev].keys())
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext/convert/cvsps	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,154 @@
+#!/usr/bin/env python
+#
+# Commandline front-end for cvsps.py
+#
+# Copyright 2008, Frank Kingswood <frank@kingswood-consulting.co.uk>
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+
+import sys
+from mercurial import util
+from mercurial.i18n import _
+from optparse import OptionParser, SUPPRESS_HELP
+from hgext.convert.cvsps import createlog, createchangeset, logerror
+
+def main():
+    '''Main program to mimic cvsps.'''
+
+    op = OptionParser(usage='%prog [-bpruvxz] path',
+                      description='Read CVS rlog for current directory or named '
+                                  'path in repository, and convert the log to changesets '
+                                  'based on matching commit log entries and dates.')
+
+    # Options that are ignored for compatibility with cvsps-2.1
+    op.add_option('-A', dest='Ignore', action='store_true', help=SUPPRESS_HELP)
+    op.add_option('--cvs-direct', dest='Ignore', action='store_true', help=SUPPRESS_HELP)
+    op.add_option('-q', dest='Ignore', action='store_true', help=SUPPRESS_HELP)
+
+    # Main options shared with cvsps-2.1
+    op.add_option('-b', dest='Branches', action='append', default=[],
+                  help='Only return changes on specified branches')
+    op.add_option('-p', dest='Prefix', action='store', default='',
+                  help='Prefix to remove from file names')
+    op.add_option('-r', dest='Revisions', action='append', default=[],
+                  help='Only return changes after or between specified tags')
+    op.add_option('-u', dest='Cache', action='store_const', const='update',
+                  help="Update cvs log cache")
+    op.add_option('-v', dest='Verbose', action='count', default=0,
+                  help='Be verbose')
+    op.add_option('-x', dest='Cache', action='store_const', const='write',
+                  help="Create new cvs log cache")
+    op.add_option('-z', dest='Fuzz', action='store', type='int', default=60,
+                  help='Set commit time fuzz', metavar='seconds')
+    op.add_option('--root', dest='Root', action='store', default='',
+                  help='Specify cvsroot', metavar='cvsroot')
+
+    # Options specific to this version
+    op.add_option('--parents', dest='Parents', action='store_true',
+                  help='Show parent changesets')
+    op.add_option('--ancestors', dest='Ancestors', action='store_true',
+                  help='Show current changeset in ancestor branches')
+
+    options, args = op.parse_args()
+
+    # Create a ui object for printing progress messages
+    class UI:
+        def __init__(self, verbose):
+            if verbose:
+                self.status = self.message
+            if verbose>1:
+                self.note = self.message
+            if verbose>2:
+                self.debug = self.message
+        def message(self, msg):
+            sys.stderr.write(msg)
+        def nomessage(self, msg):
+            pass
+        status = nomessage
+        note = nomessage
+        debug = nomessage
+    ui = UI(options.Verbose)
+
+    try:
+        if args:
+            log = []
+            for d in args:
+                log += createlog(ui, d, root=options.Root, cache=options.Cache)
+        else:
+            log = createlog(ui, root=options.Root, cache=options.Cache)
+    except logerror, e:
+        print e
+        return
+
+    changesets = createchangeset(ui, log, options.Fuzz)
+    del log
+
+    # Print changesets (optionally filtered)
+
+    off = len(options.Revisions)
+    branches = {}    # latest version number in each branch
+    ancestors = {}   # parent branch
+    for cs in changesets:
+
+        if options.Ancestors:
+            if cs.branch not in branches and cs.parents and cs.parents[0].id:
+                ancestors[cs.branch] = changesets[cs.parents[0].id-1].branch, cs.parents[0].id
+            branches[cs.branch] = cs.id
+
+        # limit by branches
+        if options.Branches and (cs.branch or 'HEAD') not in options.Branches:
+            continue
+
+        if not off:
+            # Note: trailing spaces on several lines here are needed to have
+            #       bug-for-bug compatibility with cvsps.
+            print '---------------------'
+            print 'PatchSet %d ' % cs.id
+            print 'Date: %s' % util.datestr(cs.date, '%Y/%m/%d %H:%M:%S %1%2')
+            print 'Author: %s' % cs.author
+            print 'Branch: %s' % (cs.branch or 'HEAD')
+            print 'Tag%s: %s ' % (['', 's'][len(cs.tags)>1],
+                                  ','.join(cs.tags) or '(none)')
+            if options.Parents and cs.parents:
+                if len(cs.parents)>1:
+                    print 'Parents: %s' % (','.join([str(p.id) for p in cs.parents]))
+                else:
+                    print 'Parent: %d' % cs.parents[0].id
+
+            if options.Ancestors:
+                b = cs.branch
+                r = []
+                while b:
+                    b, c = ancestors[b]
+                    r.append('%s:%d:%d' % (b or "HEAD", c, branches[b]))
+                if r:
+                    print 'Ancestors: %s' % (','.join(r))
+
+            print 'Log:'
+            print cs.comment
+            print
+            print 'Members: '
+            for f in cs.entries:
+                fn = f.file
+                if fn.startswith(options.Prefix):
+                    fn = fn[len(options.Prefix):]
+                print '\t%s:%s->%s%s ' % (fn, '.'.join([str(x) for x in f.parent]) or 'INITIAL',
+                                          '.'.join([str(x) for x in f.revision]), ['', '(DEAD)'][f.dead])
+            print
+
+        # have we seen the start tag?
+        if options.Revisions and off:
+            if options.Revisions[0] == str(cs.id) or \
+                options.Revisions[0] in cs.tags:
+                off = False
+
+        # see if we reached the end tag
+        if len(options.Revisions)>1 and not off:
+            if options.Revisions[1] == str(cs.id) or \
+                options.Revisions[1] in cs.tags:
+                break
+
+
+if __name__ == '__main__':
+    main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext/convert/cvsps.py	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,548 @@
+#
+# Mercurial built-in replacement for cvsps.
+#
+# Copyright 2008, Frank Kingswood <frank@kingswood-consulting.co.uk>
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+
+import os
+import re
+import sys
+import cPickle as pickle
+from mercurial import util
+from mercurial.i18n import _
+
+def listsort(list, key):
+    "helper to sort by key in Python 2.3"
+    try:
+        list.sort(key=key)
+    except TypeError:
+        list.sort(lambda l, r: cmp(key(l), key(r)))
+
+class logentry(object):
+    '''Class logentry has the following attributes:
+        .author    - author name as CVS knows it
+        .branch    - name of branch this revision is on
+        .branches  - revision tuple of branches starting at this revision
+        .comment   - commit message
+        .date      - the commit date as a (time, tz) tuple
+        .dead      - true if file revision is dead
+        .file      - Name of file
+        .lines     - a tuple (+lines, -lines) or None
+        .parent    - Previous revision of this entry
+        .rcs       - name of file as returned from CVS
+        .revision  - revision number as tuple
+        .tags      - list of tags on the file
+    '''
+    def __init__(self, **entries):
+        self.__dict__.update(entries)
+
+class logerror(Exception):
+    pass
+
+def createlog(ui, directory=None, root="", rlog=True, cache=None):
+    '''Collect the CVS rlog'''
+
+    # Because we store many duplicate commit log messages, reusing strings
+    # saves a lot of memory and pickle storage space.
+    _scache = {}
+    def scache(s):
+        "return a shared version of a string"
+        return _scache.setdefault(s, s)
+
+    ui.status(_('collecting CVS rlog\n'))
+
+    log = []      # list of logentry objects containing the CVS state
+
+    # patterns to match in CVS (r)log output, by state of use
+    re_00 = re.compile('RCS file: (.+)$')
+    re_01 = re.compile('cvs \\[r?log aborted\\]: (.+)$')
+    re_02 = re.compile('cvs (r?log|server): (.+)\n$')
+    re_03 = re.compile("(Cannot access.+CVSROOT)|(can't create temporary directory.+)$")
+    re_10 = re.compile('Working file: (.+)$')
+    re_20 = re.compile('symbolic names:')
+    re_30 = re.compile('\t(.+): ([\\d.]+)$')
+    re_31 = re.compile('----------------------------$')
+    re_32 = re.compile('=============================================================================$')
+    re_50 = re.compile('revision ([\\d.]+)(\s+locked by:\s+.+;)?$')
+    re_60 = re.compile(r'date:\s+(.+);\s+author:\s+(.+);\s+state:\s+(.+?);(\s+lines:\s+(\+\d+)?\s+(-\d+)?;)?')
+    re_70 = re.compile('branches: (.+);$')
+
+    prefix = ''   # leading path to strip of what we get from CVS
+
+    if directory is None:
+        # Current working directory
+
+        # Get the real directory in the repository
+        try:
+            prefix = file(os.path.join('CVS','Repository')).read().strip()
+            if prefix == ".":
+                prefix = ""
+            directory = prefix
+        except IOError:
+            raise logerror('Not a CVS sandbox')
+
+        if prefix and not prefix.endswith('/'):
+            prefix += '/'
+
+        # Use the Root file in the sandbox, if it exists
+        try:
+            root = file(os.path.join('CVS','Root')).read().strip()
+        except IOError:
+            pass
+
+    if not root:
+        root = os.environ.get('CVSROOT', '')
+
+    # read log cache if one exists
+    oldlog = []
+    date = None
+
+    if cache:
+        cachedir = os.path.expanduser('~/.hg.cvsps')
+        if not os.path.exists(cachedir):
+            os.mkdir(cachedir)
+
+        # The cvsps cache pickle needs a uniquified name, based on the
+        # repository location. The address may have all sort of nasties
+        # in it, slashes, colons and such. So here we take just the
+        # alphanumerics, concatenated in a way that does not mix up the
+        # various components, so that
+        #    :pserver:user@server:/path
+        # and
+        #    /pserver/user/server/path
+        # are mapped to different cache file names.
+        cachefile = root.split(":") + [directory, "cache"]
+        cachefile = ['-'.join(re.findall(r'\w+', s)) for s in cachefile if s]
+        cachefile = os.path.join(cachedir,
+                                 '.'.join([s for s in cachefile if s]))
+
+    if cache == 'update':
+        try:
+            ui.note(_('reading cvs log cache %s\n') % cachefile)
+            oldlog = pickle.load(file(cachefile))
+            ui.note(_('cache has %d log entries\n') % len(oldlog))
+        except Exception, e:
+            ui.note(_('error reading cache: %r\n') % e)
+
+        if oldlog:
+            date = oldlog[-1].date    # last commit date as a (time,tz) tuple
+            date = util.datestr(date, '%Y/%m/%d %H:%M:%S %1%2')
+
+    # build the CVS commandline
+    cmd = ['cvs', '-q']
+    if root:
+        cmd.append('-d%s' % root)
+        p = root.split(':')[-1]
+        if not p.endswith('/'):
+            p += '/'
+        prefix = p + prefix
+    cmd.append(['log', 'rlog'][rlog])
+    if date:
+        # no space between option and date string
+        cmd.append('-d>%s' % date)
+    cmd.append(directory)
+
+    # state machine begins here
+    tags = {}     # dictionary of revisions on current file with their tags
+    state = 0
+    store = False # set when a new record can be appended
+
+    cmd = [util.shellquote(arg) for arg in cmd]
+    ui.note(_("running %s\n") % (' '.join(cmd)))
+    ui.debug(_("prefix=%r directory=%r root=%r\n") % (prefix, directory, root))
+
+    for line in util.popen(' '.join(cmd)):
+        if line.endswith('\n'):
+            line = line[:-1]
+        #ui.debug('state=%d line=%r\n' % (state, line))
+
+        if state == 0:
+            # initial state, consume input until we see 'RCS file'
+            match = re_00.match(line)
+            if match:
+                rcs = match.group(1)
+                tags = {}
+                if rlog:
+                    filename = rcs[:-2]
+                    if filename.startswith(prefix):
+                        filename = filename[len(prefix):]
+                    if filename.startswith('/'):
+                        filename = filename[1:]
+                    if filename.startswith('Attic/'):
+                        filename = filename[6:]
+                    else:
+                        filename = filename.replace('/Attic/', '/')
+                    state = 2
+                    continue
+                state = 1
+                continue
+            match = re_01.match(line)
+            if match:
+                raise Exception(match.group(1))
+            match = re_02.match(line)
+            if match:
+                raise Exception(match.group(2))
+            if re_03.match(line):
+                raise Exception(line)
+
+        elif state == 1:
+            # expect 'Working file' (only when using log instead of rlog)
+            match = re_10.match(line)
+            assert match, _('RCS file must be followed by working file')
+            filename = match.group(1)
+            state = 2
+
+        elif state == 2:
+            # expect 'symbolic names'
+            if re_20.match(line):
+                state = 3
+
+        elif state == 3:
+            # read the symbolic names and store as tags
+            match = re_30.match(line)
+            if match:
+                rev = [int(x) for x in match.group(2).split('.')]
+
+                # Convert magic branch number to an odd-numbered one
+                revn = len(rev)
+                if revn > 3 and (revn % 2) == 0 and rev[-2] == 0:
+                    rev = rev[:-2] + rev[-1:]
+                rev = tuple(rev)
+
+                if rev not in tags:
+                    tags[rev] = []
+                tags[rev].append(match.group(1))
+
+            elif re_31.match(line):
+                state = 5
+            elif re_32.match(line):
+                state = 0
+
+        elif state == 4:
+            # expecting '------' separator before first revision
+            if re_31.match(line):
+                state = 5
+            else:
+                assert not re_32.match(line), _('Must have at least some revisions')
+
+        elif state == 5:
+            # expecting revision number and possibly (ignored) lock indication
+            # we create the logentry here from values stored in states 0 to 4,
+            # as this state is re-entered for subsequent revisions of a file.
+            match = re_50.match(line)
+            assert match, _('expected revision number')
+            e = logentry(rcs=scache(rcs), file=scache(filename),
+                    revision=tuple([int(x) for x in match.group(1).split('.')]),
+                    branches=[], parent=None)
+            state = 6
+
+        elif state == 6:
+            # expecting date, author, state, lines changed
+            match = re_60.match(line)
+            assert match, _('revision must be followed by date line')
+            d = match.group(1)
+            if d[2] == '/':
+                # Y2K
+                d = '19' + d
+
+            if len(d.split()) != 3:
+                # cvs log dates always in GMT
+                d = d + ' UTC'
+            e.date = util.parsedate(d, ['%y/%m/%d %H:%M:%S', '%Y/%m/%d %H:%M:%S', '%Y-%m-%d %H:%M:%S'])
+            e.author = scache(match.group(2))
+            e.dead = match.group(3).lower() == 'dead'
+
+            if match.group(5):
+                if match.group(6):
+                    e.lines = (int(match.group(5)), int(match.group(6)))
+                else:
+                    e.lines = (int(match.group(5)), 0)
+            elif match.group(6):
+                e.lines = (0, int(match.group(6)))
+            else:
+                e.lines = None
+            e.comment = []
+            state = 7
+
+        elif state == 7:
+            # read the revision numbers of branches that start at this revision
+            # or store the commit log message otherwise
+            m = re_70.match(line)
+            if m:
+                e.branches = [tuple([int(y) for y in x.strip().split('.')])
+                                for x in m.group(1).split(';')]
+                state = 8
+            elif re_31.match(line):
+                state = 5
+                store = True
+            elif re_32.match(line):
+                state = 0
+                store = True
+            else:
+                e.comment.append(line)
+
+        elif state == 8:
+            # store commit log message
+            if re_31.match(line):
+                state = 5
+                store = True
+            elif re_32.match(line):
+                state = 0
+                store = True
+            else:
+                e.comment.append(line)
+
+        if store:
+            # clean up the results and save in the log.
+            store = False
+            e.tags = util.sort([scache(x) for x in tags.get(e.revision, [])])
+            e.comment = scache('\n'.join(e.comment))
+
+            revn = len(e.revision)
+            if revn > 3 and (revn % 2) == 0:
+                e.branch = tags.get(e.revision[:-1], [None])[0]
+            else:
+                e.branch = None
+
+            log.append(e)
+
+            if len(log) % 100 == 0:
+                ui.status(util.ellipsis('%d %s' % (len(log), e.file), 80)+'\n')
+
+    listsort(log, key=lambda x:(x.rcs, x.revision))
+
+    # find parent revisions of individual files
+    versions = {}
+    for e in log:
+        branch = e.revision[:-1]
+        p = versions.get((e.rcs, branch), None)
+        if p is None:
+            p = e.revision[:-2]
+        e.parent = p
+        versions[(e.rcs, branch)] = e.revision
+
+    # update the log cache
+    if cache:
+        if log:
+            # join up the old and new logs
+            listsort(log, key=lambda x:x.date)
+
+            if oldlog and oldlog[-1].date >= log[0].date:
+                raise logerror('Log cache overlaps with new log entries,'
+                               ' re-run without cache.')
+
+            log = oldlog + log
+
+            # write the new cachefile
+            ui.note(_('writing cvs log cache %s\n') % cachefile)
+            pickle.dump(log, file(cachefile, 'w'))
+        else:
+            log = oldlog
+
+    ui.status(_('%d log entries\n') % len(log))
+
+    return log
+
+
+class changeset(object):
+    '''Class changeset has the following attributes:
+        .author    - author name as CVS knows it
+        .branch    - name of branch this changeset is on, or None
+        .comment   - commit message
+        .date      - the commit date as a (time,tz) tuple
+        .entries   - list of logentry objects in this changeset
+        .parents   - list of one or two parent changesets
+        .tags      - list of tags on this changeset
+    '''
+    def __init__(self, **entries):
+        self.__dict__.update(entries)
+
+def createchangeset(ui, log, fuzz=60, mergefrom=None, mergeto=None):
+    '''Convert log into changesets.'''
+
+    ui.status(_('creating changesets\n'))
+
+    # Merge changesets
+
+    listsort(log, key=lambda x:(x.comment, x.author, x.branch, x.date))
+
+    changesets = []
+    files = {}
+    c = None
+    for i, e in enumerate(log):
+
+        # Check if log entry belongs to the current changeset or not.
+        if not (c and
+                  e.comment == c.comment and
+                  e.author == c.author and
+                  e.branch == c.branch and
+                  ((c.date[0] + c.date[1]) <=
+                   (e.date[0] + e.date[1]) <=
+                   (c.date[0] + c.date[1]) + fuzz) and
+                  e.file not in files):
+            c = changeset(comment=e.comment, author=e.author,
+                          branch=e.branch, date=e.date, entries=[])
+            changesets.append(c)
+            files = {}
+            if len(changesets) % 100 == 0:
+                t = '%d %s' % (len(changesets), repr(e.comment)[1:-1])
+                ui.status(util.ellipsis(t, 80) + '\n')
+
+        c.entries.append(e)
+        files[e.file] = True
+        c.date = e.date       # changeset date is date of latest commit in it
+
+    # Sort files in each changeset
+
+    for c in changesets:
+        def pathcompare(l, r):
+            'Mimic cvsps sorting order'
+            l = l.split('/')
+            r = r.split('/')
+            nl = len(l)
+            nr = len(r)
+            n = min(nl, nr)
+            for i in range(n):
+                if i + 1 == nl and nl < nr:
+                    return -1
+                elif i + 1 == nr and nl > nr:
+                    return +1
+                elif l[i] < r[i]:
+                    return -1
+                elif l[i] > r[i]:
+                    return +1
+            return 0
+        def entitycompare(l, r):
+            return pathcompare(l.file, r.file)
+
+        c.entries.sort(entitycompare)
+
+    # Sort changesets by date
+
+    def cscmp(l, r):
+        d = sum(l.date) - sum(r.date)
+        if d:
+            return d
+
+        # detect vendor branches and initial commits on a branch
+        le = {}
+        for e in l.entries:
+            le[e.rcs] = e.revision
+        re = {}
+        for e in r.entries:
+            re[e.rcs] = e.revision
+
+        d = 0
+        for e in l.entries:
+            if re.get(e.rcs, None) == e.parent:
+                assert not d
+                d = 1
+                break
+
+        for e in r.entries:
+            if le.get(e.rcs, None) == e.parent:
+                assert not d
+                d = -1
+                break
+
+        return d
+
+    changesets.sort(cscmp)
+
+    # Collect tags
+
+    globaltags = {}
+    for c in changesets:
+        tags = {}
+        for e in c.entries:
+            for tag in e.tags:
+                # remember which is the latest changeset to have this tag
+                globaltags[tag] = c
+
+    for c in changesets:
+        tags = {}
+        for e in c.entries:
+            for tag in e.tags:
+                tags[tag] = True
+        # remember tags only if this is the latest changeset to have it
+        c.tags = util.sort([tag for tag in tags if globaltags[tag] is c])
+
+    # Find parent changesets, handle {{mergetobranch BRANCHNAME}}
+    # by inserting dummy changesets with two parents, and handle
+    # {{mergefrombranch BRANCHNAME}} by setting two parents.
+
+    if mergeto is None:
+        mergeto = r'{{mergetobranch ([-\w]+)}}'
+    if mergeto:
+        mergeto = re.compile(mergeto)
+
+    if mergefrom is None:
+        mergefrom = r'{{mergefrombranch ([-\w]+)}}'
+    if mergefrom:
+        mergefrom = re.compile(mergefrom)
+
+    versions = {}    # changeset index where we saw any particular file version
+    branches = {}    # changeset index where we saw a branch
+    n = len(changesets)
+    i = 0
+    while i<n:
+        c = changesets[i]
+
+        for f in c.entries:
+            versions[(f.rcs, f.revision)] = i
+
+        p = None
+        if c.branch in branches:
+            p = branches[c.branch]
+        else:
+            for f in c.entries:
+                p = max(p, versions.get((f.rcs, f.parent), None))
+
+        c.parents = []
+        if p is not None:
+            c.parents.append(changesets[p])
+
+        if mergefrom:
+            m = mergefrom.search(c.comment)
+            if m:
+                m = m.group(1)
+                if m == 'HEAD':
+                    m = None
+                if m in branches and c.branch != m:
+                    c.parents.append(changesets[branches[m]])
+
+        if mergeto:
+            m = mergeto.search(c.comment)
+            if m:
+                try:
+                    m = m.group(1)
+                    if m == 'HEAD':
+                        m = None
+                except:
+                    m = None   # if no group found then merge to HEAD
+                if m in branches and c.branch != m:
+                    # insert empty changeset for merge
+                    cc = changeset(author=c.author, branch=m, date=c.date,
+                            comment='convert-repo: CVS merge from branch %s' % c.branch,
+                            entries=[], tags=[], parents=[changesets[branches[m]], c])
+                    changesets.insert(i + 1, cc)
+                    branches[m] = i + 1
+
+                    # adjust our loop counters now we have inserted a new entry
+                    n += 1
+                    i += 2
+                    continue
+
+        branches[c.branch] = i
+        i += 1
+
+    # Number changesets
+
+    for i, c in enumerate(changesets):
+        c.id = i + 1
+
+    ui.status(_('%d changeset entries\n') % len(changesets))
+
+    return changesets
--- a/hgext/convert/darcs.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/hgext/convert/darcs.py	Wed Sep 17 11:34:37 2008 +0200
@@ -66,7 +66,7 @@
         self.parents[child] = []
 
     def after(self):
-        self.ui.debug('cleaning up %s\n' % self.tmppath)
+        self.ui.debug(_('cleaning up %s\n') % self.tmppath)
         shutil.rmtree(self.tmppath, ignore_errors=True)
 
     def xml(self, cmd, **kwargs):
@@ -110,9 +110,8 @@
                 copies[elt.get('from')] = elt.get('to')
             else:
                 changes.append((elt.text.strip(), rev))
-        changes.sort()
         self.lastrev = rev
-        return changes, copies
+        return util.sort(changes), copies
 
     def getfile(self, name, rev):
         if rev != self.lastrev:
--- a/hgext/convert/gnuarch.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/hgext/convert/gnuarch.py	Wed Sep 17 11:34:37 2008 +0200
@@ -23,7 +23,7 @@
         super(gnuarch_source, self).__init__(ui, path, rev=rev)
 
         if not os.path.exists(os.path.join(path, '{arch}')):
-            raise NoRepo(_("%s does not look like a GNU Arch repo" % path))
+            raise NoRepo(_("%s does not look like a GNU Arch repo") % path)
 
         # Could use checktool, but we want to check for baz or tla.
         self.execmd = None
@@ -54,7 +54,7 @@
             output = self.run0('tree-version', '-d', self.path)
         self.treeversion = output.strip()
 
-        self.ui.status(_('analyzing tree version %s...\n' % self.treeversion))
+        self.ui.status(_('analyzing tree version %s...\n') % self.treeversion)
 
         # Get name of temporary directory
         version = self.treeversion.split('/')
@@ -80,7 +80,7 @@
         self.parents[None] = child
 
     def after(self):
-        self.ui.debug(_('cleaning up %s\n' % self.tmppath))
+        self.ui.debug(_('cleaning up %s\n') % self.tmppath)
         shutil.rmtree(self.tmppath, ignore_errors=True)
 
     def getheads(self):
@@ -130,10 +130,8 @@
             for c in cps:
                 copies[c] = cps[c]
 
-        changes.sort()
         self.lastrev = rev
-
-        return changes, copies
+        return util.sort(changes), copies
 
     def getcommit(self, rev):
         changes = self.changes[rev]
@@ -157,7 +155,7 @@
             # Initialise 'base-0' revision
             self._obtainrevision(rev)
         else:
-            self.ui.debug(_('applying revision %s...\n' % rev))
+            self.ui.debug(_('applying revision %s...\n') % rev)
             revision = '%s--%s' % (self.treeversion, rev)
             changeset, status = self.runlines('replay', '-d', self.tmppath,
                                               revision)
@@ -168,8 +166,8 @@
                 self._obtainrevision(rev)
             else:
                 old_rev = self.parents[rev][0]
-                self.ui.debug(_('computing changeset between %s and %s...\n' \
-                                    % (old_rev, rev)))
+                self.ui.debug(_('computing changeset between %s and %s...\n')
+                              % (old_rev, rev))
                 rev_a = '%s--%s' % (self.treeversion, old_rev)
                 rev_b = '%s--%s' % (self.treeversion, rev)
                 self._parsechangeset(changeset, rev)
@@ -219,11 +217,11 @@
         return changes, copies
 
     def _obtainrevision(self, rev):
-        self.ui.debug(_('obtaining revision %s...\n' % rev))
+        self.ui.debug(_('obtaining revision %s...\n') % rev)
         revision = '%s--%s' % (self.treeversion, rev)
         output = self._execute('get', revision, self.tmppath)
         self.checkexit(output)
-        self.ui.debug(_('analysing revision %s...\n' % rev))
+        self.ui.debug(_('analysing revision %s...\n') % rev)
         files = self._readcontents(self.tmppath)
         self.changes[rev].add_files += files
 
--- a/hgext/convert/hg.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/hgext/convert/hg.py	Wed Sep 17 11:34:37 2008 +0200
@@ -17,7 +17,7 @@
 from mercurial.i18n import _
 from mercurial.repo import RepoError
 from mercurial.node import bin, hex, nullid
-from mercurial import hg, revlog, util
+from mercurial import hg, revlog, util, context
 
 from common import NoRepo, commit, converter_source, converter_sink
 
@@ -54,11 +54,9 @@
         self.ui.debug(_('run hg sink pre-conversion action\n'))
         self.wlock = self.repo.wlock()
         self.lock = self.repo.lock()
-        self.repo.dirstate.clear()
 
     def after(self):
         self.ui.debug(_('run hg sink post-conversion action\n'))
-        self.repo.dirstate.invalidate()
         self.lock = None
         self.wlock = None
 
@@ -72,21 +70,6 @@
         h = self.repo.changelog.heads()
         return [ hex(x) for x in h ]
 
-    def putfile(self, f, e, data):
-        self.repo.wwrite(f, data, e)
-        if f not in self.repo.dirstate:
-            self.repo.dirstate.normallookup(f)
-
-    def copyfile(self, source, dest):
-        self.repo.copy(source, dest)
-
-    def delfile(self, f):
-        try:
-            util.unlink(self.repo.wjoin(f))
-            #self.repo.remove([f])
-        except OSError:
-            pass
-
     def setbranch(self, branch, pbranches):
         if not self.clonebranches:
             return
@@ -125,13 +108,19 @@
                 self.repo.pull(prepo, [prepo.lookup(h) for h in heads])
             self.before()
 
-    def putcommit(self, files, parents, commit):
-        seen = {}
+    def putcommit(self, files, copies, parents, commit, source):
+
+        files = dict(files)
+        def getfilectx(repo, memctx, f):
+            v = files[f]
+            data = source.getfile(f, v)
+            e = source.getmode(f, v)
+            return context.memfilectx(f, data, 'l' in e, 'x' in e, copies.get(f))
+
         pl = []
         for p in parents:
-            if p not in seen:
+            if p not in pl:
                 pl.append(p)
-                seen[p] = 1
         parents = pl
         nparents = len(parents)
         if self.filemapmode and nparents == 1:
@@ -152,9 +141,9 @@
         while parents:
             p1 = p2
             p2 = parents.pop(0)
-            a = self.repo.rawcommit(files, text, commit.author, commit.date,
-                                    bin(p1), bin(p2), extra=extra)
-            self.repo.dirstate.clear()
+            ctx = context.memctx(self.repo, (p1, p2), text, files.keys(), getfilectx,
+                                 commit.author, commit.date, extra)
+            a = self.repo.commitctx(ctx)
             text = "(octopus merge fixup)\n"
             p2 = hex(self.repo.changelog.tip())
 
@@ -163,43 +152,39 @@
             mnode = self.repo.changelog.read(bin(p2))[0]
             if not man.cmp(m1node, man.revision(mnode)):
                 self.repo.rollback()
-                self.repo.dirstate.clear()
                 return parent
         return p2
 
     def puttags(self, tags):
-        try:
-            old = self.repo.wfile(".hgtags").read()
-            oldlines = old.splitlines(1)
-            oldlines.sort()
-        except:
-            oldlines = []
+         try:
+             parentctx = self.repo[self.tagsbranch]
+             tagparent = parentctx.node()
+         except RepoError, inst:
+             parentctx = None
+             tagparent = nullid
 
-        k = tags.keys()
-        k.sort()
-        newlines = []
-        for tag in k:
-            newlines.append("%s %s\n" % (tags[tag], tag))
+         try:
+             oldlines = util.sort(parentctx['.hgtags'].data().splitlines(1))
+         except:
+             oldlines = []
 
-        newlines.sort()
+         newlines = util.sort([("%s %s\n" % (tags[tag], tag)) for tag in tags])
 
-        if newlines != oldlines:
-            self.ui.status("updating tags\n")
-            f = self.repo.wfile(".hgtags", "w")
-            f.write("".join(newlines))
-            f.close()
-            if not oldlines: self.repo.add([".hgtags"])
-            date = "%s 0" % int(time.mktime(time.gmtime()))
-            extra = {}
-            if self.tagsbranch != 'default':
-                extra['branch'] = self.tagsbranch
-            try:
-                tagparent = self.repo.changectx(self.tagsbranch).node()
-            except RepoError, inst:
-                tagparent = nullid
-            self.repo.rawcommit([".hgtags"], "update tags", "convert-repo",
-                                date, tagparent, nullid, extra=extra)
-            return hex(self.repo.changelog.tip())
+         if newlines == oldlines:
+             return None
+         data = "".join(newlines)
+
+         def getfilectx(repo, memctx, f):
+            return context.memfilectx(f, data, False, False, None)
+
+         self.ui.status(_("updating tags\n"))
+         date = "%s 0" % int(time.mktime(time.gmtime()))
+         extra = {'branch': self.tagsbranch}
+         ctx = context.memctx(self.repo, (tagparent, None), "update tags",
+                              [".hgtags"], getfilectx, "convert-repo", date,
+                              extra)
+         self.repo.commitctx(ctx)
+         return hex(self.repo.changelog.tip())
 
     def setfilemapmode(self, active):
         self.filemapmode = active
@@ -221,51 +206,75 @@
         self.lastctx = None
         self._changescache = None
         self.convertfp = None
+        # Restrict converted revisions to startrev descendants
+        startnode = ui.config('convert', 'hg.startrev')
+        if startnode is not None:
+            try:
+                startnode = self.repo.lookup(startnode)
+            except repo.RepoError:
+                raise util.Abort(_('%s is not a valid start revision')
+                                 % startnode)
+            startrev = self.repo.changelog.rev(startnode)
+            children = {startnode: 1}
+            for rev in self.repo.changelog.descendants(startrev):
+                children[self.repo.changelog.node(rev)] = 1
+            self.keep = children.__contains__
+        else:
+            self.keep = util.always
 
     def changectx(self, rev):
         if self.lastrev != rev:
-            self.lastctx = self.repo.changectx(rev)
+            self.lastctx = self.repo[rev]
             self.lastrev = rev
         return self.lastctx
 
+    def parents(self, ctx):
+        return [p.node() for p in ctx.parents()
+                if p and self.keep(p.node())]
+
     def getheads(self):
         if self.rev:
-            return [hex(self.repo.changectx(self.rev).node())]
+            heads = [self.repo[self.rev].node()]
         else:
-            return [hex(node) for node in self.repo.heads()]
+            heads = self.repo.heads()
+        return [hex(h) for h in heads if self.keep(h)]
 
     def getfile(self, name, rev):
         try:
-            return self.changectx(rev).filectx(name).data()
+            return self.changectx(rev)[name].data()
         except revlog.LookupError, err:
             raise IOError(err)
 
     def getmode(self, name, rev):
-        m = self.changectx(rev).manifest()
-        return (m.execf(name) and 'x' or '') + (m.linkf(name) and 'l' or '')
+        return self.changectx(rev).manifest().flags(name)
 
     def getchanges(self, rev):
         ctx = self.changectx(rev)
+        parents = self.parents(ctx)
+        if not parents:
+            files = util.sort(ctx.manifest().keys())
+            return [(f, rev) for f in files], {}
         if self._changescache and self._changescache[0] == rev:
             m, a, r = self._changescache[1]
         else:
-            m, a, r = self.repo.status(ctx.parents()[0].node(), ctx.node())[:3]
+            m, a, r = self.repo.status(parents[0], ctx.node())[:3]
         changes = [(name, rev) for name in m + a + r]
-        changes.sort()
-        return (changes, self.getcopies(ctx, m + a))
+        return util.sort(changes), self.getcopies(ctx, m + a)
 
     def getcopies(self, ctx, files):
         copies = {}
         for name in files:
             try:
-                copies[name] = ctx.filectx(name).renamed()[0]
+                copynode = ctx.filectx(name).renamed()[0]
+                if self.keep(copynode):
+                    copies[name] = copynode
             except TypeError:
                 pass
         return copies
 
     def getcommit(self, rev):
         ctx = self.changectx(rev)
-        parents = [hex(p.node()) for p in ctx.parents() if p.node() != nullid]
+        parents = [hex(p) for p in self.parents(ctx)]
         if self.saverev:
             crev = rev
         else:
@@ -276,12 +285,18 @@
 
     def gettags(self):
         tags = [t for t in self.repo.tagslist() if t[0] != 'tip']
-        return dict([(name, hex(node)) for name, node in tags])
+        return dict([(name, hex(node)) for name, node in tags
+                     if self.keep(node)])
 
     def getchangedfiles(self, rev, i):
         ctx = self.changectx(rev)
-        i = i or 0
-        changes = self.repo.status(ctx.parents()[i].node(), ctx.node())[:3]
+        parents = self.parents(ctx)
+        if not parents and i is None:
+            i = 0
+            changes = [], ctx.manifest().keys(), []
+        else:
+            i = i or 0
+            changes = self.repo.status(parents[i], ctx.node())[:3]
 
         if i == 0:
             self._changescache = (rev, changes)
--- a/hgext/convert/subversion.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/hgext/convert/subversion.py	Wed Sep 17 11:34:37 2008 +0200
@@ -189,7 +189,7 @@
             try:
                 latest = int(rev)
             except ValueError:
-                raise util.Abort('svn: revision %s is not an integer' % rev)
+                raise util.Abort(_('svn: revision %s is not an integer') % rev)
 
         self.startrev = self.ui.config('convert', 'svn.startrev', default=0)
         try:
@@ -290,7 +290,7 @@
                     self.ui.note(_('ignoring empty branch %s\n') %
                                    branch.encode(self.encoding))
                     continue
-                self.ui.note('found branch %s at %d\n' %
+                self.ui.note(_('found branch %s at %d\n') %
                              (branch, self.revnum(brevid)))
                 self.heads.append(brevid)
 
@@ -420,7 +420,7 @@
                 tagspath = srctagspath
 
         except SubversionException, (inst, num):
-            self.ui.note('no tags found at revision %d\n' % start)
+            self.ui.note(_('no tags found at revision %d\n') % start)
         return tags
 
     def converted(self, rev, destrev):
@@ -473,7 +473,7 @@
         except SubversionException:
             dirent = None
         if not dirent:
-            raise util.Abort('%s not found up to revision %d' % (path, stop))
+            raise util.Abort(_('%s not found up to revision %d') % (path, stop))
 
         # stat() gives us the previous revision on this line of development, but
         # it might be in *another module*. Fetch the log and detect renames down
@@ -489,7 +489,7 @@
                     if not path.startswith(p) or not paths[p].copyfrom_path:
                         continue
                     newpath = paths[p].copyfrom_path + path[len(p):]
-                    self.ui.debug("branch renamed from %s to %s at %d\n" %
+                    self.ui.debug(_("branch renamed from %s to %s at %d\n") %
                                   (path, newpath, revnum))
                     path = newpath
                     break
@@ -528,7 +528,7 @@
         prevmodule = self.prevmodule
         if prevmodule is None:
             prevmodule = ''
-        self.ui.debug("reparent to %s\n" % svn_url)
+        self.ui.debug(_("reparent to %s\n") % svn_url)
         svn.ra.reparent(self.ra, svn_url)
         self.prevmodule = module
         return prevmodule
@@ -560,11 +560,11 @@
                 copyfrom_path = self.getrelpath(ent.copyfrom_path, pmodule)
                 if not copyfrom_path:
                     continue
-                self.ui.debug("copied to %s from %s@%s\n" %
+                self.ui.debug(_("copied to %s from %s@%s\n") %
                               (entrypath, copyfrom_path, ent.copyfrom_rev))
                 copies[self.recode(entry)] = self.recode(copyfrom_path)
             elif kind == 0: # gone, but had better be a deleted *file*
-                self.ui.debug("gone from %s\n" % ent.copyfrom_rev)
+                self.ui.debug(_("gone from %s\n") % ent.copyfrom_rev)
 
                 # if a branch is created but entries are removed in the same
                 # changeset, get the right fromrev
@@ -582,22 +582,22 @@
                         part = "/".join(parts[:i])
                         info = part, copyfrom.get(part, None)
                         if info[1] is not None:
-                            self.ui.debug("Found parent directory %s\n" % info[1])
+                            self.ui.debug(_("Found parent directory %s\n") % info[1])
                             rc = info
                     return rc
 
-                self.ui.debug("base, entry %s %s\n" % (basepath, entrypath))
+                self.ui.debug(_("base, entry %s %s\n") % (basepath, entrypath))
 
                 frompath, froment = lookup_parts(entrypath) or (None, revnum - 1)
 
                 # need to remove fragment from lookup_parts and replace with copyfrom_path
                 if frompath is not None:
-                    self.ui.debug("munge-o-matic\n")
+                    self.ui.debug(_("munge-o-matic\n"))
                     self.ui.debug(entrypath + '\n')
                     self.ui.debug(entrypath[len(frompath):] + '\n')
                     entrypath = froment.copyfrom_path + entrypath[len(frompath):]
                     fromrev = froment.copyfrom_rev
-                    self.ui.debug("Info: %s %s %s %s\n" % (frompath, froment, ent, entrypath))
+                    self.ui.debug(_("Info: %s %s %s %s\n") % (frompath, froment, ent, entrypath))
 
                 # We can avoid the reparent calls if the module has not changed
                 # but it probably does not worth the pain.
@@ -638,7 +638,7 @@
                             else:
                                 entries.append(entry)
                 else:
-                    self.ui.debug('unknown path in revision %d: %s\n' % \
+                    self.ui.debug(_('unknown path in revision %d: %s\n') % \
                                   (revnum, path))
             elif kind == svn.core.svn_node_dir:
                 # Should probably synthesize normal file entries
@@ -655,8 +655,7 @@
                 # This will fail if a directory was copied
                 # from another branch and then some of its files
                 # were deleted in the same transaction.
-                children = self._find_children(path, revnum)
-                children.sort()
+                children = util.sort(self._find_children(path, revnum))
                 for child in children:
                     # Can we move a child directory and its
                     # parent in the same commit? (probably can). Could
@@ -685,7 +684,7 @@
                 if not copyfrompath:
                     continue
                 copyfrom[path] = ent
-                self.ui.debug("mark %s came from %s:%d\n"
+                self.ui.debug(_("mark %s came from %s:%d\n")
                               % (path, copyfrompath, ent.copyfrom_rev))
                 children = self._find_children(ent.copyfrom_path, ent.copyfrom_rev)
                 children.sort()
@@ -716,7 +715,7 @@
             """Return the parsed commit object or None, and True if
             the revision is a branch root.
             """
-            self.ui.debug("parsing revision %d (%d changes)\n" %
+            self.ui.debug(_("parsing revision %d (%d changes)\n") %
                           (revnum, len(orig_paths)))
 
             branched = False
@@ -729,8 +728,7 @@
             parents = []
             # check whether this revision is the start of a branch or part
             # of a branch renaming
-            orig_paths = orig_paths.items()
-            orig_paths.sort()
+            orig_paths = util.sort(orig_paths.items())
             root_paths = [(p,e) for p,e in orig_paths if self.module.startswith(p)]
             if root_paths:
                 path, ent = root_paths[-1]
@@ -754,10 +752,10 @@
                         prevmodule, prevnum = self.revsplit(previd)[1:]
                         if prevnum >= self.startrev:
                             parents = [previd]
-                            self.ui.note('found parent of branch %s at %d: %s\n' %
+                            self.ui.note(_('found parent of branch %s at %d: %s\n') %
                                          (self.module, prevnum, prevmodule))
                 else:
-                    self.ui.debug("No copyfrom path, don't know what to do.\n")
+                    self.ui.debug(_("No copyfrom path, don't know what to do.\n"))
 
             paths = []
             # filter out unrelated paths
@@ -796,7 +794,7 @@
             self.child_cset = cset
             return cset, branched
 
-        self.ui.note('fetching revision log for "%s" from %d to %d\n' %
+        self.ui.note(_('fetching revision log for "%s" from %d to %d\n') %
                      (self.module, from_revnum, to_revnum))
 
         try:
@@ -810,11 +808,11 @@
                         lastonbranch = True
                         break
                     if self.is_blacklisted(revnum):
-                        self.ui.note('skipping blacklisted revision %d\n'
+                        self.ui.note(_('skipping blacklisted revision %d\n')
                                      % revnum)
                         continue
                     if paths is None:
-                        self.ui.debug('revision %d has no entries\n' % revnum)
+                        self.ui.debug(_('revision %d has no entries\n') % revnum)
                         continue
                     cset, lastonbranch = parselogentry(paths, revnum, author,
                                                        date, message)
@@ -839,7 +837,7 @@
                     pass
         except SubversionException, (inst, num):
             if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION:
-                raise util.Abort('svn: branch has no revision %s' % to_revnum)
+                raise util.Abort(_('svn: branch has no revision %s') % to_revnum)
             raise
 
     def _getfile(self, file, rev):
@@ -892,7 +890,7 @@
                 return relative
 
         # The path is outside our tracked tree...
-        self.ui.debug('%r is not under %r, ignoring\n' % (path, module))
+        self.ui.debug(_('%r is not under %r, ignoring\n') % (path, module))
         return None
 
     def _checkpath(self, path, revnum):
@@ -1034,12 +1032,6 @@
                 if 'x' in flags:
                     self.setexec.append(filename)
 
-    def delfile(self, name):
-        self.delete.append(name)
-
-    def copyfile(self, source, dest):
-        self.copies.append([source, dest])
-
     def _copyfile(self, source, dest):
         # SVN's copy command pukes if the destination file exists, but
         # our copyfile method expects to record a copy that has
@@ -1072,10 +1064,9 @@
         return dirs
 
     def add_dirs(self, files):
-        add_dirs = [d for d in self.dirs_of(files)
+        add_dirs = [d for d in util.sort(self.dirs_of(files))
                     if not os.path.exists(self.wjoin(d, '.svn', 'entries'))]
         if add_dirs:
-            add_dirs.sort()
             self.xargs(add_dirs, 'add', non_recursive=True, quiet=True)
         return add_dirs
 
@@ -1085,8 +1076,7 @@
         return files
 
     def tidy_dirs(self, names):
-        dirs = list(self.dirs_of(names))
-        dirs.sort()
+        dirs = util.sort(self.dirs_of(names))
         dirs.reverse()
         deleted = []
         for d in dirs:
@@ -1102,7 +1092,20 @@
     def revid(self, rev):
         return u"svn:%s@%s" % (self.uuid, rev)
 
-    def putcommit(self, files, parents, commit):
+    def putcommit(self, files, copies, parents, commit, source):
+        # Apply changes to working copy
+        for f, v in files:
+            try:
+                data = source.getfile(f, v)
+            except IOError, inst:
+                self.delete.append(f)
+            else:
+                e = source.getmode(f, v)
+                self.putfile(f, e, data)
+                if f in copies:
+                    self.copies.append([copies[f], f])
+        files = [f[0] for f in files]
+
         for parent in parents:
             try:
                 return self.revid(self.childmap[parent])
--- a/hgext/extdiff.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/hgext/extdiff.py	Wed Sep 17 11:34:37 2008 +0200
@@ -52,7 +52,6 @@
 
 def snapshot_node(ui, repo, files, node, tmproot):
     '''snapshot files as of some revision'''
-    mf = repo.changectx(node).manifest()
     dirname = os.path.basename(repo.root)
     if dirname == "":
         dirname = "root"
@@ -61,17 +60,18 @@
     os.mkdir(base)
     ui.note(_('making snapshot of %d files from rev %s\n') %
             (len(files), short(node)))
+    ctx = repo[node]
     for fn in files:
-        if not fn in mf:
+        wfn = util.pconvert(fn)
+        if not wfn in ctx:
             # skipping new file after a merge ?
             continue
-        wfn = util.pconvert(fn)
         ui.note('  %s\n' % wfn)
         dest = os.path.join(base, wfn)
         destdir = os.path.dirname(dest)
         if not os.path.isdir(destdir):
             os.makedirs(destdir)
-        data = repo.wwritedata(wfn, repo.file(wfn).read(mf[wfn]))
+        data = repo.wwritedata(wfn, ctx[wfn].data())
         open(dest, 'wb').write(data)
     return dirname
 
@@ -121,9 +121,8 @@
     - just invoke the diff for a single file in the working dir
     '''
     node1, node2 = cmdutil.revpair(repo, opts['rev'])
-    files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
-    modified, added, removed, deleted, unknown = repo.status(
-        node1, node2, files, match=matchfn)[:5]
+    matcher = cmdutil.match(repo, pats, opts)
+    modified, added, removed = repo.status(node1, node2, matcher)[:3]
     if not (modified or added or removed):
         return 0
 
@@ -165,13 +164,13 @@
         cmdline = ('%s %s %s %s' %
                    (util.shellquote(diffcmd), ' '.join(diffopts),
                     util.shellquote(dir1), util.shellquote(dir2)))
-        ui.debug('running %r in %s\n' % (cmdline, tmproot))
+        ui.debug(_('running %r in %s\n') % (cmdline, tmproot))
         util.system(cmdline, cwd=tmproot)
 
         for copy_fn, working_fn, mtime in fns_and_mtime:
             if os.path.getmtime(copy_fn) != mtime:
-                ui.debug('File changed while diffing. '
-                         'Overwriting: %s (src: %s)\n' % (working_fn, copy_fn))
+                ui.debug(_('File changed while diffing. '
+                         'Overwriting: %s (src: %s)\n') % (working_fn, copy_fn))
                 util.copyfile(copy_fn, working_fn)
 
         return 1
--- a/hgext/fetch.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/hgext/fetch.py	Wed Sep 17 11:34:37 2008 +0200
@@ -4,6 +4,7 @@
 #
 # This software may be used and distributed according to the terms
 # of the GNU General Public License, incorporated herein by reference.
+'''pulling, updating and merging in one command'''
 
 from mercurial.i18n import _
 from mercurial.node import nullid, short
@@ -15,7 +16,7 @@
     This finds all changes from the repository at the specified path
     or URL and adds them to the local repository.
 
-    If the pulled changes add a new head, the head is automatically
+    If the pulled changes add a new branch head, the head is automatically
     merged, and the result of the merge is committed.  Otherwise, the
     working directory is updated to include the new changes.
 
@@ -27,23 +28,74 @@
     See 'hg help dates' for a list of formats valid for -d/--date.
     '''
 
-    def postincoming(other, modheads):
+    date = opts.get('date')
+    if date:
+        opts['date'] = util.parsedate(date)
+
+    parent, p2 = repo.dirstate.parents()
+    branch = repo[parent].branch()
+    if parent != repo[branch].node():
+        raise util.Abort(_('working dir not at branch tip '
+                           '(use "hg update" to check out branch tip)'))
+
+    if p2 != nullid:
+        raise util.Abort(_('outstanding uncommitted merge'))
+
+    wlock = lock = None
+    try:
+        wlock = repo.wlock()
+        lock = repo.lock()
+        mod, add, rem, del_ = repo.status()[:4]
+
+        if mod or add or rem:
+            raise util.Abort(_('outstanding uncommitted changes'))
+        if del_:
+            raise util.Abort(_('working directory is missing some files'))
+        if len(repo.branchheads(branch)) > 1:
+            raise util.Abort(_('multiple heads in this branch '
+                               '(use "hg heads ." and "hg merge" to merge)'))
+
+        cmdutil.setremoteconfig(ui, opts)
+
+        other = hg.repository(ui, ui.expandpath(source))
+        ui.status(_('pulling from %s\n') %
+                  util.hidepassword(ui.expandpath(source)))
+        revs = None
+        if opts['rev']:
+            if not other.local():
+                raise util.Abort(_("fetch -r doesn't work for remote "
+                                   "repositories yet"))
+            else:
+                revs = [other.lookup(rev) for rev in opts['rev']]
+
+        # Are there any changes at all?
+        modheads = repo.pull(other, heads=revs)
         if modheads == 0:
             return 0
-        if modheads == 1:
-            return hg.clean(repo, repo.changelog.tip())
-        newheads = repo.heads(parent)
-        newchildren = [n for n in repo.heads(parent) if n != parent]
+
+        # Is this a simple fast-forward along the current branch?
+        newheads = repo.branchheads(branch)
+        newchildren = repo.changelog.nodesbetween([parent], newheads)[2]
+        if len(newheads) == 1:
+            if newchildren[0] != parent:
+                return hg.clean(repo, newchildren[0])
+            else:
+                return
+
+        # Are there more than one additional branch heads?
+        newchildren = [n for n in newchildren if n != parent]
         newparent = parent
         if newchildren:
             newparent = newchildren[0]
             hg.clean(repo, newparent)
-        newheads = [n for n in repo.heads() if n != newparent]
+        newheads = [n for n in newheads if n != newparent]
         if len(newheads) > 1:
-            ui.status(_('not merging with %d other new heads '
-                        '(use "hg heads" and "hg merge" to merge them)') %
+            ui.status(_('not merging with %d other new branch heads '
+                        '(use "hg heads ." and "hg merge" to merge them)\n') %
                       (len(newheads) - 1))
             return
+
+        # Otherwise, let's merge.
         err = False
         if newheads:
             # By default, we consider the repository we're pulling
@@ -60,6 +112,7 @@
             ui.status(_('merging with %d:%s\n') %
                       (repo.changelog.rev(secondparent), short(secondparent)))
             err = hg.merge(repo, secondparent, remind=False)
+
         if not err:
             mod, add, rem = repo.status()[:3]
             message = (cmdutil.logmessage(opts) or
@@ -73,45 +126,6 @@
                         'with local\n') % (repo.changelog.rev(n),
                                            short(n)))
 
-    def pull():
-        cmdutil.setremoteconfig(ui, opts)
-
-        other = hg.repository(ui, ui.expandpath(source))
-        ui.status(_('pulling from %s\n') %
-                  util.hidepassword(ui.expandpath(source)))
-        revs = None
-        if opts['rev']:
-            if not other.local():
-                raise util.Abort(_("fetch -r doesn't work for remote "
-                                   "repositories yet"))
-            else:
-                revs = [other.lookup(rev) for rev in opts['rev']]
-        modheads = repo.pull(other, heads=revs)
-        return postincoming(other, modheads)
-
-    date = opts.get('date')
-    if date:
-        opts['date'] = util.parsedate(date)
-
-    parent, p2 = repo.dirstate.parents()
-    if parent != repo.changelog.tip():
-        raise util.Abort(_('working dir not at tip '
-                           '(use "hg update" to check out tip)'))
-    if p2 != nullid:
-        raise util.Abort(_('outstanding uncommitted merge'))
-    wlock = lock = None
-    try:
-        wlock = repo.wlock()
-        lock = repo.lock()
-        mod, add, rem, del_ = repo.status()[:4]
-        if mod or add or rem:
-            raise util.Abort(_('outstanding uncommitted changes'))
-        if del_:
-            raise util.Abort(_('working directory is missing some files'))
-        if len(repo.heads()) > 1:
-            raise util.Abort(_('multiple heads in this repository '
-                               '(use "hg heads" and "hg merge" to merge)'))
-        return pull()
     finally:
         del lock, wlock
 
--- a/hgext/gpg.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/hgext/gpg.py	Wed Sep 17 11:34:37 2008 +0200
@@ -239,7 +239,7 @@
         repo.opener("localsigs", "ab").write(sigmessage)
         return
 
-    for x in repo.status()[:5]:
+    for x in repo.status(unknown=True)[:5]:
         if ".hgsigs" in x and not opts["force"]:
             raise util.Abort(_("working copy of .hgsigs is changed "
                                "(please commit .hgsigs manually "
--- a/hgext/graphlog.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/hgext/graphlog.py	Wed Sep 17 11:34:37 2008 +0200
@@ -4,6 +4,7 @@
 #
 # This software may be used and distributed according to the terms of
 # the GNU General Public License, incorporated herein by reference.
+'''show revision graphs in terminal windows'''
 
 import os
 import sys
@@ -12,6 +13,7 @@
 from mercurial.i18n import _
 from mercurial.node import nullrev
 from mercurial.util import Abort, canonpath
+from mercurial import util
 
 def revision_grapher(repo, start_rev, stop_rev):
     """incremental revision grapher
@@ -52,8 +54,7 @@
         for parent in parents:
             if parent not in next_revs:
                 parents_to_add.append(parent)
-        parents_to_add.sort()
-        next_revs[rev_index:rev_index + 1] = parents_to_add
+        next_revs[rev_index:rev_index + 1] = util.sort(parents_to_add)
 
         edges = []
         for parent in parents:
@@ -88,7 +89,7 @@
     assert start_rev >= stop_rev
     curr_rev = start_rev
     revs = []
-    filerev = repo.file(path).count() - 1
+    filerev = len(repo.file(path)) - 1
     while filerev >= 0:
         fctx = repo.filectx(path, fileid=filerev)
 
@@ -104,8 +105,7 @@
         for parent in parents:
             if parent not in next_revs:
                 parents_to_add.append(parent)
-        parents_to_add.sort()
-        next_revs[rev_index:rev_index + 1] = parents_to_add
+        next_revs[rev_index:rev_index + 1] = util.sort(parents_to_add)
 
         edges = []
         for parent in parents:
@@ -197,7 +197,7 @@
         revs = revrange(repo, rev_opt)
         return (max(revs), min(revs))
     else:
-        return (repo.changelog.count() - 1, 0)
+        return (len(repo) - 1, 0)
 
 def graphlog(ui, repo, path=None, **opts):
     """show revision history alongside an ASCII revision graph
--- a/hgext/hgk.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/hgext/hgk.py	Wed Sep 17 11:34:37 2008 +0200
@@ -4,60 +4,59 @@
 #
 # This software may be used and distributed according to the terms
 # of the GNU General Public License, incorporated herein by reference.
-#
-# The hgk extension allows browsing the history of a repository in a
-# graphical way. It requires Tcl/Tk version 8.4 or later. (Tcl/Tk is
-# not distributed with Mercurial.)
-#
-# hgk consists of two parts: a Tcl script that does the displaying and
-# querying of information, and an extension to mercurial named hgk.py,
-# which provides hooks for hgk to get information. hgk can be found in
-# the contrib directory, and hgk.py can be found in the hgext
-# directory.
-#
-# To load the hgext.py extension, add it to your .hgrc file (you have
-# to use your global $HOME/.hgrc file, not one in a repository). You
-# can specify an absolute path:
-#
-#   [extensions]
-#   hgk=/usr/local/lib/hgk.py
-#
-# Mercurial can also scan the default python library path for a file
-# named 'hgk.py' if you set hgk empty:
-#
-#   [extensions]
-#   hgk=
-#
-# The hg view command will launch the hgk Tcl script. For this command
-# to work, hgk must be in your search path. Alternately, you can
-# specify the path to hgk in your .hgrc file:
-#
-#   [hgk]
-#   path=/location/of/hgk
-#
-# hgk can make use of the extdiff extension to visualize
-# revisions. Assuming you had already configured extdiff vdiff
-# command, just add:
-#
-#   [hgk]
-#   vdiff=vdiff
-#
-# Revisions context menu will now display additional entries to fire
-# vdiff on hovered and selected revisions.
+'''browsing the repository in a graphical way
+
+The hgk extension allows browsing the history of a repository in a
+graphical way. It requires Tcl/Tk version 8.4 or later. (Tcl/Tk is
+not distributed with Mercurial.)
+
+hgk consists of two parts: a Tcl script that does the displaying and
+querying of information, and an extension to mercurial named hgk.py,
+which provides hooks for hgk to get information. hgk can be found in
+the contrib directory, and hgk.py can be found in the hgext directory.
+
+To load the hgext.py extension, add it to your .hgrc file (you have
+to use your global $HOME/.hgrc file, not one in a repository). You
+can specify an absolute path:
+
+  [extensions]
+  hgk=/usr/local/lib/hgk.py
+
+Mercurial can also scan the default python library path for a file
+named 'hgk.py' if you set hgk empty:
+
+  [extensions]
+  hgk=
+
+The hg view command will launch the hgk Tcl script. For this command
+to work, hgk must be in your search path. Alternately, you can
+specify the path to hgk in your .hgrc file:
+
+  [hgk]
+  path=/location/of/hgk
+
+hgk can make use of the extdiff extension to visualize revisions.
+Assuming you had already configured extdiff vdiff command, just add:
+
+  [hgk]
+  vdiff=vdiff
+
+Revisions context menu will now display additional entries to fire
+vdiff on hovered and selected revisions.'''
 
 import os
-from mercurial import commands, util, patch, revlog
+from mercurial import commands, util, patch, revlog, cmdutil
 from mercurial.node import nullid, nullrev, short
+from mercurial.i18n import _
 
 def difftree(ui, repo, node1=None, node2=None, *files, **opts):
     """diff trees from two commits"""
     def __difftree(repo, node1, node2, files=[]):
         assert node2 is not None
-        mmap = repo.changectx(node1).manifest()
-        mmap2 = repo.changectx(node2).manifest()
-        status = repo.status(node1, node2, files=files)[:5]
-        modified, added, removed, deleted, unknown = status
-
+        mmap = repo[node1].manifest()
+        mmap2 = repo[node2].manifest()
+        m = cmdutil.match(repo, files)
+        modified, added, removed  = repo.status(node1, node2, m)[:3]
         empty = short(nullid)
 
         for f in modified:
@@ -92,8 +91,8 @@
         if opts['patch']:
             if opts['pretty']:
                 catcommit(ui, repo, node2, "")
-            patch.diff(repo, node1, node2,
-                       files=files,
+            m = cmdutil.match(repo, files)
+            patch.diff(repo, node1, node2, match=m,
                        opts=patch.diffopts(ui, {'git': True}))
         else:
             __difftree(repo, node1, node2, files=files)
@@ -103,11 +102,11 @@
 def catcommit(ui, repo, n, prefix, ctx=None):
     nlprefix = '\n' + prefix;
     if ctx is None:
-        ctx = repo.changectx(n)
-    (p1, p2) = ctx.parents()
+        ctx = repo[n]
     ui.write("tree %s\n" % short(ctx.changeset()[0])) # use ctx.node() instead ??
-    if p1: ui.write("parent %s\n" % short(p1.node()))
-    if p2: ui.write("parent %s\n" % short(p2.node()))
+    for p in ctx.parents():
+        ui.write("parent %s\n" % p)
+
     date = ctx.date()
     description = ctx.description().replace("\0", "")
     lines = description.splitlines()
@@ -151,12 +150,12 @@
 
     else:
         if not type or not r:
-            ui.warn("cat-file: type or revision not supplied\n")
+            ui.warn(_("cat-file: type or revision not supplied\n"))
             commands.help_(ui, 'cat-file')
 
     while r:
         if type != "commit":
-            ui.warn("aborting hg cat-file only understands commits\n")
+            ui.warn(_("aborting hg cat-file only understands commits\n"))
             return 1;
         n = repo.lookup(r)
         catcommit(ui, repo, n, prefix)
@@ -175,7 +174,7 @@
 # you can specify a commit to stop at by starting the sha1 with ^
 def revtree(ui, args, repo, full="tree", maxnr=0, parents=False):
     def chlogwalk():
-        count = repo.changelog.count()
+        count = len(repo)
         i = count
         l = [0] * 100
         chunk = 100
@@ -191,7 +190,7 @@
                     l[chunk - x:] = [0] * (chunk - x)
                     break
                 if full != None:
-                    l[x] = repo.changectx(i + x)
+                    l[x] = repo[i + x]
                     l[x].changeset() # force reading
                 else:
                     l[x] = 1
@@ -318,40 +317,40 @@
     os.chdir(repo.root)
     optstr = ' '.join(['--%s %s' % (k, v) for k, v in opts.iteritems() if v])
     cmd = ui.config("hgk", "path", "hgk") + " %s %s" % (optstr, " ".join(etc))
-    ui.debug("running %s\n" % cmd)
+    ui.debug(_("running %s\n") % cmd)
     util.system(cmd)
 
 cmdtable = {
     "^view":
         (view,
-         [('l', 'limit', '', 'limit number of changes displayed')],
-         'hg view [-l LIMIT] [REVRANGE]'),
+         [('l', 'limit', '', _('limit number of changes displayed'))],
+         _('hg view [-l LIMIT] [REVRANGE]')),
     "debug-diff-tree":
         (difftree,
-         [('p', 'patch', None, 'generate patch'),
-          ('r', 'recursive', None, 'recursive'),
-          ('P', 'pretty', None, 'pretty'),
-          ('s', 'stdin', None, 'stdin'),
-          ('C', 'copy', None, 'detect copies'),
-          ('S', 'search', "", 'search')],
-         'hg git-diff-tree [OPTION]... NODE1 NODE2 [FILE]...'),
+         [('p', 'patch', None, _('generate patch')),
+          ('r', 'recursive', None, _('recursive')),
+          ('P', 'pretty', None, _('pretty')),
+          ('s', 'stdin', None, _('stdin')),
+          ('C', 'copy', None, _('detect copies')),
+          ('S', 'search', "", _('search'))],
+         _('hg git-diff-tree [OPTION]... NODE1 NODE2 [FILE]...')),
     "debug-cat-file":
         (catfile,
-         [('s', 'stdin', None, 'stdin')],
-         'hg debug-cat-file [OPTION]... TYPE FILE'),
+         [('s', 'stdin', None, _('stdin'))],
+         _('hg debug-cat-file [OPTION]... TYPE FILE')),
     "debug-config":
-        (config, [], 'hg debug-config'),
+        (config, [], _('hg debug-config')),
     "debug-merge-base":
-        (base, [], 'hg debug-merge-base node node'),
+        (base, [], _('hg debug-merge-base node node')),
     "debug-rev-parse":
         (revparse,
-         [('', 'default', '', 'ignored')],
-         'hg debug-rev-parse REV'),
+         [('', 'default', '', _('ignored'))],
+         _('hg debug-rev-parse REV')),
     "debug-rev-list":
         (revlist,
-         [('H', 'header', None, 'header'),
-          ('t', 'topo-order', None, 'topo-order'),
-          ('p', 'parents', None, 'parents'),
-          ('n', 'max-count', 0, 'max-count')],
-         'hg debug-rev-list [options] revs'),
+         [('H', 'header', None, _('header')),
+          ('t', 'topo-order', None, _('topo-order')),
+          ('p', 'parents', None, _('parents')),
+          ('n', 'max-count', 0, _('max-count'))],
+         _('hg debug-rev-list [options] revs')),
 }
--- a/hgext/highlight.py	Wed Sep 17 11:14:06 2008 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,98 +0,0 @@
-"""
-This is Mercurial extension for syntax highlighting in the file
-revision view of hgweb.
-
-It depends on the pygments syntax highlighting library:
-http://pygments.org/
-
-To enable the extension add this to hgrc:
-
-[extensions]
-hgext.highlight =
-
-There is a single configuration option:
-
-[web]
-pygments_style = <style>
-
-The default is 'colorful'.  If this is changed the corresponding CSS
-file should be re-generated by running
-
-# pygmentize -f html -S <newstyle>
-
-
--- Adam Hupp <adam@hupp.org>
-
-
-"""
-
-from mercurial import demandimport
-demandimport.ignore.extend(['pkgutil',
-                            'pkg_resources',
-                            '__main__',])
-
-from mercurial.hgweb.hgweb_mod import hgweb
-from mercurial import util
-from mercurial.templatefilters import filters
-
-from pygments import highlight
-from pygments.util import ClassNotFound
-from pygments.lexers import guess_lexer, guess_lexer_for_filename, TextLexer
-from pygments.formatters import HtmlFormatter
-
-SYNTAX_CSS = ('\n<link rel="stylesheet" href="#staticurl#highlight.css" '
-              'type="text/css" />')
-
-def pygmentize(self, tmpl, fctx, field):
-    # append a <link ...> to the syntax highlighting css
-    old_header = ''.join(tmpl('header'))
-    if SYNTAX_CSS not in old_header:
-        new_header =  old_header + SYNTAX_CSS
-        tmpl.cache['header'] = new_header
-
-    text = fctx.data()
-    if util.binary(text):
-        return
-
-    style = self.config("web", "pygments_style", "colorful")
-    # To get multi-line strings right, we can't format line-by-line
-    try:
-        lexer = guess_lexer_for_filename(fctx.path(), text[:1024],
-                                         encoding=util._encoding)
-    except (ClassNotFound, ValueError):
-        try:
-            lexer = guess_lexer(text[:1024], encoding=util._encoding)
-        except (ClassNotFound, ValueError):
-            lexer = TextLexer(encoding=util._encoding)
-
-    formatter = HtmlFormatter(style=style, encoding=util._encoding)
-
-    colorized = highlight(text, lexer, formatter)
-    # strip wrapping div
-    colorized = colorized[:colorized.find('\n</pre>')]
-    colorized = colorized[colorized.find('<pre>')+5:]
-    coloriter = iter(colorized.splitlines())
-
-    filters['colorize'] = lambda x: coloriter.next()
-
-    oldl = tmpl.cache[field]
-    newl = oldl.replace('line|escape', 'line|colorize')
-    tmpl.cache[field] = newl
-
-def filerevision_highlight(self, tmpl, fctx):
-    pygmentize(self, tmpl, fctx, 'fileline')
-
-    return realrevision(self, tmpl, fctx)
-
-def fileannotate_highlight(self, tmpl, fctx):
-    pygmentize(self, tmpl, fctx, 'annotateline')
-
-    return realannotate(self, tmpl, fctx)
-
-# monkeypatch in the new version
-# should be safer than overriding the method in a derived class
-# and then patching the class
-realrevision = hgweb.filerevision
-hgweb.filerevision = filerevision_highlight
-realannotate = hgweb.fileannotate
-hgweb.fileannotate = fileannotate_highlight
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext/highlight/__init__.py	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,61 @@
+"""a mercurial extension for syntax highlighting in hgweb
+
+It depends on the pygments syntax highlighting library:
+http://pygments.org/
+
+To enable the extension add this to hgrc:
+
+[extensions]
+hgext.highlight =
+
+There is a single configuration option:
+
+[web]
+pygments_style = <style>
+
+The default is 'colorful'.
+
+-- Adam Hupp <adam@hupp.org>
+"""
+
+import highlight
+from mercurial.hgweb import webcommands, webutil, common
+
+web_filerevision = webcommands._filerevision
+web_annotate = webcommands.annotate
+
+def filerevision_highlight(web, tmpl, fctx):
+    mt = ''.join(tmpl('mimetype', encoding=web.encoding))
+    # only pygmentize for mimetype containing 'html' so we both match
+    # 'text/html' and possibly 'application/xhtml+xml' in the future
+    # so that we don't have to touch the extension when the mimetype
+    # for a template changes; also hgweb optimizes the case that a
+    # raw file is sent using rawfile() and doesn't call us, so we
+    # can't clash with the file's content-type here in case we
+    # pygmentize a html file
+    if 'html' in mt:
+        style = web.config('web', 'pygments_style', 'colorful')
+        highlight.pygmentize('fileline', fctx, style, tmpl)
+    return web_filerevision(web, tmpl, fctx)
+
+def annotate_highlight(web, req, tmpl):
+    mt = ''.join(tmpl('mimetype', encoding=web.encoding))
+    if 'html' in mt:
+        fctx = webutil.filectx(web.repo, req)
+        style = web.config('web', 'pygments_style', 'colorful')
+        highlight.pygmentize('annotateline', fctx, style, tmpl)
+    return web_annotate(web, req, tmpl)
+
+def generate_css(web, req, tmpl):
+    pg_style = web.config('web', 'pygments_style', 'colorful')
+    fmter = highlight.HtmlFormatter(style = pg_style)
+    req.respond(common.HTTP_OK, 'text/css')
+    return ['/* pygments_style = %s */\n\n' % pg_style, fmter.get_style_defs('')]
+
+
+# monkeypatch in the new version
+
+webcommands._filerevision = filerevision_highlight
+webcommands.annotate = annotate_highlight
+webcommands.highlightcss = generate_css
+webcommands.__all__.append('highlightcss')
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext/highlight/highlight.py	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,54 @@
+# highlight extension implementation file
+#
+# The original module was split in an interface and an implementation
+# file to defer pygments loading and speedup extension setup.
+
+from mercurial import demandimport
+demandimport.ignore.extend(['pkgutil', 'pkg_resources', '__main__',])
+
+from mercurial import util
+from mercurial.templatefilters import filters
+
+from pygments import highlight
+from pygments.util import ClassNotFound
+from pygments.lexers import guess_lexer, guess_lexer_for_filename, TextLexer
+from pygments.formatters import HtmlFormatter
+
+SYNTAX_CSS = ('\n<link rel="stylesheet" href="{url}highlightcss" '
+              'type="text/css" />')
+
+def pygmentize(field, fctx, style, tmpl):
+
+    # append a <link ...> to the syntax highlighting css
+    old_header = ''.join(tmpl('header'))
+    if SYNTAX_CSS not in old_header:
+        new_header =  old_header + SYNTAX_CSS
+        tmpl.cache['header'] = new_header
+
+    text = fctx.data()
+    if util.binary(text):
+        return
+
+    # To get multi-line strings right, we can't format line-by-line
+    try:
+        lexer = guess_lexer_for_filename(fctx.path(), text[:1024],
+                                         encoding=util._encoding)
+    except (ClassNotFound, ValueError):
+        try:
+            lexer = guess_lexer(text[:1024], encoding=util._encoding)
+        except (ClassNotFound, ValueError):
+            lexer = TextLexer(encoding=util._encoding)
+
+    formatter = HtmlFormatter(style=style, encoding=util._encoding)
+
+    colorized = highlight(text, lexer, formatter)
+    # strip wrapping div
+    colorized = colorized[:colorized.find('\n</pre>')]
+    colorized = colorized[colorized.find('<pre>')+5:]
+    coloriter = iter(colorized.splitlines())
+
+    filters['colorize'] = lambda x: coloriter.next()
+
+    oldl = tmpl.cache[field]
+    newl = oldl.replace('line|escape', 'line|colorize')
+    tmpl.cache[field] = newl
--- a/hgext/imerge.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/hgext/imerge.py	Wed Sep 17 11:34:37 2008 +0200
@@ -74,7 +74,7 @@
 
         status = statusfile.read().split('\0')
         if len(status) < 3:
-            raise util.Abort('invalid imerge status file')
+            raise util.Abort(_('invalid imerge status file'))
 
         try:
             parents = [self.repo.changectx(n) for n in status[:2]]
@@ -154,7 +154,7 @@
         dp = self.repo.dirstate.parents()
         p1, p2 = self.wctx.parents()
         if p1.node() != dp[0] or p2.node() != dp[1]:
-            raise util.Abort('imerge state does not match working directory')
+            raise util.Abort(_('imerge state does not match working directory'))
 
     def next(self):
         remaining = self.remaining()
@@ -164,7 +164,7 @@
         resolved = dict.fromkeys(self.resolved)
         for fn in files:
             if fn not in self.conflicts:
-                raise util.Abort('%s is not in the merge set' % fn)
+                raise util.Abort(_('%s is not in the merge set') % fn)
             resolved[fn] = True
         self.resolved = resolved.keys()
         self.resolved.sort()
@@ -175,7 +175,7 @@
         resolved = dict.fromkeys(self.resolved)
         for fn in files:
             if fn not in resolved:
-                raise util.Abort('%s is not resolved' % fn)
+                raise util.Abort(_('%s is not resolved') % fn)
             del resolved[fn]
         self.resolved = resolved.keys()
         self.resolved.sort()
@@ -194,11 +194,11 @@
 
 def load(im, source):
     if im.merging():
-        raise util.Abort('there is already a merge in progress '
-                         '(update -C <rev> to abort it)' )
+        raise util.Abort(_('there is already a merge in progress '
+                           '(update -C <rev> to abort it)'))
     m, a, r, d =  im.repo.status()[:4]
     if m or a or r or d:
-        raise util.Abort('working directory has uncommitted changes')
+        raise util.Abort(_('working directory has uncommitted changes'))
 
     rc = im.unpickle(source)
     if not rc:
@@ -243,7 +243,7 @@
 
 def resolve(im, *files):
     if not files:
-        raise util.Abort('resolve requires at least one filename')
+        raise util.Abort(_('resolve requires at least one filename'))
     return im.resolve(files)
 
 def save(im, dest):
@@ -286,7 +286,7 @@
 
 def unresolve(im, *files):
     if not files:
-        raise util.Abort('unresolve requires at least one filename')
+        raise util.Abort(_('unresolve requires at least one filename'))
     return im.unresolve(files)
 
 subcmdtable = {
@@ -380,7 +380,7 @@
     else:
         rev = opts.get('rev')
         if rev and args:
-            raise util.Abort('please specify just one revision')
+            raise util.Abort(_('please specify just one revision'))
 
         if len(args) == 2 and args[0] == 'load':
             pass
@@ -403,5 +403,5 @@
     (imerge,
      [('r', 'rev', '', _('revision to merge')),
       ('a', 'auto', None, _('automatically merge where possible'))],
-      'hg imerge [command]')
+      _('hg imerge [command]'))
 }
--- a/hgext/inotify/__init__.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/hgext/inotify/__init__.py	Wed Sep 17 11:34:37 2008 +0200
@@ -50,12 +50,12 @@
         # to recurse.
         inotifyserver = False
 
-        def status(self, files, match, list_ignored, list_clean,
-                   list_unknown=True):
+        def status(self, match, ignored, clean, unknown=True):
+            files = match.files()
             try:
-                if not list_ignored and not self.inotifyserver:
+                if not ignored and not self.inotifyserver:
                     result = client.query(ui, repo, files, match, False,
-                                          list_clean, list_unknown)
+                                          clean, unknown)
                     if result is not None:
                         return result
             except (OSError, socket.error), err:
@@ -81,7 +81,7 @@
                     if query:
                         try:
                             return query(ui, repo, files or [], match,
-                                         list_ignored, list_clean, list_unknown)
+                                         ignored, clean, unknown)
                         except socket.error, err:
                             ui.warn(_('could not talk to new inotify '
                                            'server: %s\n') % err[-1])
@@ -94,8 +94,7 @@
                 self.status = super(inotifydirstate, self).status
 
             return super(inotifydirstate, self).status(
-                files, match or util.always, list_ignored, list_clean,
-                list_unknown)
+                match, ignored, clean, unknown)
 
     repo.dirstate.__class__ = inotifydirstate
 
--- a/hgext/inotify/client.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/hgext/inotify/client.py	Wed Sep 17 11:34:37 2008 +0200
@@ -11,7 +11,7 @@
 import common
 import os, select, socket, stat, struct, sys
 
-def query(ui, repo, names, match, list_ignored, list_clean, list_unknown=True):
+def query(ui, repo, names, match, ignored, clean, unknown=True):
     sock = socket.socket(socket.AF_UNIX)
     sockpath = repo.join('inotify.sock')
     try:
@@ -27,10 +27,10 @@
         for n in names or []:
             yield n
         states = 'almrx!'
-        if list_ignored:
+        if ignored:
             raise ValueError('this is insanity')
-        if list_clean: states += 'n'
-        if list_unknown: states += '?'
+        if clean: states += 'n'
+        if unknown: states += '?'
         yield states
 
     req = '\0'.join(genquery())
--- a/hgext/inotify/server.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/hgext/inotify/server.py	Wed Sep 17 11:34:37 2008 +0200
@@ -6,7 +6,7 @@
 # This software may be used and distributed according to the terms
 # of the GNU General Public License, incorporated herein by reference.
 
-from mercurial.i18n import gettext as _
+from mercurial.i18n import _
 from mercurial import osutil, ui, util
 import common
 import errno, os, select, socket, stat, struct, sys, tempfile, time
@@ -290,10 +290,10 @@
                     del self.dir(self.statustrees[oldstatus], root)[fn]
         if self.ui.debugflag and oldstatus != status:
             if isdir:
-                self.ui.note('status: %r dir(%d) -> %s\n' %
+                self.ui.note(_('status: %r dir(%d) -> %s\n') %
                              (wfn, len(oldstatus), status))
             else:
-                self.ui.note('status: %r %s -> %s\n' %
+                self.ui.note(_('status: %r %s -> %s\n') %
                              (wfn, oldstatus, status))
         if not isdir:
             if status and status != 'i':
@@ -382,7 +382,7 @@
 
         if self.repo.dirstate.ignorefunc is not None:
             self.repo.dirstate.ignorefunc = None
-            self.ui.note('rescanning due to .hgignore change\n')
+            self.ui.note(_('rescanning due to .hgignore change\n'))
             self.scan()
 
     def getstat(self, wpath):
@@ -467,7 +467,7 @@
 
     def process_delete(self, wpath, evt):
         if self.ui.debugflag:
-            self.ui.note(('%s event: deleted %s\n') %
+            self.ui.note(_('%s event: deleted %s\n') %
                          (self.event_time(), wpath))
 
         if evt.mask & inotify.IN_ISDIR:
@@ -490,12 +490,12 @@
 
     def handle_event(self, fd, event):
         if self.ui.debugflag:
-            self.ui.note('%s readable: %d bytes\n' %
+            self.ui.note(_('%s readable: %d bytes\n') %
                          (self.event_time(), self.threshold.readable()))
         if not self.threshold():
             if self.registered:
                 if self.ui.debugflag:
-                    self.ui.note('%s below threshold - unhooking\n' %
+                    self.ui.note(_('%s below threshold - unhooking\n') %
                                  (self.event_time()))
                 self.master.poll.unregister(fd)
                 self.registered = False
@@ -506,7 +506,7 @@
     def read_events(self, bufsize=None):
         events = self.watcher.read(bufsize)
         if self.ui.debugflag:
-            self.ui.note('%s reading %d events\n' %
+            self.ui.note(_('%s reading %d events\n') %
                          (self.event_time(), len(events)))
         for evt in events:
             wpath = self.wpath(evt)
@@ -523,7 +523,7 @@
     def handle_timeout(self):
         if not self.registered:
             if self.ui.debugflag:
-                self.ui.note('%s hooking back up with %d bytes readable\n' %
+                self.ui.note(_('%s hooking back up with %d bytes readable\n') %
                              (self.event_time(), self.threshold.readable()))
             self.read_events(0)
             self.master.poll.register(self, select.POLLIN)
@@ -531,12 +531,10 @@
 
         if self.eventq:
             if self.ui.debugflag:
-                self.ui.note('%s processing %d deferred events as %d\n' %
+                self.ui.note(_('%s processing %d deferred events as %d\n') %
                              (self.event_time(), self.deferred,
                               len(self.eventq)))
-            eventq = self.eventq.items()
-            eventq.sort()
-            for wpath, evts in eventq:
+            for wpath, evts in util.sort(self.eventq.items()):
                 for evt in evts:
                     self.deferred_event(wpath, evt)
             self.eventq.clear()
@@ -694,9 +692,9 @@
             try:
                 if self.ui.debugflag:
                     if timeout is None:
-                        self.ui.note('polling: no timeout\n')
+                        self.ui.note(_('polling: no timeout\n'))
                     else:
-                        self.ui.note('polling: %sms timeout\n' % timeout)
+                        self.ui.note(_('polling: %sms timeout\n') % timeout)
                 events = self.poll.poll(timeout)
             except select.error, err:
                 if err[0] == errno.EINTR:
--- a/hgext/interhg.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/hgext/interhg.py	Wed Sep 17 11:34:37 2008 +0200
@@ -28,6 +28,7 @@
 import re
 from mercurial.hgweb import hgweb_mod
 from mercurial import templatefilters
+from mercurial.i18n import _
 
 orig_escape = templatefilters.filters["escape"]
 
@@ -56,7 +57,7 @@
         match = re.match(r'^s%s(.+)(?:(?<=\\\\)|(?<!\\))%s(.*)%s([ilmsux])*$'
                          % (delim, delim, delim), pattern)
         if not match:
-            self.repo.ui.warn("interhg: invalid pattern for %s: %s\n"
+            self.repo.ui.warn(_("interhg: invalid pattern for %s: %s\n")
                               % (key, pattern))
             continue
 
@@ -76,7 +77,7 @@
             regexp = re.compile(regexp, flags)
             interhg_table.append((regexp, format))
         except re.error:
-            self.repo.ui.warn("interhg: invalid regexp for %s: %s\n"
+            self.repo.ui.warn(_("interhg: invalid regexp for %s: %s\n")
                               % (key, regexp))
     return orig_refresh(self)
 
--- a/hgext/keyword.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/hgext/keyword.py	Wed Sep 17 11:34:37 2008 +0200
@@ -78,7 +78,7 @@
 "Log = {desc}" expands to the first line of the changeset description.
 '''
 
-from mercurial import commands, cmdutil, context, dispatch, filelog, revlog
+from mercurial import commands, cmdutil, dispatch, filelog, revlog
 from mercurial import patch, localrepo, templater, templatefilters, util
 from mercurial.hgweb import webcommands
 from mercurial.node import nullid, hex
@@ -88,64 +88,20 @@
 commands.optionalrepo += ' kwdemo'
 
 # hg commands that do not act on keywords
-nokwcommands = ('add addremove bundle copy export grep incoming init'
-                ' log outgoing push rename rollback tip'
+nokwcommands = ('add addremove annotate bundle copy export grep incoming init'
+                ' log outgoing push rename rollback tip verify'
                 ' convert email glog')
 
 # hg commands that trigger expansion only when writing to working dir,
 # not when reading filelog, and unexpand when reading from working dir
-restricted = 'record qfold qimport qnew qpush qrefresh qrecord'
+restricted = 'merge record resolve qfold qimport qnew qpush qrefresh qrecord'
 
 def utcdate(date):
     '''Returns hgdate in cvs-like UTC format.'''
     return time.strftime('%Y/%m/%d %H:%M:%S', time.gmtime(date[0]))
 
-
 # make keyword tools accessible
-kwtools = {'templater': None, 'hgcmd': None}
-
-# store originals of monkeypatches
-_patchfile_init = patch.patchfile.__init__
-_patch_diff = patch.diff
-_dispatch_parse = dispatch._parse
-
-def _kwpatchfile_init(self, ui, fname, missing=False):
-    '''Monkeypatch/wrap patch.patchfile.__init__ to avoid
-    rejects or conflicts due to expanded keywords in working dir.'''
-    _patchfile_init(self, ui, fname, missing=missing)
-    # shrink keywords read from working dir
-    kwt = kwtools['templater']
-    self.lines = kwt.shrinklines(self.fname, self.lines)
-
-def _kw_diff(repo, node1=None, node2=None, files=None, match=util.always,
-             fp=None, changes=None, opts=None):
-    '''Monkeypatch patch.diff to avoid expansion except when
-    comparing against working dir.'''
-    if node2 is not None:
-        kwtools['templater'].matcher = util.never
-    elif node1 is not None and node1 != repo.changectx().node():
-        kwtools['templater'].restrict = True
-    _patch_diff(repo, node1=node1, node2=node2, files=files, match=match,
-                fp=fp, changes=changes, opts=opts)
-
-def _kwweb_changeset(web, req, tmpl):
-    '''Wraps webcommands.changeset turning off keyword expansion.'''
-    kwtools['templater'].matcher = util.never
-    return web.changeset(tmpl, web.changectx(req))
-
-def _kwweb_filediff(web, req, tmpl):
-    '''Wraps webcommands.filediff turning off keyword expansion.'''
-    kwtools['templater'].matcher = util.never
-    return web.filediff(tmpl, web.filectx(req))
-
-def _kwdispatch_parse(ui, args):
-    '''Monkeypatch dispatch._parse to obtain running hg command.'''
-    cmd, func, args, options, cmdoptions = _dispatch_parse(ui, args)
-    kwtools['hgcmd'] = cmd
-    return cmd, func, args, options, cmdoptions
-
-# dispatch._parse is run before reposetup, so wrap it here
-dispatch._parse = _kwdispatch_parse
+kwtools = {'templater': None, 'hgcmd': '', 'inc': [], 'exc': ['.hg*']}
 
 
 class kwtemplater(object):
@@ -163,15 +119,16 @@
         'Header': '{root}/{file},v {node|short} {date|utcdate} {author|user}',
     }
 
-    def __init__(self, ui, repo, inc, exc):
+    def __init__(self, ui, repo):
         self.ui = ui
         self.repo = repo
-        self.matcher = util.matcher(repo.root, inc=inc, exc=exc)[1]
+        self.matcher = util.matcher(repo.root,
+                                    inc=kwtools['inc'], exc=kwtools['exc'])[1]
         self.restrict = kwtools['hgcmd'] in restricted.split()
 
         kwmaps = self.ui.configitems('keywordmaps')
         if kwmaps: # override default templates
-            kwmaps = [(k, templater.parsestring(v, quoted=False))
+            kwmaps = [(k, templater.parsestring(v, False))
                       for (k, v) in kwmaps]
             self.templates = dict(kwmaps)
         escaped = map(re.escape, self.templates.keys())
@@ -185,7 +142,7 @@
     def getnode(self, path, fnode):
         '''Derives changenode from file path and filenode.'''
         # used by kwfilelog.read and kwexpand
-        c = context.filectx(self.repo, path, fileid=fnode)
+        c = self.repo.filectx(path, fileid=fnode)
         return c.node()
 
     def substitute(self, data, path, node, subfunc):
@@ -206,25 +163,26 @@
             return self.substitute(data, path, changenode, self.re_kw.sub)
         return data
 
-    def iskwfile(self, path, islink):
+    def iskwfile(self, path, flagfunc):
         '''Returns true if path matches [keyword] pattern
         and is not a symbolic link.
         Caveat: localrepository._link fails on Windows.'''
-        return self.matcher(path) and not islink(path)
+        return self.matcher(path) and not 'l' in flagfunc(path)
 
-    def overwrite(self, node=None, expand=True, files=None):
+    def overwrite(self, node, expand, files):
         '''Overwrites selected files expanding/shrinking keywords.'''
-        ctx = self.repo.changectx(node)
-        mf = ctx.manifest()
         if node is not None:     # commit
+            ctx = self.repo[node]
+            mf = ctx.manifest()
             files = [f for f in ctx.files() if f in mf]
             notify = self.ui.debug
         else:                    # kwexpand/kwshrink
+            ctx = self.repo['.']
+            mf = ctx.manifest()
             notify = self.ui.note
-        candidates = [f for f in files if self.iskwfile(f, mf.linkf)]
+        candidates = [f for f in files if self.iskwfile(f, ctx.flags)]
         if candidates:
             self.restrict = True # do not expand when reading
-            candidates.sort()
             action = expand and 'expanding' or 'shrinking'
             for f in candidates:
                 fp = self.repo.file(f)
@@ -271,9 +229,9 @@
     Subclass of filelog to hook into its read, add, cmp methods.
     Keywords are "stored" unexpanded, and processed on reading.
     '''
-    def __init__(self, opener, path):
+    def __init__(self, opener, kwt, path):
         super(kwfilelog, self).__init__(opener, path)
-        self.kwt = kwtools['templater']
+        self.kwt = kwt
         self.path = path
 
     def read(self, node):
@@ -284,7 +242,7 @@
     def add(self, text, meta, tr, link, p1=None, p2=None):
         '''Removes keyword substitutions when adding to filelog.'''
         text = self.kwt.shrink(self.path, text)
-        return super(kwfilelog, self).add(text, meta, tr, link, p1=p1, p2=p2)
+        return super(kwfilelog, self).add(text, meta, tr, link, p1, p2)
 
     def cmp(self, node, text):
         '''Removes keyword substitutions for comparison.'''
@@ -294,28 +252,30 @@
             return t2 != text
         return revlog.revlog.cmp(self, node, text)
 
-def _status(ui, repo, kwt, *pats, **opts):
+def _status(ui, repo, kwt, unknown, *pats, **opts):
     '''Bails out if [keyword] configuration is not active.
     Returns status of working directory.'''
     if kwt:
-        files, match, anypats = cmdutil.matchpats(repo, pats, opts)
-        return repo.status(files=files, match=match, list_clean=True)
+        matcher = cmdutil.match(repo, pats, opts)
+        return repo.status(match=matcher, unknown=unknown, clean=True)
     if ui.configitems('keyword'):
         raise util.Abort(_('[keyword] patterns cannot match'))
     raise util.Abort(_('no [keyword] patterns configured'))
 
 def _kwfwrite(ui, repo, expand, *pats, **opts):
     '''Selects files and passes them to kwtemplater.overwrite.'''
+    if repo.dirstate.parents()[1] != nullid:
+        raise util.Abort(_('outstanding uncommitted merge'))
     kwt = kwtools['templater']
-    status = _status(ui, repo, kwt, *pats, **opts)
-    modified, added, removed, deleted, unknown, ignored, clean = status
+    status = _status(ui, repo, kwt, False, *pats, **opts)
+    modified, added, removed, deleted = status[:4]
     if modified or added or removed or deleted:
-        raise util.Abort(_('outstanding uncommitted changes in given files'))
+        raise util.Abort(_('outstanding uncommitted changes'))
     wlock = lock = None
     try:
         wlock = repo.wlock()
         lock = repo.lock()
-        kwt.overwrite(expand=expand, files=clean)
+        kwt.overwrite(None, expand, status[6])
     finally:
         del wlock, lock
 
@@ -345,7 +305,7 @@
     branchname = 'demobranch'
     tmpdir = tempfile.mkdtemp('', 'kwdemo.')
     ui.note(_('creating temporary repo at %s\n') % tmpdir)
-    repo = localrepo.localrepository(ui, path=tmpdir, create=True)
+    repo = localrepo.localrepository(ui, tmpdir, True)
     ui.setconfig('keyword', fn, '')
     if args or opts.get('rcfile'):
         kwstatus = 'custom'
@@ -367,6 +327,7 @@
         ui.readconfig(repo.join('hgrc'))
     if not opts.get('default'):
         kwmaps = dict(ui.configitems('keywordmaps')) or kwtemplater.templates
+    uisetup(ui)
     reposetup(ui, repo)
     for k, v in ui.configitems('extensions'):
         if k.endswith('keyword'):
@@ -418,15 +379,11 @@
     That is, files matched by [keyword] config patterns but not symlinks.
     '''
     kwt = kwtools['templater']
-    status = _status(ui, repo, kwt, *pats, **opts)
+    status = _status(ui, repo, kwt, opts.get('untracked'), *pats, **opts)
     modified, added, removed, deleted, unknown, ignored, clean = status
-    files = modified + added + clean
-    if opts.get('untracked'):
-        files += unknown
-    files.sort()
-    wctx = repo.workingctx()
-    islink = lambda p: 'l' in wctx.fileflags(p)
-    kwfiles = [f for f in files if kwt.iskwfile(f, islink)]
+    files = util.sort(modified + added + clean + unknown)
+    wctx = repo[None]
+    kwfiles = [f for f in files if kwt.iskwfile(f, wctx.flags)]
     cwd = pats and repo.getcwd() or ''
     kwfstats = not opts.get('ignore') and (('K', kwfiles),) or ()
     if opts.get('all') or opts.get('ignore'):
@@ -448,46 +405,57 @@
     _kwfwrite(ui, repo, False, *pats, **opts)
 
 
+def uisetup(ui):
+    '''Collects [keyword] config in kwtools.
+    Monkeypatches dispatch._parse if needed.'''
+
+    for pat, opt in ui.configitems('keyword'):
+        if opt != 'ignore':
+            kwtools['inc'].append(pat)
+        else:
+            kwtools['exc'].append(pat)
+
+    if kwtools['inc']:
+        def kwdispatch_parse(ui, args):
+            '''Monkeypatch dispatch._parse to obtain running hg command.'''
+            cmd, func, args, options, cmdoptions = dispatch_parse(ui, args)
+            kwtools['hgcmd'] = cmd
+            return cmd, func, args, options, cmdoptions
+
+        dispatch_parse = dispatch._parse
+        dispatch._parse = kwdispatch_parse
+
 def reposetup(ui, repo):
     '''Sets up repo as kwrepo for keyword substitution.
     Overrides file method to return kwfilelog instead of filelog
     if file matches user configuration.
     Wraps commit to overwrite configured files with updated
     keyword substitutions.
-    This is done for local repos only, and only if there are
-    files configured at all for keyword substitution.'''
+    Monkeypatches patch and webcommands.'''
 
     try:
-        if (not repo.local() or kwtools['hgcmd'] in nokwcommands.split()
+        if (not repo.local() or not kwtools['inc']
+            or kwtools['hgcmd'] in nokwcommands.split()
             or '.hg' in util.splitpath(repo.root)
             or repo._url.startswith('bundle:')):
             return
     except AttributeError:
         pass
 
-    inc, exc = [], ['.hg*']
-    for pat, opt in ui.configitems('keyword'):
-        if opt != 'ignore':
-            inc.append(pat)
-        else:
-            exc.append(pat)
-    if not inc:
-        return
-
-    kwtools['templater'] = kwt = kwtemplater(ui, repo, inc, exc)
+    kwtools['templater'] = kwt = kwtemplater(ui, repo)
 
     class kwrepo(repo.__class__):
         def file(self, f):
             if f[0] == '/':
                 f = f[1:]
-            return kwfilelog(self.sopener, f)
+            return kwfilelog(self.sopener, kwt, f)
 
         def wread(self, filename):
             data = super(kwrepo, self).wread(filename)
             return kwt.wread(filename, data)
 
         def commit(self, files=None, text='', user=None, date=None,
-                   match=util.always, force=False, force_editor=False,
+                   match=None, force=False, force_editor=False,
                    p1=None, p2=None, extra={}, empty_ok=False):
             wlock = lock = None
             _p1 = _p2 = None
@@ -512,28 +480,66 @@
                     else:
                         _p2 = hex(_p2)
 
-                node = super(kwrepo,
-                             self).commit(files=files, text=text, user=user,
-                                          date=date, match=match, force=force,
-                                          force_editor=force_editor,
-                                          p1=p1, p2=p2, extra=extra,
-                                          empty_ok=empty_ok)
+                n = super(kwrepo, self).commit(files, text, user, date, match,
+                                               force, force_editor, p1, p2,
+                                               extra, empty_ok)
 
                 # restore commit hooks
                 for name, cmd in commithooks.iteritems():
                     ui.setconfig('hooks', name, cmd)
-                if node is not None:
-                    kwt.overwrite(node=node)
-                    repo.hook('commit', node=node, parent1=_p1, parent2=_p2)
-                return node
+                if n is not None:
+                    kwt.overwrite(n, True, None)
+                    repo.hook('commit', node=n, parent1=_p1, parent2=_p2)
+                return n
             finally:
                 del wlock, lock
 
+    # monkeypatches
+    def kwpatchfile_init(self, ui, fname, missing=False):
+        '''Monkeypatch/wrap patch.patchfile.__init__ to avoid
+        rejects or conflicts due to expanded keywords in working dir.'''
+        patchfile_init(self, ui, fname, missing)
+        # shrink keywords read from working dir
+        self.lines = kwt.shrinklines(self.fname, self.lines)
+
+    def kw_diff(repo, node1=None, node2=None, match=None,
+                fp=None, changes=None, opts=None):
+        '''Monkeypatch patch.diff to avoid expansion except when
+        comparing against working dir.'''
+        if node2 is not None:
+            kwt.matcher = util.never
+        elif node1 is not None and node1 != repo['.'].node():
+            kwt.restrict = True
+        patch_diff(repo, node1, node2, match, fp, changes, opts)
+
+    def kwweb_annotate(web, req, tmpl):
+        '''Wraps webcommands.annotate turning off keyword expansion.'''
+        kwt.matcher = util.never
+        return webcommands_annotate(web, req, tmpl)
+
+    def kwweb_changeset(web, req, tmpl):
+        '''Wraps webcommands.changeset turning off keyword expansion.'''
+        kwt.matcher = util.never
+        return webcommands_changeset(web, req, tmpl)
+
+    def kwweb_filediff(web, req, tmpl):
+        '''Wraps webcommands.filediff turning off keyword expansion.'''
+        kwt.matcher = util.never
+        return webcommands_filediff(web, req, tmpl)
+
     repo.__class__ = kwrepo
-    patch.patchfile.__init__ = _kwpatchfile_init
-    patch.diff = _kw_diff
-    webcommands.changeset = webcommands.rev = _kwweb_changeset
-    webcommands.filediff = webcommands.diff = _kwweb_filediff
+
+    patchfile_init = patch.patchfile.__init__
+    patch_diff = patch.diff
+    webcommands_annotate = webcommands.annotate
+    webcommands_changeset = webcommands.changeset
+    webcommands_filediff = webcommands.filediff
+
+    patch.patchfile.__init__ = kwpatchfile_init
+    patch.diff = kw_diff
+    webcommands.annotate = kwweb_annotate
+    webcommands.changeset = webcommands.rev = kwweb_changeset
+    webcommands.filediff = webcommands.diff = kwweb_filediff
 
 
 cmdtable = {
--- a/hgext/mq.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/hgext/mq.py	Wed Sep 17 11:34:37 2008 +0200
@@ -143,9 +143,8 @@
             bad = self.check_guard(guard)
             if bad:
                 raise util.Abort(bad)
-        guards = dict.fromkeys(guards).keys()
-        guards.sort()
-        self.ui.debug('active guards: %s\n' % ' '.join(guards))
+        guards = util.sort(util.unique(guards))
+        self.ui.debug(_('active guards: %s\n') % ' '.join(guards))
         self.active_guards = guards
         self.guards_dirty = True
 
@@ -318,14 +317,12 @@
         try:
             os.unlink(undo)
         except OSError, inst:
-            self.ui.warn('error removing undo: %s\n' % str(inst))
+            self.ui.warn(_('error removing undo: %s\n') % str(inst))
 
     def printdiff(self, repo, node1, node2=None, files=None,
                   fp=None, changes=None, opts={}):
-        fns, matchfn, anypats = cmdutil.matchpats(repo, files, opts)
-
-        patch.diff(repo, node1, node2, fns, match=matchfn,
-                   fp=fp, changes=changes, opts=self.diffopts())
+        m = cmdutil.match(repo, files, opts)
+        patch.diff(repo, node1, node2, m, fp, changes, self.diffopts())
 
     def mergeone(self, repo, mergeq, head, patch, rev):
         # first try just applying the patch
@@ -338,13 +335,13 @@
         if n is None:
             raise util.Abort(_("apply failed for patch %s") % patch)
 
-        self.ui.warn("patch didn't work out, merging %s\n" % patch)
+        self.ui.warn(_("patch didn't work out, merging %s\n") % patch)
 
         # apply failed, strip away that rev and merge.
         hg.clean(repo, head)
         self.strip(repo, n, update=False, backup='strip')
 
-        ctx = repo.changectx(rev)
+        ctx = repo[rev]
         ret = hg.merge(repo, rev)
         if ret:
             raise util.Abort(_("update returned %d") % ret)
@@ -403,7 +400,7 @@
         for patch in series:
             patch = mergeq.lookup(patch, strict=True)
             if not patch:
-                self.ui.warn("patch %s does not exist\n" % patch)
+                self.ui.warn(_("patch %s does not exist\n") % patch)
                 return (1, None)
             pushable, reason = self.pushable(patch)
             if not pushable:
@@ -411,7 +408,7 @@
                 continue
             info = mergeq.isapplied(patch)
             if not info:
-                self.ui.warn("patch %s is not applied\n" % patch)
+                self.ui.warn(_("patch %s is not applied\n") % patch)
                 return (1, None)
             rev = revlog.bin(info[1])
             (err, head) = self.mergeone(repo, mergeq, head, patch, rev)
@@ -433,7 +430,7 @@
         except Exception, inst:
             self.ui.note(str(inst) + '\n')
             if not self.ui.verbose:
-                self.ui.warn("patch failed, unable to continue (try -v)\n")
+                self.ui.warn(_("patch failed, unable to continue (try -v)\n"))
             return (False, files, False)
 
         return (True, files, fuzz)
@@ -474,21 +471,21 @@
             if not pushable:
                 self.explain_pushable(patchname, all_patches=True)
                 continue
-            self.ui.warn("applying %s\n" % patchname)
+            self.ui.warn(_("applying %s\n") % patchname)
             pf = os.path.join(patchdir, patchname)
 
             try:
                 message, comments, user, date, patchfound = self.readheaders(patchname)
             except:
-                self.ui.warn("Unable to read %s\n" % patchname)
+                self.ui.warn(_("Unable to read %s\n") % patchname)
                 err = 1
                 break
 
             if not message:
-                message = "imported patch %s\n" % patchname
+                message = _("imported patch %s\n") % patchname
             else:
                 if list:
-                    message.append("\nimported patch %s" % patchname)
+                    message.append(_("\nimported patch %s") % patchname)
                 message = '\n'.join(message)
 
             (patcherr, files, fuzz) = self.patch(repo, pf)
@@ -510,8 +507,10 @@
                     repo.dirstate.merge(f)
                 p1, p2 = repo.dirstate.parents()
                 repo.dirstate.setparents(p1, merge)
+
             files = patch.updatedir(self.ui, repo, files)
-            n = repo.commit(files, message, user, date, match=util.never,
+            match = cmdutil.matchfiles(repo, files or [])
+            n = repo.commit(files, message, user, date, match=match,
                             force=True)
 
             if n == None:
@@ -522,19 +521,53 @@
 
             if patcherr:
                 if not patchfound:
-                    self.ui.warn("patch %s is empty\n" % patchname)
+                    self.ui.warn(_("patch %s is empty\n") % patchname)
                     err = 0
                 else:
-                    self.ui.warn("patch failed, rejects left in working dir\n")
+                    self.ui.warn(_("patch failed, rejects left in working dir\n"))
                     err = 1
                 break
 
             if fuzz and strict:
-                self.ui.warn("fuzz found when applying patch, stopping\n")
+                self.ui.warn(_("fuzz found when applying patch, stopping\n"))
                 err = 1
                 break
         return (err, n)
 
+    def _clean_series(self, patches):
+        indices = util.sort([self.find_series(p) for p in patches])
+        for i in indices[-1::-1]:
+            del self.full_series[i]
+        self.parse_series()
+        self.series_dirty = 1
+
+    def finish(self, repo, revs):
+        revs.sort()
+        firstrev = repo[self.applied[0].rev].rev()
+        appliedbase = 0
+        patches = []
+        for rev in util.sort(revs):
+            if rev < firstrev:
+                raise util.Abort(_('revision %d is not managed') % rev)
+            base = revlog.bin(self.applied[appliedbase].rev)
+            node = repo.changelog.node(rev)
+            if node != base:
+                raise util.Abort(_('cannot delete revision %d above '
+                                   'applied patches') % rev)
+            patches.append(self.applied[appliedbase].name)
+            appliedbase += 1
+
+        r = self.qrepo()
+        if r:
+            r.remove(patches, True)
+        else:
+            for p in patches:
+                os.unlink(self.join(p))
+
+        del self.applied[:appliedbase]
+        self.applied_dirty = 1
+        self._clean_series(patches)
+
     def delete(self, repo, patches, opts):
         if not patches and not opts.get('rev'):
             raise util.Abort(_('qdelete requires at least one revision or '
@@ -580,12 +613,7 @@
         if appliedbase:
             del self.applied[:appliedbase]
             self.applied_dirty = 1
-        indices = [self.find_series(p) for p in realpatches]
-        indices.sort()
-        for i in indices[-1::-1]:
-            del self.full_series[i]
-        self.parse_series()
-        self.series_dirty = 1
+        self._clean_series(realpatches)
 
     def check_toppatch(self, repo):
         if len(self.applied) > 0:
@@ -623,11 +651,11 @@
         if os.path.exists(self.join(patch)):
             raise util.Abort(_('patch "%s" already exists') % patch)
         if opts.get('include') or opts.get('exclude') or pats:
-            fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
-            m, a, r, d = repo.status(files=fns, match=match)[:4]
+            match = cmdutil.match(repo, pats, opts)
+            m, a, r, d = repo.status(match=match)[:4]
         else:
             m, a, r, d = self.check_localchanges(repo, force)
-            fns, match, anypats = cmdutil.matchpats(repo, m + a + r)
+            match = cmdutil.match(repo, m + a + r)
         commitfiles = m + a + r
         self.check_toppatch(repo)
         wlock = repo.wlock()
@@ -665,14 +693,14 @@
         finally:
             del wlock
 
-    def strip(self, repo, rev, update=True, backup="all"):
+    def strip(self, repo, rev, update=True, backup="all", force=None):
         wlock = lock = None
         try:
             wlock = repo.wlock()
             lock = repo.lock()
 
             if update:
-                self.check_localchanges(repo, refresh=False)
+                self.check_localchanges(repo, force=force, refresh=False)
                 urev = self.qparents(repo, rev)
                 hg.clean(repo, urev)
                 repo.dirstate.write()
@@ -822,7 +850,7 @@
                 self.ui.warn(_('cleaning up working directory...'))
                 node = repo.dirstate.parents()[0]
                 hg.revert(repo, node, None)
-                unknown = repo.status()[4]
+                unknown = repo.status(unknown=True)[4]
                 # only remove unknown files that we know we touched or
                 # created while patching
                 for f in unknown:
@@ -867,7 +895,7 @@
                 rr = [ revlog.bin(x.rev) for x in self.applied ]
                 for p in parents:
                     if p in rr:
-                        self.ui.warn("qpop: forcing dirstate update\n")
+                        self.ui.warn(_("qpop: forcing dirstate update\n"))
                         update = True
 
             if not force and update:
@@ -883,7 +911,7 @@
             else:
                 popi = info[0] + 1
                 if popi >= end:
-                    self.ui.warn("qpop: %s is already at the top\n" % patch)
+                    self.ui.warn(_("qpop: %s is already at the top\n") % patch)
                     return
             info = [ popi ] + [self.applied[popi].rev, self.applied[popi].name]
 
@@ -894,8 +922,8 @@
                 top = self.check_toppatch(repo)
 
             if repo.changelog.heads(rev) != [revlog.bin(self.applied[-1].rev)]:
-                raise util.Abort("popping would remove a revision not "
-                                 "managed by this patch queue")
+                raise util.Abort(_("popping would remove a revision not "
+                                   "managed by this patch queue"))
 
             # we know there are no local changes, so we can make a simplified
             # form of hg.update.
@@ -903,9 +931,9 @@
                 qp = self.qparents(repo, rev)
                 changes = repo.changelog.read(qp)
                 mmap = repo.manifest.read(changes[0])
-                m, a, r, d, u = repo.status(qp, top)[:5]
+                m, a, r, d = repo.status(qp, top)[:4]
                 if d:
-                    raise util.Abort("deletions found between repo revs")
+                    raise util.Abort(_("deletions found between repo revs"))
                 for f in m:
                     getfile(f, mmap[f], mmap.flags(f))
                 for f in r:
@@ -925,27 +953,24 @@
             del self.applied[start:end]
             self.strip(repo, rev, update=False, backup='strip')
             if len(self.applied):
-                self.ui.write("Now at: %s\n" % self.applied[-1].name)
+                self.ui.write(_("Now at: %s\n") % self.applied[-1].name)
             else:
-                self.ui.write("Patch queue now empty\n")
+                self.ui.write(_("Patch queue now empty\n"))
         finally:
             del wlock
 
     def diff(self, repo, pats, opts):
         top = self.check_toppatch(repo)
         if not top:
-            self.ui.write("No patches applied\n")
+            self.ui.write(_("No patches applied\n"))
             return
         qp = self.qparents(repo, top)
-        if opts.get('git'):
-            self.diffopts().git = True
-        if opts.get('unified') is not None:
-            self.diffopts().context = opts['unified']
+        self._diffopts = patch.diffopts(self.ui, opts)
         self.printdiff(repo, qp, files=pats, opts=opts)
 
     def refresh(self, repo, pats=None, **opts):
         if len(self.applied) == 0:
-            self.ui.write("No patches applied\n")
+            self.ui.write(_("No patches applied\n"))
             return 1
         newdate = opts.get('date')
         if newdate:
@@ -956,7 +981,7 @@
             (top, patchfn) = (self.applied[-1].rev, self.applied[-1].name)
             top = revlog.bin(top)
             if repo.changelog.heads(top) != [top]:
-                raise util.Abort("cannot refresh a revision with children")
+                raise util.Abort(_("cannot refresh a revision with children"))
             cparents = repo.changelog.parents(top)
             patchparent = self.qparents(repo, top)
             message, comments, user, date, patchfound = self.readheaders(patchfn)
@@ -1026,7 +1051,7 @@
 
             if opts.get('git'):
                 self.diffopts().git = True
-            fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
+            matchfn = cmdutil.match(repo, pats, opts)
             tip = repo.changelog.tip()
             if top == tip:
                 # if the top of our patch queue is also the tip, there is an
@@ -1039,21 +1064,19 @@
                 # patch already
                 #
                 # this should really read:
-                #   mm, dd, aa, aa2, uu = repo.status(tip, patchparent)[:5]
+                #   mm, dd, aa, aa2 = repo.status(tip, patchparent)[:4]
                 # but we do it backwards to take advantage of manifest/chlog
                 # caching against the next repo.status call
                 #
-                mm, aa, dd, aa2, uu = repo.status(patchparent, tip)[:5]
+                mm, aa, dd, aa2 = repo.status(patchparent, tip)[:4]
                 changes = repo.changelog.read(tip)
                 man = repo.manifest.read(changes[0])
                 aaa = aa[:]
                 if opts.get('short'):
-                    filelist = mm + aa + dd
-                    match = dict.fromkeys(filelist).__contains__
+                    match = cmdutil.matchfiles(repo, mm + aa + dd)
                 else:
-                    filelist = None
-                    match = util.always
-                m, a, r, d, u = repo.status(files=filelist, match=match)[:5]
+                    match = cmdutil.matchall(repo)
+                m, a, r, d = repo.status(match=match)[:4]
 
                 # we might end up with files that were added between
                 # tip and the dirstate parent, but then changed in the
@@ -1086,9 +1109,9 @@
                 m = util.unique(mm)
                 r = util.unique(dd)
                 a = util.unique(aa)
-                c = [filter(matchfn, l) for l in (m, a, r, [], u)]
-                filelist = util.unique(c[0] + c[1] + c[2])
-                patch.diff(repo, patchparent, files=filelist, match=matchfn,
+                c = [filter(matchfn, l) for l in (m, a, r)]
+                match = cmdutil.matchfiles(repo, util.unique(c[0] + c[1] + c[2]))
+                patch.diff(repo, patchparent, match=match,
                            fp=patchf, changes=c, opts=self.diffopts())
                 patchf.close()
 
@@ -1146,7 +1169,7 @@
                 self.applied_dirty = 1
                 self.strip(repo, top, update=False,
                            backup='strip')
-                n = repo.commit(filelist, message, user, date, match=matchfn,
+                n = repo.commit(match.files(), message, user, date, match=match,
                                 force=1)
                 self.applied.append(statusentry(revlog.hex(n), patchfn))
                 self.removeundo(repo)
@@ -1236,8 +1259,7 @@
                                    self.guards_path)
                         and not fl.startswith('.')):
                         msng_list.append(fl)
-            msng_list.sort()
-            for x in msng_list:
+            for x in util.sort(msng_list):
                 pfx = self.ui.verbose and ('D ') or ''
                 self.ui.write("%s%s\n" % (pfx, displayname(x)))
 
@@ -1274,9 +1296,9 @@
                 else:
                     series.append(file_)
         if datastart == None:
-            self.ui.warn("No saved patch data found\n")
+            self.ui.warn(_("No saved patch data found\n"))
             return 1
-        self.ui.warn("restoring status: %s\n" % lines[0])
+        self.ui.warn(_("restoring status: %s\n") % lines[0])
         self.full_series = series
         self.applied = applied
         self.parse_series()
@@ -1285,9 +1307,9 @@
         heads = repo.changelog.heads()
         if delete:
             if rev not in heads:
-                self.ui.warn("save entry has children, leaving it alone\n")
+                self.ui.warn(_("save entry has children, leaving it alone\n"))
             else:
-                self.ui.warn("removing save entry %s\n" % short(rev))
+                self.ui.warn(_("removing save entry %s\n") % short(rev))
                 pp = repo.dirstate.parents()
                 if rev in pp:
                     update = True
@@ -1295,27 +1317,27 @@
                     update = False
                 self.strip(repo, rev, update=update, backup='strip')
         if qpp:
-            self.ui.warn("saved queue repository parents: %s %s\n" %
+            self.ui.warn(_("saved queue repository parents: %s %s\n") %
                          (short(qpp[0]), short(qpp[1])))
             if qupdate:
                 self.ui.status(_("queue directory updating\n"))
                 r = self.qrepo()
                 if not r:
-                    self.ui.warn("Unable to load queue repository\n")
+                    self.ui.warn(_("Unable to load queue repository\n"))
                     return 1
                 hg.clean(r, qpp[0])
 
     def save(self, repo, msg=None):
         if len(self.applied) == 0:
-            self.ui.warn("save: no patches applied, exiting\n")
+            self.ui.warn(_("save: no patches applied, exiting\n"))
             return 1
         if self.issaveline(self.applied[-1]):
-            self.ui.warn("status is already saved\n")
+            self.ui.warn(_("status is already saved\n"))
             return 1
 
         ar = [ ':' + x for x in self.full_series ]
         if not msg:
-            msg = "hg patches saved state"
+            msg = _("hg patches saved state")
         else:
             msg = "hg patches: " + msg.rstrip('\r\n')
         r = self.qrepo()
@@ -1327,7 +1349,7 @@
                    "\n".join(ar) + '\n' or "")
         n = repo.commit(None, text, user=None, force=1)
         if not n:
-            self.ui.warn("repo commit failed\n")
+            self.ui.warn(_("repo commit failed\n"))
             return 1
         self.applied.append(statusentry(revlog.hex(n),'.hg.patches.save.line'))
         self.applied_dirty = 1
@@ -1472,7 +1494,7 @@
                     else:
                         text = file(filename, 'rb').read()
                 except IOError:
-                    raise util.Abort(_("unable to read %s") % patchname)
+                    raise util.Abort(_("unable to read %s") % filename)
                 if not patchname:
                     patchname = normname(os.path.basename(filename))
                 self.check_reserved_name(patchname)
@@ -1499,9 +1521,8 @@
     the --rev parameter. At least one patch or revision is required.
 
     With --rev, mq will stop managing the named revisions (converting
-    them to regular mercurial changesets). The patches must be applied
-    and at the base of the stack. This option is useful when the patches
-    have been applied upstream.
+    them to regular mercurial changesets). The qfinish command should be
+    used as an alternative for qdel -r, as the latter option is deprecated.
 
     With --keep, the patch files are preserved in the patch directory."""
     q = repo.mq
@@ -1938,10 +1959,10 @@
         else:
             newpath, i = lastsavename(q.path)
         if not newpath:
-            ui.warn("no saved queues found, please use -n\n")
+            ui.warn(_("no saved queues found, please use -n\n"))
             return 1
         mergeq = queue(ui, repo.join(""), newpath)
-        ui.warn("merging with queue at: %s\n" % mergeq.path)
+        ui.warn(_("merging with queue at: %s\n") % mergeq.path)
     ret = q.push(repo, patch, force=opts['force'], list=opts['list'],
                  mergeq=mergeq)
     return ret
@@ -1955,7 +1976,7 @@
     localupdate = True
     if opts['name']:
         q = queue(ui, repo.join(""), repo.join(opts['name']))
-        ui.warn('using patch queue: %s\n' % q.path)
+        ui.warn(_('using patch queue: %s\n') % q.path)
         localupdate = False
     else:
         q = repo.mq
@@ -2054,7 +2075,7 @@
                                        'use -f to force') % newpath)
         else:
             newpath = savename(path)
-        ui.warn("copy %s to %s\n" % (path, newpath))
+        ui.warn(_("copy %s to %s\n") % (path, newpath))
         util.copyfiles(path, newpath)
     if opts['empty']:
         try:
@@ -2086,7 +2107,7 @@
     elif rev not in (cl.ancestor(p[0], rev), cl.ancestor(p[1], rev)):
         update = False
 
-    repo.mq.strip(repo, rev, backup=backup, update=update)
+    repo.mq.strip(repo, rev, backup=backup, update=update, force=opts['force'])
     return 0
 
 def select(ui, repo, *args, **opts):
@@ -2191,6 +2212,35 @@
         finally:
             q.save_dirty()
 
+def finish(ui, repo, *revrange, **opts):
+    """move applied patches into repository history
+
+    Finishes the specified revisions (corresponding to applied patches) by
+    moving them out of mq control into regular repository history.
+
+    Accepts a revision range or the --applied option. If --applied is
+    specified, all applied mq revisions are removed from mq control.
+    Otherwise, the given revisions must be at the base of the stack of
+    applied patches.
+
+    This can be especially useful if your changes have been applied to an
+    upstream repository, or if you are about to push your changes to upstream.
+    """
+    if not opts['applied'] and not revrange:
+        raise util.Abort(_('no revisions specified'))
+    elif opts['applied']:
+        revrange = ('qbase:qtip',) + revrange
+
+    q = repo.mq
+    if not q.applied:
+        ui.status(_('no patches applied\n'))
+        return 0
+
+    revs = cmdutil.revrange(repo, revrange)
+    q.finish(repo, revs)
+    q.save_dirty()
+    return 0
+
 def reposetup(ui, repo):
     class mqrepo(repo.__class__):
         def abort_if_wdir_patched(self, errmsg, force=False):
@@ -2228,7 +2278,7 @@
             mqtags = [(revlog.bin(patch.rev), patch.name) for patch in q.applied]
 
             if mqtags[-1][0] not in self.changelog.nodemap:
-                self.ui.warn('mq status file refers to unknown node %s\n'
+                self.ui.warn(_('mq status file refers to unknown node %s\n')
                              % revlog.short(mqtags[-1][0]))
                 return tagscache
 
@@ -2237,7 +2287,8 @@
             mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
             for patch in mqtags:
                 if patch[1] in tagscache:
-                    self.ui.warn('Tag %s overrides mq patch of the same name\n' % patch[1])
+                    self.ui.warn(_('Tag %s overrides mq patch of the same name\n')
+                                 % patch[1])
                 else:
                     tagscache[patch[1]] = patch[0]
 
@@ -2251,7 +2302,7 @@
             cl = self.changelog
             qbasenode = revlog.bin(q.applied[0].rev)
             if qbasenode not in cl.nodemap:
-                self.ui.warn('mq status file refers to unknown node %s\n'
+                self.ui.warn(_('mq status file refers to unknown node %s\n')
                              % revlog.short(qbasenode))
                 return super(mqrepo, self)._branchtags(partial, lrev)
 
@@ -2267,7 +2318,7 @@
             # we might as well use it, but we won't save it.
 
             # update the cache up to the tip
-            self._updatebranchcache(partial, start, cl.count())
+            self._updatebranchcache(partial, start, len(cl))
 
             return partial
 
@@ -2294,10 +2345,8 @@
          _('hg qcommit [OPTION]... [FILE]...')),
     "^qdiff":
         (diff,
-         [('g', 'git', None, _('use git extended diff format')),
-          ('U', 'unified', 3, _('number of lines of context to show')),
-         ] + commands.walkopts,
-         _('hg qdiff [-I] [-X] [-U NUM] [-g] [FILE]...')),
+         commands.diffopts + commands.diffopts2 + commands.walkopts,
+         _('hg qdiff [OPTION]... [FILE]...')),
     "qdelete|qremove|qrm":
         (delete,
          [('k', 'keep', None, _('keep patch file')),
@@ -2321,15 +2370,15 @@
     'qheader': (header, [], _('hg qheader [PATCH]')),
     "^qimport":
         (qimport,
-         [('e', 'existing', None, 'import file in patch dir'),
-          ('n', 'name', '', 'patch file name'),
-          ('f', 'force', None, 'overwrite existing files'),
-          ('r', 'rev', [], 'place existing revisions under mq control'),
+         [('e', 'existing', None, _('import file in patch dir')),
+          ('n', 'name', '', _('patch file name')),
+          ('f', 'force', None, _('overwrite existing files')),
+          ('r', 'rev', [], _('place existing revisions under mq control')),
           ('g', 'git', None, _('use git extended diff format'))],
          _('hg qimport [-e] [-n NAME] [-f] [-g] [-r REV]... FILE...')),
     "^qinit":
         (init,
-         [('c', 'create-repo', None, 'create queue repository')],
+         [('c', 'create-repo', None, _('create queue repository'))],
          _('hg qinit [-c]')),
     "qnew":
         (new,
@@ -2397,9 +2446,14 @@
          _('hg qseries [-ms]')),
     "^strip":
         (strip,
-         [('b', 'backup', None, _('bundle unrelated changesets')),
+         [('f', 'force', None, _('force removal with local changes')),
+          ('b', 'backup', None, _('bundle unrelated changesets')),
           ('n', 'nobackup', None, _('no backups'))],
          _('hg strip [-f] [-b] [-n] REV')),
     "qtop": (top, [] + seriesopts, _('hg qtop [-s]')),
     "qunapplied": (unapplied, [] + seriesopts, _('hg qunapplied [-s] [PATCH]')),
+    "qfinish":
+        (finish,
+         [('a', 'applied', None, _('finish all applied changesets'))],
+         _('hg qfinish [-a] [REV...]')),
 }
--- a/hgext/notify.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/hgext/notify.py	Wed Sep 17 11:34:37 2008 +0200
@@ -156,9 +156,7 @@
             if fnmatch.fnmatch(self.repo.root, pat):
                 for user in users.split(','):
                     subs[self.fixmail(user)] = 1
-        subs = subs.keys()
-        subs.sort()
-        return subs
+        return util.sort(subs)
 
     def url(self, path=None):
         return self.ui.config('web', 'baseurl') + (path or self.root)
@@ -235,9 +233,11 @@
     def diff(self, node, ref):
         maxdiff = int(self.ui.config('notify', 'maxdiff', 300))
         prev = self.repo.changelog.parents(node)[0]
+
         self.ui.pushbuffer()
         patch.diff(self.repo, prev, ref, opts=patch.diffopts(self.ui))
-        difflines = self.ui.popbuffer().splitlines(1)
+        difflines = self.ui.popbuffer().splitlines()
+
         if self.ui.configbool('notify', 'diffstat', True):
             s = patch.diffstat(difflines)
             # s may be nil, don't include the header if it is
@@ -251,7 +251,7 @@
             difflines = difflines[:maxdiff]
         elif difflines:
             self.ui.write(_('\ndiffs (%d lines):\n\n') % len(difflines))
-        self.ui.write(*difflines)
+        self.ui.write("\n".join(difflines))
 
 def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
     '''send email notifications to interested subscribers.
@@ -269,11 +269,11 @@
     node = bin(node)
     ui.pushbuffer()
     if hooktype == 'changegroup':
-        start = repo.changelog.rev(node)
-        end = repo.changelog.count()
+        start = repo[node].rev()
+        end = len(repo)
         count = end - start
         for rev in xrange(start, end):
-            n.node(repo.changelog.node(rev))
+            n.node(repo[rev].node())
         n.diff(node, repo.changelog.tip())
     else:
         count = 1
--- a/hgext/pager.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/hgext/pager.py	Wed Sep 17 11:34:37 2008 +0200
@@ -10,26 +10,56 @@
 #   [extension]
 #   hgext.pager =
 #
-# To set the pager that should be used, set the application variable:
-#
-#   [pager]
-#   pager = LESS='FSRX' less
-#
-# If no pager is set, the pager extensions uses the environment
-# variable $PAGER. If neither pager.pager, nor $PAGER is set, no pager
-# is used.
-#
-# If you notice "BROKEN PIPE" error messages, you can disable them
-# by setting:
-#
-#   [pager]
-#   quiet = True
+# Run "hg help pager" to get info on configuration.
+
+'''browse command output with external pager
+
+To set the pager that should be used, set the application variable:
+
+  [pager]
+  pager = LESS='FSRX' less
+
+If no pager is set, the pager extensions uses the environment
+variable $PAGER. If neither pager.pager, nor $PAGER is set, no pager
+is used.
+
+If you notice "BROKEN PIPE" error messages, you can disable them
+by setting:
+
+  [pager]
+  quiet = True
+
+You can disable the pager for certain commands by adding them to the
+pager.ignore list:
+
+  [pager]
+  ignore = version, help, update
+
+You can also enable the pager only for certain commands using pager.attend:
+
+  [pager]
+  attend = log
+
+If pager.attend is present, pager.ignore will be ignored.
+
+To ignore global commands like "hg version" or "hg help", you have to specify
+them in the global .hgrc
+'''
 
 import sys, os, signal
+from mercurial import dispatch, util
 
 def uisetup(ui):
-    p = ui.config("pager", "pager", os.environ.get("PAGER"))
-    if p and sys.stdout.isatty() and '--debugger' not in sys.argv:
-        if ui.configbool('pager', 'quiet'):
-            signal.signal(signal.SIGPIPE, signal.SIG_DFL)
-        sys.stderr = sys.stdout = os.popen(p, "wb")
+    def pagecmd(ui, options, cmd, cmdfunc):
+        p = ui.config("pager", "pager", os.environ.get("PAGER"))
+        if p and sys.stdout.isatty() and '--debugger' not in sys.argv:
+            attend = ui.configlist('pager', 'attend')
+            if (cmd in attend or
+                (cmd not in ui.configlist('pager', 'ignore') and not attend)):
+                sys.stderr = sys.stdout = util.popen(p, "wb")
+                if ui.configbool('pager', 'quiet'):
+                    signal.signal(signal.SIGPIPE, signal.SIG_DFL)
+        return oldrun(ui, options, cmd, cmdfunc)
+
+    oldrun = dispatch._runcommand
+    dispatch._runcommand = pagecmd
--- a/hgext/patchbomb.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/hgext/patchbomb.py	Wed Sep 17 11:34:37 2008 +0200
@@ -1,72 +1,69 @@
-# Command for sending a collection of Mercurial changesets as a series
-# of patch emails.
-#
-# The series is started off with a "[PATCH 0 of N]" introduction,
-# which describes the series as a whole.
-#
-# Each patch email has a Subject line of "[PATCH M of N] ...", using
-# the first line of the changeset description as the subject text.
-# The message contains two or three body parts:
-#
-#   The remainder of the changeset description.
-#
-#   [Optional] If the diffstat program is installed, the result of
-#   running diffstat on the patch.
-#
-#   The patch itself, as generated by "hg export".
-#
-# Each message refers to all of its predecessors using the In-Reply-To
-# and References headers, so they will show up as a sequence in
-# threaded mail and news readers, and in mail archives.
-#
-# For each changeset, you will be prompted with a diffstat summary and
-# the changeset summary, so you can be sure you are sending the right
-# changes.
-#
-# To enable this extension:
-#
-#   [extensions]
-#   hgext.patchbomb =
-#
-# To configure other defaults, add a section like this to your hgrc
-# file:
-#
-#   [email]
-#   from = My Name <my@email>
-#   to = recipient1, recipient2, ...
-#   cc = cc1, cc2, ...
-#   bcc = bcc1, bcc2, ...
-#
-# Then you can use the "hg email" command to mail a series of changesets
-# as a patchbomb.
-#
-# To avoid sending patches prematurely, it is a good idea to first run
-# the "email" command with the "-n" option (test only).  You will be
-# prompted for an email recipient address, a subject an an introductory
-# message describing the patches of your patchbomb.  Then when all is
-# done, patchbomb messages are displayed. If PAGER environment variable
-# is set, your pager will be fired up once for each patchbomb message, so
-# you can verify everything is alright.
-#
-# The "-m" (mbox) option is also very useful.  Instead of previewing
-# each patchbomb message in a pager or sending the messages directly,
-# it will create a UNIX mailbox file with the patch emails.  This
-# mailbox file can be previewed with any mail user agent which supports
-# UNIX mbox files, i.e. with mutt:
-#
-#   % mutt -R -f mbox
-#
-# When you are previewing the patchbomb messages, you can use `formail'
-# (a utility that is commonly installed as part of the procmail package),
-# to send each message out:
-#
-#  % formail -s sendmail -bm -t < mbox
-#
-# That should be all.  Now your patchbomb is on its way out.
+'''sending Mercurial changesets as a series of patch emails
+
+The series is started off with a "[PATCH 0 of N]" introduction,
+which describes the series as a whole.
+
+Each patch email has a Subject line of "[PATCH M of N] ...", using
+the first line of the changeset description as the subject text.
+The message contains two or three body parts:
+
+  The remainder of the changeset description.
+
+  [Optional] If the diffstat program is installed, the result of
+  running diffstat on the patch.
+
+  The patch itself, as generated by "hg export".
+
+Each message refers to all of its predecessors using the In-Reply-To
+and References headers, so they will show up as a sequence in
+threaded mail and news readers, and in mail archives.
+
+For each changeset, you will be prompted with a diffstat summary and
+the changeset summary, so you can be sure you are sending the right changes.
+
+To enable this extension:
+
+  [extensions]
+  hgext.patchbomb =
+
+To configure other defaults, add a section like this to your hgrc file:
 
-import os, errno, socket, tempfile
+  [email]
+  from = My Name <my@email>
+  to = recipient1, recipient2, ...
+  cc = cc1, cc2, ...
+  bcc = bcc1, bcc2, ...
+
+Then you can use the "hg email" command to mail a series of changesets
+as a patchbomb.
+
+To avoid sending patches prematurely, it is a good idea to first run
+the "email" command with the "-n" option (test only).  You will be
+prompted for an email recipient address, a subject an an introductory
+message describing the patches of your patchbomb.  Then when all is
+done, patchbomb messages are displayed. If PAGER environment variable
+is set, your pager will be fired up once for each patchbomb message, so
+you can verify everything is alright.
+
+The "-m" (mbox) option is also very useful.  Instead of previewing
+each patchbomb message in a pager or sending the messages directly,
+it will create a UNIX mailbox file with the patch emails.  This
+mailbox file can be previewed with any mail user agent which supports
+UNIX mbox files, i.e. with mutt:
+
+  % mutt -R -f mbox
+
+When you are previewing the patchbomb messages, you can use `formail'
+(a utility that is commonly installed as part of the procmail package),
+to send each message out:
+
+  % formail -s sendmail -bm -t < mbox
+
+That should be all. Now your patchbomb is on its way out.'''
+
+import os, errno, socket, tempfile, cStringIO
 import email.MIMEMultipart, email.MIMEText, email.MIMEBase
-import email.Utils, email.Encoders
+import email.Utils, email.Encoders, email.Generator
 from mercurial import cmdutil, commands, hg, mail, patch, util
 from mercurial.i18n import _
 from mercurial.node import bin
@@ -401,14 +398,15 @@
         if bcc:
             m['Bcc'] = ', '.join(bcc)
         if opts.get('test'):
-            ui.status('Displaying ', m['Subject'], ' ...\n')
+            ui.status(_('Displaying '), m['Subject'], ' ...\n')
             ui.flush()
             if 'PAGER' in os.environ:
-                fp = os.popen(os.environ['PAGER'], 'w')
+                fp = util.popen(os.environ['PAGER'], 'w')
             else:
                 fp = ui
+            generator = email.Generator.Generator(fp, mangle_from_=False)
             try:
-                fp.write(m.as_string(0))
+                generator.flatten(m, 0)
                 fp.write('\n')
             except IOError, inst:
                 if inst.errno != errno.EPIPE:
@@ -416,20 +414,24 @@
             if fp is not ui:
                 fp.close()
         elif opts.get('mbox'):
-            ui.status('Writing ', m['Subject'], ' ...\n')
+            ui.status(_('Writing '), m['Subject'], ' ...\n')
             fp = open(opts.get('mbox'), 'In-Reply-To' in m and 'ab+' or 'wb+')
+            generator = email.Generator.Generator(fp, mangle_from_=True)
             date = util.datestr(start_time, '%a %b %d %H:%M:%S %Y')
             fp.write('From %s %s\n' % (sender_addr, date))
-            fp.write(m.as_string(0))
+            generator.flatten(m, 0)
             fp.write('\n\n')
             fp.close()
         else:
             if not sendmail:
                 sendmail = mail.connect(ui)
-            ui.status('Sending ', m['Subject'], ' ...\n')
+            ui.status(_('Sending '), m['Subject'], ' ...\n')
             # Exim does not remove the Bcc field
             del m['Bcc']
-            sendmail(sender, to + bcc + cc, m.as_string(0))
+            fp = cStringIO.StringIO()
+            generator = email.Generator.Generator(fp, mangle_from_=False)
+            generator.flatten(m, 0)
+            sendmail(sender, to + bcc + cc, fp.getvalue())
 
 cmdtable = {
     "email":
--- a/hgext/purge.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/hgext/purge.py	Wed Sep 17 11:34:37 2008 +0200
@@ -27,79 +27,10 @@
 # along with this program; if not, write to the Free Software
 # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
 
-from mercurial import util, commands
+from mercurial import util, commands, cmdutil
 from mercurial.i18n import _
 import os
 
-def dopurge(ui, repo, dirs=None, act=True, ignored=False,
-            abort_on_err=False, eol='\n',
-            force=False, include=None, exclude=None):
-    def error(msg):
-        if abort_on_err:
-            raise util.Abort(msg)
-        else:
-            ui.warn(_('warning: %s\n') % msg)
-
-    def remove(remove_func, name):
-        if act:
-            try:
-                remove_func(os.path.join(repo.root, name))
-            except OSError, e:
-                error(_('%s cannot be removed') % name)
-        else:
-            ui.write('%s%s' % (name, eol))
-
-    if not force:
-        _check_fs(ui, repo)
-
-    directories = []
-    files = []
-    missing = []
-    roots, match, anypats = util.cmdmatcher(repo.root, repo.getcwd(), dirs,
-                                            include, exclude)
-    for src, f, st in repo.dirstate.statwalk(files=roots, match=match,
-                                             ignored=ignored, directories=True):
-        if src == 'd':
-            directories.append(f)
-        elif src == 'm':
-            missing.append(f)
-        elif src == 'f' and f not in repo.dirstate:
-            files.append(f)
-
-    directories.sort()
-
-    for f in files:
-        if f not in repo.dirstate:
-            ui.note(_('Removing file %s\n') % f)
-            remove(os.remove, f)
-
-    for f in directories[::-1]:
-        if match(f) and not os.listdir(repo.wjoin(f)):
-            ui.note(_('Removing directory %s\n') % f)
-            remove(os.rmdir, f)
-
-def _check_fs(ui, repo):
-    """Abort if there is the chance of having problems with name-mangling fs
-
-    In a name mangling filesystem (e.g. a case insensitive one)
-    dirstate.walk() can yield filenames different from the ones
-    stored in the dirstate. This already confuses the status and
-    add commands, but with purge this may cause data loss.
-
-    To prevent this, this function will abort if there are uncommitted
-    changes.
-    """
-
-    # We can't use (files, match) to do a partial walk here - we wouldn't
-    # notice a modified README file if the user ran "hg purge readme"
-    modified, added, removed, deleted = repo.status()[:4]
-    if modified or added or removed or deleted:
-        if not util.checkfolding(repo.path) and not ui.quiet:
-            ui.warn(_("Purging on name mangling filesystems is not "
-                      "fully supported.\n"))
-        raise util.Abort(_("outstanding uncommitted changes"))
-
-
 def purge(ui, repo, *dirs, **opts):
     '''removes files not tracked by mercurial
 
@@ -125,25 +56,42 @@
     files that this program would delete use the --print option.
     '''
     act = not opts['print']
-    ignored = bool(opts['all'])
-    abort_on_err = bool(opts['abort_on_err'])
-    eol = opts['print0'] and '\0' or '\n'
-    if eol == '\0':
-        # --print0 implies --print
-        act = False
-    force = bool(opts['force'])
-    include = opts['include']
-    exclude = opts['exclude']
-    dopurge(ui, repo, dirs, act, ignored, abort_on_err,
-            eol, force, include, exclude)
+    eol = '\n'
+    if opts['print0']:
+        eol = '\0'
+        act = False # --print0 implies --print
 
+    def remove(remove_func, name):
+        if act:
+            try:
+                remove_func(os.path.join(repo.root, name))
+            except OSError, e:
+                m = _('%s cannot be removed') % name
+                if opts['abort_on_err']:
+                    raise util.Abort(m)
+                ui.warn(_('warning: %s\n') % m)
+        else:
+            ui.write('%s%s' % (name, eol))
+
+    directories = []
+    match = cmdutil.match(repo, dirs, opts)
+    match.dir = directories.append
+    status = repo.status(match=match, ignored=opts['all'], unknown=True)
+
+    for f in util.sort(status[4] + status[5]):
+        ui.note(_('Removing file %s\n') % f)
+        remove(os.remove, f)
+
+    for f in util.sort(directories)[::-1]:
+        if match(f) and not os.listdir(repo.wjoin(f)):
+            ui.note(_('Removing directory %s\n') % f)
+            remove(os.rmdir, f)
 
 cmdtable = {
     'purge|clean':
         (purge,
          [('a', 'abort-on-err', None, _('abort if an error occurs')),
           ('',  'all', None, _('purge ignored files too')),
-          ('f', 'force', None, _('purge even when there are uncommitted changes')),
           ('p', 'print', None, _('print the file names instead of deleting them')),
           ('0', 'print0', None, _('end filenames with NUL, for use with xargs'
                                   ' (implies -p)')),
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext/rebase.py	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,401 @@
+# rebase.py - rebasing feature for mercurial
+#
+# Copyright 2008 Stefano Tortarolo <stefano.tortarolo at gmail dot com>
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+
+''' Rebasing feature
+
+This extension lets you rebase changesets in an existing Mercurial repository.
+
+For more information:
+http://www.selenic.com/mercurial/wiki/index.cgi/RebaseProject
+'''
+
+from mercurial import util, repair, merge, cmdutil, dispatch, commands
+from mercurial.commands import templateopts
+from mercurial.node import nullrev
+from mercurial.i18n import _
+import os, errno
+
+def rebase(ui, repo, **opts):
+    """move changeset (and descendants) to a different branch
+
+    Rebase uses repeated merging to graft changesets from one part of history
+    onto another. This can be useful for linearizing local changes relative to
+    a master development tree.
+
+    If a rebase is interrupted to manually resolve a merge, it can be continued
+    with --continue or aborted with --abort.
+    """
+    originalwd = target = source = None
+    external = nullrev
+    state = skipped = {}
+
+    lock = wlock = None
+    try:
+        lock = repo.lock()
+        wlock = repo.wlock()
+
+        # Validate input and define rebasing points
+        destf = opts.get('dest', None)
+        srcf = opts.get('source', None)
+        basef = opts.get('base', None)
+        contf = opts.get('continue')
+        abortf = opts.get('abort')
+        collapsef = opts.get('collapse', False)
+        if contf or abortf:
+            if contf and abortf:
+                raise dispatch.ParseError('rebase',
+                                    _('cannot use both abort and continue'))
+            if collapsef:
+                raise dispatch.ParseError('rebase',
+                        _('cannot use collapse with continue or abort'))
+
+            if (srcf or basef or destf):
+                raise dispatch.ParseError('rebase',
+                    _('abort and continue do not allow specifying revisions'))
+
+            originalwd, target, state, collapsef, external = restorestatus(repo)
+            if abortf:
+                abort(repo, originalwd, target, state)
+                return
+        else:
+            if srcf and basef:
+                raise dispatch.ParseError('rebase', _('cannot specify both a '
+                                                        'revision and a base'))
+            cmdutil.bail_if_changed(repo)
+            result = buildstate(repo, destf, srcf, basef, collapsef)
+            if result:
+                originalwd, target, state, external = result
+            else: # Empty state built, nothing to rebase
+                repo.ui.status(_('nothing to rebase\n'))
+                return
+
+        # Rebase
+        targetancestors = list(repo.changelog.ancestors(target))
+        targetancestors.append(target)
+
+        for rev in util.sort(state):
+            if state[rev] == -1:
+                storestatus(repo, originalwd, target, state, collapsef,
+                                                                external)
+                rebasenode(repo, rev, target, state, skipped, targetancestors,
+                                                                collapsef)
+        ui.note(_('rebase merging completed\n'))
+
+        if collapsef:
+            p1, p2 = defineparents(repo, min(state), target,
+                                                        state, targetancestors)
+            concludenode(repo, rev, p1, external, state, collapsef,
+                                                last=True, skipped=skipped)
+
+        if 'qtip' in repo.tags():
+            updatemq(repo, state, skipped, **opts)
+
+        if not opts.get('keep'):
+            # Remove no more useful revisions
+            if (util.set(repo.changelog.descendants(min(state)))
+                                                    - util.set(state.keys())):
+                ui.warn(_("warning: new changesets detected on source branch, "
+                                                        "not stripping\n"))
+            else:
+                repair.strip(repo.ui, repo, repo[min(state)].node(), "strip")
+
+        clearstatus(repo)
+        ui.status(_("rebase completed\n"))
+        if skipped:
+            ui.note(_("%d revisions have been skipped\n") % len(skipped))
+    finally:
+        del lock, wlock
+
+def concludenode(repo, rev, p1, p2, state, collapse, last=False, skipped={}):
+    """Skip commit if collapsing has been required and rev is not the last
+    revision, commit otherwise
+    """
+    repo.dirstate.setparents(repo[p1].node(), repo[p2].node())
+
+    if collapse and not last:
+        return None
+
+    # Commit, record the old nodeid
+    m, a, r = repo.status()[:3]
+    newrev = nullrev
+    try:
+        if last:
+            commitmsg = 'Collapsed revision'
+            for rebased in state:
+                if rebased not in skipped:
+                    commitmsg += '\n* %s' % repo[rebased].description()
+            commitmsg = repo.ui.edit(commitmsg, repo.ui.username())
+        else:
+            commitmsg = repo[rev].description()
+        # Commit might fail if unresolved files exist
+        newrev = repo.commit(m+a+r,
+                            text=commitmsg,
+                            user=repo[rev].user(),
+                            date=repo[rev].date(),
+                            extra={'rebase_source': repo[rev].hex()})
+        return newrev
+    except util.Abort:
+        # Invalidate the previous setparents
+        repo.dirstate.invalidate()
+        raise
+
+def rebasenode(repo, rev, target, state, skipped, targetancestors, collapse):
+    'Rebase a single revision'
+    repo.ui.debug(_("rebasing %d:%s\n") % (rev, repo[rev].node()))
+
+    p1, p2 = defineparents(repo, rev, target, state, targetancestors)
+
+    # Merge phase
+    if len(repo.parents()) != 2:
+        # Update to target and merge it with local
+        merge.update(repo, p1, False, True, False)
+        repo.dirstate.write()
+        stats = merge.update(repo, rev, True, False, False)
+
+        if stats[3] > 0:
+            raise util.Abort(_('fix unresolved conflicts with hg resolve then '
+                                                'run hg rebase --continue'))
+    else: # we have an interrupted rebase
+        repo.ui.debug(_('resuming interrupted rebase\n'))
+
+
+    newrev = concludenode(repo, rev, p1, p2, state, collapse)
+
+    # Update the state
+    if newrev is not None:
+        state[rev] = repo[newrev].rev()
+    else:
+        if not collapse:
+            repo.ui.note(_('no changes, revision %d skipped\n') % rev)
+            repo.ui.debug(_('next revision set to %s\n') % p1)
+            skipped[rev] = True
+        state[rev] = p1
+
+def defineparents(repo, rev, target, state, targetancestors):
+    'Return the new parent relationship of the revision that will be rebased'
+    parents = repo[rev].parents()
+    p1 = p2 = nullrev
+
+    P1n = parents[0].rev()
+    if P1n in targetancestors:
+        p1 = target
+    elif P1n in state:
+        p1 = state[P1n]
+    else: # P1n external
+        p1 = target
+        p2 = P1n
+
+    if len(parents) == 2 and parents[1].rev() not in targetancestors:
+        P2n = parents[1].rev()
+        # interesting second parent
+        if P2n in state:
+            if p1 == target: # P1n in targetancestors or external
+                p1 = state[P2n]
+            else:
+                p2 = state[P2n]
+        else: # P2n external
+            if p2 != nullrev: # P1n external too => rev is a merged revision
+                raise util.Abort(_('cannot use revision %d as base, result '
+                        'would have 3 parents') % rev)
+            p2 = P2n
+    return p1, p2
+
+def updatemq(repo, state, skipped, **opts):
+    'Update rebased mq patches - finalize and then import them'
+    mqrebase = {}
+    for p in repo.mq.applied:
+        if repo[p.rev].rev() in state:
+            repo.ui.debug(_('revision %d is an mq patch (%s), finalize it.\n') %
+                                        (repo[p.rev].rev(), p.name))
+            mqrebase[repo[p.rev].rev()] = p.name
+
+    if mqrebase:
+        repo.mq.finish(repo, mqrebase.keys())
+
+        # We must start import from the newest revision
+        mq = mqrebase.keys()
+        mq.sort()
+        mq.reverse()
+        for rev in mq:
+            if rev not in skipped:
+                repo.ui.debug(_('import mq patch %d (%s)\n')
+                              % (state[rev], mqrebase[rev]))
+                repo.mq.qimport(repo, (), patchname=mqrebase[rev],
+                            git=opts.get('git', False),rev=[str(state[rev])])
+        repo.mq.save_dirty()
+
+def storestatus(repo, originalwd, target, state, collapse, external):
+    'Store the current status to allow recovery'
+    f = repo.opener("rebasestate", "w")
+    f.write(repo[originalwd].hex() + '\n')
+    f.write(repo[target].hex() + '\n')
+    f.write(repo[external].hex() + '\n')
+    f.write('%d\n' % int(collapse))
+    for d, v in state.items():
+        oldrev = repo[d].hex()
+        newrev = repo[v].hex()
+        f.write("%s:%s\n" % (oldrev, newrev))
+    f.close()
+    repo.ui.debug(_('rebase status stored\n'))
+
+def clearstatus(repo):
+    'Remove the status files'
+    if os.path.exists(repo.join("rebasestate")):
+        util.unlink(repo.join("rebasestate"))
+
+def restorestatus(repo):
+    'Restore a previously stored status'
+    try:
+        target = None
+        collapse = False
+        external = nullrev
+        state = {}
+        f = repo.opener("rebasestate")
+        for i, l in enumerate(f.read().splitlines()):
+            if i == 0:
+                originalwd = repo[l].rev()
+            elif i == 1:
+                target = repo[l].rev()
+            elif i == 2:
+                external = repo[l].rev()
+            elif i == 3:
+                collapse = bool(int(l))
+            else:
+                oldrev, newrev = l.split(':')
+                state[repo[oldrev].rev()] = repo[newrev].rev()
+        repo.ui.debug(_('rebase status resumed\n'))
+        return originalwd, target, state, collapse, external
+    except IOError, err:
+        if err.errno != errno.ENOENT:
+            raise
+        raise util.Abort(_('no rebase in progress'))
+
+def abort(repo, originalwd, target, state):
+    'Restore the repository to its original state'
+    if util.set(repo.changelog.descendants(target)) - util.set(state.values()):
+        repo.ui.warn(_("warning: new changesets detected on target branch, "
+                                                    "not stripping\n"))
+    else:
+        # Strip from the first rebased revision
+        merge.update(repo, repo[originalwd].rev(), False, True, False)
+        rebased = filter(lambda x: x > -1, state.values())
+        if rebased:
+            strippoint = min(rebased)
+            repair.strip(repo.ui, repo, repo[strippoint].node(), "strip")
+        clearstatus(repo)
+        repo.ui.status(_('rebase aborted\n'))
+
+def buildstate(repo, dest, src, base, collapse):
+    'Define which revisions are going to be rebased and where'
+    state = {}
+    targetancestors = util.set()
+
+    if not dest:
+         # Destination defaults to the latest revision in the current branch
+        branch = repo[None].branch()
+        dest = repo[branch].rev()
+    else:
+        if 'qtip' in repo.tags() and (repo[dest].hex() in
+                                [s.rev for s in repo.mq.applied]):
+            raise util.Abort(_('cannot rebase onto an applied mq patch'))
+        dest = repo[dest].rev()
+
+    if src:
+        commonbase = repo[src].ancestor(repo[dest])
+        if commonbase == repo[src]:
+            raise util.Abort(_('cannot rebase an ancestor'))
+        if commonbase == repo[dest]:
+            raise util.Abort(_('cannot rebase a descendant'))
+        source = repo[src].rev()
+    else:
+        if base:
+            cwd = repo[base].rev()
+        else:
+            cwd = repo['.'].rev()
+
+        if cwd == dest:
+            repo.ui.debug(_('already working on current\n'))
+            return None
+
+        targetancestors = util.set(repo.changelog.ancestors(dest))
+        if cwd in targetancestors:
+            repo.ui.debug(_('already working on the current branch\n'))
+            return None
+
+        cwdancestors = util.set(repo.changelog.ancestors(cwd))
+        cwdancestors.add(cwd)
+        rebasingbranch = cwdancestors - targetancestors
+        source = min(rebasingbranch)
+
+    repo.ui.debug(_('rebase onto %d starting from %d\n') % (dest, source))
+    state = dict.fromkeys(repo.changelog.descendants(source), nullrev)
+    external = nullrev
+    if collapse:
+        if not targetancestors:
+            targetancestors = util.set(repo.changelog.ancestors(dest))
+        for rev in state:
+            # Check externals and fail if there are more than one
+            for p in repo[rev].parents():
+                if (p.rev() not in state and p.rev() != source
+                            and p.rev() not in targetancestors):
+                    if external != nullrev:
+                        raise util.Abort(_('unable to collapse, there is more '
+                                'than one external parent'))
+                    external = p.rev()
+
+    state[source] = nullrev
+    return repo['.'].rev(), repo[dest].rev(), state, external
+
+def pulldelegate(pullfunction, repo, *args, **opts):
+    'Call rebase after pull if the latter has been invoked with --rebase'
+    if opts.get('rebase'):
+        if opts.get('update'):
+            raise util.Abort(_('--update and --rebase are not compatible'))
+
+        cmdutil.bail_if_changed(repo)
+        revsprepull = len(repo)
+        pullfunction(repo.ui, repo, *args, **opts)
+        revspostpull = len(repo)
+        if revspostpull > revsprepull:
+            rebase(repo.ui, repo, **opts)
+    else:
+        pullfunction(repo.ui, repo, *args, **opts)
+
+def uisetup(ui):
+    'Replace pull with a decorator to provide --rebase option'
+    # cribbed from color.py
+    aliases, entry = cmdutil.findcmd(ui, 'pull', commands.table)
+    for candidatekey, candidateentry in commands.table.iteritems():
+        if candidateentry is entry:
+            cmdkey, cmdentry = candidatekey, entry
+            break
+
+    decorator = lambda ui, repo, *args, **opts: \
+                    pulldelegate(cmdentry[0], repo, *args, **opts)
+    # make sure 'hg help cmd' still works
+    decorator.__doc__ = cmdentry[0].__doc__
+    decoratorentry = (decorator,) + cmdentry[1:]
+    rebaseopt = ('', 'rebase', None,
+                            _("rebase working directory to branch head"))
+    decoratorentry[1].append(rebaseopt)
+    commands.table[cmdkey] = decoratorentry
+
+cmdtable = {
+"rebase":
+        (rebase,
+        [
+        ('', 'keep', False, _('keep original revisions')),
+        ('s', 'source', '', _('rebase from a given revision')),
+        ('b', 'base', '', _('rebase from the base of a given revision')),
+        ('d', 'dest', '', _('rebase onto a given revision')),
+        ('', 'collapse', False, _('collapse the rebased revisions')),
+        ('c', 'continue', False, _('continue an interrupted rebase')),
+        ('a', 'abort', False, _('abort an interrupted rebase')),] +
+         templateopts,
+        _('hg rebase [-s rev | -b rev] [-d rev] [--collapse] | [-c] | [-a] | '
+                                                                '[--keep]')),
+}
--- a/hgext/record.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/hgext/record.py	Wed Sep 17 11:34:37 2008 +0200
@@ -7,7 +7,7 @@
 
 '''interactive change selection during commit or qrefresh'''
 
-from mercurial.i18n import _
+from mercurial.i18n import gettext, _
 from mercurial import cmdutil, commands, extensions, hg, mdiff, patch
 from mercurial import util
 import copy, cStringIO, errno, operator, os, re, tempfile
@@ -282,22 +282,24 @@
         if resp_file[0] is not None:
             return resp_file[0]
         while True:
-            r = (ui.prompt(query + _(' [Ynsfdaq?] '), '(?i)[Ynsfdaq?]?$')
-                 or 'y').lower()
-            if r == '?':
-                c = record.__doc__.find('y - record this change')
-                for l in record.__doc__[c:].splitlines():
-                    if l: ui.write(_(l.strip()), '\n')
+            choices = _('[Ynsfdaq?]')
+            r = (ui.prompt("%s %s " % (query, choices), '(?i)%s?$' % choices)
+                 or _('y')).lower()
+            if r == _('?'):
+                doc = gettext(record.__doc__)
+                c = doc.find(_('y - record this change'))
+                for l in doc[c:].splitlines():
+                    if l: ui.write(l.strip(), '\n')
                 continue
-            elif r == 's':
+            elif r == _('s'):
                 r = resp_file[0] = 'n'
-            elif r == 'f':
+            elif r == _('f'):
                 r = resp_file[0] = 'y'
-            elif r == 'd':
+            elif r == _('d'):
                 r = resp_all[0] = 'n'
-            elif r == 'a':
+            elif r == _('a'):
                 r = resp_all[0] = 'y'
-            elif r == 'q':
+            elif r == _('q'):
                 raise util.Abort(_('user quit'))
             return r
     while chunks:
@@ -315,7 +317,7 @@
                 chunk.pretty(ui)
             r = prompt(_('examine changes to %s?') %
                        _(' and ').join(map(repr, chunk.files())))
-            if r == 'y':
+            if r == _('y'):
                 applied[chunk.filename()] = [chunk]
                 if chunk.allhunks():
                     applied[chunk.filename()] += consumefile()
@@ -327,7 +329,7 @@
                 chunk.pretty(ui)
             r = prompt(_('record this change to %r?') %
                        chunk.filename())
-            if r == 'y':
+            if r == _('y'):
                 if fixoffset:
                     chunk = copy.copy(chunk)
                     chunk.toline += fixoffset
@@ -391,7 +393,7 @@
     if not ui.interactive:
         raise util.Abort(_('running non-interactively, use commit instead'))
 
-    def recordfunc(ui, repo, files, message, match, opts):
+    def recordfunc(ui, repo, message, match, opts):
         """This is generic record driver.
 
         It's job is to interactively filter local changes, and accordingly
@@ -404,16 +406,16 @@
         In the end we'll record intresting changes, and everything else will be
         left in place, so the user can continue his work.
         """
-        if files:
+        if match.files():
             changes = None
         else:
-            changes = repo.status(files=files, match=match)[:5]
-            modified, added, removed = changes[:3]
-            files = modified + added + removed
+            changes = repo.status(match=match)[:3]
+            modified, added, removed = changes
+            match = cmdutil.matchfiles(repo, modified + added + removed)
         diffopts = mdiff.diffopts(git=True, nodates=True)
         fp = cStringIO.StringIO()
-        patch.diff(repo, repo.dirstate.parents()[0], files=files,
-                   match=match, changes=changes, opts=diffopts, fp=fp)
+        patch.diff(repo, repo.dirstate.parents()[0], match=match,
+                   changes=changes, opts=diffopts, fp=fp)
         fp.seek(0)
 
         # 1. filter patch, so we have intending-to apply subset of it
@@ -425,14 +427,15 @@
             try: contenders.update(dict.fromkeys(h.files()))
             except AttributeError: pass
 
-        newfiles = [f for f in files if f in contenders]
+        newfiles = [f for f in match.files() if f in contenders]
 
         if not newfiles:
             ui.status(_('no changes to record\n'))
             return 0
 
         if changes is None:
-            changes = repo.status(files=newfiles, match=match)[:5]
+            match = cmdutil.matchfiles(repo, newfiles)
+            changes = repo.status(match=match)
         modified = dict.fromkeys(changes[0])
 
         # 2. backup changed files, so we can restore them in the end
@@ -451,7 +454,7 @@
                 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
                                                dir=backupdir)
                 os.close(fd)
-                ui.debug('backup %r as %r\n' % (f, tmpname))
+                ui.debug(_('backup %r as %r\n') % (f, tmpname))
                 util.copyfile(repo.wjoin(f), tmpname)
                 backups[f] = tmpname
 
@@ -469,7 +472,7 @@
             # 3b. (apply)
             if dopatch:
                 try:
-                    ui.debug('applying patch\n')
+                    ui.debug(_('applying patch\n'))
                     ui.debug(fp.getvalue())
                     patch.internalpatch(fp, ui, 1, repo.root)
                 except patch.PatchError, err:
@@ -497,7 +500,7 @@
             # 5. finally restore backed-up files
             try:
                 for realname, tmpname in backups.iteritems():
-                    ui.debug('restoring %r to %r\n' % (tmpname, realname))
+                    ui.debug(_('restoring %r to %r\n') % (tmpname, realname))
                     util.copyfile(tmpname, repo.wjoin(realname))
                     os.unlink(tmpname)
                 os.rmdir(backupdir)
--- a/hgext/transplant.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/hgext/transplant.py	Wed Sep 17 11:34:37 2008 +0200
@@ -88,9 +88,7 @@
 
     def apply(self, repo, source, revmap, merges, opts={}):
         '''apply the revisions in revmap one by one in revision order'''
-        revs = revmap.keys()
-        revs.sort()
-
+        revs = util.sort(revmap)
         p1, p2 = repo.dirstate.parents()
         pulls = []
         diffopts = patch.diffopts(self.ui, opts)
@@ -171,7 +169,7 @@
     def filter(self, filter, changelog, patchfile):
         '''arbitrarily rewrite changeset before applying it'''
 
-        self.ui.status('filtering %s\n' % patchfile)
+        self.ui.status(_('filtering %s\n') % patchfile)
         user, date, msg = (changelog[1], changelog[2], changelog[4])
 
         fd, headerfile = tempfile.mkstemp(prefix='hg-transplant-')
@@ -310,9 +308,7 @@
         if not os.path.isdir(self.path):
             os.mkdir(self.path)
         series = self.opener('series', 'w')
-        revs = revmap.keys()
-        revs.sort()
-        for rev in revs:
+        for rev in util.sort(revmap):
             series.write(revlog.hex(revmap[rev]) + '\n')
         if merges:
             series.write('# Merges\n')
@@ -572,10 +568,6 @@
         for r in merges:
             revmap[source.changelog.rev(r)] = r
 
-        revs = revmap.keys()
-        revs.sort()
-        pulls = []
-
         tp.apply(repo, source, revmap, merges, opts)
     finally:
         if bundle:
--- a/hgext/win32mbcs.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/hgext/win32mbcs.py	Wed Sep 17 11:34:37 2008 +0200
@@ -1,35 +1,43 @@
-# win32mbcs.py -- MBCS filename support for Mercurial on Windows
+# win32mbcs.py -- MBCS filename support for Mercurial
 #
 # Copyright (c) 2008 Shun-ichi Goto <shunichi.goto@gmail.com>
 #
-# Version: 0.1
+# Version: 0.2
 # Author:  Shun-ichi Goto <shunichi.goto@gmail.com>
 #
 # This software may be used and distributed according to the terms
 # of the GNU General Public License, incorporated herein by reference.
 #
-"""Allow to use shift_jis/big5 filenames on Windows.
-
-There is a well known issue "0x5c problem" on Windows.  It is a
-trouble on handling path name as raw encoded byte sequence of
-problematic encodings like shift_jis or big5.  The primary intent
-of this extension is to allow using such a encoding on Mercurial
-without strange file operation error.
+"""Allow to use MBCS path with problematic encoding.
 
-By enabling this extension, hook mechanism is activated and some
-functions are altered.  Usually, this encoding is your local encoding
-on your system by default. So you can get benefit simply by enabling
-this extension.
-
-The encoding for filename is same one for terminal by default.  You
-can change the encoding by setting HGENCODING environment variable.
+Some MBCS encodings are not good for some path operations
+(i.e. splitting path, case conversion, etc.) with its encoded bytes.
+We call such a encoding (i.e. shift_jis and big5) as "problematic
+encoding".  This extension can be used to fix the issue with those
+encodings by wrapping some functions to convert to unicode string
+before path operation.
 
 This extension is usefull for:
- * Japanese Windows user using shift_jis encoding.
- * Chinese Windows user using big5 encoding.
- * Users who want to use a repository created with such a encoding.
+ * Japanese Windows users using shift_jis encoding.
+ * Chinese Windows users using big5 encoding.
+ * All users who use a repository with one of problematic encodings
+   on case-insensitive file system.
+
+This extension is not needed for:
+ * Any user who use only ascii chars in path.
+ * Any user who do not use any of problematic encodings.
 
-Note: Unix people does not need to use this extension.
+Note that there are some limitations on using this extension:
+ * You should use single encoding in one repository.
+ * You should set same encoding for the repository by locale or HGENCODING.
+
+To use this extension, enable the extension in .hg/hgrc or ~/.hgrc:
+
+  [extensions]
+  hgext.win32mbcs =
+
+Path encoding conversion are done between unicode and util._encoding
+which is decided by mercurial from current locale setting or HGENCODING.
 
 """
 
@@ -37,122 +45,78 @@
 from mercurial.i18n import _
 from mercurial import util
 
-__all__ = ['install', 'uninstall', 'reposetup']
+def decode(arg):
+   if isinstance(arg, str):
+       uarg = arg.decode(util._encoding)
+       if arg == uarg.encode(util._encoding):
+           return uarg
+       raise UnicodeError("Not local encoding")
+   elif isinstance(arg, tuple):
+       return tuple(map(decode, arg))
+   elif isinstance(arg, list):
+       return map(decode, arg)
+   return arg
+
+def encode(arg):
+   if isinstance(arg, unicode):
+       return arg.encode(util._encoding)
+   elif isinstance(arg, tuple):
+       return tuple(map(encode, arg))
+   elif isinstance(arg, list):
+       return map(encode, arg)
+   return arg
+
+def wrapper(func, args):
+   # check argument is unicode, then call original
+   for arg in args:
+       if isinstance(arg, unicode):
+           return func(*args)
 
+   try:
+       # convert arguments to unicode, call func, then convert back
+       return encode(func(*decode(args)))
+   except UnicodeError:
+       # If not encoded with util._encoding, report it then
+       # continue with calling original function.
+      raise util.Abort(_("[win32mbcs] filename conversion fail with"
+                         " %s encoding\n") % (util._encoding))
+
+def wrapname(name):
+   idx = name.rfind('.')
+   module = name[:idx]
+   name = name[idx+1:]
+   module = eval(module)
+   func = getattr(module, name)
+   def f(*args):
+       return wrapper(func, args)
+   try:
+      f.__name__ = func.__name__                # fail with python23
+   except Exception:
+      pass
+   setattr(module, name, f)
+
+# List of functions to be wrapped.
+# NOTE: os.path.dirname() and os.path.basename() are safe because
+#       they use result of os.path.split()
+funcs = '''os.path.join os.path.split os.path.splitext
+ os.path.splitunc os.path.normpath os.path.normcase os.makedirs
+ util.endswithsep util.splitpath util.checkcase util.fspath'''
 
 # codec and alias names of sjis and big5 to be faked.
-_problematic_encodings = util.frozenset([
-        'big5', 'big5-tw', 'csbig5',
-        'big5hkscs', 'big5-hkscs', 'hkscs',
-        'cp932', '932', 'ms932', 'mskanji', 'ms-kanji',
-        'shift_jis', 'csshiftjis', 'shiftjis', 'sjis', 's_jis',
-        'shift_jis_2004', 'shiftjis2004', 'sjis_2004', 'sjis2004',
-        'shift_jisx0213', 'shiftjisx0213', 'sjisx0213', 's_jisx0213',
-        ])
-
-# attribute name to store original function
-_ORIGINAL = '_original'
-
-_ui = None
-
-def decode_with_check(arg):
-    if isinstance(arg, tuple):
-        return tuple(map(decode_with_check, arg))
-    elif isinstance(arg, list):
-        return map(decode_with_check, arg)
-    elif isinstance(arg, str):
-        uarg = arg.decode(util._encoding)
-        if arg == uarg.encode(util._encoding):
-            return uarg
-        else:
-            raise UnicodeError("Not local encoding")
-    else:
-        return arg
-
-def encode_with_check(arg):
-    if isinstance(arg, tuple):
-        return tuple(map(encode_with_check, arg))
-    elif isinstance(arg, list):
-        return map(encode_with_check, arg)
-    elif isinstance(arg, unicode):
-        ret = arg.encode(util._encoding)
-        return ret
-    else:
-        return arg
-
-def wrap(func):
-
-    def wrapped(*args):
-        # check argument is unicode, then call original
-        for arg in args:
-            if isinstance(arg, unicode):
-                return func(*args)
-        # make decoded argument list into uargs
-        try:
-            args = decode_with_check(args)
-        except UnicodeError, exc:
-            # If not encoded with _local_fs_encoding, report it then
-            # continue with calling original function.
-            _ui.warn(_("WARNING: [win32mbcs] filename conversion fail for" +
-                     " %s: '%s'\n") % (util._encoding, args))
-            return func(*args)
-        # call as unicode operation, then return with encoding
-        return encode_with_check(func(*args))
-
-    # fake is only for relevant environment.
-    if hasattr(func, _ORIGINAL) or \
-            util._encoding.lower() not in _problematic_encodings:
-        return func
-    else:
-        f = wrapped
-        f.__name__ = func.__name__
-        setattr(f, _ORIGINAL, func)   # hold original to restore
-        return f
-
-def unwrap(func):
-    return getattr(func, _ORIGINAL, func)
-
-def install():
-    # wrap some python functions and mercurial functions
-    # to handle raw bytes on Windows.
-    # NOTE: dirname and basename is safe because they use result
-    # of os.path.split()
-    global _ui
-    if not _ui:
-        from mercurial import ui
-        _ui = ui.ui()
-    os.path.join = wrap(os.path.join)
-    os.path.split = wrap(os.path.split)
-    os.path.splitext = wrap(os.path.splitext)
-    os.path.splitunc = wrap(os.path.splitunc)
-    os.path.normpath = wrap(os.path.normpath)
-    os.path.normcase = wrap(os.path.normcase)
-    os.makedirs = wrap(os.makedirs)
-    util.endswithsep = wrap(util.endswithsep)
-    util.splitpath = wrap(util.splitpath)
-
-def uninstall():
-    # restore original functions.
-    os.path.join = unwrap(os.path.join)
-    os.path.split = unwrap(os.path.split)
-    os.path.splitext = unwrap(os.path.splitext)
-    os.path.splitunc = unwrap(os.path.splitunc)
-    os.path.normpath = unwrap(os.path.normpath)
-    os.path.normcase = unwrap(os.path.normcase)
-    os.makedirs = unwrap(os.makedirs)
-    util.endswithsep = unwrap(util.endswithsep)
-    util.splitpath = unwrap(util.splitpath)
-
+problematic_encodings = '''big5 big5-tw csbig5 big5hkscs big5-hkscs
+ hkscs cp932 932 ms932 mskanji ms-kanji shift_jis csshiftjis shiftjis
+ sjis s_jis shift_jis_2004 shiftjis2004 sjis_2004 sjis2004
+ shift_jisx0213 shiftjisx0213 sjisx0213 s_jisx0213'''
 
 def reposetup(ui, repo):
-    # TODO: decide use of config section for this extension
-    global _ui
-    _ui = ui
-    if not os.path.supports_unicode_filenames:
-        ui.warn(_("[win32mbcs] cannot activate on this platform.\n"))
-        return
-    # install features of this extension
-    install()
-    ui.debug(_("[win32mbcs] activeted with encoding: %s\n") % util._encoding)
+   # TODO: decide use of config section for this extension
+   if not os.path.supports_unicode_filenames:
+       ui.warn(_("[win32mbcs] cannot activate on this platform.\n"))
+       return
 
-# win32mbcs.py ends here
+   # fake is only for relevant environment.
+   if util._encoding.lower() in problematic_encodings.split():
+       for f in funcs.split():
+           wrapname(f)
+       ui.debug(_("[win32mbcs] activated with encoding: %s\n") % util._encoding)
+
--- a/hgext/win32text.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/hgext/win32text.py	Wed Sep 17 11:34:37 2008 +0200
@@ -1,4 +1,4 @@
-# win32text.py - LF <-> CRLF translation utilities for Windows users
+# win32text.py - LF <-> CRLF/CR translation utilities for Windows/Mac users
 #
 # This software may be used and distributed according to the terms
 # of the GNU General Public License, incorporated herein by reference.
@@ -9,95 +9,133 @@
 # hgext.win32text =
 # [encode]
 # ** = cleverencode:
+# # or ** = macencode:
 # [decode]
 # ** = cleverdecode:
+# # or ** = macdecode:
 #
-# If not doing conversion, to make sure you do not commit CRLF by accident:
+# If not doing conversion, to make sure you do not commit CRLF/CR by accident:
 #
 # [hooks]
 # pretxncommit.crlf = python:hgext.win32text.forbidcrlf
+# # or pretxncommit.cr = python:hgext.win32text.forbidcr
 #
-# To do the same check on a server to prevent CRLF from being pushed or pulled:
+# To do the same check on a server to prevent CRLF/CR from being pushed or
+# pulled:
 #
 # [hooks]
 # pretxnchangegroup.crlf = python:hgext.win32text.forbidcrlf
+# # or pretxnchangegroup.cr = python:hgext.win32text.forbidcr
 
 from mercurial.i18n import gettext as _
 from mercurial.node import bin, short
+from mercurial import util
 import re
 
 # regexp for single LF without CR preceding.
 re_single_lf = re.compile('(^|[^\r])\n', re.MULTILINE)
 
-def dumbdecode(s, cmd, ui=None, repo=None, filename=None, **kwargs):
-    # warn if already has CRLF in repository.
+newlinestr = {'\r\n': 'CRLF', '\r': 'CR'}
+filterstr = {'\r\n': 'clever', '\r': 'mac'}
+
+def checknewline(s, newline, ui=None, repo=None, filename=None):
+    # warn if already has 'newline' in repository.
     # it might cause unexpected eol conversion.
     # see issue 302:
     #   http://www.selenic.com/mercurial/bts/issue302
-    if '\r\n' in s and ui and filename and repo:
-        ui.warn(_('WARNING: %s already has CRLF line endings\n'
+    if newline in s and ui and filename and repo:
+        ui.warn(_('WARNING: %s already has %s line endings\n'
                   'and does not need EOL conversion by the win32text plugin.\n'
                   'Before your next commit, please reconsider your '
                   'encode/decode settings in \nMercurial.ini or %s.\n') %
-                (filename, repo.join('hgrc')))
+                (filename, newlinestr[newline], repo.join('hgrc')))
+
+def dumbdecode(s, cmd, **kwargs):
+    checknewline(s, '\r\n', **kwargs)
     # replace single LF to CRLF
     return re_single_lf.sub('\\1\r\n', s)
 
 def dumbencode(s, cmd):
     return s.replace('\r\n', '\n')
 
-def clevertest(s, cmd):
-    if '\0' in s: return False
-    return True
+def macdumbdecode(s, cmd, **kwargs):
+    checknewline(s, '\r', **kwargs)
+    return s.replace('\n', '\r')
+
+def macdumbencode(s, cmd):
+    return s.replace('\r', '\n')
 
 def cleverdecode(s, cmd, **kwargs):
-    if clevertest(s, cmd):
+    if not util.binary(s):
         return dumbdecode(s, cmd, **kwargs)
     return s
 
 def cleverencode(s, cmd):
-    if clevertest(s, cmd):
+    if not util.binary(s):
         return dumbencode(s, cmd)
     return s
 
+def macdecode(s, cmd, **kwargs):
+    if not util.binary(s):
+        return macdumbdecode(s, cmd, **kwargs)
+    return s
+
+def macencode(s, cmd):
+    if not util.binary(s):
+        return macdumbencode(s, cmd)
+    return s
+
 _filters = {
     'dumbdecode:': dumbdecode,
     'dumbencode:': dumbencode,
     'cleverdecode:': cleverdecode,
     'cleverencode:': cleverencode,
+    'macdumbdecode:': macdumbdecode,
+    'macdumbencode:': macdumbencode,
+    'macdecode:': macdecode,
+    'macencode:': macencode,
     }
 
-def forbidcrlf(ui, repo, hooktype, node, **kwargs):
+def forbidnewline(ui, repo, hooktype, node, newline, **kwargs):
     halt = False
-    for rev in xrange(repo.changelog.rev(bin(node)), repo.changelog.count()):
-        c = repo.changectx(rev)
+    for rev in xrange(repo[node].rev(), len(repo)):
+        c = repo[rev]
         for f in c.files():
             if f not in c:
                 continue
             data = c[f].data()
-            if '\0' not in data and '\r\n' in data:
+            if not util.binary(data) and newline in data:
                 if not halt:
                     ui.warn(_('Attempt to commit or push text file(s) '
-                              'using CRLF line endings\n'))
+                              'using %s line endings\n') %
+                              newlinestr[newline])
                 ui.warn(_('in %s: %s\n') % (short(c.node()), f))
                 halt = True
     if halt and hooktype == 'pretxnchangegroup':
+        crlf = newlinestr[newline].lower()
+        filter = filterstr[newline]
         ui.warn(_('\nTo prevent this mistake in your local repository,\n'
                   'add to Mercurial.ini or .hg/hgrc:\n'
                   '\n'
                   '[hooks]\n'
-                  'pretxncommit.crlf = python:hgext.win32text.forbidcrlf\n'
+                  'pretxncommit.%s = python:hgext.win32text.forbid%s\n'
                   '\n'
                   'and also consider adding:\n'
                   '\n'
                   '[extensions]\n'
                   'hgext.win32text =\n'
                   '[encode]\n'
-                  '** = cleverencode:\n'
+                  '** = %sencode:\n'
                   '[decode]\n'
-                  '** = cleverdecode:\n'))
+                  '** = %sdecode:\n') % (crlf, crlf, filter, filter))
     return halt
 
+def forbidcrlf(ui, repo, hooktype, node, **kwargs):
+    return forbidnewline(ui, repo, hooktype, node, '\r\n', **kwargs)
+
+def forbidcr(ui, repo, hooktype, node, **kwargs):
+    return forbidnewline(ui, repo, hooktype, node, '\r', **kwargs)
+
 def reposetup(ui, repo):
     if not repo.local():
         return
--- a/mercurial/archival.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/mercurial/archival.py	Wed Sep 17 11:34:37 2008 +0200
@@ -52,7 +52,8 @@
         def _write_gzip_header(self):
             self.fileobj.write('\037\213')             # magic header
             self.fileobj.write('\010')                 # compression method
-            fname = self.filename[:-3]
+            # Python 2.6 deprecates self.filename
+            fname = getattr(self, 'name', None) or self.filename
             flags = 0
             if fname:
                 flags = gzip.FNAME
@@ -207,18 +208,17 @@
             data = repo.wwritedata(name, data)
         archiver.addfile(name, mode, islink, data)
 
-    ctx = repo.changectx(node)
     if kind not in archivers:
-        raise util.Abort(_("unknown archive type '%s'" % kind))
+        raise util.Abort(_("unknown archive type '%s'") % kind)
+
+    ctx = repo[node]
     archiver = archivers[kind](dest, prefix, mtime or ctx.date()[0])
-    m = ctx.manifest()
-    items = m.items()
-    items.sort()
+
     if repo.ui.configbool("ui", "archivemeta", True):
         write('.hg_archival.txt', 0644, False,
               lambda: 'repo: %s\nnode: %s\n' % (
                   hex(repo.changelog.node(0)), hex(node)))
-    for filename, filenode in items:
-        write(filename, m.execf(filename) and 0755 or 0644, m.linkf(filename),
-              lambda: repo.file(filename).read(filenode))
+    for f in ctx:
+        ff = ctx.flags(f)
+        write(f, 'x' in ff and 0755 or 0644, 'l' in ff, ctx[f].data)
     archiver.done()
--- a/mercurial/bundlerepo.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/mercurial/bundlerepo.py	Wed Sep 17 11:34:37 2008 +0200
@@ -12,7 +12,7 @@
 
 from node import hex, nullid, short
 from i18n import _
-import changegroup, util, os, struct, bz2, tempfile, shutil, mdiff
+import changegroup, util, os, struct, bz2, zlib, tempfile, shutil, mdiff
 import repo, localrepo, changelog, manifest, filelog, revlog
 
 class bundlerevlog(revlog.revlog):
@@ -34,12 +34,12 @@
             for chunk in changegroup.chunkiter(bundlefile):
                 pos = bundlefile.tell()
                 yield chunk, pos - len(chunk)
-        n = self.count()
+        n = len(self)
         prev = None
         for chunk, start in chunkpositer():
             size = len(chunk)
             if size < 80:
-                raise util.Abort("invalid changegroup")
+                raise util.Abort(_("invalid changegroup"))
             start += 80
             size -= 80
             node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
@@ -127,7 +127,7 @@
 
     def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
         raise NotImplementedError
-    def addgroup(self, revs, linkmapper, transaction, unique=0):
+    def addgroup(self, revs, linkmapper, transaction):
         raise NotImplementedError
     def strip(self, rev, minlink):
         raise NotImplementedError
@@ -173,14 +173,17 @@
             raise util.Abort(_("%s: not a Mercurial bundle file") % bundlename)
         elif not header.startswith("HG10"):
             raise util.Abort(_("%s: unknown bundle version") % bundlename)
-        elif header == "HG10BZ":
+        elif (header == "HG10BZ") or (header == "HG10GZ"):
             fdtemp, temp = tempfile.mkstemp(prefix="hg-bundle-",
                                             suffix=".hg10un", dir=self.path)
             self.tempfile = temp
             fptemp = os.fdopen(fdtemp, 'wb')
             def generator(f):
-                zd = bz2.BZ2Decompressor()
-                zd.decompress("BZ")
+                if header == "HG10BZ":
+                    zd = bz2.BZ2Decompressor()
+                    zd.decompress("BZ")
+                elif header == "HG10GZ":
+                    zd = zlib.decompressobj()
                 for chunk in f:
                     yield zd.decompress(chunk)
             gen = generator(util.filechunkiter(self.bundlefile, 4096))
--- a/mercurial/byterange.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/mercurial/byterange.py	Wed Sep 17 11:34:37 2008 +0200
@@ -110,7 +110,7 @@
         in self.fo.  This includes methods."""
         if hasattr(self.fo, name):
             return getattr(self.fo, name)
-        raise AttributeError, name
+        raise AttributeError(name)
 
     def tell(self):
         """Return the position within the range.
@@ -257,7 +257,7 @@
     def ftp_open(self, req):
         host = req.get_host()
         if not host:
-            raise IOError, ('ftp error', 'no host given')
+            raise IOError('ftp error', 'no host given')
         host, port = splitport(host)
         if port is None:
             port = ftplib.FTP_PORT
@@ -329,7 +329,7 @@
             headers = mimetools.Message(sf)
             return addinfourl(fp, headers, req.get_full_url())
         except ftplib.all_errors, msg:
-            raise IOError, ('ftp error', msg), sys.exc_info()[2]
+            raise IOError('ftp error', msg), sys.exc_info()[2]
 
     def connect_ftp(self, user, passwd, host, port, dirs):
         fw = ftpwrapper(user, passwd, host, port, dirs)
@@ -359,7 +359,7 @@
             try:
                 self.ftp.nlst(file)
             except ftplib.error_perm, reason:
-                raise IOError, ('ftp error', reason), sys.exc_info()[2]
+                raise IOError('ftp error', reason), sys.exc_info()[2]
             # Restore the transfer mode!
             self.ftp.voidcmd(cmd)
             # Try to retrieve as a file
@@ -373,7 +373,7 @@
                     fp = RangeableFileObject(fp, (rest,''))
                     return (fp, retrlen)
                 elif not str(reason).startswith('550'):
-                    raise IOError, ('ftp error', reason), sys.exc_info()[2]
+                    raise IOError('ftp error', reason), sys.exc_info()[2]
         if not conn:
             # Set transfer mode to ASCII!
             self.ftp.voidcmd('TYPE A')
--- a/mercurial/changelog.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/mercurial/changelog.py	Wed Sep 17 11:34:37 2008 +0200
@@ -83,7 +83,7 @@
         "delay visibility of index updates to other readers"
         self._realopener = self.opener
         self.opener = self._delayopener
-        self._delaycount = self.count()
+        self._delaycount = len(self)
         self._delaybuf = []
         self._delayname = None
 
@@ -109,7 +109,7 @@
         # if we're doing an initial clone, divert to another file
         if self._delaycount == 0:
             self._delayname = fp.name
-            if not self.count():
+            if not len(self):
                 # make sure to truncate the file
                 mode = mode.replace('a', 'w')
             return self._realopener(name + ".a", mode)
@@ -131,9 +131,7 @@
 
     def encode_extra(self, d):
         # keys must be sorted to produce a deterministic changelog entry
-        keys = d.keys()
-        keys.sort()
-        items = [_string_escape('%s:%s' % (k, d[k])) for k in keys]
+        items = [_string_escape('%s:%s' % (k, d[k])) for k in util.sort(d)]
         return "\0".join(items)
 
     def read(self, node):
@@ -176,7 +174,7 @@
         files = l[3:]
         return (manifest, user, (time, timezone), files, desc, extra)
 
-    def add(self, manifest, list, desc, transaction, p1=None, p2=None,
+    def add(self, manifest, files, desc, transaction, p1=None, p2=None,
                   user=None, date=None, extra={}):
 
         user = user.strip()
@@ -193,7 +191,6 @@
         if extra:
             extra = self.encode_extra(extra)
             parseddate = "%s %s" % (parseddate, extra)
-        list.sort()
-        l = [hex(manifest), user, parseddate] + list + ["", desc]
+        l = [hex(manifest), user, parseddate] + util.sort(files) + ["", desc]
         text = "\n".join(l)
-        return self.addrevision(text, transaction, self.count(), p1, p2)
+        return self.addrevision(text, transaction, len(self), p1, p2)
--- a/mercurial/cmdutil.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/mercurial/cmdutil.py	Wed Sep 17 11:34:37 2008 +0200
@@ -9,6 +9,7 @@
 from i18n import _
 import os, sys, bisect, stat
 import mdiff, bdiff, util, templater, templatefilters, patch, errno
+import match as _match
 
 revrangesep = ':'
 
@@ -125,7 +126,7 @@
         if revrangesep in revs[0]:
             start, end = revs[0].split(revrangesep, 1)
             start = revfix(repo, start, 0)
-            end = revfix(repo, end, repo.changelog.count() - 1)
+            end = revfix(repo, end, len(repo) - 1)
         else:
             start = revfix(repo, revs[0], None)
     elif len(revs) == 2:
@@ -150,7 +151,7 @@
         if revrangesep in spec:
             start, end = spec.split(revrangesep, 1)
             start = revfix(repo, start, 0)
-            end = revfix(repo, end, repo.changelog.count() - 1)
+            end = revfix(repo, end, len(repo) - 1)
             step = start > end and -1 or 1
             for rev in xrange(start, end+step, step):
                 if rev in seen:
@@ -223,27 +224,28 @@
                               pathname),
                 mode)
 
-def matchpats(repo, pats=[], opts={}, globbed=False, default=None):
-    cwd = repo.getcwd()
-    return util.cmdmatcher(repo.root, cwd, pats or [], opts.get('include'),
-                           opts.get('exclude'), globbed=globbed,
-                           default=default)
+def match(repo, pats=[], opts={}, globbed=False, default='relpath'):
+    if not globbed and default == 'relpath':
+        pats = util.expand_glob(pats or [])
+    m = _match.match(repo.root, repo.getcwd(), pats,
+                    opts.get('include'), opts.get('exclude'), default)
+    def badfn(f, msg):
+        repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
+        return False
+    m.bad = badfn
+    return m
 
-def walk(repo, pats=[], opts={}, node=None, badmatch=None, globbed=False,
-         default=None):
-    files, matchfn, anypats = matchpats(repo, pats, opts, globbed=globbed,
-                                        default=default)
-    exact = dict.fromkeys(files)
-    cwd = repo.getcwd()
-    for src, fn in repo.walk(node=node, files=files, match=matchfn,
-                             badmatch=badmatch):
-        yield src, fn, repo.pathto(fn, cwd), fn in exact
+def matchall(repo):
+    return _match.always(repo.root, repo.getcwd())
+
+def matchfiles(repo, files):
+    return _match.exact(repo.root, repo.getcwd(), files)
 
 def findrenames(repo, added=None, removed=None, threshold=0.5):
     '''find renamed files -- yields (before, after, score) tuples'''
     if added is None or removed is None:
         added, removed = repo.status()[1:3]
-    ctx = repo.changectx()
+    ctx = repo['.']
     for a in added:
         aa = repo.wread(a)
         bestname, bestscore = None, threshold
@@ -275,16 +277,19 @@
     add, remove = [], []
     mapping = {}
     audit_path = util.path_auditor(repo.root)
-    for src, abs, rel, exact in walk(repo, pats, opts):
+    m = match(repo, pats, opts)
+    for abs in repo.walk(m):
         target = repo.wjoin(abs)
         good = True
         try:
             audit_path(abs)
         except:
             good = False
-        if src == 'f' and good and abs not in repo.dirstate:
+        rel = m.rel(abs)
+        exact = m.exact(abs)
+        if good and abs not in repo.dirstate:
             add.append(abs)
-            mapping[abs] = rel, exact
+            mapping[abs] = rel, m.exact(abs)
             if repo.ui.verbose or not exact:
                 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
         if repo.dirstate[abs] != 'r' and (not good or not util.lexists(target)
@@ -319,8 +324,11 @@
 
     def walkpat(pat):
         srcs = []
-        for tag, abs, rel, exact in walk(repo, [pat], opts, globbed=True):
+        m = match(repo, [pat], opts, globbed=True)
+        for abs in repo.walk(m):
             state = repo.dirstate[abs]
+            rel = m.rel(abs)
+            exact = m.exact(abs)
             if state in '?r':
                 if exact and state == '?':
                     ui.warn(_('%s: not copying - file is not managed\n') % rel)
@@ -645,9 +653,7 @@
             self.ui.write(_("copies:      %s\n") % ' '.join(copies))
 
         if extra and self.ui.debugflag:
-            extraitems = extra.items()
-            extraitems.sort()
-            for key, value in extraitems:
+            for key, value in util.sort(extra.items()):
                 self.ui.write(_("extra:       %s=%s\n")
                               % (key, value.encode('string_escape')))
 
@@ -791,9 +797,7 @@
             return showlist('tag', self.repo.nodetags(changenode), **args)
 
         def showextras(**args):
-            extras = changes[5].items()
-            extras.sort()
-            for key, value in extras:
+            for key, value in util.sort(changes[5].items()):
                 args = args.copy()
                 args.update(dict(key=key, value=value))
                 yield self.t('extra', **args)
@@ -889,7 +893,7 @@
     # options
     patch = False
     if opts.get('patch'):
-        patch = matchfn or util.always
+        patch = matchfn or matchall(repo)
 
     tmpl = opts.get('template')
     mapfile = None
@@ -922,7 +926,7 @@
 def finddate(ui, repo, date):
     """Find the tipmost changeset that matches the given date spec"""
     df = util.matchdate(date)
-    get = util.cachefunc(lambda r: repo.changectx(r).changeset())
+    get = util.cachefunc(lambda r: repo[r].changeset())
     changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
     results = {}
     for st, rev, fns in changeiter:
@@ -932,7 +936,7 @@
                 results[rev] = d
         elif st == 'iter':
             if rev in results:
-                ui.status("Found revision %s from %s\n" %
+                ui.status(_("Found revision %s from %s\n") %
                           (rev, util.datestr(results[rev])))
                 return str(rev)
 
@@ -977,31 +981,31 @@
                 if windowsize < sizelimit:
                     windowsize *= 2
 
-    files, matchfn, anypats = matchpats(repo, pats, opts)
+    m = match(repo, pats, opts)
     follow = opts.get('follow') or opts.get('follow_first')
 
-    if repo.changelog.count() == 0:
-        return [], matchfn
+    if not len(repo):
+        return [], m
 
     if follow:
-        defrange = '%s:0' % repo.changectx().rev()
+        defrange = '%s:0' % repo['.'].rev()
     else:
         defrange = '-1:0'
     revs = revrange(repo, opts['rev'] or [defrange])
     wanted = {}
-    slowpath = anypats or opts.get('removed')
+    slowpath = m.anypats() or opts.get('removed')
     fncache = {}
 
-    if not slowpath and not files:
+    if not slowpath and not m.files():
         # No files, no patterns.  Display all revs.
         wanted = dict.fromkeys(revs)
     copies = []
     if not slowpath:
         # Only files, no patterns.  Check the history of each file.
         def filerevgen(filelog, node):
-            cl_count = repo.changelog.count()
+            cl_count = len(repo)
             if node is None:
-                last = filelog.count() - 1
+                last = len(filelog) - 1
             else:
                 last = filelog.rev(node)
             for i, window in increasing_windows(last, nullrev):
@@ -1017,14 +1021,14 @@
                     if rev[0] < cl_count:
                         yield rev
         def iterfiles():
-            for filename in files:
+            for filename in m.files():
                 yield filename, None
             for filename_node in copies:
                 yield filename_node
         minrev, maxrev = min(revs), max(revs)
         for file_, node in iterfiles():
             filelog = repo.file(file_)
-            if filelog.count() == 0:
+            if not len(filelog):
                 if node is None:
                     # A zero count may be a directory or deleted file, so
                     # try to find matching entries on the slow path.
@@ -1050,13 +1054,12 @@
 
         # The slow path checks files modified in every changeset.
         def changerevgen():
-            for i, window in increasing_windows(repo.changelog.count()-1,
-                                                nullrev):
+            for i, window in increasing_windows(len(repo) - 1, nullrev):
                 for j in xrange(i - window, i + 1):
                     yield j, change(j)[3]
 
         for rev, changefiles in changerevgen():
-            matches = filter(matchfn, changefiles)
+            matches = filter(m, changefiles)
             if matches:
                 fncache[rev] = matches
                 wanted[rev] = 1
@@ -1109,7 +1112,7 @@
                 del wanted[x]
 
     def iterate():
-        if follow and not files:
+        if follow and not m.files():
             ff = followfilter(onlyfirst=opts.get('follow_first'))
             def want(rev):
                 if ff.match(rev) and rev in wanted:
@@ -1122,20 +1125,18 @@
         for i, window in increasing_windows(0, len(revs)):
             yield 'window', revs[0] < revs[-1], revs[-1]
             nrevs = [rev for rev in revs[i:i+window] if want(rev)]
-            srevs = list(nrevs)
-            srevs.sort()
-            for rev in srevs:
+            for rev in util.sort(list(nrevs)):
                 fns = fncache.get(rev)
                 if not fns:
                     def fns_generator():
                         for f in change(rev)[3]:
-                            if matchfn(f):
+                            if m(f):
                                 yield f
                     fns = fns_generator()
                 yield 'add', rev, fns
             for rev in nrevs:
                 yield 'iter', rev, None
-    return iterate(), matchfn
+    return iterate(), m
 
 def commit(ui, repo, commitfunc, pats, opts):
     '''commit the specified files or all outstanding changes'''
@@ -1149,13 +1150,18 @@
     if opts.get('addremove'):
         addremove(repo, pats, opts)
 
-    fns, match, anypats = matchpats(repo, pats, opts)
+    m = match(repo, pats, opts)
     if pats:
-        status = repo.status(files=fns, match=match)
-        modified, added, removed, deleted, unknown = status[:5]
-        files = modified + added + removed
+        modified, added, removed = repo.status(match=m)[:3]
+        files = util.sort(modified + added + removed)
         slist = None
-        for f in fns:
+
+        def is_dir(f):
+            name = f + '/'
+            i = bisect.bisect(files, name)
+            return i < len(files) and files[i].startswith(name)
+
+        for f in m.files():
             if f == '.':
                 continue
             if f not in files:
@@ -1164,14 +1170,11 @@
                 try:
                     mode = os.lstat(rf)[stat.ST_MODE]
                 except OSError:
+                    if is_dir(f): # deleted directory ?
+                        continue
                     raise util.Abort(_("file %s not found!") % rel)
                 if stat.S_ISDIR(mode):
-                    name = f + '/'
-                    if slist is None:
-                        slist = list(files)
-                        slist.sort()
-                    i = bisect.bisect(slist, name)
-                    if i >= len(slist) or not slist[i].startswith(name):
+                    if not is_dir(f):
                         raise util.Abort(_("no match under directory %s!")
                                          % rel)
                 elif not (stat.S_ISREG(mode) or stat.S_ISLNK(mode)):
@@ -1179,9 +1182,8 @@
                                        "unsupported file type!") % rel)
                 elif f not in repo.dirstate:
                     raise util.Abort(_("file %s not tracked!") % rel)
-    else:
-        files = []
+        m = matchfiles(repo, files)
     try:
-        return commitfunc(ui, repo, files, message, match, opts)
+        return commitfunc(ui, repo, message, m, opts)
     except ValueError, inst:
         raise util.Abort(str(inst))
--- a/mercurial/commands.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/mercurial/commands.py	Wed Sep 17 11:34:37 2008 +0200
@@ -7,12 +7,13 @@
 
 from node import hex, nullid, nullrev, short
 from repo import RepoError, NoCapability
-from i18n import _
+from i18n import _, gettext
 import os, re, sys, urllib
 import hg, util, revlog, bundlerepo, extensions, copies
 import difflib, patch, time, help, mdiff, tempfile
 import version, socket
 import archival, changegroup, cmdutil, hgweb.server, sshserver, hbisect
+import merge as merge_
 
 # Commands start here, listed alphabetically
 
@@ -30,15 +31,16 @@
     rejected = None
     exacts = {}
     names = []
-    for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
-                                             badmatch=util.always):
-        if exact:
+    m = cmdutil.match(repo, pats, opts)
+    m.bad = lambda x,y: True
+    for abs in repo.walk(m):
+        if m.exact(abs):
             if ui.verbose:
-                ui.status(_('adding %s\n') % rel)
+                ui.status(_('adding %s\n') % m.rel(abs))
             names.append(abs)
             exacts[abs] = 1
         elif abs not in repo.dirstate:
-            ui.status(_('adding %s\n') % rel)
+            ui.status(_('adding %s\n') % m.rel(abs))
             names.append(abs)
     if not opts.get('dry_run'):
         rejected = repo.add(names)
@@ -53,11 +55,11 @@
     New files are ignored if they match any of the patterns in .hgignore. As
     with add, these changes take effect at the next commit.
 
-    Use the -s option to detect renamed files.  With a parameter > 0,
+    Use the -s option to detect renamed files. With a parameter > 0,
     this compares every removed file with every added file and records
-    those similar enough as renames.  This option takes a percentage
+    those similar enough as renames. This option takes a percentage
     between 0 (disabled) and 100 (files must be identical) as its
-    parameter.  Detecting renamed files this way can be expensive.
+    parameter. Detecting renamed files this way can be expensive.
     """
     try:
         sim = float(opts.get('similarity') or 0)
@@ -105,13 +107,13 @@
         lastfunc = funcmap[-1]
         funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
 
-    ctx = repo.changectx(opts['rev'])
-
-    for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
-                                             node=ctx.node()):
-        fctx = ctx.filectx(abs)
+    ctx = repo[opts['rev']]
+
+    m = cmdutil.match(repo, pats, opts)
+    for abs in ctx.walk(m):
+        fctx = ctx[abs]
         if not opts['text'] and util.binary(fctx.data()):
-            ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
+            ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
             continue
 
         lines = fctx.annotate(follow=opts.get('follow'),
@@ -134,7 +136,7 @@
     By default, the revision used is the parent of the working
     directory; use "-r" to specify a different revision.
 
-    To specify the type of archive to create, use "-t".  Valid
+    To specify the type of archive to create, use "-t". Valid
     types are:
 
     "files" (default): a directory full of files
@@ -148,18 +150,18 @@
     using a format string; see "hg help export" for details.
 
     Each member added to an archive file has a directory prefix
-    prepended.  Use "-p" to specify a format string for the prefix.
+    prepended. Use "-p" to specify a format string for the prefix.
     The default is the basename of the archive, with suffixes removed.
     '''
 
-    ctx = repo.changectx(opts['rev'])
+    ctx = repo[opts['rev']]
     if not ctx:
         raise util.Abort(_('repository has no revisions'))
     node = ctx.node()
     dest = cmdutil.make_filename(repo, dest, node)
     if os.path.realpath(dest) == repo.root:
         raise util.Abort(_('repository root cannot be destination'))
-    dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
+    matchfn = cmdutil.match(repo, [], opts)
     kind = opts.get('type') or 'files'
     prefix = opts['prefix']
     if dest == '-':
@@ -174,20 +176,20 @@
 def backout(ui, repo, node=None, rev=None, **opts):
     '''reverse effect of earlier changeset
 
-    Commit the backed out changes as a new changeset.  The new
+    Commit the backed out changes as a new changeset. The new
     changeset is a child of the backed out changeset.
 
     If you back out a changeset other than the tip, a new head is
-    created.  This head will be the new tip and you should merge this
+    created. This head will be the new tip and you should merge this
     backout changeset with another head (current one by default).
 
     The --merge option remembers the parent of the working directory
     before starting the backout, then merges the new head with that
-    changeset afterwards.  This saves you from doing the merge by
-    hand.  The result of this merge is not committed, as for a normal
+    changeset afterwards. This saves you from doing the merge by
+    hand. The result of this merge is not committed, as for a normal
     merge.
 
-    See 'hg help dates' for a list of formats valid for -d/--date.
+    See \'hg help dates\' for a list of formats valid for -d/--date.
     '''
     if rev and node:
         raise util.Abort(_("please specify just one revision"))
@@ -270,7 +272,7 @@
     Once you have performed tests, mark the working directory as bad
     or good and bisect will either update to another candidate changeset
     or announce that it has found the bad revision.
-    
+
     As a shortcut, you can also use the revision argument to mark a
     revision as good or bad without checking it out first.
     """
@@ -371,12 +373,19 @@
     Unless --force is specified, branch will not let you set a
     branch name that shadows an existing branch.
 
+    Use --clean to reset the working directory branch to that of the
+    parent of the working directory, negating a previous branch change.
+
     Use the command 'hg update' to switch to an existing branch.
     """
 
-    if label:
+    if opts.get('clean'):
+        label = repo[None].parents()[0].branch()
+        repo.dirstate.setbranch(label)
+        ui.status(_('reset working directory to branch %s\n') % label)
+    elif label:
         if not opts.get('force') and label in repo.branchtags():
-            if label not in [p.branch() for p in repo.workingctx().parents()]:
+            if label not in [p.branch() for p in repo.parents()]:
                 raise util.Abort(_('a branch of the same name already exists'
                                    ' (use --force to override)'))
         repo.dirstate.setbranch(util.fromlocal(label))
@@ -388,18 +397,17 @@
     """list repository named branches
 
     List the repository's named branches, indicating which ones are
-    inactive.  If active is specified, only show active branches.
+    inactive. If active is specified, only show active branches.
 
     A branch is considered active if it contains repository heads.
 
     Use the command 'hg update' to switch to an existing branch.
     """
     hexfunc = ui.debugflag and hex or short
-    activebranches = [util.tolocal(repo.changectx(n).branch())
+    activebranches = [util.tolocal(repo[n].branch())
                             for n in repo.heads()]
-    branches = [(tag in activebranches, repo.changelog.rev(node), tag)
-                            for tag, node in repo.branchtags().items()]
-    branches.sort()
+    branches = util.sort([(tag in activebranches, repo.changelog.rev(node), tag)
+                          for tag, node in repo.branchtags().items()])
     branches.reverse()
 
     for isactive, node, tag in branches:
@@ -420,8 +428,9 @@
 
     If no destination repository is specified the destination is
     assumed to have all the nodes specified by one or more --base
-    parameters.  To create a bundle containing all changesets, use
-    --all (or --base null).
+    parameters. To create a bundle containing all changesets, use
+    --all (or --base null). To change the compression method applied,
+    use the -t option (by default, bundles are compressed using bz2).
 
     The bundle file can then be transferred using conventional means and
     applied to another repository with the unbundle or pull command.
@@ -475,7 +484,14 @@
         cg = repo.changegroupsubset(o, revs, 'bundle')
     else:
         cg = repo.changegroup(o, 'bundle')
-    changegroup.writebundle(cg, fname, "HG10BZ")
+
+    bundletype = opts.get('type', 'bzip2').lower()
+    btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
+    bundletype = btypes.get(bundletype)
+    if bundletype not in changegroup.bundletypes:
+        raise util.Abort(_('unknown bundle type specified with --type'))
+
+    changegroup.writebundle(cg, fname, bundletype)
 
 def cat(ui, repo, file1, *pats, **opts):
     """output the current or given revision of files
@@ -485,19 +501,19 @@
     or tip if no revision is checked out.
 
     Output may be to a file, in which case the name of the file is
-    given using a format string.  The formatting rules are the same as
+    given using a format string. The formatting rules are the same as
     for the export command, with the following additions:
 
     %s   basename of file being printed
     %d   dirname of file being printed, or '.' if in repo root
     %p   root-relative path name of file being printed
     """
-    ctx = repo.changectx(opts['rev'])
+    ctx = repo[opts['rev']]
     err = 1
-    for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
-                                             ctx.node()):
+    m = cmdutil.match(repo, (file1,) + pats, opts)
+    for abs in ctx.walk(m):
         fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
-        data = ctx.filectx(abs).data()
+        data = ctx[abs].data()
         if opts.get('decode'):
             data = repo.wwritedata(abs, data)
         fp.write(data)
@@ -517,20 +533,22 @@
 
     For efficiency, hardlinks are used for cloning whenever the source
     and destination are on the same filesystem (note this applies only
-    to the repository data, not to the checked out files).  Some
+    to the repository data, not to the checked out files). Some
     filesystems, such as AFS, implement hardlinking incorrectly, but
-    do not report errors.  In these cases, use the --pull option to
+    do not report errors. In these cases, use the --pull option to
     avoid hardlinking.
 
-    You can safely clone repositories and checked out files using full
-    hardlinks with
+    In some cases, you can clone repositories and checked out files
+    using full hardlinks with
 
       $ cp -al REPO REPOCLONE
 
-    which is the fastest way to clone. However, the operation is not
-    atomic (making sure REPO is not modified during the operation is
-    up to you) and you have to make sure your editor breaks hardlinks
-    (Emacs and most Linux Kernel tools do so).
+    This is the fastest way to clone, but it is not always safe.  The
+    operation is not atomic (making sure REPO is not modified during
+    the operation is up to you) and you have to make sure your editor
+    breaks hardlinks (Emacs and most Linux Kernel tools do so).  Also,
+    this is not compatible with certain extensions that place their
+    metadata under the .hg directory, such as mq.
 
     If you use the -r option to clone up to a specific revision, no
     subsequent revisions will be present in the cloned repository.
@@ -569,9 +587,9 @@
 
     See 'hg help dates' for a list of formats valid for -d/--date.
     """
-    def commitfunc(ui, repo, files, message, match, opts):
-        return repo.commit(files, message, opts['user'], opts['date'], match,
-                           force_editor=opts.get('force_editor'))
+    def commitfunc(ui, repo, message, match, opts):
+        return repo.commit(match.files(), message, opts['user'], opts['date'],
+                           match, force_editor=opts.get('force_editor'))
 
     node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
     if not node:
@@ -581,21 +599,26 @@
     parents = cl.parentrevs(rev)
     if rev - 1 in parents:
         # one of the parents was the old tip
-        return
-    if (parents == (nullrev, nullrev) or
-        len(cl.heads(cl.node(parents[0]))) > 1 and
-        (parents[1] == nullrev or len(cl.heads(cl.node(parents[1]))) > 1)):
+        pass
+    elif (parents == (nullrev, nullrev) or
+          len(cl.heads(cl.node(parents[0]))) > 1 and
+          (parents[1] == nullrev or len(cl.heads(cl.node(parents[1]))) > 1)):
         ui.status(_('created new head\n'))
 
+    if ui.debugflag:
+        ui.write(_('committed changeset %d:%s\n') % (rev,hex(node)))
+    elif ui.verbose:
+        ui.write(_('committed changeset %d:%s\n') % (rev,short(node)))
+
 def copy(ui, repo, *pats, **opts):
     """mark files as copied for the next commit
 
-    Mark dest as having copies of source files.  If dest is a
-    directory, copies are put in that directory.  If dest is a file,
+    Mark dest as having copies of source files. If dest is a
+    directory, copies are put in that directory. If dest is a file,
     there can only be one source.
 
     By default, this command copies the contents of files as they
-    stand in the working directory.  If invoked with --after, the
+    stand in the working directory. If invoked with --after, the
     operation is recorded, but no copying is performed.
 
     This command takes effect in the next commit. To undo a copy
@@ -642,35 +665,30 @@
         ui.write("%s\n" % "\n".join(options))
         return
 
-    clist = cmdutil.findpossible(ui, cmd, table).keys()
-    clist.sort()
-    ui.write("%s\n" % "\n".join(clist))
+    ui.write("%s\n" % "\n".join(util.sort(cmdutil.findpossible(ui, cmd, table))))
 
 def debugfsinfo(ui, path = "."):
     file('.debugfsinfo', 'w').write('')
     ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
     ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
-    ui.write('case-sensitive: %s\n' % (util.checkfolding('.debugfsinfo')
+    ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
                                 and 'yes' or 'no'))
     os.unlink('.debugfsinfo')
 
-def debugrebuildstate(ui, repo, rev=""):
+def debugrebuildstate(ui, repo, rev="tip"):
     """rebuild the dirstate as it would look like for the given revision"""
-    if rev == "":
-        rev = repo.changelog.tip()
-    ctx = repo.changectx(rev)
-    files = ctx.manifest()
+    ctx = repo[rev]
     wlock = repo.wlock()
     try:
-        repo.dirstate.rebuild(rev, files)
+        repo.dirstate.rebuild(ctx.node(), ctx.manifest())
     finally:
         del wlock
 
 def debugcheckstate(ui, repo):
     """validate the correctness of the current dirstate"""
     parent1, parent2 = repo.dirstate.parents()
-    m1 = repo.changectx(parent1).manifest()
-    m2 = repo.changectx(parent2).manifest()
+    m1 = repo[parent1].manifest()
+    m2 = repo[parent2].manifest()
     errors = 0
     for f in repo.dirstate:
         state = repo.dirstate[f]
@@ -737,11 +755,9 @@
 
 def debugstate(ui, repo, nodates=None):
     """show the contents of the current dirstate"""
-    k = repo.dirstate._map.items()
-    k.sort()
     timestr = ""
     showdate = not nodates
-    for file_, ent in k:
+    for file_, ent in util.sort(repo.dirstate._map.items()):
         if showdate:
             if ent[3] == -1:
                 # Pad or slice to locale representation
@@ -783,7 +799,7 @@
     r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
     ui.write("   rev    offset  length   base linkrev" +
              " nodeid       p1           p2\n")
-    for i in xrange(r.count()):
+    for i in r:
         node = r.node(i)
         try:
             pp = r.parents(node)
@@ -797,7 +813,7 @@
     """dump an index DAG as a .dot file"""
     r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
     ui.write("digraph G {\n")
-    for i in xrange(r.count()):
+    for i in r:
         node = r.node(i)
         pp = r.parents(node)
         ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
@@ -920,26 +936,28 @@
 def debugrename(ui, repo, file1, *pats, **opts):
     """dump rename information"""
 
-    ctx = repo.changectx(opts.get('rev', 'tip'))
-    for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
-                                             ctx.node()):
-        fctx = ctx.filectx(abs)
-        m = fctx.filelog().renamed(fctx.filenode())
-        if m:
-            ui.write(_("%s renamed from %s:%s\n") % (rel, m[0], hex(m[1])))
+    ctx = repo[opts.get('rev')]
+    m = cmdutil.match(repo, (file1,) + pats, opts)
+    for abs in ctx.walk(m):
+        fctx = ctx[abs]
+        o = fctx.filelog().renamed(fctx.filenode())
+        rel = m.rel(abs)
+        if o:
+            ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
         else:
             ui.write(_("%s not renamed\n") % rel)
 
 def debugwalk(ui, repo, *pats, **opts):
     """show how files match on given patterns"""
-    items = list(cmdutil.walk(repo, pats, opts))
+    m = cmdutil.match(repo, pats, opts)
+    items = list(repo.walk(m))
     if not items:
         return
-    fmt = '%%s  %%-%ds  %%-%ds  %%s' % (
-        max([len(abs) for (src, abs, rel, exact) in items]),
-        max([len(rel) for (src, abs, rel, exact) in items]))
-    for src, abs, rel, exact in items:
-        line = fmt % (src, abs, rel, exact and 'exact' or '')
+    fmt = 'f  %%-%ds  %%-%ds  %%s' % (
+        max([len(abs) for abs in items]),
+        max([len(m.rel(abs)) for abs in items]))
+    for abs in items:
+        line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '')
         ui.write("%s\n" % line.rstrip())
 
 def diff(ui, repo, *pats, **opts):
@@ -965,10 +983,8 @@
     """
     node1, node2 = cmdutil.revpair(repo, opts['rev'])
 
-    fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
-
-    patch.diff(repo, node1, node2, fns, match=matchfn,
-               opts=patch.diffopts(ui, opts))
+    m = cmdutil.match(repo, pats, opts)
+    patch.diff(repo, node1, node2, match=m, opts=patch.diffopts(ui, opts))
 
 def export(ui, repo, *changesets, **opts):
     """dump the header and diffs for one or more changesets
@@ -982,7 +998,7 @@
     as it will compare the merge changeset against its first parent only.
 
     Output may be to a file, in which case the name of the file is
-    given using a format string.  The formatting rules are as follows:
+    given using a format string. The formatting rules are as follows:
 
     %%   literal "%" character
     %H   changeset hash (40 bytes of hexadecimal)
@@ -1016,13 +1032,13 @@
 
     Search revisions of files for a regular expression.
 
-    This command behaves differently than Unix grep.  It only accepts
-    Python/Perl regexps.  It searches repository history, not the
-    working directory.  It always prints the revision number in which
+    This command behaves differently than Unix grep. It only accepts
+    Python/Perl regexps. It searches repository history, not the
+    working directory. It always prints the revision number in which
     a match appears.
 
     By default, grep only prints output for the first revision of a
-    file in which it finds a match.  To get it to print every revision
+    file in which it finds a match. To get it to print every revision
     that contains a change in match status ("-" for a match that
     becomes a non-match, or "+" for a non-match that becomes a match),
     use the --all flag.
@@ -1066,6 +1082,9 @@
             self.colstart = colstart
             self.colend = colend
 
+        def __hash__(self):
+            return hash((self.linenum, self.line))
+
         def __eq__(self, other):
             return self.line == other.line
 
@@ -1126,7 +1145,7 @@
 
     fstate = {}
     skip = {}
-    get = util.cachefunc(lambda r: repo.changectx(r).changeset())
+    get = util.cachefunc(lambda r: repo[r].changeset())
     changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
     found = False
     follow = opts.get('follow')
@@ -1134,7 +1153,7 @@
         if st == 'window':
             matches.clear()
         elif st == 'add':
-            ctx = repo.changectx(rev)
+            ctx = repo[rev]
             matches[rev] = {}
             for fn in fns:
                 if fn in skip:
@@ -1149,9 +1168,7 @@
                 except revlog.LookupError:
                     pass
         elif st == 'iter':
-            states = matches[rev].items()
-            states.sort()
-            for fn, m in states:
+            for fn, m in util.sort(matches[rev].items()):
                 copy = copies.get(rev, {}).get(fn)
                 if fn in skip:
                     if copy:
@@ -1169,9 +1186,7 @@
                     fstate[copy] = m
                 prev[fn] = rev
 
-    fstate = fstate.items()
-    fstate.sort()
-    for fn, state in fstate:
+    for fn, state in util.sort(fstate.items()):
         if fn in skip:
             continue
         if fn not in copies.get(prev[fn], {}):
@@ -1192,7 +1207,7 @@
     are the usual targets for update and merge operations.
 
     Branch heads are changesets that have a given branch tag, but have
-    no child changesets with that tag.  They are usually where
+    no child changesets with that tag. They are usually where
     development on the given branch takes place.
     """
     if opts['rev']:
@@ -1206,7 +1221,7 @@
         heads = []
         visitedset = util.set()
         for branchrev in branchrevs:
-            branch = repo.changectx(branchrev).branch()
+            branch = repo[branchrev].branch()
             if branch in visitedset:
                 continue
             visitedset.add(branch)
@@ -1258,7 +1273,14 @@
         if with_version:
             version_(ui)
             ui.write('\n')
-        aliases, i = cmdutil.findcmd(ui, name, table)
+
+        try:
+            aliases, i = cmdutil.findcmd(ui, name, table)
+        except cmdutil.AmbiguousCommand, inst:
+            select = lambda c: c.lstrip('^').startswith(inst.args[0])
+            helplist(_('list of commands:\n\n'), select)
+            return
+
         # synopsis
         ui.write("%s\n" % i[2])
 
@@ -1267,7 +1289,7 @@
             ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
 
         # description
-        doc = i[0].__doc__
+        doc = gettext(i[0].__doc__)
         if not doc:
             doc = _("(No help text available)")
         if ui.quiet:
@@ -1293,7 +1315,7 @@
             f = f.lstrip("^")
             if not ui.debugflag and f.startswith("debug"):
                 continue
-            doc = e[0].__doc__
+            doc = gettext(e[0].__doc__)
             if not doc:
                 doc = _("(No help text available)")
             h[f] = doc.splitlines(0)[0].rstrip()
@@ -1304,8 +1326,7 @@
             return
 
         ui.status(header)
-        fns = h.keys()
-        fns.sort()
+        fns = util.sort(h)
         m = max(map(len, fns))
         for f in fns:
             if ui.verbose:
@@ -1318,17 +1339,13 @@
             addglobalopts(True)
 
     def helptopic(name):
-        v = None
-        for i in help.helptable:
-            l = i.split('|')
-            if name in l:
-                v = i
-                header = l[-1]
-        if not v:
+        for names, header, doc in help.helptable:
+            if name in names:
+                break
+        else:
             raise cmdutil.UnknownCommand(name)
 
         # description
-        doc = help.helptable[v]
         if not doc:
             doc = _("(No help text available)")
         if callable(doc):
@@ -1343,7 +1360,8 @@
         except KeyError:
             raise cmdutil.UnknownCommand(name)
 
-        doc = (mod.__doc__ or _('No help text available')).splitlines(0)
+        doc = gettext(mod.__doc__) or _('No help text available')
+        doc = doc.splitlines(0)
         ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
         for d in doc[1:]:
             ui.write(d, '\n')
@@ -1399,6 +1417,15 @@
                                          and _(" (default: %s)") % default
                                          or "")))
 
+    if ui.verbose:
+        ui.write(_("\nspecial help topics:\n"))
+        topics = []
+        for names, header, doc in help.helptable:
+            topics.append((", ".join(names), header))
+        topics_len = max([len(s[0]) for s in topics])
+        for t, desc in topics:
+            ui.write(" %-*s  %s\n" % (topics_len, t, desc))
+
     if opt_output:
         opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
         for first, second in opt_output:
@@ -1441,7 +1468,7 @@
                 "can't query remote revision number, branch, or tags")
         output = [hexfunc(srepo.lookup(rev))]
     elif not rev:
-        ctx = repo.workingctx()
+        ctx = repo[None]
         parents = ctx.parents()
         changed = False
         if default or id or num:
@@ -1453,7 +1480,7 @@
             output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
                                     (changed) and "+" or ""))
     else:
-        ctx = repo.changectx(rev)
+        ctx = repo[rev]
         if default or id:
             output = [hexfunc(ctx.node())]
         if num:
@@ -1485,15 +1512,15 @@
     If there are outstanding changes in the working directory, import
     will abort unless given the -f flag.
 
-    You can import a patch straight from a mail message.  Even patches
+    You can import a patch straight from a mail message. Even patches
     as attachments work (body part must be type text/plain or
-    text/x-patch to be used).  From and Subject headers of email
-    message are used as default committer and commit message.  All
+    text/x-patch to be used). From and Subject headers of email
+    message are used as default committer and commit message. All
     text/plain body parts before first diff are added to commit
     message.
 
     If the imported patch was generated by hg export, user and description
-    from patch override values from message headers and body.  Values
+    from patch override values from message headers and body. Values
     given on command line with -m and -u override these.
 
     If --exact is specified, import will set the working directory
@@ -1550,7 +1577,7 @@
                     message = None
                 ui.debug(_('message:\n%s\n') % message)
 
-                wp = repo.workingctx().parents()
+                wp = repo.parents()
                 if opts.get('exact'):
                     if not nodeid or not p1:
                         raise util.Abort(_('not a mercurial patch'))
@@ -1662,7 +1689,7 @@
 def init(ui, dest=".", **opts):
     """create a new repository in the given directory
 
-    Initialize a new repository in the given directory.  If the given
+    Initialize a new repository in the given directory. If the given
     directory does not exist, it is created.
 
     If no directory is given, the current directory is used.
@@ -1680,7 +1707,7 @@
     Print all files under Mercurial control whose names match the
     given patterns.
 
-    This command searches the entire repository by default.  To search
+    This command searches the entire repository by default. To search
     just the current directory and its subdirectories, use
     "--include .".
 
@@ -1693,24 +1720,18 @@
     that contain white space as multiple filenames.
     """
     end = opts['print0'] and '\0' or '\n'
-    rev = opts['rev']
-    if rev:
-        node = repo.lookup(rev)
-    else:
-        node = None
+    rev = opts.get('rev') or None
 
     ret = 1
-    for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
-                                             badmatch=util.always,
-                                             default='relglob'):
-        if src == 'b':
-            continue
-        if not node and abs not in repo.dirstate:
+    m = cmdutil.match(repo, pats, opts, default='relglob')
+    m.bad = lambda x,y: False
+    for abs in repo[rev].walk(m):
+        if not rev and abs not in repo.dirstate:
             continue
         if opts['fullpath']:
             ui.write(os.path.join(repo.root, abs), end)
         else:
-            ui.write(((pats and rel) or abs), end)
+            ui.write(((pats and m.rel(abs)) or abs), end)
         ret = 0
 
     return ret
@@ -1722,7 +1743,7 @@
     project.
 
     File history is shown without following rename or copy history of
-    files.  Use -f/--follow with a file name to follow history across
+    files. Use -f/--follow with a file name to follow history across
     renames and copies. --follow without a file name will only show
     ancestors or descendants of the starting revision. --follow-first
     only follows the first parent of merge revisions.
@@ -1745,7 +1766,7 @@
 
     """
 
-    get = util.cachefunc(lambda r: repo.changectx(r).changeset())
+    get = util.cachefunc(lambda r: repo[r].changeset())
     changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
 
     limit = cmdutil.loglimit(opts)
@@ -1754,7 +1775,7 @@
     if opts['copies'] and opts['rev']:
         endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
     else:
-        endrev = repo.changelog.count()
+        endrev = len(repo)
     rcache = {}
     ncache = {}
     def getrenamed(fn, rev):
@@ -1766,7 +1787,7 @@
             rcache[fn] = {}
             ncache[fn] = {}
             fl = repo.file(fn)
-            for i in xrange(fl.count()):
+            for i in fl:
                 node = fl.node(i)
                 lr = fl.linkrev(node)
                 renamed = fl.renamed(node)
@@ -1782,7 +1803,7 @@
         # filectx logic.
 
         try:
-            return repo.changectx(rev).filectx(fn).renamed()
+            return repo[rev][fn].renamed()
         except revlog.LookupError:
             pass
         return None
@@ -1858,17 +1879,13 @@
     if not node:
         node = rev
 
-    m = repo.changectx(node).manifest()
-    files = m.keys()
-    files.sort()
-
-    for f in files:
+    decor = {'l':'644 @ ', 'x':'755 * ', '':'644   '}
+    ctx = repo[node]
+    for f in ctx:
         if ui.debugflag:
-            ui.write("%40s " % hex(m[f]))
+            ui.write("%40s " % hex(ctx.manifest()[f]))
         if ui.verbose:
-            type = m.execf(f) and "*" or m.linkf(f) and "@" or " "
-            perm = m.execf(f) and "755" or "644"
-            ui.write("%3s %1s " % (perm, type))
+            ui.write(decor[ctx.flags(f)])
         ui.write("%s\n" % f)
 
 def merge(ui, repo, node=None, force=None, rev=None):
@@ -1880,8 +1897,8 @@
     performed before any further updates are allowed.
 
     If no revision is specified, the working directory's parent is a
-    head revision, and the repository contains exactly one other head,
-    the other head is merged with by default.  Otherwise, an explicit
+    head revision, and the current branch contains exactly one other head,
+    the other head is merged with by default. Otherwise, an explicit
     revision to merge with must be provided.
     """
 
@@ -1891,22 +1908,28 @@
         node = rev
 
     if not node:
-        heads = repo.heads()
-        if len(heads) > 2:
-            raise util.Abort(_('repo has %d heads - '
-                               'please merge with an explicit rev') %
-                             len(heads))
+        branch = repo.changectx(None).branch()
+        bheads = repo.branchheads(branch)
+        if len(bheads) > 2:
+            raise util.Abort(_("branch '%s' has %d heads - "
+                               "please merge with an explicit rev") %
+                             (branch, len(bheads)))
+
         parent = repo.dirstate.parents()[0]
-        if len(heads) == 1:
+        if len(bheads) == 1:
+            if len(repo.heads()) > 1:
+                raise util.Abort(_("branch '%s' has one head - "
+                                   "please merge with an explicit rev") %
+                                 branch)
             msg = _('there is nothing to merge')
-            if parent != repo.lookup(repo.workingctx().branch()):
+            if parent != repo.lookup(repo[None].branch()):
                 msg = _('%s - use "hg update" instead') % msg
             raise util.Abort(msg)
 
-        if parent not in heads:
+        if parent not in bheads:
             raise util.Abort(_('working dir not at a head rev - '
                                'use "hg update" or merge with an explicit rev'))
-        node = parent == heads[0] and heads[-1] or heads[0]
+        node = parent == bheads[0] and bheads[-1] or bheads[0]
     return hg.merge(repo, node, force=force)
 
 def outgoing(ui, repo, dest=None, **opts):
@@ -1956,15 +1979,15 @@
     """
     rev = opts.get('rev')
     if rev:
-        ctx = repo.changectx(rev)
+        ctx = repo[rev]
     else:
-        ctx = repo.workingctx()
+        ctx = repo[None]
 
     if file_:
-        files, match, anypats = cmdutil.matchpats(repo, (file_,), opts)
-        if anypats or len(files) != 1:
+        m = cmdutil.match(repo, (file_,), opts)
+        if m.anypats() or len(m.files()) != 1:
             raise util.Abort(_('can only specify an explicit file name'))
-        file_ = files[0]
+        file_ = m.files()[0]
         filenodes = []
         for cp in ctx.parents():
             if not cp:
@@ -1992,7 +2015,7 @@
     definition of available names.
 
     Path names are defined in the [paths] section of /etc/mercurial/hgrc
-    and $HOME/.hgrc.  If run inside a repository, .hg/hgrc is used, too.
+    and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
     """
     if search:
         for name, path in ui.configitems("paths"):
@@ -2030,9 +2053,9 @@
     Valid URLs are of the form:
 
       local/filesystem/path (or file://local/filesystem/path)
-      http://[user@]host[:port]/[path]
-      https://[user@]host[:port]/[path]
-      ssh://[user@]host[:port]/[path]
+      http://[user[:pass]@]host[:port]/[path]
+      https://[user[:pass]@]host[:port]/[path]
+      ssh://[user[:pass]@]host[:port]/[path]
       static-http://host[:port]/[path]
 
     Paths in the local filesystem can either point to Mercurial
@@ -2092,9 +2115,9 @@
     Valid URLs are of the form:
 
       local/filesystem/path (or file://local/filesystem/path)
-      ssh://[user@]host[:port]/[path]
-      http://[user@]host[:port]/[path]
-      https://[user@]host[:port]/[path]
+      ssh://[user[:pass]@]host[:port]/[path]
+      http://[user[:pass]@]host[:port]/[path]
+      https://[user[:pass]@]host[:port]/[path]
 
     An optional identifier after # indicates a particular branch, tag,
     or changeset to push. If -r is used, the named changeset and all its
@@ -2111,7 +2134,7 @@
     cmdutil.setremoteconfig(ui, opts)
 
     other = hg.repository(ui, dest)
-    ui.status('pushing to %s\n' % util.hidepassword(dest))
+    ui.status(_('pushing to %s\n') % util.hidepassword(dest))
     if revs:
         revs = [repo.lookup(rev) for rev in revs]
     r = repo.push(other, opts['force'], revs=revs)
@@ -2134,7 +2157,7 @@
 
     message = cmdutil.logmessage(opts)
 
-    files, match, anypats = cmdutil.matchpats(repo, pats, opts)
+    files = cmdutil.match(repo, pats, opts).files()
     if opts['files']:
         files += open(opts['files']).read().splitlines()
 
@@ -2186,47 +2209,28 @@
     if not pats and not after:
         raise util.Abort(_('no files specified'))
 
-    files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
-    mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
-    modified, added, removed, deleted, unknown = mardu
-
-    remove, forget = [], []
-    for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
-
-        reason = None
-        if abs in removed or abs in unknown:
-            continue
-
-        # last column
-        elif abs in deleted:
-            remove.append(abs)
-
-        # rest of the third row
-        elif after and not force:
-            reason = _('still exists (use -f to force removal)')
-
-        # rest of the first column
-        elif abs in added:
-            if not force:
-                reason = _('has been marked for add (use -f to force removal)')
-            else:
-                forget.append(abs)
-
-        # rest of the third column
-        elif abs in modified:
-            if not force:
-                reason = _('is modified (use -f to force removal)')
-            else:
-                remove.append(abs)
-
-        # rest of the second column
-        elif not reason:
-            remove.append(abs)
-
-        if reason:
-            ui.warn(_('not removing %s: file %s\n') % (rel, reason))
-        elif ui.verbose or not exact:
-            ui.status(_('removing %s\n') % rel)
+    m = cmdutil.match(repo, pats, opts)
+    s = repo.status(match=m, clean=True)
+    modified, added, deleted, clean = s[0], s[1], s[3], s[6]
+
+    def warn(files, reason):
+        for f in files:
+            ui.warn(_('not removing %s: file %s (use -f to force removal)\n')
+                    % (m.rel(f), reason))
+
+    if force:
+        remove, forget = modified + deleted + clean, added
+    elif after:
+        remove, forget = deleted, []
+        warn(modified + added + clean, _('still exists'))
+    else:
+        remove, forget = deleted + clean, []
+        warn(modified, _('is modified'))
+        warn(added, _('has been marked for add'))
+
+    for f in util.sort(remove + forget):
+        if ui.verbose or not m.exact(f):
+            ui.status(_('removing %s\n') % m.rel(f))
 
     repo.forget(forget)
     repo.remove(remove, unlink=not after)
@@ -2234,12 +2238,12 @@
 def rename(ui, repo, *pats, **opts):
     """rename files; equivalent of copy + remove
 
-    Mark dest as copies of sources; mark sources for deletion.  If
-    dest is a directory, copies are put in that directory.  If dest is
+    Mark dest as copies of sources; mark sources for deletion. If
+    dest is a directory, copies are put in that directory. If dest is
     a file, there can only be one source.
 
     By default, this command copies the contents of files as they
-    stand in the working directory.  If invoked with --after, the
+    stand in the working directory. If invoked with --after, the
     operation is recorded, but no copying is performed.
 
     This command takes effect in the next commit. To undo a rename
@@ -2251,6 +2255,39 @@
     finally:
         del wlock
 
+def resolve(ui, repo, *pats, **opts):
+    """resolve file merges from a branch merge or update
+
+    This command will attempt to resolve unresolved merges from the
+    last update or merge command. This will use the local file
+    revision preserved at the last update or merge to cleanly retry
+    the file merge attempt. With no file or options specified, this
+    command will attempt to resolve all unresolved files.
+
+    The codes used to show the status of files are:
+    U = unresolved
+    R = resolved
+    """
+
+    if len([x for x in opts if opts[x]]) > 1:
+        raise util.Abort(_("too many options specified"))
+
+    ms = merge_.mergestate(repo)
+    m = cmdutil.match(repo, pats, opts)
+
+    for f in ms:
+        if m(f):
+            if opts.get("list"):
+                ui.write("%s %s\n" % (ms[f].upper(), f))
+            elif opts.get("mark"):
+                ms.mark(f, "r")
+            elif opts.get("unmark"):
+                ms.mark(f, "u")
+            else:
+                wctx = repo[None]
+                mctx = wctx.parents()[-1]
+                ms.resolve(f, wctx, mctx)
+
 def revert(ui, repo, *pats, **opts):
     """restore individual files or dirs to an earlier state
 
@@ -2269,13 +2306,13 @@
     back" some or all of an earlier change.
     See 'hg help dates' for a list of formats valid for -d/--date.
 
-    Revert modifies the working directory.  It does not commit any
-    changes, or change the parent of the working directory.  If you
+    Revert modifies the working directory. It does not commit any
+    changes, or change the parent of the working directory. If you
     revert to a revision other than the parent of the working
     directory, the reverted files will thus appear modified
     afterwards.
 
-    If a file has been deleted, it is restored.  If the executable
+    If a file has been deleted, it is restored. If the executable
     mode of a file was changed, it is reset.
 
     If names are given, all files matching the names are reverted.
@@ -2298,7 +2335,7 @@
     if not opts['rev'] and p2 != nullid:
         raise util.Abort(_('uncommitted merge - please provide a '
                            'specific revision'))
-    ctx = repo.changectx(opts['rev'])
+    ctx = repo[opts['rev']]
     node = ctx.node()
     mf = ctx.manifest()
     if node == parent:
@@ -2316,30 +2353,32 @@
     try:
         # walk dirstate.
         files = []
-        for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
-                                                 badmatch=mf.has_key):
-            names[abs] = (rel, exact)
-            if src != 'b':
-                files.append(abs)
+
+        m = cmdutil.match(repo, pats, opts)
+        m.bad = lambda x,y: False
+        for abs in repo.walk(m):
+            names[abs] = m.rel(abs), m.exact(abs)
 
         # walk target manifest.
 
-        def badmatch(path):
+        def badfn(path, msg):
             if path in names:
-                return True
+                return False
             path_ = path + '/'
             for f in names:
                 if f.startswith(path_):
-                    return True
+                    return False
+            repo.ui.warn("%s: %s\n" % (m.rel(path), msg))
             return False
 
-        for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
-                                                 badmatch=badmatch):
-            if abs in names or src == 'b':
-                continue
-            names[abs] = (rel, exact)
-
-        changes = repo.status(files=files, match=names.has_key)[:4]
+        m = cmdutil.match(repo, pats, opts)
+        m.bad = badfn
+        for abs in repo[node].walk(m):
+            if abs not in names:
+                names[abs] = m.rel(abs), m.exact(abs)
+
+        m = cmdutil.matchfiles(repo, names)
+        changes = repo.status(match=m)[:4]
         modified, added, removed, deleted = map(dict.fromkeys, changes)
 
         # if f is a rename, also revert the source
@@ -2373,10 +2412,7 @@
             (deleted, revert, remove, False, False),
             )
 
-        entries = names.items()
-        entries.sort()
-
-        for abs, (rel, exact) in entries:
+        for abs, (rel, exact) in util.sort(names.items()):
             mfentry = mf.get(abs)
             target = repo.wjoin(abs)
             def handle(xlist, dobackup):
@@ -2414,7 +2450,7 @@
                 if pmf is None:
                     # only need parent manifest in this unlikely case,
                     # so do not read by default
-                    pmf = repo.changectx(parent).manifest()
+                    pmf = repo[parent].manifest()
                 if abs in pmf:
                     if mfentry:
                         # if version of file is same in parent and target
@@ -2428,7 +2464,7 @@
         if not opts.get('dry_run'):
             def checkout(f):
                 fc = ctx[f]
-                repo.wwrite(f, fc.data(), fc.fileflags())
+                repo.wwrite(f, fc.data(), fc.flags())
 
             audit_path = util.path_auditor(repo.root)
             for f in remove[0]:
@@ -2511,7 +2547,7 @@
     Start a local HTTP repository browser and pull server.
 
     By default, the server logs accesses to stdout and errors to
-    stderr.  Use the "-A" and "-E" options to log to files.
+    stderr. Use the "-A" and "-E" options to log to files.
     """
 
     if opts["stdio"]:
@@ -2550,8 +2586,17 @@
             if port == ':80':
                 port = ''
 
-            ui.status(_('listening at http://%s%s/%s (%s:%d)\n') %
-                      (self.httpd.fqaddr, port, prefix, self.httpd.addr, self.httpd.port))
+            bindaddr = self.httpd.addr
+            if bindaddr == '0.0.0.0':
+                bindaddr = '*'
+            elif ':' in bindaddr: # IPv6
+                bindaddr = '[%s]' % bindaddr
+
+            fqaddr = self.httpd.fqaddr
+            if ':' in fqaddr:
+                fqaddr = '[%s]' % fqaddr
+            ui.status(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
+                      (fqaddr, port, prefix, bindaddr, self.httpd.port))
 
         def run(self):
             self.httpd.serve_forever()
@@ -2563,10 +2608,10 @@
 def status(ui, repo, *pats, **opts):
     """show changed files in the working directory
 
-    Show status of files in the repository.  If names are given, only
-    files that match are shown.  Files that are clean or ignored or
+    Show status of files in the repository. If names are given, only
+    files that match are shown. Files that are clean or ignored or
     source of a copy/move operation, are not listed unless -c (clean),
-    -i (ignored), -C (copies) or -A is given.  Unless options described
+    -i (ignored), -C (copies) or -A is given. Unless options described
     with "show only ..." are given, the options -mardu are used.
 
     Option -q/--quiet hides untracked (unknown and ignored) files
@@ -2591,65 +2636,45 @@
       = the previous added file was copied from here
     """
 
-    all = opts['all']
     node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
-
-    files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
     cwd = (pats and repo.getcwd()) or ''
-    modified, added, removed, deleted, unknown, ignored, clean = [
-        n for n in repo.status(node1=node1, node2=node2, files=files,
-                               match=matchfn,
-                               list_ignored=opts['ignored']
-                                            or all and not ui.quiet,
-                               list_clean=opts['clean'] or all,
-                               list_unknown=opts['unknown']
-                                            or not (ui.quiet or
-                                                    opts['modified'] or
-                                                    opts['added'] or
-                                                    opts['removed'] or
-                                                    opts['deleted'] or
-                                                    opts['ignored']))]
-
-    changetypes = (('modified', 'M', modified),
-                   ('added', 'A', added),
-                   ('removed', 'R', removed),
-                   ('deleted', '!', deleted),
-                   ('unknown', '?', unknown),
-                   ('ignored', 'I', ignored))
-
-    explicit_changetypes = changetypes + (('clean', 'C', clean),)
-
+    end = opts['print0'] and '\0' or '\n'
     copy = {}
-    showcopy = {}
-    if ((all or opts.get('copies')) and not opts.get('no_status')):
-        if opts.get('rev') == []:
-            # fast path, more correct with merge parents
-            showcopy = copy = repo.dirstate.copies().copy()
-        else:
-            ctxn = repo.changectx(nullid)
-            ctx1 = repo.changectx(node1)
-            ctx2 = repo.changectx(node2)
-            if node2 is None:
-                ctx2 = repo.workingctx()
-            copy, diverge = copies.copies(repo, ctx1, ctx2, ctxn)
-            for k, v in copy.items():
+    states = 'modified added removed deleted unknown ignored clean'.split()
+    show = [k for k in states if opts[k]]
+    if opts['all']:
+        show += ui.quiet and (states[:4] + ['clean']) or states
+    if not show:
+        show = ui.quiet and states[:4] or states[:5]
+
+    stat = repo.status(node1, node2, cmdutil.match(repo, pats, opts),
+                       'ignored' in show, 'clean' in show, 'unknown' in show)
+    changestates = zip(states, 'MAR!?IC', stat)
+
+    if (opts['all'] or opts['copies']) and not opts['no_status']:
+        ctxn = repo[nullid]
+        ctx1 = repo[node1]
+        ctx2 = repo[node2]
+        added = stat[1]
+        if node2 is None:
+            added = stat[0] + stat[1] # merged?
+
+        for k, v in copies.copies(repo, ctx1, ctx2, ctxn)[0].items():
+            if k in added:
+                copy[k] = v
+            elif v in added:
                 copy[v] = k
 
-    end = opts['print0'] and '\0' or '\n'
-
-    for opt, char, changes in ([ct for ct in explicit_changetypes
-                                if all or opts[ct[0]]]
-                               or changetypes):
-
-        if opts['no_status']:
-            format = "%%s%s" % end
-        else:
+    for state, char, files in changestates:
+        if state in show:
             format = "%s %%s%s" % (char, end)
-
-        for f in changes:
-            ui.write(format % repo.pathto(f, cwd))
-            if f in copy and (f in added or f in showcopy):
-                ui.write('  %s%s' % (repo.pathto(copy[f], cwd), end))
+            if opts['no_status']:
+                format = "%%s%s" % end
+
+            for f in files:
+                ui.write(format % repo.pathto(f, cwd))
+                if f in copy:
+                    ui.write('  %s%s' % (repo.pathto(copy[f], cwd), end))
 
 def tag(ui, repo, name1, *names, **opts):
     """add one or more tags for the current or given revision
@@ -2666,13 +2691,13 @@
     To facilitate version control, distribution, and merging of tags,
     they are stored as a file named ".hgtags" which is managed
     similarly to other project files and can be hand-edited if
-    necessary.  The file '.hg/localtags' is used for local tags (not
+    necessary. The file '.hg/localtags' is used for local tags (not
     shared among repositories).
 
     See 'hg help dates' for a list of formats valid for -d/--date.
     """
 
-    rev_ = None
+    rev_ = "."
     names = (name1,) + names
     if len(names) != len(dict.fromkeys(names)):
         raise util.Abort(_('tag names must be unique'))
@@ -2703,7 +2728,7 @@
     if not rev_ and repo.dirstate.parents()[1] != nullid:
         raise util.Abort(_('uncommitted merge - please provide a '
                            'specific revision'))
-    r = repo.changectx(rev_).node()
+    r = repo[rev_].node()
 
     if not message:
         message = (_('Added tag %s for changeset %s') %
@@ -2760,7 +2785,7 @@
     that repository becomes the current tip. The "tip" tag is special
     and cannot be renamed or assigned to a different changeset.
     """
-    cmdutil.show_changeset(ui, repo, opts).show(nullrev+repo.changelog.count())
+    cmdutil.show_changeset(ui, repo, opts).show(len(repo) - 1)
 
 def unbundle(ui, repo, fname1, *fnames, **opts):
     """apply one or more changegroup files
@@ -2788,8 +2813,8 @@
 def update(ui, repo, node=None, rev=None, clean=False, date=None):
     """update working directory
 
-    Update the working directory to the specified revision, or the
-    tip of the current branch if none is specified. Use null as
+    Update the repository's working directory to the specified revision,
+    or the tip of the current branch if none is specified. Use null as
     the revision to remove the working copy (like 'hg clone -U').
 
     If the requested revision is a descendant of the working
@@ -2901,6 +2926,23 @@
     ('M', 'no-merges', None, _('do not show merges')),
 ] + templateopts
 
+diffopts = [
+    ('a', 'text', None, _('treat all files as text')),
+    ('g', 'git', None, _('use git extended diff format')),
+    ('', 'nodates', None, _("don't include dates in diff headers"))
+]
+
+diffopts2 = [
+    ('p', 'show-function', None, _('show which function each change is in')),
+    ('w', 'ignore-all-space', None,
+     _('ignore white space when comparing lines')),
+    ('b', 'ignore-space-change', None,
+     _('ignore changes in the amount of white space')),
+    ('B', 'ignore-blank-lines', None,
+     _('ignore changes whose lines are all blank')),
+    ('U', 'unified', '', _('number of lines of context to show'))
+]
+
 table = {
     "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
     "addremove":
@@ -2949,8 +2991,9 @@
     "branch":
         (branch,
          [('f', 'force', None,
-           _('set branch name even if it shadows an existing branch'))],
-         _('hg branch [-f] [NAME]')),
+           _('set branch name even if it shadows an existing branch')),
+          ('C', 'clean', None, _('reset branch name to parent branch name'))],
+         _('hg branch [-fC] [NAME]')),
     "branches":
         (branches,
          [('a', 'active', False,
@@ -2964,8 +3007,8 @@
            _('a changeset up to which you would like to bundle')),
           ('', 'base', [],
            _('a base changeset to specify instead of a destination')),
-          ('a', 'all', None,
-           _('bundle all changesets in the repository')),
+          ('a', 'all', None, _('bundle all changesets in the repository')),
+          ('t', 'type', 'bzip2', _('bundle compression type to use')),
          ] + remoteopts,
          _('hg bundle [-f] [-a] [-r REV]... [--base REV]... FILE [DEST]')),
     "cat":
@@ -3040,29 +3083,14 @@
     "debugwalk": (debugwalk, walkopts, _('hg debugwalk [OPTION]... [FILE]...')),
     "^diff":
         (diff,
-         [('r', 'rev', [], _('revision')),
-          ('a', 'text', None, _('treat all files as text')),
-          ('p', 'show-function', None,
-           _('show which function each change is in')),
-          ('g', 'git', None, _('use git extended diff format')),
-          ('', 'nodates', None, _("don't include dates in diff headers")),
-          ('w', 'ignore-all-space', None,
-           _('ignore white space when comparing lines')),
-          ('b', 'ignore-space-change', None,
-           _('ignore changes in the amount of white space')),
-          ('B', 'ignore-blank-lines', None,
-           _('ignore changes whose lines are all blank')),
-          ('U', 'unified', '',
-           _('number of lines of context to show'))
-         ] + walkopts,
+         [('r', 'rev', [], _('revision'))
+         ] + diffopts + diffopts2 + walkopts,
          _('hg diff [OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
     "^export":
         (export,
          [('o', 'output', '', _('print output to file with formatted name')),
-          ('a', 'text', None, _('treat all files as text')),
-          ('g', 'git', None, _('use git extended diff format')),
-          ('', 'nodates', None, _("don't include dates in diff headers")),
-          ('', 'switch-parent', None, _('diff against the second parent'))],
+          ('', 'switch-parent', None, _('diff against the second parent'))
+          ] + diffopts,
          _('hg export [OPTION]... [-o OUTFILESPEC] REV...')),
     "grep":
         (grep,
@@ -3206,6 +3234,12 @@
            _('forcibly copy over an existing managed file')),
          ] + walkopts + dryrunopts,
          _('hg rename [OPTION]... SOURCE... DEST')),
+    "resolve":
+        (resolve,
+         [('l', 'list', None, _('list state of files needing merge')),
+          ('m', 'mark', None, _('mark files as resolved')),
+          ('u', 'unmark', None, _('unmark files as resolved'))],
+          _('hg resolve [OPTION] [FILES...]')),
     "revert":
         (revert,
          [('a', 'all', None, _('revert all changes when no arguments given')),
@@ -3280,7 +3314,7 @@
          _('hg unbundle [-u] FILE...')),
     "^update|up|checkout|co":
         (update,
-         [('C', 'clean', None, _('overwrite locally modified files')),
+         [('C', 'clean', None, _('overwrite locally modified files (no backup)')),
           ('d', 'date', '', _('tipmost revision matching date')),
           ('r', 'rev', '', _('revision'))],
          _('hg update [-C] [-d DATE] [[-r] REV]')),
--- a/mercurial/context.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/mercurial/context.py	Wed Sep 17 11:34:37 2008 +0200
@@ -5,35 +5,36 @@
 # This software may be used and distributed according to the terms
 # of the GNU General Public License, incorporated herein by reference.
 
-from node import nullid, nullrev, short
+from node import nullid, nullrev, short, hex
 from i18n import _
 import ancestor, bdiff, revlog, util, os, errno
 
 class changectx(object):
     """A changecontext object makes access to data related to a particular
     changeset convenient."""
-    def __init__(self, repo, changeid=None):
+    def __init__(self, repo, changeid=''):
         """changeid is a revision number, node, or tag"""
+        if changeid == '':
+            changeid = '.'
         self._repo = repo
-
-        if not changeid and changeid != 0:
-            p1, p2 = self._repo.dirstate.parents()
-            self._rev = self._repo.changelog.rev(p1)
-            if self._rev == -1:
-                changeid = 'tip'
-            else:
-                self._node = p1
-                return
-
         self._node = self._repo.lookup(changeid)
         self._rev = self._repo.changelog.rev(self._node)
 
     def __str__(self):
         return short(self.node())
 
+    def __int__(self):
+        return self.rev()
+
     def __repr__(self):
         return "<changectx %s>" % str(self)
 
+    def __hash__(self):
+        try:
+            return hash(self._rev)
+        except AttributeError:
+            return id(self)
+
     def __eq__(self, other):
         try:
             return self._rev == other._rev
@@ -57,8 +58,14 @@
             md = self._repo.manifest.readdelta(self._changeset[0])
             self._manifestdelta = md
             return self._manifestdelta
+        elif name == '_parents':
+            p = self._repo.changelog.parents(self._node)
+            if p[1] == nullid:
+                p = p[:-1]
+            self._parents = [changectx(self._repo, x) for x in p]
+            return self._parents
         else:
-            raise AttributeError, name
+            raise AttributeError(name)
 
     def __contains__(self, key):
         return key in self._manifest
@@ -67,9 +74,7 @@
         return self.filectx(key)
 
     def __iter__(self):
-        a = self._manifest.keys()
-        a.sort()
-        for f in a:
+        for f in util.sort(self._manifest):
             yield f
 
     def changeset(self): return self._changeset
@@ -77,6 +82,7 @@
 
     def rev(self): return self._rev
     def node(self): return self._node
+    def hex(self): return hex(self._node)
     def user(self): return self._changeset[1]
     def date(self): return self._changeset[2]
     def files(self): return self._changeset[3]
@@ -87,14 +93,21 @@
 
     def parents(self):
         """return contexts for each parent changeset"""
-        p = self._repo.changelog.parents(self._node)
-        return [changectx(self._repo, x) for x in p]
+        return self._parents
 
     def children(self):
         """return contexts for each child changeset"""
         c = self._repo.changelog.children(self._node)
         return [changectx(self._repo, x) for x in c]
 
+    def ancestors(self):
+        for a in self._repo.changelog.ancestors(self._rev):
+            yield changectx(self._repo, a)
+
+    def descendants(self):
+        for d in self._repo.changelog.descendants(self._rev):
+            yield changectx(self._repo, d)
+
     def _fileinfo(self, path):
         if '_manifest' in self.__dict__:
             try:
@@ -115,7 +128,7 @@
     def filenode(self, path):
         return self._fileinfo(path)[0]
 
-    def fileflags(self, path):
+    def flags(self, path):
         try:
             return self._fileinfo(path)[1]
         except revlog.LookupError:
@@ -128,15 +141,6 @@
         return filectx(self._repo, path, fileid=fileid,
                        changectx=self, filelog=filelog)
 
-    def filectxs(self):
-        """generate a file context for each file in this changeset's
-           manifest"""
-        mf = self.manifest()
-        m = mf.keys()
-        m.sort()
-        for f in m:
-            yield self.filectx(f, fileid=mf[f])
-
     def ancestor(self, c2):
         """
         return the ancestor context of self and c2
@@ -144,6 +148,23 @@
         n = self._repo.changelog.ancestor(self._node, c2._node)
         return changectx(self._repo, n)
 
+    def walk(self, match):
+        fdict = dict.fromkeys(match.files())
+        # for dirstate.walk, files=['.'] means "walk the whole tree".
+        # follow that here, too
+        fdict.pop('.', None)
+        for fn in self:
+            for ffn in fdict:
+                # match if the file is the exact name or a directory
+                if ffn == fn or fn.startswith("%s/" % ffn):
+                    del fdict[ffn]
+                    break
+            if match(fn):
+                yield fn
+        for fn in util.sort(fdict):
+            if match.bad(fn, 'No such file in rev ' + str(self)) and match(fn):
+                yield fn
+
 class filectx(object):
     """A filecontext object makes access to data related to a particular
        filerevision convenient."""
@@ -194,7 +215,7 @@
             self._repopath = self._path
             return self._repopath
         else:
-            raise AttributeError, name
+            raise AttributeError(name)
 
     def __nonzero__(self):
         try:
@@ -210,6 +231,12 @@
     def __repr__(self):
         return "<filectx %s>" % str(self)
 
+    def __hash__(self):
+        try:
+            return hash((self._path, self._fileid))
+        except AttributeError:
+            return id(self)
+
     def __eq__(self, other):
         try:
             return (self._path == other._path
@@ -228,9 +255,7 @@
 
     def filerev(self): return self._filerev
     def filenode(self): return self._filenode
-    def fileflags(self): return self._changectx.fileflags(self._path)
-    def isexec(self): return 'x' in self.fileflags()
-    def islink(self): return 'l' in self.fileflags()
+    def flags(self): return self._changectx.flags(self._path)
     def filelog(self): return self._filelog
 
     def rev(self):
@@ -376,12 +401,11 @@
         # sort by revision (per file) which is a topological order
         visit = []
         for f in files:
-            fn = [(n.rev(), n) for n in needed.keys() if n._path == f]
+            fn = [(n.rev(), n) for n in needed if n._path == f]
             visit.extend(fn)
-        visit.sort()
+
         hist = {}
-
-        for r, f in visit:
+        for r, f in util.sort(visit):
             curr = decorate(f.data(), f)
             for p in parents(f):
                 if p != nullid:
@@ -432,11 +456,41 @@
 
 class workingctx(changectx):
     """A workingctx object makes access to data related to
-    the current working directory convenient."""
-    def __init__(self, repo):
+    the current working directory convenient.
+    parents - a pair of parent nodeids, or None to use the dirstate.
+    date - any valid date string or (unixtime, offset), or None.
+    user - username string, or None.
+    extra - a dictionary of extra values, or None.
+    changes - a list of file lists as returned by localrepo.status()
+               or None to use the repository status.
+    """
+    def __init__(self, repo, parents=None, text="", user=None, date=None,
+                 extra=None, changes=None):
         self._repo = repo
         self._rev = None
         self._node = None
+        self._text = text
+        if date:
+            self._date = util.parsedate(date)
+        if user:
+            self._user = user
+        if parents:
+            self._parents = [changectx(self._repo, p) for p in parents]
+        if changes:
+            self._status = list(changes)
+
+        self._extra = {}
+        if extra:
+            self._extra = extra.copy()
+        if 'branch' not in self._extra:
+            branch = self._repo.dirstate.branch()
+            try:
+                branch = branch.decode('UTF-8').encode('UTF-8')
+            except UnicodeDecodeError:
+                raise util.Abort(_('branch name not in UTF-8!'))
+            self._extra['branch'] = branch
+        if self._extra['branch'] == '':
+            self._extra['branch'] = 'default'
 
     def __str__(self):
         return str(self._parents[0]) + "+"
@@ -444,34 +498,44 @@
     def __nonzero__(self):
         return True
 
+    def __contains__(self, key):
+        return self._dirstate[key] not in "?r"
+
     def __getattr__(self, name):
-        if name == '_parents':
-            self._parents = self._repo.parents()
-            return self._parents
         if name == '_status':
-            self._status = self._repo.status()
+            self._status = self._repo.status(unknown=True)
             return self._status
+        elif name == '_user':
+            self._user = self._repo.ui.username()
+            return self._user
+        elif name == '_date':
+            self._date = util.makedate()
+            return self._date
         if name == '_manifest':
             self._buildmanifest()
             return self._manifest
+        elif name == '_parents':
+            p = self._repo.dirstate.parents()
+            if p[1] == nullid:
+                p = p[:-1]
+            self._parents = [changectx(self._repo, x) for x in p]
+            return self._parents
         else:
-            raise AttributeError, name
+            raise AttributeError(name)
 
     def _buildmanifest(self):
         """generate a manifest corresponding to the working directory"""
 
         man = self._parents[0].manifest().copy()
         copied = self._repo.dirstate.copies()
-        is_exec = util.execfunc(self._repo.root,
-                                lambda p: man.execf(copied.get(p,p)))
-        is_link = util.linkfunc(self._repo.root,
-                                lambda p: man.linkf(copied.get(p,p)))
+        cf = lambda x: man.flags(copied.get(x, x))
+        ff = self._repo.dirstate.flagfunc(cf)
         modified, added, removed, deleted, unknown = self._status[:5]
         for i, l in (("a", added), ("m", modified), ("u", unknown)):
             for f in l:
                 man[f] = man.get(copied.get(f, f), nullid) + i
                 try:
-                    man.set(f, is_exec(f), is_link(f))
+                    man.set(f, ff(f))
                 except OSError:
                     pass
 
@@ -483,13 +547,11 @@
 
     def manifest(self): return self._manifest
 
-    def user(self): return self._repo.ui.username()
-    def date(self): return util.makedate()
-    def description(self): return ""
+    def user(self): return self._user or self._repo.ui.username()
+    def date(self): return self._date
+    def description(self): return self._text
     def files(self):
-        f = self.modified() + self.added() + self.removed()
-        f.sort()
-        return f
+        return util.sort(self._status[0] + self._status[1] + self._status[2])
 
     def modified(self): return self._status[0]
     def added(self): return self._status[1]
@@ -497,21 +559,18 @@
     def deleted(self): return self._status[3]
     def unknown(self): return self._status[4]
     def clean(self): return self._status[5]
-    def branch(self): return self._repo.dirstate.branch()
+    def branch(self): return self._extra['branch']
+    def extra(self): return self._extra
 
     def tags(self):
         t = []
         [t.extend(p.tags()) for p in self.parents()]
         return t
 
-    def parents(self):
-        """return contexts for each parent changeset"""
-        return self._parents
-
     def children(self):
         return []
 
-    def fileflags(self, path):
+    def flags(self, path):
         if '_manifest' in self.__dict__:
             try:
                 return self._manifest.flags(path)
@@ -521,12 +580,9 @@
         pnode = self._parents[0].changeset()[0]
         orig = self._repo.dirstate.copies().get(path, path)
         node, flag = self._repo.manifest.find(pnode, orig)
-        is_link = util.linkfunc(self._repo.root,
-                                lambda p: flag and 'l' in flag)
-        is_exec = util.execfunc(self._repo.root,
-                                lambda p: flag and 'x' in flag)
         try:
-            return (is_link(path) and 'l' or '') + (is_exec(path) and 'x' or '')
+            ff = self._repo.dirstate.flagfunc(lambda x: flag or '')
+            return ff(path)
         except OSError:
             pass
 
@@ -543,6 +599,9 @@
         """return the ancestor context of self and c2"""
         return self._parents[0].ancestor(c2) # punt on two parents for now
 
+    def walk(self, match):
+        return util.sort(self._repo.dirstate.walk(match, True, False).keys())
+
 class workingfilectx(filectx):
     """A workingfilectx object makes access to data related to a particular
        file in the working directory convenient."""
@@ -571,7 +630,7 @@
             self._filelog = self._repo.file(self._repopath)
             return self._filelog
         else:
-            raise AttributeError, name
+            raise AttributeError(name)
 
     def __nonzero__(self):
         return True
@@ -625,3 +684,92 @@
             return (t, tz)
 
     def cmp(self, text): return self._repo.wread(self._path) == text
+
+class memctx(object):
+    """A memctx is a subset of changectx supposed to be built on memory
+    and passed to commit functions.
+
+    NOTE: this interface and the related memfilectx are experimental and
+    may change without notice.
+
+    parents - a pair of parent nodeids.
+    filectxfn - a callable taking (repo, memctx, path) arguments and
+    returning a memctx object.
+    date - any valid date string or (unixtime, offset), or None.
+    user - username string, or None.
+    extra - a dictionary of extra values, or None.
+    """
+    def __init__(self, repo, parents, text, files, filectxfn, user=None,
+                 date=None, extra=None):
+        self._repo = repo
+        self._rev = None
+        self._node = None
+        self._text = text
+        self._date = date and util.parsedate(date) or util.makedate()
+        self._user = user
+        parents = [(p or nullid) for p in parents]
+        p1, p2 = parents
+        self._parents = [changectx(self._repo, p) for p in (p1, p2)]
+        files = util.sort(list(files))
+        self._status = [files, [], [], [], []]
+        self._filectxfn = filectxfn
+
+        self._extra = extra and extra.copy() or {}
+        if 'branch' not in self._extra:
+            self._extra['branch'] = 'default'
+        elif self._extra.get('branch') == '':
+            self._extra['branch'] = 'default'
+
+    def __str__(self):
+        return str(self._parents[0]) + "+"
+
+    def __int__(self):
+        return self._rev
+
+    def __nonzero__(self):
+        return True
+
+    def user(self): return self._user or self._repo.ui.username()
+    def date(self): return self._date
+    def description(self): return self._text
+    def files(self): return self.modified()
+    def modified(self): return self._status[0]
+    def added(self): return self._status[1]
+    def removed(self): return self._status[2]
+    def deleted(self): return self._status[3]
+    def unknown(self): return self._status[4]
+    def clean(self): return self._status[5]
+    def branch(self): return self._extra['branch']
+    def extra(self): return self._extra
+    def flags(self, f): return self[f].flags()
+
+    def parents(self):
+        """return contexts for each parent changeset"""
+        return self._parents
+
+    def filectx(self, path, filelog=None):
+        """get a file context from the working directory"""
+        return self._filectxfn(self._repo, self, path)
+
+class memfilectx(object):
+    """A memfilectx is a subset of filectx supposed to be built by client
+    code and passed to commit functions.
+    """
+    def __init__(self, path, data, islink, isexec, copied):
+        """copied is the source file path, or None."""
+        self._path = path
+        self._data = data
+        self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
+        self._copied = None
+        if copied:
+            self._copied = (copied, nullid)
+
+    def __nonzero__(self): return True
+    def __str__(self): return "%s@%s" % (self.path(), self._changectx)
+    def path(self): return self._path
+    def data(self): return self._data
+    def flags(self): return self._flags
+    def isexec(self): return 'x' in self._flags
+    def islink(self): return 'l' in self._flags
+    def renamed(self): return self._copied
+
--- a/mercurial/copies.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/mercurial/copies.py	Wed Sep 17 11:34:37 2008 +0200
@@ -11,9 +11,7 @@
 
 def _nonoverlap(d1, d2, d3):
     "Return list of elements in d1 not in d2 or d3"
-    l = [d for d in d1 if d not in d3 and d not in d2]
-    l.sort()
-    return l
+    return util.sort([d for d in d1 if d not in d3 and d not in d2])
 
 def _dirname(f):
     s = f.rfind("/")
@@ -49,9 +47,7 @@
         visit += [(p, depth - 1) for p in fc.parents()]
 
     # return old names sorted by depth
-    old = old.values()
-    old.sort()
-    return [o[1] for o in old]
+    return [o[1] for o in util.sort(old.values())]
 
 def _findlimit(repo, a, b):
     "find the earliest revision that's an ancestor of a or b but not both"
@@ -67,7 +63,7 @@
     #   - quit when interesting revs is zero
 
     cl = repo.changelog
-    working = cl.count() # pseudo rev for the working directory
+    working = len(cl) # pseudo rev for the working directory
     if a is None:
         a = working
     if b is None:
@@ -109,6 +105,10 @@
     if not c1 or not c2 or c1 == c2:
         return {}, {}
 
+    # avoid silly behavior for parent -> working dir
+    if c2.node() == None and c1.node() == repo.dirstate.parents()[0]:
+        return repo.dirstate.copies(), {}
+
     limit = _findlimit(repo, c1.rev(), c2.rev())
     m1 = c1.manifest()
     m2 = c2.manifest()
--- a/mercurial/dirstate.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/mercurial/dirstate.py	Wed Sep 17 11:34:37 2008 +0200
@@ -9,17 +9,24 @@
 
 from node import nullid
 from i18n import _
-import struct, os, bisect, stat, strutil, util, errno, ignore
+import struct, os, stat, util, errno, ignore
 import cStringIO, osutil, sys
 
 _unknown = ('?', 0, 0, 0)
 _format = ">cllll"
 
+def _finddirs(path):
+    pos = path.rfind('/')
+    while pos != -1:
+        yield path[:pos]
+        pos = path.rfind('/', 0, pos)
+
 class dirstate(object):
 
     def __init__(self, opener, ui, root):
         self._opener = opener
         self._root = root
+        self._rootdir = os.path.join(root, '')
         self._dirty = False
         self._dirtypl = False
         self._ui = ui
@@ -31,6 +38,13 @@
         elif name == '_copymap':
             self._read()
             return self._copymap
+        elif name == '_foldmap':
+            _foldmap = {}
+            for name in self._map:
+                norm = os.path.normcase(os.path.normpath(name))
+                _foldmap[norm] = name
+            self._foldmap = _foldmap
+            return self._foldmap
         elif name == '_branch':
             try:
                 self._branch = (self._opener("branch").read().strip()
@@ -48,10 +62,18 @@
                 if err.errno != errno.ENOENT: raise
             return self._pl
         elif name == '_dirs':
-            self._dirs = {}
-            for f in self._map:
-                if self[f] != 'r':
-                    self._incpath(f)
+            dirs = {}
+            for f,s in self._map.iteritems():
+                if s[0] != 'r':
+                    pos = f.rfind('/')
+                    while pos != -1:
+                        f = f[:pos]
+                        if f in dirs:
+                            dirs[f] += 1
+                            break
+                        dirs[f] = 1
+                        pos = f.rfind('/')
+            self._dirs = dirs
             return self._dirs
         elif name == '_ignore':
             files = [self._join('.hgignore')]
@@ -63,14 +85,56 @@
         elif name == '_slash':
             self._slash = self._ui.configbool('ui', 'slash') and os.sep != '/'
             return self._slash
+        elif name == '_checklink':
+            self._checklink = util.checklink(self._root)
+            return self._checklink
         elif name == '_checkexec':
             self._checkexec = util.checkexec(self._root)
             return self._checkexec
+        elif name == '_checkcase':
+            self._checkcase = not util.checkcase(self._join('.hg'))
+            return self._checkcase
+        elif name == 'normalize':
+            if self._checkcase:
+                self.normalize = self._normalize
+            else:
+                self.normalize = lambda x: x
+            return self.normalize
         else:
-            raise AttributeError, name
+            raise AttributeError(name)
 
     def _join(self, f):
-        return os.path.join(self._root, f)
+        # much faster than os.path.join()
+        # it's safe because f is always a relative path
+        return self._rootdir + f
+
+    def flagfunc(self, fallback):
+        if self._checklink:
+            if self._checkexec:
+                def f(x):
+                    p = self._join(x)
+                    if os.path.islink(p):
+                        return 'l'
+                    if util.is_exec(p):
+                        return 'x'
+                    return ''
+                return f
+            def f(x):
+                if os.path.islink(self._join(x)):
+                    return 'l'
+                if 'x' in fallback(x):
+                    return 'x'
+                return ''
+            return f
+        if self._checkexec:
+            def f(x):
+                if 'l' in fallback(x):
+                    return 'l'
+                if util.is_exec(self._join(x)):
+                    return 'x'
+                return ''
+            return f
+        return fallback
 
     def getcwd(self):
         cwd = os.getcwd()
@@ -106,9 +170,7 @@
         return key in self._map
 
     def __iter__(self):
-        a = self._map.keys()
-        a.sort()
-        for x in a:
+        for x in util.sort(self._map):
             yield x
 
     def parents(self):
@@ -161,7 +223,7 @@
             dmap[f] = e # we hold onto e[4] because making a subtuple is slow
 
     def invalidate(self):
-        for a in "_map _copymap _branch _pl _dirs _ignore".split():
+        for a in "_map _copymap _foldmap _branch _pl _dirs _ignore".split():
             if a in self.__dict__:
                 delattr(self, a)
         self._dirty = False
@@ -178,67 +240,39 @@
     def copies(self):
         return self._copymap
 
-    def _incpath(self, path):
-        c = path.rfind('/')
-        if c >= 0:
+    def _droppath(self, f):
+        if self[f] not in "?r" and "_dirs" in self.__dict__:
             dirs = self._dirs
-            base = path[:c]
-            if base not in dirs:
-                self._incpath(base)
-                dirs[base] = 1
-            else:
-                dirs[base] += 1
-
-    def _decpath(self, path):
-        c = path.rfind('/')
-        if c >= 0:
-            base = path[:c]
-            dirs = self._dirs
-            if dirs[base] == 1:
-                del dirs[base]
-                self._decpath(base)
-            else:
+            for base in _finddirs(f):
+                if dirs[base] == 1:
+                    del dirs[base]
+                    return
                 dirs[base] -= 1
 
-    def _incpathcheck(self, f):
-        if '\r' in f or '\n' in f:
-            raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r")
-                             % f)
-        # shadows
-        if f in self._dirs:
-            raise util.Abort(_('directory %r already in dirstate') % f)
-        for c in strutil.rfindall(f, '/'):
-            d = f[:c]
-            if d in self._dirs:
-                break
-            if d in self._map and self[d] != 'r':
-                raise util.Abort(_('file %r in dirstate clashes with %r') %
-                                 (d, f))
-        self._incpath(f)
-
-    def _changepath(self, f, newstate, relaxed=False):
-        # handle upcoming path changes
+    def _addpath(self, f, check=False):
         oldstate = self[f]
-        if oldstate not in "?r" and newstate in "?r":
-            if "_dirs" in self.__dict__:
-                self._decpath(f)
-            return
-        if oldstate in "?r" and newstate not in "?r":
-            if relaxed and oldstate == '?':
-                # XXX
-                # in relaxed mode we assume the caller knows
-                # what it is doing, workaround for updating
-                # dir-to-file revisions
-                if "_dirs" in self.__dict__:
-                    self._incpath(f)
-                return
-            self._incpathcheck(f)
-            return
+        if check or oldstate == "r":
+            if '\r' in f or '\n' in f:
+                raise util.Abort(
+                    _("'\\n' and '\\r' disallowed in filenames: %r") % f)
+            if f in self._dirs:
+                raise util.Abort(_('directory %r already in dirstate') % f)
+            # shadows
+            for d in _finddirs(f):
+                if d in self._dirs:
+                    break
+                if d in self._map and self[d] != 'r':
+                    raise util.Abort(
+                        _('file %r in dirstate clashes with %r') % (d, f))
+        if oldstate in "?r" and "_dirs" in self.__dict__:
+            dirs = self._dirs
+            for base in _finddirs(f):
+                dirs[base] = dirs.get(base, 0) + 1
 
     def normal(self, f):
         'mark a file normal and clean'
         self._dirty = True
-        self._changepath(f, 'n', True)
+        self._addpath(f)
         s = os.lstat(self._join(f))
         self._map[f] = ('n', s.st_mode, s.st_size, s.st_mtime, 0)
         if f in self._copymap:
@@ -262,7 +296,7 @@
             if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
                 return
         self._dirty = True
-        self._changepath(f, 'n', True)
+        self._addpath(f)
         self._map[f] = ('n', 0, -1, -1, 0)
         if f in self._copymap:
             del self._copymap[f]
@@ -270,7 +304,7 @@
     def normaldirty(self, f):
         'mark a file normal, but dirty'
         self._dirty = True
-        self._changepath(f, 'n', True)
+        self._addpath(f)
         self._map[f] = ('n', 0, -2, -1, 0)
         if f in self._copymap:
             del self._copymap[f]
@@ -278,7 +312,7 @@
     def add(self, f):
         'mark a file added'
         self._dirty = True
-        self._changepath(f, 'a')
+        self._addpath(f, True)
         self._map[f] = ('a', 0, -1, -1, 0)
         if f in self._copymap:
             del self._copymap[f]
@@ -286,7 +320,7 @@
     def remove(self, f):
         'mark a file removed'
         self._dirty = True
-        self._changepath(f, 'r')
+        self._droppath(f)
         size = 0
         if self._pl[1] != nullid and f in self._map:
             entry = self._map[f]
@@ -302,7 +336,7 @@
         'mark a file merged'
         self._dirty = True
         s = os.lstat(self._join(f))
-        self._changepath(f, 'm', True)
+        self._addpath(f)
         self._map[f] = ('m', s.st_mode, s.st_size, s.st_mtime, 0)
         if f in self._copymap:
             del self._copymap[f]
@@ -311,11 +345,19 @@
         'forget a file'
         self._dirty = True
         try:
-            self._changepath(f, '?')
+            self._droppath(f)
             del self._map[f]
         except KeyError:
             self._ui.warn(_("not in dirstate: %s\n") % f)
 
+    def _normalize(self, path):
+        norm_path = os.path.normcase(os.path.normpath(path))
+        if norm_path not in self._foldmap:
+            if not os.path.exists(os.path.join(self._root, path)):
+                return path
+            self._foldmap[norm_path] = util.fspath(path, self._root)
+        return self._foldmap[norm_path]
+
     def clear(self):
         self._map = {}
         if "_dirs" in self.__dict__:
@@ -327,7 +369,7 @@
     def rebuild(self, parent, files):
         self.clear()
         for f in files:
-            if files.execf(f):
+            if 'x' in files.flags(f):
                 self._map[f] = ('n', 0777, -1, 0, 0)
             else:
                 self._map[f] = ('n', 0666, -1, 0, 0)
@@ -364,40 +406,33 @@
         st.rename()
         self._dirty = self._dirtypl = False
 
-    def _filter(self, files):
-        ret = {}
-        unknown = []
-
-        for x in files:
-            if x == '.':
-                return self._map.copy()
-            if x not in self._map:
-                unknown.append(x)
-            else:
-                ret[x] = self._map[x]
-
-        if not unknown:
-            return ret
+    def _dirignore(self, f):
+        if f == '.':
+            return False
+        if self._ignore(f):
+            return True
+        for p in _finddirs(f):
+            if self._ignore(p):
+                return True
+        return False
 
-        b = self._map.keys()
-        b.sort()
-        blen = len(b)
+    def walk(self, match, unknown, ignored):
+        '''
+        walk recursively through the directory tree, finding all files
+        matched by the match function
+
+        results are yielded in a tuple (filename, stat), where stat
+        and st is the stat result if the file was found in the directory.
+        '''
 
-        for x in unknown:
-            bs = bisect.bisect(b, "%s%s" % (x, '/'))
-            while bs < blen:
-                s = b[bs]
-                if len(s) > len(x) and s.startswith(x):
-                    ret[s] = self._map[s]
-                else:
-                    break
-                bs += 1
-        return ret
+        def fwarn(f, msg):
+            self._ui.warn('%s: %s\n' % (self.pathto(f), msg))
+            return False
+        badfn = fwarn
+        if hasattr(match, 'bad'):
+            badfn = match.bad
 
-    def _supported(self, f, mode, verbose=False):
-        if stat.S_ISREG(mode) or stat.S_ISLNK(mode):
-            return True
-        if verbose:
+        def badtype(f, mode):
             kind = 'unknown'
             if stat.S_ISCHR(mode): kind = _('character device')
             elif stat.S_ISBLK(mode): kind = _('block device')
@@ -406,173 +441,117 @@
             elif stat.S_ISDIR(mode): kind = _('directory')
             self._ui.warn(_('%s: unsupported file type (type is %s)\n')
                           % (self.pathto(f), kind))
-        return False
 
-    def _dirignore(self, f):
-        if f == '.':
-            return False
-        if self._ignore(f):
-            return True
-        for c in strutil.findall(f, '/'):
-            if self._ignore(f[:c]):
-                return True
-        return False
-
-    def walk(self, files=None, match=util.always, badmatch=None):
-        # filter out the stat
-        for src, f, st in self.statwalk(files, match, badmatch=badmatch):
-            yield src, f
-
-    def statwalk(self, files=None, match=util.always, unknown=True,
-                 ignored=False, badmatch=None, directories=False):
-        '''
-        walk recursively through the directory tree, finding all files
-        matched by the match function
-
-        results are yielded in a tuple (src, filename, st), where src
-        is one of:
-        'f' the file was found in the directory tree
-        'd' the file is a directory of the tree
-        'm' the file was only in the dirstate and not in the tree
-        'b' file was not found and matched badmatch
-
-        and st is the stat result if the file was found in the directory.
-        '''
-
-        # walk all files by default
-        if not files:
-            files = ['.']
-            dc = self._map.copy()
-        else:
-            files = util.unique(files)
-            dc = self._filter(files)
-
-        def imatch(file_):
-            if file_ not in dc and self._ignore(file_):
-                return False
-            return match(file_)
-
-        # TODO: don't walk unknown directories if unknown and ignored are False
         ignore = self._ignore
         dirignore = self._dirignore
         if ignored:
-            imatch = match
             ignore = util.never
             dirignore = util.never
+        elif not unknown:
+            # if unknown and ignored are False, skip step 2
+            ignore = util.always
+            dirignore = util.always
 
-        # self._root may end with a path separator when self._root == '/'
-        common_prefix_len = len(self._root)
-        if not util.endswithsep(self._root):
-            common_prefix_len += 1
-
+        matchfn = match.matchfn
+        dmap = self._map
         normpath = util.normpath
+        normalize = self.normalize
         listdir = osutil.listdir
         lstat = os.lstat
-        bisect_left = bisect.bisect_left
-        isdir = os.path.isdir
         pconvert = util.pconvert
-        join = os.path.join
-        s_isdir = stat.S_ISDIR
-        supported = self._supported
-        _join = self._join
-        known = {'.hg': 1}
+        getkind = stat.S_IFMT
+        dirkind = stat.S_IFDIR
+        regkind = stat.S_IFREG
+        lnkkind = stat.S_IFLNK
+        join = self._join
+        work = []
+        wadd = work.append
+
+        files = util.unique(match.files())
+        if not files or '.' in files:
+            files = ['']
+        results = {'.hg': None}
 
-        # recursion free walker, faster than os.walk.
-        def findfiles(s):
-            work = [s]
-            wadd = work.append
-            found = []
-            add = found.append
-            if directories:
-                add((normpath(s[common_prefix_len:]), 'd', lstat(s)))
-            while work:
-                top = work.pop()
-                entries = listdir(top, stat=True)
-                # nd is the top of the repository dir tree
-                nd = normpath(top[common_prefix_len:])
-                if nd == '.':
-                    nd = ''
+        # step 1: find all explicit files
+        for ff in util.sort(files):
+            nf = normalize(normpath(ff))
+            if nf in results:
+                continue
+
+            try:
+                st = lstat(join(nf))
+                kind = getkind(st.st_mode)
+                if kind == dirkind:
+                    if not dirignore(nf):
+                        wadd(nf)
+                elif kind == regkind or kind == lnkkind:
+                    results[nf] = st
                 else:
-                    # do not recurse into a repo contained in this
-                    # one. use bisect to find .hg directory so speed
-                    # is good on big directory.
-                    names = [e[0] for e in entries]
-                    hg = bisect_left(names, '.hg')
-                    if hg < len(names) and names[hg] == '.hg':
-                        if isdir(join(top, '.hg')):
-                            continue
-                for f, kind, st in entries:
-                    np = pconvert(join(nd, f))
-                    if np in known:
-                        continue
-                    known[np] = 1
-                    p = join(top, f)
-                    # don't trip over symlinks
-                    if kind == stat.S_IFDIR:
-                        if not ignore(np):
-                            wadd(p)
-                            if directories:
-                                add((np, 'd', st))
-                        if np in dc and match(np):
-                            add((np, 'm', st))
-                    elif imatch(np):
-                        if supported(np, st.st_mode):
-                            add((np, 'f', st))
-                        elif np in dc:
-                            add((np, 'm', st))
-            found.sort()
-            return found
-
-        # step one, find all files that match our criteria
-        files.sort()
-        for ff in files:
-            nf = normpath(ff)
-            f = _join(ff)
-            try:
-                st = lstat(f)
+                    badtype(ff, kind)
+                    if nf in dmap:
+                        results[nf] = None
             except OSError, inst:
-                found = False
-                for fn in dc:
-                    if nf == fn or (fn.startswith(nf) and fn[len(nf)] == '/'):
-                        found = True
+                keep = False
+                prefix = nf + "/"
+                for fn in dmap:
+                    if nf == fn or fn.startswith(prefix):
+                        keep = True
                         break
-                if not found:
-                    if inst.errno != errno.ENOENT or not badmatch:
-                        self._ui.warn('%s: %s\n' %
-                                      (self.pathto(ff), inst.strerror))
-                    elif badmatch and badmatch(ff) and imatch(nf):
-                        yield 'b', ff, None
-                continue
-            if s_isdir(st.st_mode):
-                if not dirignore(nf):
-                    for f, src, st in findfiles(f):
-                        yield src, f, st
+                if not keep:
+                    if inst.errno != errno.ENOENT:
+                        fwarn(ff, inst.strerror)
+                    elif badfn(ff, inst.strerror):
+                        if (nf in dmap or not ignore(nf)) and matchfn(nf):
+                            results[nf] = None
+
+        # step 2: visit subdirectories
+        while work:
+            nd = work.pop()
+            if hasattr(match, 'dir'):
+                match.dir(nd)
+            if nd == '.':
+                nd = ''
+                entries = listdir(join(nd), stat=True)
             else:
-                if nf in known:
-                    continue
-                known[nf] = 1
-                if match(nf):
-                    if supported(ff, st.st_mode, verbose=True):
-                        yield 'f', nf, st
-                    elif ff in dc:
-                        yield 'm', nf, st
+                entries = listdir(join(nd), stat=True, skip ='.hg')
+            for f, kind, st in entries:
+                nf = normalize(nd and (nd + "/" + f) or f)
+                if nf not in results:
+                    if kind == dirkind:
+                        if not ignore(nf):
+                            wadd(nf)
+                        if nf in dmap and matchfn(nf):
+                            results[nf] = None
+                    elif kind == regkind or kind == lnkkind:
+                        if nf in dmap:
+                            if matchfn(nf):
+                                results[nf] = st
+                        elif matchfn(nf) and not ignore(nf):
+                            results[nf] = st
+                    elif nf in dmap and matchfn(nf):
+                        results[nf] = None
 
-        # step two run through anything left in the dc hash and yield
-        # if we haven't already seen it
-        ks = dc.keys()
-        ks.sort()
-        for k in ks:
-            if k in known:
-                continue
-            known[k] = 1
-            if imatch(k):
-                yield 'm', k, None
+        # step 3: report unseen items in the dmap hash
+        visit = [f for f in dmap if f not in results and match(f)]
+        for nf in util.sort(visit):
+            results[nf] = None
+            try:
+                st = lstat(join(nf))
+                kind = getkind(st.st_mode)
+                if kind == regkind or kind == lnkkind:
+                    results[nf] = st
+            except OSError, inst:
+                if inst.errno not in (errno.ENOENT, errno.ENOTDIR):
+                    raise
 
-    def status(self, files, match, list_ignored, list_clean, list_unknown=True):
+        del results['.hg']
+        return results
+
+    def status(self, match, ignored, clean, unknown):
+        listignored, listclean, listunknown = ignored, clean, unknown
         lookup, modified, added, unknown, ignored = [], [], [], [], []
         removed, deleted, clean = [], [], []
 
-        files = files or []
         _join = self._join
         lstat = os.lstat
         cmap = self._copymap
@@ -586,38 +565,20 @@
         dadd = deleted.append
         cadd = clean.append
 
-        for src, fn, st in self.statwalk(files, match, unknown=list_unknown,
-                                         ignored=list_ignored):
-            if fn in dmap:
-                type_, mode, size, time, foo = dmap[fn]
-            else:
-                if (list_ignored or fn in files) and self._dirignore(fn):
-                    if list_ignored:
+        for fn, st in self.walk(match, listunknown, listignored).iteritems():
+            if fn not in dmap:
+                if (listignored or match.exact(fn)) and self._dirignore(fn):
+                    if listignored:
                         iadd(fn)
-                elif list_unknown:
+                elif listunknown:
                     uadd(fn)
                 continue
-            if src == 'm':
-                nonexistent = True
-                if not st:
-                    try:
-                        st = lstat(_join(fn))
-                    except OSError, inst:
-                        if inst.errno not in (errno.ENOENT, errno.ENOTDIR):
-                            raise
-                        st = None
-                    # We need to re-check that it is a valid file
-                    if st and self._supported(fn, st.st_mode):
-                        nonexistent = False
-                # XXX: what to do with file no longer present in the fs
-                # who are not removed in the dirstate ?
-                if nonexistent and type_ in "nma":
-                    dadd(fn)
-                    continue
-            # check the common case first
-            if type_ == 'n':
-                if not st:
-                    st = lstat(_join(fn))
+
+            state, mode, size, time, foo = dmap[fn]
+
+            if not st and state in "nma":
+                dadd(fn)
+            elif state == 'n':
                 if (size >= 0 and
                     (size != st.st_size
                      or ((mode ^ st.st_mode) & 0100 and self._checkexec))
@@ -626,13 +587,13 @@
                     madd(fn)
                 elif time != int(st.st_mtime):
                     ladd(fn)
-                elif list_clean:
+                elif listclean:
                     cadd(fn)
-            elif type_ == 'm':
+            elif state == 'm':
                 madd(fn)
-            elif type_ == 'a':
+            elif state == 'a':
                 aadd(fn)
-            elif type_ == 'r':
+            elif state == 'r':
                 radd(fn)
 
         return (lookup, modified, added, removed, deleted, unknown, ignored,
--- a/mercurial/dispatch.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/mercurial/dispatch.py	Wed Sep 17 11:34:37 2008 +0200
@@ -146,6 +146,8 @@
         ui.warn(_("** or mercurial@selenic.com\n"))
         ui.warn(_("** Mercurial Distributed SCM (version %s)\n")
                % version.get_version())
+        ui.warn(_("** Extensions loaded: %s\n")
+               % ", ".join([x[0] for x in extensions.extensions()]))
         raise
 
     return -1
--- a/mercurial/extensions.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/mercurial/extensions.py	Wed Sep 17 11:34:37 2008 +0200
@@ -29,7 +29,7 @@
         raise KeyError(name)
 
 def load(ui, name, path):
-    if name.startswith('hgext.'):
+    if name.startswith('hgext.') or name.startswith('hgext/'):
         shortname = name[6:]
     else:
         shortname = name
--- a/mercurial/filemerge.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/mercurial/filemerge.py	Wed Sep 17 11:34:37 2008 +0200
@@ -5,7 +5,7 @@
 # This software may be used and distributed according to the terms
 # of the GNU General Public License, incorporated herein by reference.
 
-from node import nullrev
+from node import nullrev, short
 from i18n import _
 import util, os, tempfile, simplemerge, re, filecmp
 
@@ -63,8 +63,7 @@
         if t not in tools:
             tools[t] = int(_toolstr(ui, t, "priority", "0"))
     names = tools.keys()
-    tools = [(-p,t) for t,p in tools.items()]
-    tools.sort()
+    tools = util.sort([(-p,t) for t,p in tools.items()])
     uimerge = ui.config("ui", "merge")
     if uimerge:
         if uimerge not in names:
@@ -101,13 +100,14 @@
             if newdata != data:
                 open(file, "wb").write(newdata)
 
-def filemerge(repo, fw, fd, fo, wctx, mctx):
+def filemerge(repo, mynode, orig, fcd, fco, fca):
     """perform a 3-way merge in the working directory
 
-    fw = original filename in the working directory
-    fd = destination filename in the working directory
-    fo = filename in other parent
-    wctx, mctx = working and merge changecontexts
+    mynode = parent node before merge
+    orig = original local filename before merge
+    fco = other file context
+    fca = ancestor file context
+    fcd = local file context for current/destination file
     """
 
     def temp(prefix, ctx):
@@ -125,29 +125,27 @@
         except IOError:
             return False
 
-    fco = mctx.filectx(fo)
-    if not fco.cmp(wctx.filectx(fd).data()): # files identical?
+    if not fco.cmp(fcd.data()): # files identical?
         return None
 
     ui = repo.ui
-    fcm = wctx.filectx(fw)
-    fca = fcm.ancestor(fco) or repo.filectx(fw, fileid=nullrev)
-    binary = isbin(fcm) or isbin(fco) or isbin(fca)
-    symlink = fcm.islink() or fco.islink()
-    tool, toolpath = _picktool(repo, ui, fw, binary, symlink)
+    fd = fcd.path()
+    binary = isbin(fcd) or isbin(fco) or isbin(fca)
+    symlink = 'l' in fcd.flags() + fco.flags()
+    tool, toolpath = _picktool(repo, ui, fd, binary, symlink)
     ui.debug(_("picked tool '%s' for %s (binary %s symlink %s)\n") %
-               (tool, fw, binary, symlink))
+               (tool, fd, binary, symlink))
 
     if not tool:
         tool = "internal:local"
         if ui.prompt(_(" no tool found to merge %s\n"
-                       "keep (l)ocal or take (o)ther?") % fw,
+                       "keep (l)ocal or take (o)ther?") % fd,
                      _("[lo]"), _("l")) != _("l"):
             tool = "internal:other"
     if tool == "internal:local":
         return 0
     if tool == "internal:other":
-        repo.wwrite(fd, fco.data(), fco.fileflags())
+        repo.wwrite(fd, fco.data(), fco.flags())
         return 0
     if tool == "internal:fail":
         return 1
@@ -160,11 +158,12 @@
     back = a + ".orig"
     util.copyfile(a, back)
 
-    if fw != fo:
-        repo.ui.status(_("merging %s and %s\n") % (fw, fo))
+    if orig != fco.path():
+        repo.ui.status(_("merging %s and %s to %s\n") % (orig, fco.path(), fd))
     else:
-        repo.ui.status(_("merging %s\n") % fw)
-    repo.ui.debug(_("my %s other %s ancestor %s\n") % (fcm, fco, fca))
+        repo.ui.status(_("merging %s\n") % fd)
+
+    repo.ui.debug(_("my %s other %s ancestor %s\n") % (fcd, fco, fca))
 
     # do we attempt to simplemerge first?
     if _toolbool(ui, tool, "premerge", not (binary or symlink)):
@@ -178,11 +177,11 @@
         util.copyfile(back, a) # restore from backup and try again
 
     env = dict(HG_FILE=fd,
-               HG_MY_NODE=str(wctx.parents()[0]),
-               HG_OTHER_NODE=str(mctx),
-               HG_MY_ISLINK=fcm.islink(),
-               HG_OTHER_ISLINK=fco.islink(),
-               HG_BASE_ISLINK=fca.islink())
+               HG_MY_NODE=short(mynode),
+               HG_OTHER_NODE=str(fco.changectx()),
+               HG_MY_ISLINK='l' in fcd.flags(),
+               HG_OTHER_ISLINK='l' in fco.flags(),
+               HG_BASE_ISLINK='l' in fca.flags())
 
     if tool == "internal:merge":
         r = simplemerge.simplemerge(a, b, c, label=['local', 'other'])
@@ -196,7 +195,7 @@
         r = util.system(toolpath + ' ' + args, cwd=repo.root, environ=env)
 
     if not r and _toolbool(ui, tool, "checkconflicts"):
-        if re.match("^(<<<<<<< .*|=======|>>>>>>> .*)$", fcm.data()):
+        if re.match("^(<<<<<<< .*|=======|>>>>>>> .*)$", fcd.data()):
             r = 1
 
     if not r and _toolbool(ui, tool, "checkchanged"):
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/graphmod.py	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,75 @@
+# Revision graph generator for Mercurial
+#
+# Copyright 2008 Dirkjan Ochtman <dirkjan@ochtman.nl>
+# Copyright 2007 Joel Rosdahl <joel@rosdahl.net>
+#
+# This software may be used and distributed according to the terms of
+# the GNU General Public License, incorporated herein by reference.
+
+from node import nullrev, short
+import ui, hg, util, templatefilters
+
+def graph(repo, start_rev, stop_rev):
+    """incremental revision grapher
+
+    This generator function walks through the revision history from
+    revision start_rev to revision stop_rev (which must be less than
+    or equal to start_rev) and for each revision emits tuples with the
+    following elements:
+
+      - Current node
+      - Column and color for the current node
+      - Edges; a list of (col, next_col, color) indicating the edges between
+        the current node and its parents.
+      - First line of the changeset description
+      - The changeset author
+      - The changeset date/time
+    """
+
+    assert start_rev >= stop_rev
+    assert stop_rev >= 0
+    curr_rev = start_rev
+    revs = []
+    cl = repo.changelog
+    colors = {}
+    new_color = 1
+
+    while curr_rev >= stop_rev:
+        node = cl.node(curr_rev)
+
+        # Compute revs and next_revs
+        if curr_rev not in revs:
+            revs.append(curr_rev) # new head
+            colors[curr_rev] = new_color
+            new_color += 1
+
+        idx = revs.index(curr_rev)
+        color = colors.pop(curr_rev)
+        next = revs[:]
+
+        # Add parents to next_revs
+        parents = [x for x in cl.parentrevs(curr_rev) if x != nullrev]
+        addparents = [p for p in parents if p not in next]
+        next[idx:idx + 1] = addparents
+
+        # Set colors for the parents
+        for i, p in enumerate(addparents):
+            if not i:
+                colors[p] = color
+            else:
+                colors[p] = new_color
+                new_color += 1
+
+        # Add edges to the graph
+        edges = []
+        for col, r in enumerate(revs):
+            if r in next:
+                edges.append((col, next.index(r), colors[r]))
+            elif r == curr_rev:
+                for p in parents:
+                    edges.append((col, next.index(p), colors[p]))
+
+        # Yield and move on
+        yield (repo[curr_rev], (idx, color), edges)
+        revs = next
+        curr_rev -= 1
--- a/mercurial/hbisect.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/mercurial/hbisect.py	Wed Sep 17 11:34:37 2008 +0200
@@ -30,12 +30,12 @@
         badrev = min([changelog.rev(n) for n in bad])
         goodrevs = [changelog.rev(n) for n in good]
         # build ancestors array
-        ancestors = [[]] * (changelog.count() + 1) # an extra for [-1]
+        ancestors = [[]] * (len(changelog) + 1) # an extra for [-1]
 
         # clear good revs from array
         for node in goodrevs:
             ancestors[node] = None
-        for rev in xrange(changelog.count(), -1, -1):
+        for rev in xrange(len(changelog), -1, -1):
             if ancestors[rev] is None:
                 for prev in clparents(rev):
                     ancestors[prev] = None
--- a/mercurial/help.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/mercurial/help.py	Wed Sep 17 11:34:37 2008 +0200
@@ -5,9 +5,11 @@
 # This software may be used and distributed according to the terms
 # of the GNU General Public License, incorporated herein by reference.
 
-helptable = {
-    "dates|Date Formats":
-    r'''
+from i18n import _
+
+helptable = (
+    (["dates"], _("Date Formats"),
+     _(r'''
     Some commands allow the user to specify a date:
     backout, commit, import, tag: Specify the commit date.
     log, revert, update: Select revision(s) by date.
@@ -21,7 +23,7 @@
     "13:18" (today assumed)
     "3:39" (3:39AM assumed)
     "3:39pm" (15:39)
-    "2006-12-6 13:18:29" (ISO 8601 format)
+    "2006-12-06 13:18:29" (ISO 8601 format)
     "2006-12-6 13:18"
     "2006-12-6"
     "12-6"
@@ -43,10 +45,56 @@
     ">{date}" - on or after a given date
     "{date} to {date}" - a date range, inclusive
     "-{days}" - within a given number of days of today
-    ''',
+    ''')),
+
+    (["patterns"], _("File Name Patterns"),
+     _(r'''
+    Mercurial accepts several notations for identifying one or more
+    files at a time.
+
+    By default, Mercurial treats filenames as shell-style extended
+    glob patterns.
+
+    Alternate pattern notations must be specified explicitly.
+
+    To use a plain path name without any pattern matching, start a
+    name with "path:".  These path names must match completely, from
+    the root of the current repository.
+
+    To use an extended glob, start a name with "glob:".  Globs are
+    rooted at the current directory; a glob such as "*.c" will match
+    files ending in ".c" in the current directory only.
+
+    The supported glob syntax extensions are "**" to match any string
+    across path separators, and "{a,b}" to mean "a or b".
 
-    'environment|env|Environment Variables':
-    r'''
+    To use a Perl/Python regular expression, start a name with "re:".
+    Regexp pattern matching is anchored at the root of the repository.
+
+    Plain examples:
+
+    path:foo/bar   a name bar in a directory named foo in the root of
+                   the repository
+    path:path:name a file or directory named "path:name"
+
+    Glob examples:
+
+    glob:*.c       any name ending in ".c" in the current directory
+    *.c            any name ending in ".c" in the current directory
+    **.c           any name ending in ".c" in the current directory, or
+                   any subdirectory
+    foo/*.c        any name ending in ".c" in the directory foo
+    foo/**.c       any name ending in ".c" in the directory foo, or any
+                   subdirectory
+
+    Regexp examples:
+
+    re:.*\.c$      any name ending in ".c", anywhere in the repository
+
+    ''')),
+
+    (['environment', 'env'], _('Environment Variables'),
+     _(r'''
 HG::
     Path to the 'hg' executable, automatically passed when running hooks,
     extensions or external tools. If unset or empty, an executable named
@@ -114,51 +162,57 @@
 PYTHONPATH::
     This is used by Python to find imported modules and may need to be set
     appropriately if Mercurial is not installed system-wide.
-    ''',
+    ''')),
 
-    "patterns|File Name Patterns": r'''
-    Mercurial accepts several notations for identifying one or more
-    files at a time.
+    (['revs', 'revisions'], _('Specifying Single Revisions'),
+     _(r'''
+    Mercurial accepts several notations for identifying individual
+    revisions.
 
-    By default, Mercurial treats filenames as shell-style extended
-    glob patterns.
-
-    Alternate pattern notations must be specified explicitly.
+    A plain integer is treated as a revision number. Negative
+    integers are treated as offsets from the tip, with -1 denoting the
+    tip.
 
-    To use a plain path name without any pattern matching, start a
-    name with "path:".  These path names must match completely, from
-    the root of the current repository.
+    A 40-digit hexadecimal string is treated as a unique revision
+    identifier.
 
-    To use an extended glob, start a name with "glob:".  Globs are
-    rooted at the current directory; a glob such as "*.c" will match
-    files ending in ".c" in the current directory only.
+    A hexadecimal string less than 40 characters long is treated as a
+    unique revision identifier, and referred to as a short-form
+    identifier. A short-form identifier is only valid if it is the
+    prefix of one full-length identifier.
 
-    The supported glob syntax extensions are "**" to match any string
-    across path separators, and "{a,b}" to mean "a or b".
+    Any other string is treated as a tag name, which is a symbolic
+    name associated with a revision identifier. Tag names may not
+    contain the ":" character.
+
+    The reserved name "tip" is a special tag that always identifies
+    the most recent revision.
 
-    To use a Perl/Python regular expression, start a name with "re:".
-    Regexp pattern matching is anchored at the root of the repository.
-
-    Plain examples:
+    The reserved name "null" indicates the null revision. This is the
+    revision of an empty repository, and the parent of revision 0.
 
-    path:foo/bar   a name bar in a directory named foo in the root of
-                   the repository
-    path:path:name a file or directory named "path:name"
-
-    Glob examples:
+    The reserved name "." indicates the working directory parent. If
+    no working directory is checked out, it is equivalent to null.
+    If an uncommitted merge is in progress, "." is the revision of
+    the first parent.
+    ''')),
 
-    glob:*.c       any name ending in ".c" in the current directory
-    *.c            any name ending in ".c" in the current directory
-    **.c           any name ending in ".c" in the current directory, or
-                   any subdirectory
-    foo/*.c        any name ending in ".c" in the directory foo
-    foo/**.c       any name ending in ".c" in the directory foo, or any
-                   subdirectory
+    (['mrevs', 'multirevs'], _('Specifying Multiple Revisions'),
+     _(r'''
+    When Mercurial accepts more than one revision, they may be
+    specified individually, or provided as a continuous range,
+    separated by the ":" character.
 
-    Regexp examples:
-
-    re:.*\.c$      any name ending in ".c", anywhere in the repository
+    The syntax of range notation is [BEGIN]:[END], where BEGIN and END
+    are revision identifiers. Both BEGIN and END are optional. If
+    BEGIN is not specified, it defaults to revision number 0. If END
+    is not specified, it defaults to the tip. The range ":" thus
+    means "all revisions".
 
-''',
-}
+    If BEGIN is greater than END, revisions are treated in reverse
+    order.
 
+    A range acts as a closed interval. This means that a range of 3:5
+    gives 3, 4 and 5. Similarly, a range of 4:2 gives 4, 3, and 2.
+    ''')),
+)
--- a/mercurial/hg.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/mercurial/hg.py	Wed Sep 17 11:34:37 2008 +0200
@@ -16,7 +16,7 @@
     return (os.path.isfile(util.drop_scheme('file', path)) and
             bundlerepo or localrepo)
 
-def parseurl(url, revs):
+def parseurl(url, revs=[]):
     '''parse url#branch, returning url, branch + revs'''
 
     if '#' not in url:
@@ -69,6 +69,15 @@
     '''return default destination of clone if none is given'''
     return os.path.basename(os.path.normpath(source))
 
+def localpath(path):
+    if path.startswith('file://localhost/'):
+        return path[16:]
+    if path.startswith('file://'):
+        return path[7:]
+    if path.startswith('file:'):
+        return path[5:]
+    return path
+
 def clone(ui, source, dest=None, pull=False, rev=None, update=True,
           stream=False):
     """Make a copy of an existing repository.
@@ -100,7 +109,8 @@
     rev: revision to clone up to (implies pull=True)
 
     update: update working directory after clone completes, if
-    destination is local repository
+    destination is local repository (True means update to default rev,
+    anything else is treated as a revision)
     """
 
     if isinstance(source, str):
@@ -116,15 +126,6 @@
         dest = defaultdest(source)
         ui.status(_("destination directory: %s\n") % dest)
 
-    def localpath(path):
-        if path.startswith('file://localhost/'):
-            return path[16:]
-        if path.startswith('file://'):
-            return path[7:]
-        if path.startswith('file:'):
-            return path[5:]
-        return path
-
     dest = localpath(dest)
     source = localpath(source)
 
@@ -163,13 +164,6 @@
                 copy = False
 
         if copy:
-            def force_copy(src, dst):
-                if not os.path.exists(src):
-                    # Tolerate empty source repository and optional files
-                    return
-                util.copyfiles(src, dst)
-
-            src_store = os.path.realpath(src_repo.spath)
             if not os.path.exists(dest):
                 os.mkdir(dest)
             try:
@@ -181,28 +175,18 @@
                     raise util.Abort(_("destination '%s' already exists")
                                      % dest)
                 raise
-            if src_repo.spath != src_repo.path:
-                # XXX racy
-                dummy_changelog = os.path.join(dest_path, "00changelog.i")
-                # copy the dummy changelog
-                force_copy(src_repo.join("00changelog.i"), dummy_changelog)
-                dest_store = os.path.join(dest_path, "store")
-                os.mkdir(dest_store)
-            else:
-                dest_store = dest_path
-            # copy the requires file
-            force_copy(src_repo.join("requires"),
-                       os.path.join(dest_path, "requires"))
-            # we lock here to avoid premature writing to the target
-            dest_lock = lock.lock(os.path.join(dest_store, "lock"))
 
-            files = ("data",
-                     "00manifest.d", "00manifest.i",
-                     "00changelog.d", "00changelog.i")
-            for f in files:
-                src = os.path.join(src_store, f)
-                dst = os.path.join(dest_store, f)
-                force_copy(src, dst)
+            for f in src_repo.store.copylist():
+                src = os.path.join(src_repo.path, f)
+                dst = os.path.join(dest_path, f)
+                dstbase = os.path.dirname(dst)
+                if dstbase and not os.path.exists(dstbase):
+                    os.mkdir(dstbase)
+                if os.path.exists(src):
+                    if dst.endswith('data'):
+                        # lock to avoid premature writing to the target
+                        dest_lock = lock.lock(os.path.join(dstbase, "lock"))
+                    util.copyfiles(src, dst)
 
             # we need to re-init the repo after manually copying the data
             # into it
@@ -244,7 +228,9 @@
 
             if update:
                 dest_repo.ui.status(_("updating working directory\n"))
-                if not checkout:
+                if update is not True:
+                    checkout = update
+                elif not checkout:
                     try:
                         checkout = dest_repo.lookup("default")
                     except:
@@ -271,15 +257,7 @@
     stats = _merge.update(repo, node, False, False, None)
     _showstats(repo, stats)
     if stats[3]:
-        repo.ui.status(_("There are unresolved merges with"
-                         " locally modified files.\n"))
-        if stats[1]:
-            repo.ui.status(_("You can finish the partial merge using:\n"))
-        else:
-            repo.ui.status(_("You can redo the full merge using:\n"))
-        # len(pl)==1, otherwise _merge.update() would have raised util.Abort:
-        repo.ui.status(_("  hg update %s\n  hg update %s\n")
-                       % (pl[0].rev(), repo.changectx(node).rev()))
+        repo.ui.status(_("use 'hg resolve' to retry unresolved file merges\n"))
     return stats[3] > 0
 
 def clean(repo, node, show_stats=True):
@@ -294,11 +272,7 @@
     _showstats(repo, stats)
     if stats[3]:
         pl = repo.parents()
-        repo.ui.status(_("There are unresolved merges,"
-                         " you can redo the full merge using:\n"
-                         "  hg update -C %s\n"
-                         "  hg merge %s\n")
-                       % (pl[0].rev(), pl[1].rev()))
+        repo.ui.status(_("use 'hg resolve' to retry unresolved file merges\n"))
     elif remind:
         repo.ui.status(_("(branch merge, don't forget to commit)\n"))
     return stats[3] > 0
--- a/mercurial/hgweb/common.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/mercurial/hgweb/common.py	Wed Sep 17 11:34:37 2008 +0200
@@ -10,14 +10,17 @@
 
 HTTP_OK = 200
 HTTP_BAD_REQUEST = 400
+HTTP_UNAUTHORIZED = 401
+HTTP_FORBIDDEN = 403
 HTTP_NOT_FOUND = 404
+HTTP_METHOD_NOT_ALLOWED = 405
 HTTP_SERVER_ERROR = 500
 
 class ErrorResponse(Exception):
     def __init__(self, code, message=None):
         Exception.__init__(self)
         self.code = code
-        if message:
+        if message is not None:
             self.message = message
         else:
             self.message = _statusmessage(code)
--- a/mercurial/hgweb/hgweb_mod.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/mercurial/hgweb/hgweb_mod.py	Wed Sep 17 11:34:37 2008 +0200
@@ -6,79 +6,23 @@
 # This software may be used and distributed according to the terms
 # of the GNU General Public License, incorporated herein by reference.
 
-import os, mimetypes, re, mimetools, cStringIO
-from mercurial.node import hex, nullid, short
+import os, mimetypes
+from mercurial.node import hex, nullid
 from mercurial.repo import RepoError
-from mercurial import mdiff, ui, hg, util, archival, patch, hook
-from mercurial import revlog, templater, templatefilters, changegroup
-from common import get_mtime, style_map, paritygen, countgen, get_contact
-from common import ErrorResponse
+from mercurial import mdiff, ui, hg, util, patch, hook
+from mercurial import revlog, templater, templatefilters
+from common import get_mtime, style_map, paritygen, countgen, ErrorResponse
 from common import HTTP_OK, HTTP_BAD_REQUEST, HTTP_NOT_FOUND, HTTP_SERVER_ERROR
+from common import HTTP_UNAUTHORIZED, HTTP_METHOD_NOT_ALLOWED
 from request import wsgirequest
-import webcommands, protocol
-
-shortcuts = {
-    'cl': [('cmd', ['changelog']), ('rev', None)],
-    'sl': [('cmd', ['shortlog']), ('rev', None)],
-    'cs': [('cmd', ['changeset']), ('node', None)],
-    'f': [('cmd', ['file']), ('filenode', None)],
-    'fl': [('cmd', ['filelog']), ('filenode', None)],
-    'fd': [('cmd', ['filediff']), ('node', None)],
-    'fa': [('cmd', ['annotate']), ('filenode', None)],
-    'mf': [('cmd', ['manifest']), ('manifest', None)],
-    'ca': [('cmd', ['archive']), ('node', None)],
-    'tags': [('cmd', ['tags'])],
-    'tip': [('cmd', ['changeset']), ('node', ['tip'])],
-    'static': [('cmd', ['static']), ('file', None)]
-}
-
-def _up(p):
-    if p[0] != "/":
-        p = "/" + p
-    if p[-1] == "/":
-        p = p[:-1]
-    up = os.path.dirname(p)
-    if up == "/":
-        return "/"
-    return up + "/"
+import webcommands, protocol, webutil
 
-def revnavgen(pos, pagelen, limit, nodefunc):
-    def seq(factor, limit=None):
-        if limit:
-            yield limit
-            if limit >= 20 and limit <= 40:
-                yield 50
-        else:
-            yield 1 * factor
-            yield 3 * factor
-        for f in seq(factor * 10):
-            yield f
-
-    def nav(**map):
-        l = []
-        last = 0
-        for f in seq(1, pagelen):
-            if f < pagelen or f <= last:
-                continue
-            if f > limit:
-                break
-            last = f
-            if pos + f < limit:
-                l.append(("+%d" % f, hex(nodefunc(pos + f).node())))
-            if pos - f >= 0:
-                l.insert(0, ("-%d" % f, hex(nodefunc(pos - f).node())))
-
-        try:
-            yield {"label": "(0)", "node": hex(nodefunc('0').node())}
-
-            for label, node in l:
-                yield {"label": label, "node": node}
-
-            yield {"label": "tip", "node": "tip"}
-        except RepoError:
-            pass
-
-    return nav
+perms = {
+    'changegroup': 'pull',
+    'changegroupsubset': 'pull',
+    'unbundle': 'push',
+    'stream_out': 'pull',
+}
 
 class hgweb(object):
     def __init__(self, repo, name=None):
@@ -93,7 +37,6 @@
         self.reponame = name
         self.archives = 'zip', 'gz', 'bz2'
         self.stripecount = 1
-        self._capabilities = None
         # a repo owner may set web.templates in .hg/hgrc to get any file
         # readable by the user running the CGI script
         self.templatepath = self.config("web", "templates",
@@ -125,18 +68,6 @@
             self.maxfiles = int(self.config("web", "maxfiles", 10))
             self.allowpull = self.configbool("web", "allowpull", True)
             self.encoding = self.config("web", "encoding", util._encoding)
-            self._capabilities = None
-
-    def capabilities(self):
-        if self._capabilities is not None:
-            return self._capabilities
-        caps = ['lookup', 'changegroupsubset']
-        if self.configbool('server', 'uncompressed'):
-            caps.append('stream=%d' % self.repo.changelog.version)
-        if changegroup.bundlepriority:
-            caps.append('unbundle=%s' % ','.join(changegroup.bundlepriority))
-        self._capabilities = caps
-        return caps
 
     def run(self):
         if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."):
@@ -146,22 +77,28 @@
 
     def __call__(self, env, respond):
         req = wsgirequest(env, respond)
-        self.run_wsgi(req)
-        return req
+        return self.run_wsgi(req)
 
     def run_wsgi(self, req):
 
         self.refresh()
 
-        # expand form shortcuts
+        # process this if it's a protocol request
+        # protocol bits don't need to create any URLs
+        # and the clients always use the old URL structure
 
-        for k in shortcuts.iterkeys():
-            if k in req.form:
-                for name, value in shortcuts[k]:
-                    if value is None:
-                        value = req.form[k]
-                    req.form[name] = value
-                del req.form[k]
+        cmd = req.form.get('cmd', [''])[0]
+        if cmd and cmd in protocol.__all__:
+            try:
+                if cmd in perms:
+                    self.check_perm(req, perms[cmd])
+                method = getattr(protocol, cmd)
+                return method(self.repo, req)
+            except ErrorResponse, inst:
+                req.respond(inst.code, protocol.HGTYPE)
+                if not inst.message:
+                    return []
+                return '0\n%s\n' % inst.message,
 
         # work with CGI variables to create coherent structure
         # use SCRIPT_NAME, PATH_INFO and QUERY_STRING as well as our REPO_NAME
@@ -194,8 +131,10 @@
                 cmd = cmd[style+1:]
 
             # avoid accepting e.g. style parameter as command
-            if hasattr(webcommands, cmd) or hasattr(protocol, cmd):
+            if hasattr(webcommands, cmd):
                 req.form['cmd'] = [cmd]
+            else:
+                cmd = ''
 
             if args and args[0]:
                 node = args.pop(0)
@@ -213,30 +152,13 @@
                         req.form['node'] = [fn[:-len(ext)]]
                         req.form['type'] = [type_]
 
-        # process this if it's a protocol request
-
-        cmd = req.form.get('cmd', [''])[0]
-        if cmd in protocol.__all__:
-            method = getattr(protocol, cmd)
-            method(self, req)
-            return
-
         # process the web interface request
 
         try:
 
             tmpl = self.templater(req)
-            try:
-                ctype = tmpl('mimetype', encoding=self.encoding)
-                ctype = templater.stringify(ctype)
-            except KeyError:
-                # old templates with inline HTTP headers?
-                if 'mimetype' in tmpl:
-                    raise
-                header = tmpl('header', encoding=self.encoding)
-                header_file = cStringIO.StringIO(templater.stringify(header))
-                msg = mimetools.Message(header_file, 0)
-                ctype = msg['content-type']
+            ctype = tmpl('mimetype', encoding=self.encoding)
+            ctype = templater.stringify(ctype)
 
             if cmd == '':
                 req.form['cmd'] = [tmpl.cache['default']]
@@ -252,21 +174,20 @@
                 content = getattr(webcommands, cmd)(self, req, tmpl)
                 req.respond(HTTP_OK, ctype)
 
-            req.write(content)
-            del tmpl
+            return ''.join(content),
 
         except revlog.LookupError, err:
             req.respond(HTTP_NOT_FOUND, ctype)
             msg = str(err)
             if 'manifest' not in msg:
                 msg = 'revision not found: %s' % err.name
-            req.write(tmpl('error', error=msg))
+            return ''.join(tmpl('error', error=msg)),
         except (RepoError, revlog.RevlogError), inst:
             req.respond(HTTP_SERVER_ERROR, ctype)
-            req.write(tmpl('error', error=str(inst)))
+            return ''.join(tmpl('error', error=str(inst))),
         except ErrorResponse, inst:
             req.respond(inst.code, ctype)
-            req.write(tmpl('error', error=inst.message))
+            return ''.join(tmpl('error', error=inst.message)),
 
     def templater(self, req):
 
@@ -291,13 +212,7 @@
         # some functions for the templater
 
         def header(**map):
-            header = tmpl('header', encoding=self.encoding, **map)
-            if 'mimetype' not in tmpl:
-                # old template with inline HTTP headers
-                header_file = cStringIO.StringIO(templater.stringify(header))
-                msg = mimetools.Message(header_file, 0)
-                header = header_file.read()
-            yield header
+            yield tmpl('header', encoding=self.encoding, **map)
 
         def footer(**map):
             yield tmpl("footer", **map)
@@ -355,54 +270,6 @@
         if len(files) > self.maxfiles:
             yield tmpl("fileellipses")
 
-    def siblings(self, siblings=[], hiderev=None, **args):
-        siblings = [s for s in siblings if s.node() != nullid]
-        if len(siblings) == 1 and siblings[0].rev() == hiderev:
-            return
-        for s in siblings:
-            d = {'node': hex(s.node()), 'rev': s.rev()}
-            if hasattr(s, 'path'):
-                d['file'] = s.path()
-            d.update(args)
-            yield d
-
-    def renamelink(self, fl, node):
-        r = fl.renamed(node)
-        if r:
-            return [dict(file=r[0], node=hex(r[1]))]
-        return []
-
-    def nodetagsdict(self, node):
-        return [{"name": i} for i in self.repo.nodetags(node)]
-
-    def nodebranchdict(self, ctx):
-        branches = []
-        branch = ctx.branch()
-        # If this is an empty repo, ctx.node() == nullid,
-        # ctx.branch() == 'default', but branchtags() is
-        # an empty dict. Using dict.get avoids a traceback.
-        if self.repo.branchtags().get(branch) == ctx.node():
-            branches.append({"name": branch})
-        return branches
-
-    def nodeinbranch(self, ctx):
-        branches = []
-        branch = ctx.branch()
-        if branch != 'default' and self.repo.branchtags().get(branch) != ctx.node():
-            branches.append({"name": branch})
-        return branches
-
-    def nodebranchnodefault(self, ctx):
-        branches = []
-        branch = ctx.branch()
-        if branch != 'default':
-            branches.append({"name": branch})
-        return branches
-
-    def showtag(self, tmpl, t1, node=nullid, **args):
-        for t in self.repo.nodetags(node):
-            yield tmpl(t1, tag=t, **args)
-
     def diff(self, tmpl, node1, node2, files):
         def filterfiles(filters, files):
             l = [x for x in files if x in filters]
@@ -443,8 +310,8 @@
                            linenumber="% 8s" % lineno)
 
         r = self.repo
-        c1 = r.changectx(node1)
-        c2 = r.changectx(node2)
+        c1 = r[node1]
+        c2 = r[node2]
         date1 = util.datestr(c1.date())
         date2 = util.datestr(c2.date())
 
@@ -470,524 +337,39 @@
             yield diffblock(mdiff.unidiff(to, date1, tn, date2, f, f,
                                           opts=diffopts), f, tn)
 
-    def changelog(self, tmpl, ctx, shortlog=False):
-        def changelist(limit=0,**map):
-            cl = self.repo.changelog
-            l = [] # build a list in forward order for efficiency
-            for i in xrange(start, end):
-                ctx = self.repo.changectx(i)
-                n = ctx.node()
-                showtags = self.showtag(tmpl, 'changelogtag', n)
-
-                l.insert(0, {"parity": parity.next(),
-                             "author": ctx.user(),
-                             "parent": self.siblings(ctx.parents(), i - 1),
-                             "child": self.siblings(ctx.children(), i + 1),
-                             "changelogtag": showtags,
-                             "desc": ctx.description(),
-                             "date": ctx.date(),
-                             "files": self.listfilediffs(tmpl, ctx.files(), n),
-                             "rev": i,
-                             "node": hex(n),
-                             "tags": self.nodetagsdict(n),
-                             "inbranch": self.nodeinbranch(ctx),
-                             "branches": self.nodebranchdict(ctx)})
-
-            if limit > 0:
-                l = l[:limit]
-
-            for e in l:
-                yield e
-
-        maxchanges = shortlog and self.maxshortchanges or self.maxchanges
-        cl = self.repo.changelog
-        count = cl.count()
-        pos = ctx.rev()
-        start = max(0, pos - maxchanges + 1)
-        end = min(count, start + maxchanges)
-        pos = end - 1
-        parity = paritygen(self.stripecount, offset=start-end)
-
-        changenav = revnavgen(pos, maxchanges, count, self.repo.changectx)
-
-        return tmpl(shortlog and 'shortlog' or 'changelog',
-                    changenav=changenav,
-                    node=hex(cl.tip()),
-                    rev=pos, changesets=count,
-                    entries=lambda **x: changelist(limit=0,**x),
-                    latestentry=lambda **x: changelist(limit=1,**x),
-                    archives=self.archivelist("tip"))
-
-    def search(self, tmpl, query):
-
-        def changelist(**map):
-            cl = self.repo.changelog
-            count = 0
-            qw = query.lower().split()
-
-            def revgen():
-                for i in xrange(cl.count() - 1, 0, -100):
-                    l = []
-                    for j in xrange(max(0, i - 100), i + 1):
-                        ctx = self.repo.changectx(j)
-                        l.append(ctx)
-                    l.reverse()
-                    for e in l:
-                        yield e
-
-            for ctx in revgen():
-                miss = 0
-                for q in qw:
-                    if not (q in ctx.user().lower() or
-                            q in ctx.description().lower() or
-                            q in " ".join(ctx.files()).lower()):
-                        miss = 1
-                        break
-                if miss:
-                    continue
-
-                count += 1
-                n = ctx.node()
-                showtags = self.showtag(tmpl, 'changelogtag', n)
-
-                yield tmpl('searchentry',
-                           parity=parity.next(),
-                           author=ctx.user(),
-                           parent=self.siblings(ctx.parents()),
-                           child=self.siblings(ctx.children()),
-                           changelogtag=showtags,
-                           desc=ctx.description(),
-                           date=ctx.date(),
-                           files=self.listfilediffs(tmpl, ctx.files(), n),
-                           rev=ctx.rev(),
-                           node=hex(n),
-                           tags=self.nodetagsdict(n),
-                           inbranch=self.nodeinbranch(ctx),
-                           branches=self.nodebranchdict(ctx))
-
-                if count >= self.maxchanges:
-                    break
-
-        cl = self.repo.changelog
-        parity = paritygen(self.stripecount)
-
-        return tmpl('search',
-                    query=query,
-                    node=hex(cl.tip()),
-                    entries=changelist,
-                    archives=self.archivelist("tip"))
-
-    def changeset(self, tmpl, ctx):
-        n = ctx.node()
-        showtags = self.showtag(tmpl, 'changesettag', n)
-        parents = ctx.parents()
-        p1 = parents[0].node()
-
-        files = []
-        parity = paritygen(self.stripecount)
-        for f in ctx.files():
-            files.append(tmpl("filenodelink",
-                              node=hex(n), file=f,
-                              parity=parity.next()))
-
-        def diff(**map):
-            yield self.diff(tmpl, p1, n, None)
-
-        return tmpl('changeset',
-                    diff=diff,
-                    rev=ctx.rev(),
-                    node=hex(n),
-                    parent=self.siblings(parents),
-                    child=self.siblings(ctx.children()),
-                    changesettag=showtags,
-                    author=ctx.user(),
-                    desc=ctx.description(),
-                    date=ctx.date(),
-                    files=files,
-                    archives=self.archivelist(hex(n)),
-                    tags=self.nodetagsdict(n),
-                    branch=self.nodebranchnodefault(ctx),
-                    inbranch=self.nodeinbranch(ctx),
-                    branches=self.nodebranchdict(ctx))
-
-    def filelog(self, tmpl, fctx):
-        f = fctx.path()
-        fl = fctx.filelog()
-        count = fl.count()
-        pagelen = self.maxshortchanges
-        pos = fctx.filerev()
-        start = max(0, pos - pagelen + 1)
-        end = min(count, start + pagelen)
-        pos = end - 1
-        parity = paritygen(self.stripecount, offset=start-end)
-
-        def entries(limit=0, **map):
-            l = []
-
-            for i in xrange(start, end):
-                ctx = fctx.filectx(i)
-                n = fl.node(i)
-
-                l.insert(0, {"parity": parity.next(),
-                             "filerev": i,
-                             "file": f,
-                             "node": hex(ctx.node()),
-                             "author": ctx.user(),
-                             "date": ctx.date(),
-                             "rename": self.renamelink(fl, n),
-                             "parent": self.siblings(fctx.parents()),
-                             "child": self.siblings(fctx.children()),
-                             "desc": ctx.description()})
-
-            if limit > 0:
-                l = l[:limit]
-
-            for e in l:
-                yield e
-
-        nodefunc = lambda x: fctx.filectx(fileid=x)
-        nav = revnavgen(pos, pagelen, count, nodefunc)
-        return tmpl("filelog", file=f, node=hex(fctx.node()), nav=nav,
-                    entries=lambda **x: entries(limit=0, **x),
-                    latestentry=lambda **x: entries(limit=1, **x))
-
-    def filerevision(self, tmpl, fctx):
-        f = fctx.path()
-        text = fctx.data()
-        fl = fctx.filelog()
-        n = fctx.filenode()
-        parity = paritygen(self.stripecount)
-
-        if util.binary(text):
-            mt = mimetypes.guess_type(f)[0] or 'application/octet-stream'
-            text = '(binary:%s)' % mt
-
-        def lines():
-            for lineno, t in enumerate(text.splitlines(1)):
-                yield {"line": t,
-                       "lineid": "l%d" % (lineno + 1),
-                       "linenumber": "% 6d" % (lineno + 1),
-                       "parity": parity.next()}
-
-        return tmpl("filerevision",
-                    file=f,
-                    path=_up(f),
-                    text=lines(),
-                    rev=fctx.rev(),
-                    node=hex(fctx.node()),
-                    author=fctx.user(),
-                    date=fctx.date(),
-                    desc=fctx.description(),
-                    branch=self.nodebranchnodefault(fctx),
-                    parent=self.siblings(fctx.parents()),
-                    child=self.siblings(fctx.children()),
-                    rename=self.renamelink(fl, n),
-                    permissions=fctx.manifest().flags(f))
-
-    def fileannotate(self, tmpl, fctx):
-        f = fctx.path()
-        n = fctx.filenode()
-        fl = fctx.filelog()
-        parity = paritygen(self.stripecount)
-
-        def annotate(**map):
-            last = None
-            if util.binary(fctx.data()):
-                mt = (mimetypes.guess_type(fctx.path())[0]
-                      or 'application/octet-stream')
-                lines = enumerate([((fctx.filectx(fctx.filerev()), 1),
-                                    '(binary:%s)' % mt)])
-            else:
-                lines = enumerate(fctx.annotate(follow=True, linenumber=True))
-            for lineno, ((f, targetline), l) in lines:
-                fnode = f.filenode()
-                name = self.repo.ui.shortuser(f.user())
-
-                if last != fnode:
-                    last = fnode
-
-                yield {"parity": parity.next(),
-                       "node": hex(f.node()),
-                       "rev": f.rev(),
-                       "author": name,
-                       "file": f.path(),
-                       "targetline": targetline,
-                       "line": l,
-                       "lineid": "l%d" % (lineno + 1),
-                       "linenumber": "% 6d" % (lineno + 1)}
-
-        return tmpl("fileannotate",
-                    file=f,
-                    annotate=annotate,
-                    path=_up(f),
-                    rev=fctx.rev(),
-                    node=hex(fctx.node()),
-                    author=fctx.user(),
-                    date=fctx.date(),
-                    desc=fctx.description(),
-                    rename=self.renamelink(fl, n),
-                    branch=self.nodebranchnodefault(fctx),
-                    parent=self.siblings(fctx.parents()),
-                    child=self.siblings(fctx.children()),
-                    permissions=fctx.manifest().flags(f))
-
-    def manifest(self, tmpl, ctx, path):
-        mf = ctx.manifest()
-        node = ctx.node()
-
-        files = {}
-        parity = paritygen(self.stripecount)
-
-        if path and path[-1] != "/":
-            path += "/"
-        l = len(path)
-        abspath = "/" + path
-
-        for f, n in mf.items():
-            if f[:l] != path:
-                continue
-            remain = f[l:]
-            if "/" in remain:
-                short = remain[:remain.index("/") + 1] # bleah
-                files[short] = (f, None)
-            else:
-                short = os.path.basename(remain)
-                files[short] = (f, n)
-
-        if not files:
-            raise ErrorResponse(HTTP_NOT_FOUND, 'path not found: ' + path)
-
-        def filelist(**map):
-            fl = files.keys()
-            fl.sort()
-            for f in fl:
-                full, fnode = files[f]
-                if not fnode:
-                    continue
-
-                fctx = ctx.filectx(full)
-                yield {"file": full,
-                       "parity": parity.next(),
-                       "basename": f,
-                       "date": fctx.changectx().date(),
-                       "size": fctx.size(),
-                       "permissions": mf.flags(full)}
-
-        def dirlist(**map):
-            fl = files.keys()
-            fl.sort()
-            for f in fl:
-                full, fnode = files[f]
-                if fnode:
-                    continue
-
-                yield {"parity": parity.next(),
-                       "path": "%s%s" % (abspath, f),
-                       "basename": f[:-1]}
-
-        return tmpl("manifest",
-                    rev=ctx.rev(),
-                    node=hex(node),
-                    path=abspath,
-                    up=_up(abspath),
-                    upparity=parity.next(),
-                    fentries=filelist,
-                    dentries=dirlist,
-                    archives=self.archivelist(hex(node)),
-                    tags=self.nodetagsdict(node),
-                    inbranch=self.nodeinbranch(ctx),
-                    branches=self.nodebranchdict(ctx))
-
-    def tags(self, tmpl):
-        i = self.repo.tagslist()
-        i.reverse()
-        parity = paritygen(self.stripecount)
-
-        def entries(notip=False,limit=0, **map):
-            count = 0
-            for k, n in i:
-                if notip and k == "tip":
-                    continue
-                if limit > 0 and count >= limit:
-                    continue
-                count = count + 1
-                yield {"parity": parity.next(),
-                       "tag": k,
-                       "date": self.repo.changectx(n).date(),
-                       "node": hex(n)}
-
-        return tmpl("tags",
-                    node=hex(self.repo.changelog.tip()),
-                    entries=lambda **x: entries(False,0, **x),
-                    entriesnotip=lambda **x: entries(True,0, **x),
-                    latestentry=lambda **x: entries(True,1, **x))
-
-    def summary(self, tmpl):
-        i = self.repo.tagslist()
-        i.reverse()
-
-        def tagentries(**map):
-            parity = paritygen(self.stripecount)
-            count = 0
-            for k, n in i:
-                if k == "tip": # skip tip
-                    continue;
-
-                count += 1
-                if count > 10: # limit to 10 tags
-                    break;
-
-                yield tmpl("tagentry",
-                           parity=parity.next(),
-                           tag=k,
-                           node=hex(n),
-                           date=self.repo.changectx(n).date())
-
-
-        def branches(**map):
-            parity = paritygen(self.stripecount)
-
-            b = self.repo.branchtags()
-            l = [(-self.repo.changelog.rev(n), n, t) for t, n in b.items()]
-            l.sort()
-
-            for r,n,t in l:
-                ctx = self.repo.changectx(n)
-
-                yield {'parity': parity.next(),
-                       'branch': t,
-                       'node': hex(n),
-                       'date': ctx.date()}
-
-        def changelist(**map):
-            parity = paritygen(self.stripecount, offset=start-end)
-            l = [] # build a list in forward order for efficiency
-            for i in xrange(start, end):
-                ctx = self.repo.changectx(i)
-                n = ctx.node()
-                hn = hex(n)
-
-                l.insert(0, tmpl(
-                   'shortlogentry',
-                    parity=parity.next(),
-                    author=ctx.user(),
-                    desc=ctx.description(),
-                    date=ctx.date(),
-                    rev=i,
-                    node=hn,
-                    tags=self.nodetagsdict(n),
-                    inbranch=self.nodeinbranch(ctx),
-                    branches=self.nodebranchdict(ctx)))
-
-            yield l
-
-        cl = self.repo.changelog
-        count = cl.count()
-        start = max(0, count - self.maxchanges)
-        end = min(count, start + self.maxchanges)
-
-        return tmpl("summary",
-                    desc=self.config("web", "description", "unknown"),
-                    owner=get_contact(self.config) or "unknown",
-                    lastchange=cl.read(cl.tip())[2],
-                    tags=tagentries,
-                    branches=branches,
-                    shortlog=changelist,
-                    node=hex(cl.tip()),
-                    archives=self.archivelist("tip"))
-
-    def filediff(self, tmpl, fctx):
-        n = fctx.node()
-        path = fctx.path()
-        parents = fctx.parents()
-        p1 = parents and parents[0].node() or nullid
-
-        def diff(**map):
-            yield self.diff(tmpl, p1, n, [path])
-
-        return tmpl("filediff",
-                    file=path,
-                    node=hex(n),
-                    rev=fctx.rev(),
-                    branch=self.nodebranchnodefault(fctx),
-                    parent=self.siblings(parents),
-                    child=self.siblings(fctx.children()),
-                    diff=diff)
-
     archive_specs = {
         'bz2': ('application/x-tar', 'tbz2', '.tar.bz2', None),
         'gz': ('application/x-tar', 'tgz', '.tar.gz', None),
         'zip': ('application/zip', 'zip', '.zip', None),
         }
 
-    def archive(self, tmpl, req, key, type_):
-        reponame = re.sub(r"\W+", "-", os.path.basename(self.reponame))
-        cnode = self.repo.lookup(key)
-        arch_version = key
-        if cnode == key or key == 'tip':
-            arch_version = short(cnode)
-        name = "%s-%s" % (reponame, arch_version)
-        mimetype, artype, extension, encoding = self.archive_specs[type_]
-        headers = [
-            ('Content-Type', mimetype),
-            ('Content-Disposition', 'attachment; filename=%s%s' %
-                (name, extension))
-        ]
-        if encoding:
-            headers.append(('Content-Encoding', encoding))
-        req.header(headers)
-        req.respond(HTTP_OK)
-        archival.archive(self.repo, req, cnode, artype, prefix=name)
+    def check_perm(self, req, op):
+        '''Check permission for operation based on request data (including
+        authentication info. Return true if op allowed, else false.'''
 
-    # add tags to things
-    # tags -> list of changesets corresponding to tags
-    # find tag, changeset, file
-
-    def cleanpath(self, path):
-        path = path.lstrip('/')
-        return util.canonpath(self.repo.root, '', path)
+        if op == 'pull' and not self.allowpull:
+            raise ErrorResponse(HTTP_OK, '')
+        elif op == 'pull':
+            return
 
-    def changectx(self, req):
-        if 'node' in req.form:
-            changeid = req.form['node'][0]
-        elif 'manifest' in req.form:
-            changeid = req.form['manifest'][0]
-        else:
-            changeid = self.repo.changelog.count() - 1
-
-        try:
-            ctx = self.repo.changectx(changeid)
-        except RepoError:
-            man = self.repo.manifest
-            mn = man.lookup(changeid)
-            ctx = self.repo.changectx(man.linkrev(mn))
-
-        return ctx
+        # enforce that you can only push using POST requests
+        if req.env['REQUEST_METHOD'] != 'POST':
+            msg = 'push requires POST request'
+            raise ErrorResponse(HTTP_METHOD_NOT_ALLOWED, msg)
 
-    def filectx(self, req):
-        path = self.cleanpath(req.form['file'][0])
-        if 'node' in req.form:
-            changeid = req.form['node'][0]
-        else:
-            changeid = req.form['filenode'][0]
-        try:
-            ctx = self.repo.changectx(changeid)
-            fctx = ctx.filectx(path)
-        except RepoError:
-            fctx = self.repo.filectx(path, fileid=changeid)
-
-        return fctx
-
-    def check_perm(self, req, op, default):
-        '''check permission for operation based on user auth.
-        return true if op allowed, else false.
-        default is policy to use if no config given.'''
+        # require ssl by default for pushing, auth info cannot be sniffed
+        # and replayed
+        scheme = req.env.get('wsgi.url_scheme')
+        if self.configbool('web', 'push_ssl', True) and scheme != 'https':
+            raise ErrorResponse(HTTP_OK, 'ssl required')
 
         user = req.env.get('REMOTE_USER')
 
-        deny = self.configlist('web', 'deny_' + op)
+        deny = self.configlist('web', 'deny_push')
         if deny and (not user or deny == ['*'] or user in deny):
-            return False
+            raise ErrorResponse(HTTP_UNAUTHORIZED, 'push not authorized')
 
-        allow = self.configlist('web', 'allow_' + op)
-        return (allow and (allow == ['*'] or user in allow)) or default
+        allow = self.configlist('web', 'allow_push')
+        result = allow and (allow == ['*'] or user in allow)
+        if not result:
+            raise ErrorResponse(HTTP_UNAUTHORIZED, 'push not authorized')
--- a/mercurial/hgweb/hgwebdir_mod.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/mercurial/hgweb/hgwebdir_mod.py	Wed Sep 17 11:34:37 2008 +0200
@@ -6,7 +6,7 @@
 # This software may be used and distributed according to the terms
 # of the GNU General Public License, incorporated herein by reference.
 
-import os, mimetools, cStringIO
+import os
 from mercurial.i18n import gettext as _
 from mercurial.repo import RepoError
 from mercurial import ui, hg, util, templater, templatefilters
@@ -33,8 +33,7 @@
             self.repos = cleannames(config)
             self.repos_sorted = ('', False)
         elif isinstance(config, dict):
-            self.repos = cleannames(config.items())
-            self.repos.sort()
+            self.repos = util.sort(cleannames(config.items()))
         else:
             if isinstance(config, util.configparser):
                 cp = config
@@ -71,8 +70,7 @@
 
     def __call__(self, env, respond):
         req = wsgirequest(env, respond)
-        self.run_wsgi(req)
-        return req
+        return self.run_wsgi(req)
 
     def run_wsgi(self, req):
 
@@ -81,17 +79,8 @@
 
                 virtual = req.env.get("PATH_INFO", "").strip('/')
                 tmpl = self.templater(req)
-                try:
-                    ctype = tmpl('mimetype', encoding=util._encoding)
-                    ctype = templater.stringify(ctype)
-                except KeyError:
-                    # old templates with inline HTTP headers?
-                    if 'mimetype' in tmpl:
-                        raise
-                    header = tmpl('header', encoding=util._encoding)
-                    header_file = cStringIO.StringIO(templater.stringify(header))
-                    msg = mimetools.Message(header_file, 0)
-                    ctype = msg['content-type']
+                ctype = tmpl('mimetype', encoding=util._encoding)
+                ctype = templater.stringify(ctype)
 
                 # a static file
                 if virtual.startswith('static/') or 'static' in req.form:
@@ -100,14 +89,12 @@
                         fname = virtual[7:]
                     else:
                         fname = req.form['static'][0]
-                    req.write(staticfile(static, fname, req))
-                    return
+                    return staticfile(static, fname, req)
 
                 # top-level index
                 elif not virtual:
                     req.respond(HTTP_OK, ctype)
-                    req.write(self.makeindex(req, tmpl))
-                    return
+                    return ''.join(self.makeindex(req, tmpl)),
 
                 # nested indexes and hgwebs
 
@@ -118,8 +105,7 @@
                         req.env['REPO_NAME'] = virtual
                         try:
                             repo = hg.repository(self.parentui, real)
-                            hgweb(repo).run_wsgi(req)
-                            return
+                            return hgweb(repo).run_wsgi(req)
                         except IOError, inst:
                             msg = inst.strerror
                             raise ErrorResponse(HTTP_SERVER_ERROR, msg)
@@ -130,8 +116,7 @@
                     subdir = virtual + '/'
                     if [r for r in repos if r.startswith(subdir)]:
                         req.respond(HTTP_OK, ctype)
-                        req.write(self.makeindex(req, tmpl, subdir))
-                        return
+                        return ''.join(self.makeindex(req, tmpl, subdir)),
 
                     up = virtual.rfind('/')
                     if up < 0:
@@ -140,11 +125,11 @@
 
                 # prefixes not found
                 req.respond(HTTP_NOT_FOUND, ctype)
-                req.write(tmpl("notfound", repo=virtual))
+                return ''.join(tmpl("notfound", repo=virtual)),
 
             except ErrorResponse, err:
                 req.respond(err.code, ctype)
-                req.write(tmpl('error', error=err.message or ''))
+                return ''.join(tmpl('error', error=err.message or '')),
         finally:
             tmpl = None
 
@@ -182,7 +167,7 @@
                 try:
                     u.readconfig(os.path.join(path, '.hg', 'hgrc'))
                 except Exception, e:
-                    u.warn(_('error reading %s/.hg/hgrc: %s\n' % (path, e)))
+                    u.warn(_('error reading %s/.hg/hgrc: %s\n') % (path, e))
                     continue
                 def get(section, name, default=None):
                     return u.config(section, name, default, untrusted=True)
@@ -257,13 +242,7 @@
     def templater(self, req):
 
         def header(**map):
-            header = tmpl('header', encoding=util._encoding, **map)
-            if 'mimetype' not in tmpl:
-                # old template with inline HTTP headers
-                header_file = cStringIO.StringIO(templater.stringify(header))
-                msg = mimetools.Message(header_file, 0)
-                header = header_file.read()
-            yield header
+            yield tmpl('header', encoding=util._encoding, **map)
 
         def footer(**map):
             yield tmpl("footer", **map)
--- a/mercurial/hgweb/protocol.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/mercurial/hgweb/protocol.py	Wed Sep 17 11:34:37 2008 +0200
@@ -21,69 +21,65 @@
 
 HGTYPE = 'application/mercurial-0.1'
 
-def lookup(web, req):
+def lookup(repo, req):
     try:
-        r = hex(web.repo.lookup(req.form['key'][0]))
+        r = hex(repo.lookup(req.form['key'][0]))
         success = 1
     except Exception,inst:
         r = str(inst)
         success = 0
     resp = "%s %s\n" % (success, r)
     req.respond(HTTP_OK, HGTYPE, length=len(resp))
-    req.write(resp)
+    yield resp
 
-def heads(web, req):
-    resp = " ".join(map(hex, web.repo.heads())) + "\n"
+def heads(repo, req):
+    resp = " ".join(map(hex, repo.heads())) + "\n"
     req.respond(HTTP_OK, HGTYPE, length=len(resp))
-    req.write(resp)
+    yield resp
 
-def branches(web, req):
+def branches(repo, req):
     nodes = []
     if 'nodes' in req.form:
         nodes = map(bin, req.form['nodes'][0].split(" "))
     resp = cStringIO.StringIO()
-    for b in web.repo.branches(nodes):
+    for b in repo.branches(nodes):
         resp.write(" ".join(map(hex, b)) + "\n")
     resp = resp.getvalue()
     req.respond(HTTP_OK, HGTYPE, length=len(resp))
-    req.write(resp)
+    yield resp
 
-def between(web, req):
+def between(repo, req):
     if 'pairs' in req.form:
         pairs = [map(bin, p.split("-"))
                  for p in req.form['pairs'][0].split(" ")]
     resp = cStringIO.StringIO()
-    for b in web.repo.between(pairs):
+    for b in repo.between(pairs):
         resp.write(" ".join(map(hex, b)) + "\n")
     resp = resp.getvalue()
     req.respond(HTTP_OK, HGTYPE, length=len(resp))
-    req.write(resp)
+    yield resp
 
-def changegroup(web, req):
+def changegroup(repo, req):
     req.respond(HTTP_OK, HGTYPE)
     nodes = []
-    if not web.allowpull:
-        return
 
     if 'roots' in req.form:
         nodes = map(bin, req.form['roots'][0].split(" "))
 
     z = zlib.compressobj()
-    f = web.repo.changegroup(nodes, 'serve')
+    f = repo.changegroup(nodes, 'serve')
     while 1:
         chunk = f.read(4096)
         if not chunk:
             break
-        req.write(z.compress(chunk))
+        yield z.compress(chunk)
 
-    req.write(z.flush())
+    yield z.flush()
 
-def changegroupsubset(web, req):
+def changegroupsubset(repo, req):
     req.respond(HTTP_OK, HGTYPE)
     bases = []
     heads = []
-    if not web.allowpull:
-        return
 
     if 'bases' in req.form:
         bases = [bin(x) for x in req.form['bases'][0].split(' ')]
@@ -91,69 +87,42 @@
         heads = [bin(x) for x in req.form['heads'][0].split(' ')]
 
     z = zlib.compressobj()
-    f = web.repo.changegroupsubset(bases, heads, 'serve')
+    f = repo.changegroupsubset(bases, heads, 'serve')
     while 1:
         chunk = f.read(4096)
         if not chunk:
             break
-        req.write(z.compress(chunk))
+        yield z.compress(chunk)
 
-    req.write(z.flush())
+    yield z.flush()
 
-def capabilities(web, req):
-    resp = ' '.join(web.capabilities())
-    req.respond(HTTP_OK, HGTYPE, length=len(resp))
-    req.write(resp)
+def capabilities(repo, req):
+    caps = ['lookup', 'changegroupsubset']
+    if repo.ui.configbool('server', 'uncompressed', untrusted=True):
+        caps.append('stream=%d' % repo.changelog.version)
+    if changegroupmod.bundlepriority:
+        caps.append('unbundle=%s' % ','.join(changegroupmod.bundlepriority))
+    rsp = ' '.join(caps)
+    req.respond(HTTP_OK, HGTYPE, length=len(rsp))
+    yield rsp
 
-def unbundle(web, req):
+def unbundle(repo, req):
+
+    proto = req.env.get('wsgi.url_scheme') or 'http'
+    their_heads = req.form['heads'][0].split(' ')
 
-    def bail(response, headers={}):
+    def check_heads():
+        heads = map(hex, repo.heads())
+        return their_heads == [hex('force')] or their_heads == heads
+
+    # fail early if possible
+    if not check_heads():
         length = int(req.env.get('CONTENT_LENGTH', 0))
         for s in util.filechunkiter(req, limit=length):
             # drain incoming bundle, else client will not see
             # response when run outside cgi script
             pass
-
-        status = headers.pop('status', HTTP_OK)
-        req.header(headers.items())
-        req.respond(status, HGTYPE)
-        req.write('0\n')
-        req.write(response)
-
-    # enforce that you can only unbundle with POST requests
-    if req.env['REQUEST_METHOD'] != 'POST':
-        headers = {'status': '405 Method Not Allowed'}
-        bail('unbundle requires POST request\n', headers)
-        return
-
-    # require ssl by default, auth info cannot be sniffed and
-    # replayed
-    ssl_req = web.configbool('web', 'push_ssl', True)
-    if ssl_req:
-        if req.env.get('wsgi.url_scheme') != 'https':
-            bail('ssl required\n')
-            return
-        proto = 'https'
-    else:
-        proto = 'http'
-
-    # do not allow push unless explicitly allowed
-    if not web.check_perm(req, 'push', False):
-        bail('push not authorized\n', headers={'status': '401 Unauthorized'})
-        return
-
-    their_heads = req.form['heads'][0].split(' ')
-
-    def check_heads():
-        heads = map(hex, web.repo.heads())
-        return their_heads == [hex('force')] or their_heads == heads
-
-    # fail early if possible
-    if not check_heads():
-        bail('unsynced changes\n')
-        return
-
-    req.respond(HTTP_OK, HGTYPE)
+        raise ErrorResponse(HTTP_OK, 'unsynced changes')
 
     # do not lock repo until all changegroup data is
     # streamed. save to temporary file.
@@ -166,12 +135,10 @@
             fp.write(s)
 
         try:
-            lock = web.repo.lock()
+            lock = repo.lock()
             try:
                 if not check_heads():
-                    req.write('0\n')
-                    req.write('unsynced changes\n')
-                    return
+                    raise ErrorResponse(HTTP_OK, 'unsynced changes')
 
                 fp.seek(0)
                 header = fp.read(6)
@@ -190,26 +157,24 @@
                     url = 'remote:%s:%s' % (proto,
                                             req.env.get('REMOTE_HOST', ''))
                     try:
-                        ret = web.repo.addchangegroup(gen, 'serve', url)
+                        ret = repo.addchangegroup(gen, 'serve', url)
                     except util.Abort, inst:
                         sys.stdout.write("abort: %s\n" % inst)
                         ret = 0
                 finally:
                     val = sys.stdout.getvalue()
                     sys.stdout, sys.stderr = oldio
-                req.write('%d\n' % ret)
-                req.write(val)
+                req.respond(HTTP_OK, HGTYPE)
+                return '%d\n%s' % (ret, val),
             finally:
                 del lock
         except ValueError, inst:
-            req.write('0\n')
-            req.write(str(inst) + '\n')
+            raise ErrorResponse(HTTP_OK, inst)
         except (OSError, IOError), inst:
-            req.write('0\n')
             filename = getattr(inst, 'filename', '')
             # Don't send our filesystem layout to the client
-            if filename.startswith(web.repo.root):
-                filename = filename[len(web.repo.root)+1:]
+            if filename.startswith(repo.root):
+                filename = filename[len(repo.root)+1:]
             else:
                 filename = ''
             error = getattr(inst, 'strerror', 'Unknown error')
@@ -217,14 +182,15 @@
                 code = HTTP_NOT_FOUND
             else:
                 code = HTTP_SERVER_ERROR
-            req.respond(code)
-            req.write('%s: %s\n' % (error, filename))
+            raise ErrorResponse(code, '%s: %s' % (error, filename))
     finally:
         fp.close()
         os.unlink(tempname)
 
-def stream_out(web, req):
-    if not web.allowpull:
-        return
+def stream_out(repo, req):
     req.respond(HTTP_OK, HGTYPE)
-    streamclone.stream_out(web.repo, req, untrusted=True)
+    try:
+        for chunk in streamclone.stream_out(repo, untrusted=True):
+            yield chunk
+    except streamclone.StreamException, inst:
+        yield str(inst)
--- a/mercurial/hgweb/request.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/mercurial/hgweb/request.py	Wed Sep 17 11:34:37 2008 +0200
@@ -9,6 +9,31 @@
 import socket, cgi, errno
 from common import ErrorResponse, statusmessage
 
+shortcuts = {
+    'cl': [('cmd', ['changelog']), ('rev', None)],
+    'sl': [('cmd', ['shortlog']), ('rev', None)],
+    'cs': [('cmd', ['changeset']), ('node', None)],
+    'f': [('cmd', ['file']), ('filenode', None)],
+    'fl': [('cmd', ['filelog']), ('filenode', None)],
+    'fd': [('cmd', ['filediff']), ('node', None)],
+    'fa': [('cmd', ['annotate']), ('filenode', None)],
+    'mf': [('cmd', ['manifest']), ('manifest', None)],
+    'ca': [('cmd', ['archive']), ('node', None)],
+    'tags': [('cmd', ['tags'])],
+    'tip': [('cmd', ['changeset']), ('node', ['tip'])],
+    'static': [('cmd', ['static']), ('file', None)]
+}
+
+def expand(form):
+    for k in shortcuts.iterkeys():
+        if k in form:
+            for name, value in shortcuts[k]:
+                if value is None:
+                    value = form[k]
+                form[name] = value
+            del form[k]
+    return form
+
 class wsgirequest(object):
     def __init__(self, wsgienv, start_response):
         version = wsgienv['wsgi.version']
@@ -21,7 +46,7 @@
         self.multiprocess = wsgienv['wsgi.multiprocess']
         self.run_once = wsgienv['wsgi.run_once']
         self.env = wsgienv
-        self.form = cgi.parse(self.inp, self.env, keep_blank_values=1)
+        self.form = expand(cgi.parse(self.inp, self.env, keep_blank_values=1))
         self._start_response = start_response
         self.server_write = None
         self.headers = []
--- a/mercurial/hgweb/server.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/mercurial/hgweb/server.py	Wed Sep 17 11:34:37 2008 +0200
@@ -122,7 +122,8 @@
         self.saved_headers = []
         self.sent_headers = False
         self.length = None
-        self.server.application(env, self._start_response)
+        for chunk in self.server.application(env, self._start_response):
+            self._write(chunk)
 
     def send_headers(self):
         if not self.saved_status:
@@ -258,7 +259,7 @@
                     from OpenSSL import SSL
                     ctx = SSL.Context(SSL.SSLv23_METHOD)
                 except ImportError:
-                    raise util.Abort("SSL support is unavailable")
+                    raise util.Abort(_("SSL support is unavailable"))
                 ctx.use_privatekey_file(ssl_cert)
                 ctx.use_certificate_file(ssl_cert)
                 sock = socket.socket(self.address_family, self.socket_type)
@@ -268,12 +269,7 @@
 
             self.addr, self.port = self.socket.getsockname()[0:2]
             self.prefix = prefix
-
             self.fqaddr = socket.getfqdn(address)
-            try:
-                socket.getaddrbyhost(self.fqaddr)
-            except:
-                fqaddr = address
 
     class IPv6HTTPServer(MercurialHTTPServer):
         address_family = getattr(socket, 'AF_INET6', None)
--- a/mercurial/hgweb/webcommands.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/mercurial/hgweb/webcommands.py	Wed Sep 17 11:34:37 2008 +0200
@@ -5,10 +5,15 @@
 # This software may be used and distributed according to the terms
 # of the GNU General Public License, incorporated herein by reference.
 
-import os, mimetypes
-from mercurial import revlog, util
+import os, mimetypes, re, cgi
+import webutil
+from mercurial import revlog, archival, templatefilters
+from mercurial.node import short, hex, nullid
+from mercurial.util import binary, datestr
 from mercurial.repo import RepoError
-from common import staticfile, ErrorResponse, HTTP_OK, HTTP_NOT_FOUND
+from common import paritygen, staticfile, get_contact, ErrorResponse
+from common import HTTP_OK, HTTP_FORBIDDEN, HTTP_NOT_FOUND
+from mercurial import graphmod, util
 
 # __all__ is populated with the allowed commands. Be sure to add to it if
 # you're adding a new command, or the new command won't work.
@@ -16,7 +21,7 @@
 __all__ = [
    'log', 'rawfile', 'file', 'changelog', 'shortlog', 'changeset', 'rev',
    'manifest', 'tags', 'summary', 'filediff', 'diff', 'annotate', 'filelog',
-   'archive', 'static',
+   'archive', 'static', 'graph',
 ]
 
 def log(web, req, tmpl):
@@ -26,17 +31,17 @@
         return changelog(web, req, tmpl)
 
 def rawfile(web, req, tmpl):
-    path = web.cleanpath(req.form.get('file', [''])[0])
+    path = webutil.cleanpath(web.repo, req.form.get('file', [''])[0])
     if not path:
-        content = web.manifest(tmpl, web.changectx(req), path)
+        content = manifest(web, req, tmpl)
         req.respond(HTTP_OK, web.ctype)
         return content
 
     try:
-        fctx = web.filectx(req)
+        fctx = webutil.filectx(web.repo, req)
     except revlog.LookupError, inst:
         try:
-            content = web.manifest(tmpl, web.changectx(req), path)
+            content = manifest(web, req, tmpl)
             req.respond(HTTP_OK, web.ctype)
             return content
         except ErrorResponse:
@@ -45,76 +50,518 @@
     path = fctx.path()
     text = fctx.data()
     mt = mimetypes.guess_type(path)[0]
-    if mt is None or util.binary(text):
-        mt = mt or 'application/octet-stream'
+    if mt is None:
+        mt = binary(text) and 'application/octet-stream' or 'text/plain'
 
     req.respond(HTTP_OK, mt, path, len(text))
     return [text]
 
+def _filerevision(web, tmpl, fctx):
+    f = fctx.path()
+    text = fctx.data()
+    fl = fctx.filelog()
+    n = fctx.filenode()
+    parity = paritygen(web.stripecount)
+
+    if binary(text):
+        mt = mimetypes.guess_type(f)[0] or 'application/octet-stream'
+        text = '(binary:%s)' % mt
+
+    def lines():
+        for lineno, t in enumerate(text.splitlines(1)):
+            yield {"line": t,
+                   "lineid": "l%d" % (lineno + 1),
+                   "linenumber": "% 6d" % (lineno + 1),
+                   "parity": parity.next()}
+
+    return tmpl("filerevision",
+                file=f,
+                path=webutil.up(f),
+                text=lines(),
+                rev=fctx.rev(),
+                node=hex(fctx.node()),
+                author=fctx.user(),
+                date=fctx.date(),
+                desc=fctx.description(),
+                branch=webutil.nodebranchnodefault(fctx),
+                parent=webutil.siblings(fctx.parents()),
+                child=webutil.siblings(fctx.children()),
+                rename=webutil.renamelink(fctx),
+                permissions=fctx.manifest().flags(f))
+
 def file(web, req, tmpl):
-    path = web.cleanpath(req.form.get('file', [''])[0])
+    path = webutil.cleanpath(web.repo, req.form.get('file', [''])[0])
     if not path:
-        return web.manifest(tmpl, web.changectx(req), path)
+        return manifest(web, req, tmpl)
     try:
-        return web.filerevision(tmpl, web.filectx(req))
+        return _filerevision(web, tmpl, webutil.filectx(web.repo, req))
     except revlog.LookupError, inst:
         try:
-            return web.manifest(tmpl, web.changectx(req), path)
+            return manifest(web, req, tmpl)
         except ErrorResponse:
             raise inst
 
+def _search(web, tmpl, query):
+
+    def changelist(**map):
+        cl = web.repo.changelog
+        count = 0
+        qw = query.lower().split()
+
+        def revgen():
+            for i in xrange(len(cl) - 1, 0, -100):
+                l = []
+                for j in xrange(max(0, i - 100), i + 1):
+                    ctx = web.repo[j]
+                    l.append(ctx)
+                l.reverse()
+                for e in l:
+                    yield e
+
+        for ctx in revgen():
+            miss = 0
+            for q in qw:
+                if not (q in ctx.user().lower() or
+                        q in ctx.description().lower() or
+                        q in " ".join(ctx.files()).lower()):
+                    miss = 1
+                    break
+            if miss:
+                continue
+
+            count += 1
+            n = ctx.node()
+            showtags = webutil.showtag(web.repo, tmpl, 'changelogtag', n)
+
+            yield tmpl('searchentry',
+                       parity=parity.next(),
+                       author=ctx.user(),
+                       parent=webutil.siblings(ctx.parents()),
+                       child=webutil.siblings(ctx.children()),
+                       changelogtag=showtags,
+                       desc=ctx.description(),
+                       date=ctx.date(),
+                       files=web.listfilediffs(tmpl, ctx.files(), n),
+                       rev=ctx.rev(),
+                       node=hex(n),
+                       tags=webutil.nodetagsdict(web.repo, n),
+                       inbranch=webutil.nodeinbranch(web.repo, ctx),
+                       branches=webutil.nodebranchdict(web.repo, ctx))
+
+            if count >= web.maxchanges:
+                break
+
+    cl = web.repo.changelog
+    parity = paritygen(web.stripecount)
+
+    return tmpl('search',
+                query=query,
+                node=hex(cl.tip()),
+                entries=changelist,
+                archives=web.archivelist("tip"))
+
 def changelog(web, req, tmpl, shortlog = False):
     if 'node' in req.form:
-        ctx = web.changectx(req)
+        ctx = webutil.changectx(web.repo, req)
     else:
         if 'rev' in req.form:
             hi = req.form['rev'][0]
         else:
-            hi = web.repo.changelog.count() - 1
+            hi = len(web.repo) - 1
         try:
-            ctx = web.repo.changectx(hi)
+            ctx = web.repo[hi]
         except RepoError:
-            return web.search(tmpl, hi) # XXX redirect to 404 page?
+            return _search(web, tmpl, hi) # XXX redirect to 404 page?
+
+    def changelist(limit=0, **map):
+        cl = web.repo.changelog
+        l = [] # build a list in forward order for efficiency
+        for i in xrange(start, end):
+            ctx = web.repo[i]
+            n = ctx.node()
+            showtags = webutil.showtag(web.repo, tmpl, 'changelogtag', n)
+
+            l.insert(0, {"parity": parity.next(),
+                         "author": ctx.user(),
+                         "parent": webutil.siblings(ctx.parents(), i - 1),
+                         "child": webutil.siblings(ctx.children(), i + 1),
+                         "changelogtag": showtags,
+                         "desc": ctx.description(),
+                         "date": ctx.date(),
+                         "files": web.listfilediffs(tmpl, ctx.files(), n),
+                         "rev": i,
+                         "node": hex(n),
+                         "tags": webutil.nodetagsdict(web.repo, n),
+                         "inbranch": webutil.nodeinbranch(web.repo, ctx),
+                         "branches": webutil.nodebranchdict(web.repo, ctx)
+                        })
 
-    return web.changelog(tmpl, ctx, shortlog = shortlog)
+        if limit > 0:
+            l = l[:limit]
+
+        for e in l:
+            yield e
+
+    maxchanges = shortlog and web.maxshortchanges or web.maxchanges
+    cl = web.repo.changelog
+    count = len(cl)
+    pos = ctx.rev()
+    start = max(0, pos - maxchanges + 1)
+    end = min(count, start + maxchanges)
+    pos = end - 1
+    parity = paritygen(web.stripecount, offset=start-end)
+
+    changenav = webutil.revnavgen(pos, maxchanges, count, web.repo.changectx)
+
+    return tmpl(shortlog and 'shortlog' or 'changelog',
+                changenav=changenav,
+                node=hex(ctx.node()),
+                rev=pos, changesets=count,
+                entries=lambda **x: changelist(limit=0,**x),
+                latestentry=lambda **x: changelist(limit=1,**x),
+                archives=web.archivelist("tip"))
 
 def shortlog(web, req, tmpl):
     return changelog(web, req, tmpl, shortlog = True)
 
 def changeset(web, req, tmpl):
-    return web.changeset(tmpl, web.changectx(req))
+    ctx = webutil.changectx(web.repo, req)
+    n = ctx.node()
+    showtags = webutil.showtag(web.repo, tmpl, 'changesettag', n)
+    parents = ctx.parents()
+    p1 = parents[0].node()
+
+    files = []
+    parity = paritygen(web.stripecount)
+    for f in ctx.files():
+        files.append(tmpl("filenodelink",
+                          node=hex(n), file=f,
+                          parity=parity.next()))
+
+    diffs = web.diff(tmpl, p1, n, None)
+    return tmpl('changeset',
+                diff=diffs,
+                rev=ctx.rev(),
+                node=hex(n),
+                parent=webutil.siblings(parents),
+                child=webutil.siblings(ctx.children()),
+                changesettag=showtags,
+                author=ctx.user(),
+                desc=ctx.description(),
+                date=ctx.date(),
+                files=files,
+                archives=web.archivelist(hex(n)),
+                tags=webutil.nodetagsdict(web.repo, n),
+                branch=webutil.nodebranchnodefault(ctx),
+                inbranch=webutil.nodeinbranch(web.repo, ctx),
+                branches=webutil.nodebranchdict(web.repo, ctx))
 
 rev = changeset
 
 def manifest(web, req, tmpl):
-    return web.manifest(tmpl, web.changectx(req),
-                        web.cleanpath(req.form['path'][0]))
+    ctx = webutil.changectx(web.repo, req)
+    path = webutil.cleanpath(web.repo, req.form.get('file', [''])[0])
+    mf = ctx.manifest()
+    node = ctx.node()
+
+    files = {}
+    parity = paritygen(web.stripecount)
+
+    if path and path[-1] != "/":
+        path += "/"
+    l = len(path)
+    abspath = "/" + path
+
+    for f, n in mf.items():
+        if f[:l] != path:
+            continue
+        remain = f[l:]
+        if "/" in remain:
+            short = remain[:remain.index("/") + 1] # bleah
+            files[short] = (f, None)
+        else:
+            short = os.path.basename(remain)
+            files[short] = (f, n)
+
+    if not files:
+        raise ErrorResponse(HTTP_NOT_FOUND, 'path not found: ' + path)
+
+    def filelist(**map):
+        for f in util.sort(files):
+            full, fnode = files[f]
+            if not fnode:
+                continue
+
+            fctx = ctx.filectx(full)
+            yield {"file": full,
+                   "parity": parity.next(),
+                   "basename": f,
+                   "date": fctx.date(),
+                   "size": fctx.size(),
+                   "permissions": mf.flags(full)}
+
+    def dirlist(**map):
+        for f in util.sort(files):
+            full, fnode = files[f]
+            if fnode:
+                continue
+
+            yield {"parity": parity.next(),
+                   "path": "%s%s" % (abspath, f),
+                   "basename": f[:-1]}
+
+    return tmpl("manifest",
+                rev=ctx.rev(),
+                node=hex(node),
+                path=abspath,
+                up=webutil.up(abspath),
+                upparity=parity.next(),
+                fentries=filelist,
+                dentries=dirlist,
+                archives=web.archivelist(hex(node)),
+                tags=webutil.nodetagsdict(web.repo, node),
+                inbranch=webutil.nodeinbranch(web.repo, ctx),
+                branches=webutil.nodebranchdict(web.repo, ctx))
 
 def tags(web, req, tmpl):
-    return web.tags(tmpl)
+    i = web.repo.tagslist()
+    i.reverse()
+    parity = paritygen(web.stripecount)
+
+    def entries(notip=False,limit=0, **map):
+        count = 0
+        for k, n in i:
+            if notip and k == "tip":
+                continue
+            if limit > 0 and count >= limit:
+                continue
+            count = count + 1
+            yield {"parity": parity.next(),
+                   "tag": k,
+                   "date": web.repo[n].date(),
+                   "node": hex(n)}
+
+    return tmpl("tags",
+                node=hex(web.repo.changelog.tip()),
+                entries=lambda **x: entries(False,0, **x),
+                entriesnotip=lambda **x: entries(True,0, **x),
+                latestentry=lambda **x: entries(True,1, **x))
 
 def summary(web, req, tmpl):
-    return web.summary(tmpl)
+    i = web.repo.tagslist()
+    i.reverse()
+
+    def tagentries(**map):
+        parity = paritygen(web.stripecount)
+        count = 0
+        for k, n in i:
+            if k == "tip": # skip tip
+                continue
+
+            count += 1
+            if count > 10: # limit to 10 tags
+                break
+
+            yield tmpl("tagentry",
+                       parity=parity.next(),
+                       tag=k,
+                       node=hex(n),
+                       date=web.repo[n].date())
+
+    def branches(**map):
+        parity = paritygen(web.stripecount)
+
+        b = web.repo.branchtags()
+        l = [(-web.repo.changelog.rev(n), n, t) for t, n in b.items()]
+        for r,n,t in util.sort(l):
+            yield {'parity': parity.next(),
+                   'branch': t,
+                   'node': hex(n),
+                   'date': web.repo[n].date()}
+
+    def changelist(**map):
+        parity = paritygen(web.stripecount, offset=start-end)
+        l = [] # build a list in forward order for efficiency
+        for i in xrange(start, end):
+            ctx = web.repo[i]
+            n = ctx.node()
+            hn = hex(n)
+
+            l.insert(0, tmpl(
+               'shortlogentry',
+                parity=parity.next(),
+                author=ctx.user(),
+                desc=ctx.description(),
+                date=ctx.date(),
+                rev=i,
+                node=hn,
+                tags=webutil.nodetagsdict(web.repo, n),
+                inbranch=webutil.nodeinbranch(web.repo, ctx),
+                branches=webutil.nodebranchdict(web.repo, ctx)))
+
+        yield l
+
+    cl = web.repo.changelog
+    count = len(cl)
+    start = max(0, count - web.maxchanges)
+    end = min(count, start + web.maxchanges)
+
+    return tmpl("summary",
+                desc=web.config("web", "description", "unknown"),
+                owner=get_contact(web.config) or "unknown",
+                lastchange=cl.read(cl.tip())[2],
+                tags=tagentries,
+                branches=branches,
+                shortlog=changelist,
+                node=hex(cl.tip()),
+                archives=web.archivelist("tip"))
 
 def filediff(web, req, tmpl):
-    return web.filediff(tmpl, web.filectx(req))
+    fctx = webutil.filectx(web.repo, req)
+    n = fctx.node()
+    path = fctx.path()
+    parents = fctx.parents()
+    p1 = parents and parents[0].node() or nullid
+
+    diffs = web.diff(tmpl, p1, n, [path])
+    return tmpl("filediff",
+                file=path,
+                node=hex(n),
+                rev=fctx.rev(),
+                date=fctx.date(),
+                desc=fctx.description(),
+                author=fctx.user(),
+                rename=webutil.renamelink(fctx),
+                branch=webutil.nodebranchnodefault(fctx),
+                parent=webutil.siblings(parents),
+                child=webutil.siblings(fctx.children()),
+                diff=diffs)
 
 diff = filediff
 
 def annotate(web, req, tmpl):
-    return web.fileannotate(tmpl, web.filectx(req))
+    fctx = webutil.filectx(web.repo, req)
+    f = fctx.path()
+    n = fctx.filenode()
+    fl = fctx.filelog()
+    parity = paritygen(web.stripecount)
+
+    def annotate(**map):
+        last = None
+        if binary(fctx.data()):
+            mt = (mimetypes.guess_type(fctx.path())[0]
+                  or 'application/octet-stream')
+            lines = enumerate([((fctx.filectx(fctx.filerev()), 1),
+                                '(binary:%s)' % mt)])
+        else:
+            lines = enumerate(fctx.annotate(follow=True, linenumber=True))
+        for lineno, ((f, targetline), l) in lines:
+            fnode = f.filenode()
+
+            if last != fnode:
+                last = fnode
+
+            yield {"parity": parity.next(),
+                   "node": hex(f.node()),
+                   "rev": f.rev(),
+                   "author": f.user(),
+                   "desc": f.description(),
+                   "file": f.path(),
+                   "targetline": targetline,
+                   "line": l,
+                   "lineid": "l%d" % (lineno + 1),
+                   "linenumber": "% 6d" % (lineno + 1)}
+
+    return tmpl("fileannotate",
+                file=f,
+                annotate=annotate,
+                path=webutil.up(f),
+                rev=fctx.rev(),
+                node=hex(fctx.node()),
+                author=fctx.user(),
+                date=fctx.date(),
+                desc=fctx.description(),
+                rename=webutil.renamelink(fctx),
+                branch=webutil.nodebranchnodefault(fctx),
+                parent=webutil.siblings(fctx.parents()),
+                child=webutil.siblings(fctx.children()),
+                permissions=fctx.manifest().flags(f))
 
 def filelog(web, req, tmpl):
-    return web.filelog(tmpl, web.filectx(req))
+    fctx = webutil.filectx(web.repo, req)
+    f = fctx.path()
+    fl = fctx.filelog()
+    count = len(fl)
+    pagelen = web.maxshortchanges
+    pos = fctx.filerev()
+    start = max(0, pos - pagelen + 1)
+    end = min(count, start + pagelen)
+    pos = end - 1
+    parity = paritygen(web.stripecount, offset=start-end)
+
+    def entries(limit=0, **map):
+        l = []
+
+        for i in xrange(start, end):
+            ctx = fctx.filectx(i)
+            n = fl.node(i)
+
+            l.insert(0, {"parity": parity.next(),
+                         "filerev": i,
+                         "file": f,
+                         "node": hex(ctx.node()),
+                         "author": ctx.user(),
+                         "date": ctx.date(),
+                         "rename": webutil.renamelink(fctx),
+                         "parent": webutil.siblings(fctx.parents()),
+                         "child": webutil.siblings(fctx.children()),
+                         "desc": ctx.description()})
+
+        if limit > 0:
+            l = l[:limit]
+
+        for e in l:
+            yield e
+
+    nodefunc = lambda x: fctx.filectx(fileid=x)
+    nav = webutil.revnavgen(pos, pagelen, count, nodefunc)
+    return tmpl("filelog", file=f, node=hex(fctx.node()), nav=nav,
+                entries=lambda **x: entries(limit=0, **x),
+                latestentry=lambda **x: entries(limit=1, **x))
+
 
 def archive(web, req, tmpl):
     type_ = req.form.get('type', [None])[0]
     allowed = web.configlist("web", "allow_archive")
-    if (type_ in web.archives and (type_ in allowed or
+    key = req.form['node'][0]
+
+    if type_ not in web.archives:
+        msg = 'Unsupported archive type: %s' % type_
+        raise ErrorResponse(HTTP_NOT_FOUND, msg)
+
+    if not ((type_ in allowed or
         web.configbool("web", "allow" + type_, False))):
-        web.archive(tmpl, req, req.form['node'][0], type_)
-        return []
-    raise ErrorResponse(HTTP_NOT_FOUND, 'unsupported archive type: %s' % type_)
+        msg = 'Archive type not allowed: %s' % type_
+        raise ErrorResponse(HTTP_FORBIDDEN, msg)
+
+    reponame = re.sub(r"\W+", "-", os.path.basename(web.reponame))
+    cnode = web.repo.lookup(key)
+    arch_version = key
+    if cnode == key or key == 'tip':
+        arch_version = short(cnode)
+    name = "%s-%s" % (reponame, arch_version)
+    mimetype, artype, extension, encoding = web.archive_specs[type_]
+    headers = [
+        ('Content-Type', mimetype),
+        ('Content-Disposition', 'attachment; filename=%s%s' % (name, extension))
+    ]
+    if encoding:
+        headers.append(('Content-Encoding', encoding))
+    req.header(headers)
+    req.respond(HTTP_OK)
+    archival.archive(web.repo, req, cnode, artype, prefix=name)
+    return []
+
 
 def static(web, req, tmpl):
     fname = req.form['file'][0]
@@ -124,3 +571,39 @@
                         os.path.join(web.templatepath, "static"),
                         untrusted=False)
     return [staticfile(static, fname, req)]
+
+def graph(web, req, tmpl):
+    rev = webutil.changectx(web.repo, req).rev()
+    bg_height = 39
+
+    max_rev = len(web.repo) - 1
+    revcount = min(max_rev, int(req.form.get('revcount', [25])[0]))
+    revnode = web.repo.changelog.node(rev)
+    revnode_hex = hex(revnode)
+    uprev = min(max_rev, rev + revcount)
+    downrev = max(0, rev - revcount)
+    lessrev = max(0, rev - revcount / 2)
+
+    maxchanges = web.maxshortchanges or web.maxchanges
+    count = len(web.repo)
+    changenav = webutil.revnavgen(rev, maxchanges, count, web.repo.changectx)
+
+    tree = list(graphmod.graph(web.repo, rev, downrev))
+    canvasheight = (len(tree) + 1) * bg_height - 27;
+
+    data = []
+    for i, (ctx, vtx, edges) in enumerate(tree):
+        node = short(ctx.node())
+        age = templatefilters.age(ctx.date())
+        desc = templatefilters.firstline(ctx.description())
+        desc = cgi.escape(desc)
+        user = cgi.escape(templatefilters.person(ctx.user()))
+        branch = ctx.branch()
+        branch = branch, web.repo.branchtags().get(branch) == ctx.node()
+        data.append((node, vtx, edges, desc, user, age, branch, ctx.tags()))
+
+    return tmpl('graph', rev=rev, revcount=revcount, uprev=uprev,
+                lessrev=lessrev, revcountmore=revcount and 2 * revcount or 1,
+                revcountless=revcount / 2, downrev=downrev,
+                canvasheight=canvasheight, bg_height=bg_height,
+                jsdata=data, node=revnode_hex, changenav=changenav)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/hgweb/webutil.py	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,140 @@
+# hgweb/webutil.py - utility library for the web interface.
+#
+# Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
+# Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+
+import os
+from mercurial.node import hex, nullid
+from mercurial.repo import RepoError
+from mercurial import util
+
+def up(p):
+    if p[0] != "/":
+        p = "/" + p
+    if p[-1] == "/":
+        p = p[:-1]
+    up = os.path.dirname(p)
+    if up == "/":
+        return "/"
+    return up + "/"
+
+def revnavgen(pos, pagelen, limit, nodefunc):
+    def seq(factor, limit=None):
+        if limit:
+            yield limit
+            if limit >= 20 and limit <= 40:
+                yield 50
+        else:
+            yield 1 * factor
+            yield 3 * factor
+        for f in seq(factor * 10):
+            yield f
+
+    def nav(**map):
+        l = []
+        last = 0
+        for f in seq(1, pagelen):
+            if f < pagelen or f <= last:
+                continue
+            if f > limit:
+                break
+            last = f
+            if pos + f < limit:
+                l.append(("+%d" % f, hex(nodefunc(pos + f).node())))
+            if pos - f >= 0:
+                l.insert(0, ("-%d" % f, hex(nodefunc(pos - f).node())))
+
+        try:
+            yield {"label": "(0)", "node": hex(nodefunc('0').node())}
+
+            for label, node in l:
+                yield {"label": label, "node": node}
+
+            yield {"label": "tip", "node": "tip"}
+        except RepoError:
+            pass
+
+    return nav
+
+def siblings(siblings=[], hiderev=None, **args):
+    siblings = [s for s in siblings if s.node() != nullid]
+    if len(siblings) == 1 and siblings[0].rev() == hiderev:
+        return
+    for s in siblings:
+        d = {'node': hex(s.node()), 'rev': s.rev()}
+        if hasattr(s, 'path'):
+            d['file'] = s.path()
+        d.update(args)
+        yield d
+
+def renamelink(fctx):
+    r = fctx.renamed()
+    if r:
+        return [dict(file=r[0], node=hex(r[1]))]
+    return []
+
+def nodetagsdict(repo, node):
+    return [{"name": i} for i in repo.nodetags(node)]
+
+def nodebranchdict(repo, ctx):
+    branches = []
+    branch = ctx.branch()
+    # If this is an empty repo, ctx.node() == nullid,
+    # ctx.branch() == 'default', but branchtags() is
+    # an empty dict. Using dict.get avoids a traceback.
+    if repo.branchtags().get(branch) == ctx.node():
+        branches.append({"name": branch})
+    return branches
+
+def nodeinbranch(repo, ctx):
+    branches = []
+    branch = ctx.branch()
+    if branch != 'default' and repo.branchtags().get(branch) != ctx.node():
+        branches.append({"name": branch})
+    return branches
+
+def nodebranchnodefault(ctx):
+    branches = []
+    branch = ctx.branch()
+    if branch != 'default':
+        branches.append({"name": branch})
+    return branches
+
+def showtag(repo, tmpl, t1, node=nullid, **args):
+    for t in repo.nodetags(node):
+        yield tmpl(t1, tag=t, **args)
+
+def cleanpath(repo, path):
+    path = path.lstrip('/')
+    return util.canonpath(repo.root, '', path)
+
+def changectx(repo, req):
+    changeid = "tip"
+    if 'node' in req.form:
+        changeid = req.form['node'][0]
+    elif 'manifest' in req.form:
+        changeid = req.form['manifest'][0]
+
+    try:
+        ctx = repo[changeid]
+    except RepoError:
+        man = repo.manifest
+        ctx = repo[man.linkrev(man.lookup(changeid))]
+
+    return ctx
+
+def filectx(repo, req):
+    path = cleanpath(repo, req.form['file'][0])
+    if 'node' in req.form:
+        changeid = req.form['node'][0]
+    else:
+        changeid = req.form['filenode'][0]
+    try:
+        fctx = repo[changeid][path]
+    except RepoError:
+        fctx = repo.filectx(path, fileid=changeid)
+
+    return fctx
--- a/mercurial/hgweb/wsgicgi.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/mercurial/hgweb/wsgicgi.py	Wed Sep 17 11:34:37 2008 +0200
@@ -53,7 +53,7 @@
             try:
                 if headers_sent:
                     # Re-raise original exception if headers sent
-                    raise exc_info[0], exc_info[1], exc_info[2]
+                    raise exc_info[0](exc_info[1], exc_info[2])
             finally:
                 exc_info = None     # avoid dangling circular ref
         elif headers_set:
@@ -62,4 +62,6 @@
         headers_set[:] = [status, response_headers]
         return write
 
-    application(environ, start_response)
+    content = application(environ, start_response)
+    for chunk in content:
+        write(chunk)
--- a/mercurial/hook.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/mercurial/hook.py	Wed Sep 17 11:34:37 2008 +0200
@@ -96,10 +96,9 @@
         oldstdout = os.dup(sys.__stdout__.fileno())
         os.dup2(sys.__stderr__.fileno(), sys.__stdout__.fileno())
 
-    hooks = [(hname, cmd) for hname, cmd in ui.configitems("hooks")
-             if hname.split(".", 1)[0] == name and cmd]
-    hooks.sort()
-    for hname, cmd in hooks:
+    for hname, cmd in util.sort(ui.configitems('hooks')):
+        if hname.split('.')[0] != name or not cmd:
+            continue
         if callable(cmd):
             r = _pythonhook(ui, repo, name, hname, cmd, args, throw) or r
         elif cmd.startswith('python:'):
--- a/mercurial/httprepo.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/mercurial/httprepo.py	Wed Sep 17 11:34:37 2008 +0200
@@ -268,6 +268,7 @@
 
         # 1.0 here is the _protocol_ version
         opener.addheaders = [('User-agent', 'mercurial/proto-1.0')]
+        opener.addheaders.append(('Accept', 'application/mercurial-0.1'))
         urllib2.install_opener(opener)
 
     def url(self):
@@ -421,16 +422,18 @@
         fp = httpsendfile(tempname, "rb")
         try:
             try:
-                rfp = self.do_cmd(
-                    'unbundle', data=fp,
-                    headers={'Content-Type': 'application/octet-stream'},
-                    heads=' '.join(map(hex, heads)))
+                resp = self.do_read(
+                     'unbundle', data=fp,
+                     headers={'Content-Type': 'application/octet-stream'},
+                     heads=' '.join(map(hex, heads)))
+                resp_code, output = resp.split('\n', 1)
                 try:
-                    ret = int(rfp.readline())
-                    self.ui.write(rfp.read())
-                    return ret
-                finally:
-                    rfp.close()
+                    ret = int(resp_code)
+                except ValueError, err:
+                    raise util.UnexpectedOutput(
+                            _('push failed (unexpected response):'), resp)
+                self.ui.write(output)
+                return ret
             except socket.error, err:
                 if err[0] in (errno.ECONNRESET, errno.EPIPE):
                     raise util.Abort(_('push failed: %s') % err[1])
--- a/mercurial/keepalive.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/mercurial/keepalive.py	Wed Sep 17 11:34:37 2008 +0200
@@ -19,6 +19,8 @@
 
 # Modified by Benoit Boissinot:
 #  - fix for digest auth (inspired from urllib2.py @ Python v2.4)
+# Modified by Dirkjan Ochtman:
+#  - import md5 function from a local util module
 
 """An HTTP handler for urllib2 that supports HTTP 1.1 and keepalive.
 
@@ -450,7 +452,7 @@
     keepalive_handler.close_all()
 
 def continuity(url):
-    import md5
+    from util import md5
     format = '%25s: %s'
 
     # first fetch the file with the normal http handler
--- a/mercurial/localrepo.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/mercurial/localrepo.py	Wed Sep 17 11:34:37 2008 +0200
@@ -9,8 +9,10 @@
 from i18n import _
 import repo, changegroup
 import changelog, dirstate, filelog, manifest, context, weakref
-import lock, transaction, stat, errno, ui
+import lock, transaction, stat, errno, ui, store
 import os, revlog, time, util, extensions, hook, inspect
+import match as match_
+import merge as merge_
 
 class localrepository(repo.repository):
     capabilities = util.set(('lookup', 'changegroupsubset'))
@@ -48,41 +50,21 @@
             raise repo.RepoError(_("repository %s already exists") % path)
         else:
             # find requirements
+            requirements = []
             try:
                 requirements = self.opener("requires").read().splitlines()
+                for r in requirements:
+                    if r not in self.supported:
+                        raise repo.RepoError(_("requirement '%s' not supported") % r)
             except IOError, inst:
                 if inst.errno != errno.ENOENT:
                     raise
-                requirements = []
-        # check them
-        for r in requirements:
-            if r not in self.supported:
-                raise repo.RepoError(_("requirement '%s' not supported") % r)
 
-        # setup store
-        if "store" in requirements:
-            self.encodefn = util.encodefilename
-            self.decodefn = util.decodefilename
-            self.spath = os.path.join(self.path, "store")
-        else:
-            self.encodefn = lambda x: x
-            self.decodefn = lambda x: x
-            self.spath = self.path
-
-        try:
-            # files in .hg/ will be created using this mode
-            mode = os.stat(self.spath).st_mode
-            # avoid some useless chmods
-            if (0777 & ~util._umask) == (0777 & mode):
-                mode = None
-        except OSError:
-            mode = None
-
-        self._createmode = mode
-        self.opener.createmode = mode
-        sopener = util.opener(self.spath)
-        sopener.createmode = mode
-        self.sopener = util.encodedopener(sopener, self.encodefn)
+        self.store = store.store(requirements, self.path, util.opener)
+        self.spath = self.store.path
+        self.sopener = self.store.opener
+        self.sjoin = self.store.join
+        self.opener.createmode = self.store.createmode
 
         self.ui = ui.ui(parentui=parentui)
         try:
@@ -114,7 +96,22 @@
             self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
             return self.dirstate
         else:
-            raise AttributeError, name
+            raise AttributeError(name)
+
+    def __getitem__(self, changeid):
+        if changeid == None:
+            return context.workingctx(self)
+        return context.changectx(self, changeid)
+
+    def __nonzero__(self):
+        return True
+
+    def __len__(self):
+        return len(self.changelog)
+
+    def __iter__(self):
+        for i in xrange(len(self)):
+            yield i
 
     def url(self):
         return 'file:' + self.root
@@ -146,7 +143,11 @@
             if prevtags and prevtags[-1] != '\n':
                 fp.write('\n')
             for name in names:
-                fp.write('%s %s\n' % (hex(node), munge and munge(name) or name))
+                m = munge and munge(name) or name
+                if self._tagstypecache and name in self._tagstypecache:
+                    old = self.tagscache.get(name, nullid)
+                    fp.write('%s %s\n' % (hex(old), m))
+                fp.write('%s %s\n' % (hex(node), m))
             fp.close()
 
         prevtags = ''
@@ -302,9 +303,8 @@
             n = nh[0]
             if n != nullid:
                 self.tagscache[k] = n
-                self._tagstypecache[k] = tagtypes[k]
+            self._tagstypecache[k] = tagtypes[k]
         self.tagscache['tip'] = self.changelog.tip()
-
         return self.tagscache
 
     def tagtype(self, tagname):
@@ -326,7 +326,7 @@
         last = {}
         ret = []
         for node in heads:
-            c = self.changectx(node)
+            c = self[node]
             rev = c.rev()
             try:
                 fnode = c.filenode('.hgtags')
@@ -347,8 +347,7 @@
             except:
                 r = -2 # sort to the beginning of the list if unknown
             l.append((r, t, n))
-        l.sort()
-        return [(t, n) for r, t, n in l]
+        return [(t, n) for r, t, n in util.sort(l)]
 
     def nodetags(self, node):
         '''return the tags associated with a node'''
@@ -359,7 +358,7 @@
         return self.nodetagscache.get(node, [])
 
     def _branchtags(self, partial, lrev):
-        tiprev = self.changelog.count() - 1
+        tiprev = len(self) - 1
         if lrev != tiprev:
             self._updatebranchcache(partial, lrev+1, tiprev+1)
             self._writebranchcache(partial, self.changelog.tip(), tiprev)
@@ -404,8 +403,7 @@
         try:
             last, lrev = lines.pop(0).split(" ", 1)
             last, lrev = bin(last), int(lrev)
-            if not (lrev < self.changelog.count() and
-                    self.changelog.node(lrev) == last): # sanity check
+            if lrev >= len(self) or self[lrev].node() != last:
                 # invalidate the cache
                 raise ValueError('invalidating branch cache (tip differs)')
             for l in lines:
@@ -432,18 +430,13 @@
 
     def _updatebranchcache(self, partial, start, end):
         for r in xrange(start, end):
-            c = self.changectx(r)
+            c = self[r]
             b = c.branch()
             partial[b] = c.node()
 
     def lookup(self, key):
         if key == '.':
-            key, second = self.dirstate.parents()
-            if key == nullid:
-                raise repo.RepoError(_("no revision checked out"))
-            if second != nullid:
-                self.ui.warn(_("warning: working directory has two parents, "
-                               "tag '.' uses the first\n"))
+            return self.dirstate.parents()[0]
         elif key == 'null':
             return nullid
         n = self.changelog._match(key)
@@ -469,36 +462,23 @@
     def join(self, f):
         return os.path.join(self.path, f)
 
-    def sjoin(self, f):
-        f = self.encodefn(f)
-        return os.path.join(self.spath, f)
-
     def wjoin(self, f):
         return os.path.join(self.root, f)
 
+    def rjoin(self, f):
+        return os.path.join(self.root, util.pconvert(f))
+
     def file(self, f):
         if f[0] == '/':
             f = f[1:]
         return filelog.filelog(self.sopener, f)
 
-    def changectx(self, changeid=None):
-        return context.changectx(self, changeid)
-
-    def workingctx(self):
-        return context.workingctx(self)
+    def changectx(self, changeid):
+        return self[changeid]
 
     def parents(self, changeid=None):
-        '''
-        get list of changectxs for parents of changeid or working directory
-        '''
-        if changeid is None:
-            pl = self.dirstate.parents()
-        else:
-            n = self.changelog.lookup(changeid)
-            pl = self.changelog.parents(n)
-        if pl[1] == nullid:
-            return [self.changectx(pl[0])]
-        return [self.changectx(pl[0]), self.changectx(pl[1])]
+        '''get list of changectxs for parents of changeid'''
+        return self[changeid].parents()
 
     def filectx(self, path, changeid=None, fileid=None):
         """changeid can be a changeset revision, node, or tag.
@@ -594,7 +574,7 @@
         tr = transaction.transaction(self.ui.warn, self.sopener,
                                      self.sjoin("journal"),
                                      aftertrans(renames),
-                                     self._createmode)
+                                     self.store.createmode)
         self._transref = weakref.ref(tr)
         return tr
 
@@ -680,19 +660,20 @@
         self._wlockref = weakref.ref(l)
         return l
 
-    def filecommit(self, fn, manifest1, manifest2, linkrev, tr, changelist):
+    def filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
         """
         commit an individual file as part of a larger transaction
         """
 
-        t = self.wread(fn)
+        fn = fctx.path()
+        t = fctx.data()
         fl = self.file(fn)
         fp1 = manifest1.get(fn, nullid)
         fp2 = manifest2.get(fn, nullid)
 
         meta = {}
-        cf = self.dirstate.copied(fn)
-        if cf and cf != fn:
+        cp = fctx.renamed()
+        if cp and cp[0] != fn:
             # Mark the new revision of this file as a copy of another
             # file.  This copy data will effectively act as a parent
             # of this new revision.  If this is a merge, the first
@@ -712,6 +693,7 @@
             #    \- 2 --- 4        as the merge base
             #
 
+            cf = cp[0]
             cr = manifest1.get(cf)
             nfp = fp2
 
@@ -725,19 +707,10 @@
             if not cr:
                 self.ui.debug(_(" %s: searching for copy revision for %s\n") %
                               (fn, cf))
-                p1 = self.dirstate.parents()[0]
-                rev = self.changelog.rev(p1)
-                seen = {-1:None}
-                visit = [rev]
-                while visit:
-                    for p in self.changelog.parentrevs(visit.pop(0)):
-                        if p not in seen:
-                            seen[p] = True
-                            visit.append(p)
-                            ctx = self.changectx(p)
-                            if cf in ctx:
-                                cr = ctx[cf].filenode()
-                                break
+                for a in self['.'].ancestors():
+                    if cf in a:
+                        cr = a[cf].filenode()
+                        break
 
             self.ui.debug(_(" %s: copy %s:%s\n") % (fn, cf, hex(cr)))
             meta["copy"] = cf
@@ -765,66 +738,85 @@
                            p1=p1, p2=p2, extra=extra, empty_ok=True)
 
     def commit(self, files=None, text="", user=None, date=None,
-               match=util.always, force=False, force_editor=False,
+               match=None, force=False, force_editor=False,
                p1=None, p2=None, extra={}, empty_ok=False):
-        wlock = lock = tr = None
-        valid = 0 # don't save the dirstate if this isn't set
+        wlock = lock = None
         if files:
             files = util.unique(files)
         try:
             wlock = self.wlock()
             lock = self.lock()
-            commit = []
-            remove = []
-            changed = []
             use_dirstate = (p1 is None) # not rawcommit
-            extra = extra.copy()
-
-            if use_dirstate:
-                if files:
-                    for f in files:
-                        s = self.dirstate[f]
-                        if s in 'nma':
-                            commit.append(f)
-                        elif s == 'r':
-                            remove.append(f)
-                        else:
-                            self.ui.warn(_("%s not tracked!\n") % f)
-                else:
-                    changes = self.status(match=match)[:5]
-                    modified, added, removed, deleted, unknown = changes
-                    commit = modified + added
-                    remove = removed
-            else:
-                commit = files
 
             if use_dirstate:
                 p1, p2 = self.dirstate.parents()
                 update_dirstate = True
 
                 if (not force and p2 != nullid and
-                    (files or match != util.always)):
+                    (match and (match.files() or match.anypats()))):
                     raise util.Abort(_('cannot partially commit a merge '
                                        '(do not specify files or patterns)'))
+
+                if files:
+                    modified, removed = [], []
+                    for f in files:
+                        s = self.dirstate[f]
+                        if s in 'nma':
+                            modified.append(f)
+                        elif s == 'r':
+                            removed.append(f)
+                        else:
+                            self.ui.warn(_("%s not tracked!\n") % f)
+                    changes = [modified, [], removed, [], []]
+                else:
+                    changes = self.status(match=match)
             else:
                 p1, p2 = p1, p2 or nullid
                 update_dirstate = (self.dirstate.parents()[0] == p1)
+                changes = [files, [], [], [], []]
 
+            ms = merge_.mergestate(self)
+            for f in changes[0]:
+                if f in ms and ms[f] == 'u':
+                    raise util.Abort(_("unresolved merge conflicts "
+                                                    "(see hg resolve)"))
+            wctx = context.workingctx(self, (p1, p2), text, user, date,
+                                      extra, changes)
+            return self._commitctx(wctx, force, force_editor, empty_ok,
+                                   use_dirstate, update_dirstate)
+        finally:
+            del lock, wlock
+
+    def commitctx(self, ctx):
+        wlock = lock = None
+        try:
+            wlock = self.wlock()
+            lock = self.lock()
+            return self._commitctx(ctx, force=True, force_editor=False,
+                                   empty_ok=True, use_dirstate=False,
+                                   update_dirstate=False)
+        finally:
+            del lock, wlock
+
+    def _commitctx(self, wctx, force=False, force_editor=False, empty_ok=False,
+                  use_dirstate=True, update_dirstate=True):
+        tr = None
+        valid = 0 # don't save the dirstate if this isn't set
+        try:
+            commit = util.sort(wctx.modified() + wctx.added())
+            remove = wctx.removed()
+            extra = wctx.extra().copy()
+            branchname = extra['branch']
+            user = wctx.user()
+            text = wctx.description()
+
+            p1, p2 = [p.node() for p in wctx.parents()]
             c1 = self.changelog.read(p1)
             c2 = self.changelog.read(p2)
             m1 = self.manifest.read(c1[0]).copy()
             m2 = self.manifest.read(c2[0])
 
             if use_dirstate:
-                branchname = self.workingctx().branch()
-                try:
-                    branchname = branchname.decode('UTF-8').encode('UTF-8')
-                except UnicodeDecodeError:
-                    raise util.Abort(_('branch name not in UTF-8!'))
-            else:
-                branchname = ""
-
-            if use_dirstate:
                 oldname = c1[5].get("branch") # stored in UTF-8
                 if (not commit and not remove and not force and p2 == nullid
                     and branchname == oldname):
@@ -842,26 +834,22 @@
 
             # check in files
             new = {}
-            linkrev = self.changelog.count()
-            commit.sort()
-            is_exec = util.execfunc(self.root, m1.execf)
-            is_link = util.linkfunc(self.root, m1.linkf)
+            changed = []
+            linkrev = len(self)
             for f in commit:
                 self.ui.note(f + "\n")
                 try:
-                    new[f] = self.filecommit(f, m1, m2, linkrev, trp, changed)
-                    new_exec = is_exec(f)
-                    new_link = is_link(f)
+                    fctx = wctx.filectx(f)
+                    newflags = fctx.flags()
+                    new[f] = self.filecommit(fctx, m1, m2, linkrev, trp, changed)
                     if ((not changed or changed[-1] != f) and
                         m2.get(f) != new[f]):
                         # mention the file in the changelog if some
                         # flag changed, even if there was no content
                         # change.
-                        old_exec = m1.execf(f)
-                        old_link = m1.linkf(f)
-                        if old_exec != new_exec or old_link != new_link:
+                        if m1.flags(f) != newflags:
                             changed.append(f)
-                    m1.set(f, new_exec, new_link)
+                    m1.set(f, newflags)
                     if use_dirstate:
                         self.dirstate.normal(f)
 
@@ -874,10 +862,9 @@
 
             # update manifest
             m1.update(new)
-            remove.sort()
             removed = []
 
-            for f in remove:
+            for f in util.sort(remove):
                 if f in m1:
                     del m1[f]
                     removed.append(f)
@@ -887,15 +874,12 @@
                                    (new, removed))
 
             # add changeset
-            new = new.keys()
-            new.sort()
-
-            user = user or self.ui.username()
             if (not empty_ok and not text) or force_editor:
                 edittext = []
                 if text:
                     edittext.append(text)
                 edittext.append("")
+                edittext.append("") # Empty line between message and comments.
                 edittext.append(_("HG: Enter commit message."
                                   "  Lines beginning with 'HG:' are removed."))
                 edittext.append("HG: --")
@@ -915,9 +899,6 @@
                 text = self.ui.edit("\n".join(edittext), user)
                 os.chdir(olddir)
 
-            if branchname:
-                extra["branch"] = branchname
-
             lines = [line.rstrip() for line in text.rstrip().splitlines()]
             while lines and not lines[0]:
                 del lines[0]
@@ -926,7 +907,7 @@
             text = '\n'.join(lines)
 
             n = self.changelog.add(mn, changed + removed, text, trp, p1, p2,
-                                   user, date, extra)
+                                   user, wctx.date(), extra)
             self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
                       parent2=xp2)
             tr.close()
@@ -946,169 +927,103 @@
         finally:
             if not valid: # don't save our updated dirstate
                 self.dirstate.invalidate()
-            del tr, lock, wlock
+            del tr
 
-    def walk(self, node=None, files=[], match=util.always, badmatch=None):
+    def walk(self, match, node=None):
         '''
         walk recursively through the directory tree or a given
         changeset, finding all files matched by the match
         function
-
-        results are yielded in a tuple (src, filename), where src
-        is one of:
-        'f' the file was found in the directory tree
-        'm' the file was only in the dirstate and not in the tree
-        'b' file was not found and matched badmatch
         '''
+        return self[node].walk(match)
 
-        if node:
-            fdict = dict.fromkeys(files)
-            # for dirstate.walk, files=['.'] means "walk the whole tree".
-            # follow that here, too
-            fdict.pop('.', None)
-            mdict = self.manifest.read(self.changelog.read(node)[0])
-            mfiles = mdict.keys()
-            mfiles.sort()
-            for fn in mfiles:
-                for ffn in fdict:
-                    # match if the file is the exact name or a directory
-                    if ffn == fn or fn.startswith("%s/" % ffn):
-                        del fdict[ffn]
-                        break
-                if match(fn):
-                    yield 'm', fn
-            ffiles = fdict.keys()
-            ffiles.sort()
-            for fn in ffiles:
-                if badmatch and badmatch(fn):
-                    if match(fn):
-                        yield 'b', fn
-                else:
-                    self.ui.warn(_('%s: No such file in rev %s\n')
-                                 % (self.pathto(fn), short(node)))
-        else:
-            for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
-                yield src, fn
-
-    def status(self, node1=None, node2=None, files=[], match=util.always,
-               list_ignored=False, list_clean=False, list_unknown=True):
+    def status(self, node1='.', node2=None, match=None,
+               ignored=False, clean=False, unknown=False):
         """return status of files between two nodes or node and working directory
 
         If node1 is None, use the first dirstate parent instead.
         If node2 is None, compare node1 with working directory.
         """
 
-        def fcmp(fn, getnode):
-            t1 = self.wread(fn)
-            return self.file(fn).cmp(getnode(fn), t1)
-
-        def mfmatches(node):
-            change = self.changelog.read(node)
-            mf = self.manifest.read(change[0]).copy()
+        def mfmatches(ctx):
+            mf = ctx.manifest().copy()
             for fn in mf.keys():
                 if not match(fn):
                     del mf[fn]
             return mf
 
-        modified, added, removed, deleted, unknown = [], [], [], [], []
-        ignored, clean = [], []
-
-        compareworking = False
-        if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
-            compareworking = True
+        ctx1 = self[node1]
+        ctx2 = self[node2]
+        working = ctx2 == self[None]
+        parentworking = working and ctx1 == self['.']
+        match = match or match_.always(self.root, self.getcwd())
+        listignored, listclean, listunknown = ignored, clean, unknown
 
-        if not compareworking:
-            # read the manifest from node1 before the manifest from node2,
-            # so that we'll hit the manifest cache if we're going through
-            # all the revisions in parent->child order.
-            mf1 = mfmatches(node1)
-
-        # are we comparing the working directory?
-        if not node2:
-            (lookup, modified, added, removed, deleted, unknown,
-             ignored, clean) = self.dirstate.status(files, match,
-                                                    list_ignored, list_clean,
-                                                    list_unknown)
+        if working: # we need to scan the working dir
+            s = self.dirstate.status(match, listignored, listclean, listunknown)
+            cmp, modified, added, removed, deleted, unknown, ignored, clean = s
 
-            # are we comparing working dir against its parent?
-            if compareworking:
-                if lookup:
-                    fixup = []
-                    # do a full compare of any files that might have changed
-                    ctx = self.changectx()
-                    mexec = lambda f: 'x' in ctx.fileflags(f)
-                    mlink = lambda f: 'l' in ctx.fileflags(f)
-                    is_exec = util.execfunc(self.root, mexec)
-                    is_link = util.linkfunc(self.root, mlink)
-                    def flags(f):
-                        return is_link(f) and 'l' or is_exec(f) and 'x' or ''
-                    for f in lookup:
-                        if (f not in ctx or flags(f) != ctx.fileflags(f)
-                            or ctx[f].cmp(self.wread(f))):
-                            modified.append(f)
-                        else:
-                            fixup.append(f)
-                            if list_clean:
-                                clean.append(f)
+            # check for any possibly clean files
+            if parentworking and cmp:
+                fixup = []
+                # do a full compare of any files that might have changed
+                for f in cmp:
+                    if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
+                        or ctx1[f].cmp(ctx2[f].data())):
+                        modified.append(f)
+                    else:
+                        fixup.append(f)
+
+                if listclean:
+                    clean += fixup
 
-                    # update dirstate for files that are actually clean
-                    if fixup:
-                        wlock = None
+                # update dirstate for files that are actually clean
+                if fixup:
+                    wlock = None
+                    try:
                         try:
-                            try:
-                                wlock = self.wlock(False)
-                            except lock.LockException:
-                                pass
-                            if wlock:
-                                for f in fixup:
-                                    self.dirstate.normal(f)
-                        finally:
-                            del wlock
-            else:
+                            wlock = self.wlock(False)
+                            for f in fixup:
+                                self.dirstate.normal(f)
+                        except lock.LockException:
+                            pass
+                    finally:
+                        del wlock
+
+        if not parentworking:
+            mf1 = mfmatches(ctx1)
+            if working:
                 # we are comparing working dir against non-parent
                 # generate a pseudo-manifest for the working dir
-                # XXX: create it in dirstate.py ?
-                mf2 = mfmatches(self.dirstate.parents()[0])
-                is_exec = util.execfunc(self.root, mf2.execf)
-                is_link = util.linkfunc(self.root, mf2.linkf)
-                for f in lookup + modified + added:
-                    mf2[f] = ""
-                    mf2.set(f, is_exec(f), is_link(f))
+                mf2 = mfmatches(self['.'])
+                for f in cmp + modified + added:
+                    mf2[f] = None
+                    mf2.set(f, ctx2.flags(f))
                 for f in removed:
                     if f in mf2:
                         del mf2[f]
+            else:
+                # we are comparing two revisions
+                deleted, unknown, ignored = [], [], []
+                mf2 = mfmatches(ctx2)
 
-        else:
-            # we are comparing two revisions
-            mf2 = mfmatches(node2)
-
-        if not compareworking:
-            # flush lists from dirstate before comparing manifests
             modified, added, clean = [], [], []
-
-            # make sure to sort the files so we talk to the disk in a
-            # reasonable order
-            mf2keys = mf2.keys()
-            mf2keys.sort()
-            getnode = lambda fn: mf1.get(fn, nullid)
-            for fn in mf2keys:
+            for fn in mf2:
                 if fn in mf1:
                     if (mf1.flags(fn) != mf2.flags(fn) or
                         (mf1[fn] != mf2[fn] and
-                         (mf2[fn] != "" or fcmp(fn, getnode)))):
+                         (mf2[fn] or ctx1[fn].cmp(ctx2[fn].data())))):
                         modified.append(fn)
-                    elif list_clean:
+                    elif listclean:
                         clean.append(fn)
                     del mf1[fn]
                 else:
                     added.append(fn)
-
             removed = mf1.keys()
 
-        # sort and return results:
-        for l in modified, added, removed, deleted, unknown, ignored, clean:
-            l.sort()
-        return (modified, added, removed, deleted, unknown, ignored, clean)
+        r = modified, added, removed, deleted, unknown, ignored, clean
+        [l.sort() for l in r]
+        return r
 
     def add(self, list):
         wlock = self.wlock()
@@ -1183,7 +1098,7 @@
             wlock = self.wlock()
             for f in list:
                 if self.dirstate[f] != 'r':
-                    self.ui.warn("%s not removed!\n" % f)
+                    self.ui.warn(_("%s not removed!\n") % f)
                 else:
                     m = f in manifests[0] and manifests[0] or manifests[1]
                     t = self.file(f).read(m[f])
@@ -1213,10 +1128,11 @@
         heads = self.changelog.heads(start)
         # sort the output in rev descending order
         heads = [(-self.changelog.rev(h), h) for h in heads]
-        heads.sort()
-        return [n for (r, n) in heads]
+        return [n for (r, n) in util.sort(heads)]
 
-    def branchheads(self, branch, start=None):
+    def branchheads(self, branch=None, start=None):
+        if branch is None:
+            branch = self[None].branch()
         branches = self.branchtags()
         if branch not in branches:
             return []
@@ -1254,7 +1170,7 @@
             if rev in ancestors:
                 ancestors.update(self.changelog.parentrevs(rev))
                 ancestors.remove(rev)
-            elif self.changectx(rev).branch() == branch:
+            elif self[rev].branch() == branch:
                 heads.append(rev)
                 ancestors.update(self.changelog.parentrevs(rev))
         heads = [self.changelog.node(rev) for rev in heads]
@@ -1669,7 +1585,7 @@
         # Nor do we know which filenodes are missing.
         msng_filenode_set = {}
 
-        junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
+        junk = mnfst.index[len(mnfst) - 1] # Get around a bug in lazyindex
         junk = None
 
         # A changeset always belongs to itself, so the changenode lookup
@@ -1864,12 +1780,10 @@
                     add_extra_nodes(fname,
                                     msng_filenode_set.setdefault(fname, {}))
                     changedfiles[fname] = 1
-            changedfiles = changedfiles.keys()
-            changedfiles.sort()
             # Go through all our files in order sorted by name.
-            for fname in changedfiles:
+            for fname in util.sort(changedfiles):
                 filerevlog = self.file(fname)
-                if filerevlog.count() == 0:
+                if not len(filerevlog):
                     raise util.Abort(_("empty or missing revlog for %s") % fname)
                 # Toss out the filenodes that the recipient isn't really
                 # missing.
@@ -1920,10 +1834,10 @@
         def identity(x):
             return x
 
-        def gennodelst(revlog):
-            for r in xrange(0, revlog.count()):
-                n = revlog.node(r)
-                if revlog.linkrev(n) in revset:
+        def gennodelst(log):
+            for r in log:
+                n = log.node(r)
+                if log.linkrev(n) in revset:
                     yield n
 
         def changed_file_collector(changedfileset):
@@ -1945,17 +1859,15 @@
             for chnk in cl.group(nodes, identity,
                                  changed_file_collector(changedfiles)):
                 yield chnk
-            changedfiles = changedfiles.keys()
-            changedfiles.sort()
 
             mnfst = self.manifest
             nodeiter = gennodelst(mnfst)
             for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
                 yield chnk
 
-            for fname in changedfiles:
+            for fname in util.sort(changedfiles):
                 filerevlog = self.file(fname)
-                if filerevlog.count() == 0:
+                if not len(filerevlog):
                     raise util.Abort(_("empty or missing revlog for %s") % fname)
                 nodeiter = gennodelst(filerevlog)
                 nodeiter = list(nodeiter)
@@ -1984,7 +1896,7 @@
         """
         def csmap(x):
             self.ui.debug(_("add changeset %s\n") % short(x))
-            return cl.count()
+            return len(cl)
 
         def revmap(x):
             return cl.rev(x)
@@ -2007,11 +1919,11 @@
             trp = weakref.proxy(tr)
             # pull off the changeset group
             self.ui.status(_("adding changesets\n"))
-            cor = cl.count() - 1
+            cor = len(cl) - 1
             chunkiter = changegroup.chunkiter(source)
-            if cl.addgroup(chunkiter, csmap, trp, 1) is None and not emptyok:
+            if cl.addgroup(chunkiter, csmap, trp) is None and not emptyok:
                 raise util.Abort(_("received changelog group is empty"))
-            cnr = cl.count() - 1
+            cnr = len(cl) - 1
             changesets = cnr - cor
 
             # pull off the manifest group
@@ -2031,11 +1943,11 @@
                     break
                 self.ui.debug(_("adding %s revisions\n") % f)
                 fl = self.file(f)
-                o = fl.count()
+                o = len(fl)
                 chunkiter = changegroup.chunkiter(source)
                 if fl.addgroup(chunkiter, revmap, trp) is None:
                     raise util.Abort(_("received file revlog group is empty"))
-                revisions += fl.count() - o
+                revisions += len(fl) - o
                 files += 1
 
             # make changelog see real files again
@@ -2110,7 +2022,7 @@
             except ValueError, TypeError:
                 raise util.UnexpectedOutput(
                     _('Unexpected response from remote server:'), l)
-            self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
+            self.ui.debug(_('adding %s (%s)\n') % (name, util.bytecount(size)))
             ofp = self.sopener(name, 'w')
             for chunk in util.filechunkiter(fp, limit=size):
                 ofp.write(chunk)
--- a/mercurial/lsprof.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/mercurial/lsprof.py	Wed Sep 17 11:34:37 2008 +0200
@@ -25,7 +25,7 @@
     def sort(self, crit="inlinetime"):
         """XXX docstring"""
         if crit not in profiler_entry.__dict__:
-            raise ValueError, "Can't sort by %s" % crit
+            raise ValueError("Can't sort by %s" % crit)
         self.data.sort(lambda b, a: cmp(getattr(a, crit),
                                         getattr(b, crit)))
         for e in self.data:
--- a/mercurial/mail.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/mercurial/mail.py	Wed Sep 17 11:34:37 2008 +0200
@@ -6,7 +6,8 @@
 # of the GNU General Public License, incorporated herein by reference.
 
 from i18n import _
-import os, smtplib, util, socket
+import os, smtplib, socket
+import util
 
 def _smtp(ui):
     '''build an smtp connection and return a function to send mail'''
@@ -53,7 +54,7 @@
     cmdline = '%s -f %s %s' % (program, util.email(sender),
                                ' '.join(map(util.email, recipients)))
     ui.note(_('sending mail: %s\n') % cmdline)
-    fp = os.popen(cmdline, 'w')
+    fp = util.popen(cmdline, 'w')
     fp.write(msg)
     ret = fp.close()
     if ret:
--- a/mercurial/manifest.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/mercurial/manifest.py	Wed Sep 17 11:34:37 2008 +0200
@@ -8,7 +8,7 @@
 from node import bin, hex, nullid
 from revlog import revlog, RevlogError
 from i18n import _
-import array, struct, mdiff
+import array, struct, mdiff, parsers, util
 
 class manifestdict(dict):
     def __init__(self, mapping=None, flags=None):
@@ -18,16 +18,8 @@
         self._flags = flags
     def flags(self, f):
         return self._flags.get(f, "")
-    def execf(self, f):
-        "test for executable in manifest flags"
-        return "x" in self.flags(f)
-    def linkf(self, f):
-        "test for symlink in manifest flags"
-        return "l" in self.flags(f)
-    def set(self, f, execf=False, linkf=False):
-        if linkf: self._flags[f] = "l"
-        elif execf: self._flags[f] = "x"
-        else: self._flags[f] = ""
+    def set(self, f, flags):
+        self._flags[f] = flags
     def copy(self):
         return manifestdict(dict.copy(self), dict.copy(self._flags))
 
@@ -39,14 +31,7 @@
 
     def parse(self, lines):
         mfdict = manifestdict()
-        fdict = mfdict._flags
-        for l in lines.splitlines():
-            f, n = l.split('\0')
-            if len(n) > 40:
-                fdict[f] = n[40:]
-                mfdict[f] = bin(n[:40])
-            else:
-                mfdict[f] = bin(n)
+        parsers.parse_manifest(mfdict, mfdict._flags, lines)
         return mfdict
 
     def readdelta(self, node):
@@ -134,18 +119,16 @@
             return "".join([struct.pack(">lll", d[0], d[1], len(d[2])) + d[2]
                             for d in x ])
 
-        def checkforbidden(f):
-            if '\n' in f or '\r' in f:
-                raise RevlogError(_("'\\n' and '\\r' disallowed in filenames"))
+        def checkforbidden(l):
+            for f in l:
+                if '\n' in f or '\r' in f:
+                    raise RevlogError(_("'\\n' and '\\r' disallowed in filenames"))
 
         # if we're using the listcache, make sure it is valid and
         # parented by the same node we're diffing against
         if not (changed and self.listcache and p1 and self.mapcache[0] == p1):
-            files = map.keys()
-            files.sort()
-
-            for f in files:
-                checkforbidden(f)
+            files = util.sort(map)
+            checkforbidden(files)
 
             # if this is changed to support newlines in filenames,
             # be sure to check the templates/ dir again (especially *-raw.tmpl)
@@ -156,8 +139,7 @@
         else:
             addlist = self.listcache
 
-            for f in changed[0]:
-                checkforbidden(f)
+            checkforbidden(changed[0])
             # combine the changed lists into one list for sorting
             work = [[x, 0] for x in changed[0]]
             work[len(work):] = [[x, 1] for x in changed[1]]
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/match.py	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,47 @@
+import util
+
+class _match(object):
+    def __init__(self, root, cwd, files, mf, ap):
+        self._root = root
+        self._cwd = cwd
+        self._files = files
+        self._fmap = dict.fromkeys(files)
+        self.matchfn = mf
+        self._anypats = ap
+    def __call__(self, fn):
+        return self.matchfn(fn)
+    def __iter__(self):
+        for f in self._files:
+            yield f
+    def bad(self, f, msg):
+        return True
+    def dir(self, f):
+        pass
+    def missing(self, f):
+        pass
+    def exact(self, f):
+        return f in self._fmap
+    def rel(self, f):
+        return util.pathto(self._root, self._cwd, f)
+    def files(self):
+        return self._files
+    def anypats(self):
+        return self._anypats
+
+class always(_match):
+    def __init__(self, root, cwd):
+        _match.__init__(self, root, cwd, [], lambda f: True, False)
+
+class never(_match):
+    def __init__(self, root, cwd):
+        _match.__init__(self, root, cwd, [], lambda f: False, False)
+
+class exact(_match):
+    def __init__(self, root, cwd, files):
+        _match.__init__(self, root, cwd, files, lambda f: f in files, False)
+
+class match(_match):
+    def __init__(self, root, cwd, patterns, include, exclude, default):
+        f, mf, ap = util.matcher(root, cwd, patterns, include, exclude,
+                                 None, default)
+        _match.__init__(self, root, cwd, f, mf, ap)
--- a/mercurial/mdiff.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/mercurial/mdiff.py	Wed Sep 17 11:34:37 2008 +0200
@@ -6,7 +6,7 @@
 # of the GNU General Public License, incorporated herein by reference.
 
 from i18n import _
-import bdiff, mpatch, re, struct, util, md5
+import bdiff, mpatch, re, struct, util
 
 def splitnewlines(text):
     '''like str.splitlines, but only split on newlines.'''
@@ -78,10 +78,7 @@
     epoch = util.datestr((0, 0))
 
     if not opts.text and (util.binary(a) or util.binary(b)):
-        def h(v):
-            # md5 is used instead of sha1 because md5 is supposedly faster
-            return md5.new(v).digest()
-        if a and b and len(a) == len(b) and h(a) == h(b):
+        if a and b and len(a) == len(b) and a == b:
             return ""
         l = ['Binary file %s has changed\n' % fn1]
     elif not a:
--- a/mercurial/merge.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/mercurial/merge.py	Wed Sep 17 11:34:37 2008 +0200
@@ -5,9 +5,70 @@
 # This software may be used and distributed according to the terms
 # of the GNU General Public License, incorporated herein by reference.
 
-from node import nullid, nullrev
+from node import nullid, nullrev, hex, bin
 from i18n import _
-import errno, util, os, filemerge, copies
+import errno, util, os, filemerge, copies, shutil
+
+class mergestate(object):
+    '''track 3-way merge state of individual files'''
+    def __init__(self, repo):
+        self._repo = repo
+        self._read()
+    def reset(self, node):
+        self._state = {}
+        self._local = node
+        shutil.rmtree(self._repo.join("merge"), True)
+    def _read(self):
+        self._state = {}
+        try:
+            localnode = None
+            f = self._repo.opener("merge/state")
+            for i, l in enumerate(f):
+                if i == 0:
+                    localnode = l[:-1]
+                else:
+                    bits = l[:-1].split("\0")
+                    self._state[bits[0]] = bits[1:]
+            self._local = bin(localnode)
+        except IOError, err:
+            if err.errno != errno.ENOENT:
+                raise
+    def _write(self):
+        f = self._repo.opener("merge/state", "w")
+        f.write(hex(self._local) + "\n")
+        for d, v in self._state.items():
+            f.write("\0".join([d] + v) + "\n")
+    def add(self, fcl, fco, fca, fd, flags):
+        hash = util.sha1(fcl.path()).hexdigest()
+        self._repo.opener("merge/" + hash, "w").write(fcl.data())
+        self._state[fd] = ['u', hash, fcl.path(), fca.path(),
+                           hex(fca.filenode()), fco.path(), flags]
+        self._write()
+    def __contains__(self, dfile):
+        return dfile in self._state
+    def __getitem__(self, dfile):
+        return self._state[dfile][0]
+    def __iter__(self):
+        l = self._state.keys()
+        l.sort()
+        for f in l:
+            yield f
+    def mark(self, dfile, state):
+        self._state[dfile][0] = state
+        self._write()
+    def resolve(self, dfile, wctx, octx):
+        if self[dfile] == 'r':
+            return 0
+        state, hash, lfile, afile, anode, ofile, flags = self._state[dfile]
+        f = self._repo.opener("merge/" + hash)
+        self._repo.wwrite(dfile, f.read(), flags)
+        fcd = wctx[dfile]
+        fco = octx[ofile]
+        fca = self._repo.filectx(afile, fileid=anode)
+        r = filemerge.filemerge(self._repo, self._local, lfile, fcd, fco, fca)
+        if not r:
+            self.mark(dfile, 'r')
+        return r
 
 def _checkunknown(wctx, mctx):
     "check for collisions between unknown files and files in mctx"
@@ -197,19 +258,44 @@
 
     return action
 
+def actioncmp(a1, a2):
+    m1 = a1[1]
+    m2 = a2[1]
+    if m1 == m2:
+        return cmp(a1, a2)
+    if m1 == 'r':
+        return -1
+    if m2 == 'r':
+        return 1
+    return cmp(a1, a2)
+
 def applyupdates(repo, action, wctx, mctx):
     "apply the merge action list to the working directory"
 
     updated, merged, removed, unresolved = 0, 0, 0, 0
-    action.sort()
-    # prescan for copy/renames
+    ms = mergestate(repo)
+    ms.reset(wctx.parents()[0].node())
+    moves = []
+    action.sort(actioncmp)
+
+    # prescan for merges
     for a in action:
         f, m = a[:2]
         if m == 'm': # merge
             f2, fd, flags, move = a[2:]
-            if f != fd:
-                repo.ui.debug(_("copying %s to %s\n") % (f, fd))
-                repo.wwrite(fd, repo.wread(f), flags)
+            repo.ui.debug(_("preserving %s for resolve of %s\n") % (f, fd))
+            fcl = wctx[f]
+            fco = mctx[f2]
+            fca = fcl.ancestor(fco) or repo.filectx(f, fileid=nullrev)
+            ms.add(fcl, fco, fca, fd, flags)
+            if f != fd and move:
+                moves.append(f)
+
+    # remove renamed files after safely stored
+    for f in moves:
+        if util.lexists(repo.wjoin(f)):
+            repo.ui.debug(_("removing %s\n") % f)
+            os.unlink(repo.wjoin(f))
 
     audit_path = util.path_auditor(repo.root)
 
@@ -229,7 +315,7 @@
             removed += 1
         elif m == "m": # merge
             f2, fd, flags, move = a[2:]
-            r = filemerge.filemerge(repo, f, fd, f2, wctx, mctx)
+            r = ms.resolve(fd, wctx, mctx)
             if r > 0:
                 unresolved += 1
             else:
@@ -261,7 +347,7 @@
             updated += 1
         elif m == "dr": # divergent renames
             fl = a[2]
-            repo.ui.warn("warning: detected divergent renames of %s to:\n" % f)
+            repo.ui.warn(_("warning: detected divergent renames of %s to:\n") % f)
             for nf in fl:
                 repo.ui.warn(" %s\n" % nf)
         elif m == "e": # exec
@@ -337,7 +423,7 @@
 
     wlock = repo.wlock()
     try:
-        wc = repo.workingctx()
+        wc = repo[None]
         if node is None:
             # tip of current branch
             try:
@@ -349,7 +435,7 @@
                     raise util.Abort(_("branch %s not found") % wc.branch())
         overwrite = force and not branchmerge
         pl = wc.parents()
-        p1, p2 = pl[0], repo.changectx(node)
+        p1, p2 = pl[0], repo[node]
         pa = p1.ancestor(p2)
         fp1, fp2, xp1, xp2 = p1.node(), p2.node(), str(p1), str(p2)
         fastforward = False
@@ -388,7 +474,7 @@
         action = []
         if not force:
             _checkunknown(wc, p2)
-        if not util.checkfolding(repo.path):
+        if not util.checkcase(repo.path):
             _checkcollision(p2)
         action += _forgetremoved(wc, p2, branchmerge)
         action += manifestmerge(repo, wc, p2, pa, overwrite, partial)
--- a/mercurial/osutil.c	Wed Sep 17 11:14:06 2008 +0200
+++ b/mercurial/osutil.c	Wed Sep 17 11:34:37 2008 +0200
@@ -96,208 +96,124 @@
 	listdir_stat_new,          /* tp_new */
 };
 
-static PyObject *listfiles(PyObject *list, DIR *dir,
-			   int keep_stat, int *need_stat)
-{
-	struct dirent *ent;
-	PyObject *name, *py_kind, *val;
-
-#ifdef DT_REG
-	*need_stat = 0;
-#else
-	*need_stat = 1;
-#endif
-
-	for (ent = readdir(dir); ent; ent = readdir(dir)) {
-		int kind = -1;
-
-		if (!strcmp(ent->d_name, ".") || !strcmp(ent->d_name, ".."))
-			continue;
-
-#ifdef DT_REG
-		if (!keep_stat)
-			switch (ent->d_type) {
-			case DT_REG: kind = S_IFREG; break;
-			case DT_DIR: kind = S_IFDIR; break;
-			case DT_LNK: kind = S_IFLNK; break;
-			case DT_BLK: kind = S_IFBLK; break;
-			case DT_CHR: kind = S_IFCHR; break;
-			case DT_FIFO: kind = S_IFIFO; break;
-			case DT_SOCK: kind = S_IFSOCK; break;
-			default:
-				*need_stat = 0;
-				break;
-			}
-#endif
-
-		if (kind != -1)
-			py_kind = PyInt_FromLong(kind);
-		else {
-			py_kind = Py_None;
-			Py_INCREF(Py_None);
-		}
-
-		val = PyTuple_New(keep_stat ? 3 : 2);
-		name = PyString_FromString(ent->d_name);
-
-		if (!name || !py_kind || !val) {
-			Py_XDECREF(name);
-			Py_XDECREF(py_kind);
-			Py_XDECREF(val);
-			return PyErr_NoMemory();
-		}
-
-		PyTuple_SET_ITEM(val, 0, name);
-		PyTuple_SET_ITEM(val, 1, py_kind);
-		if (keep_stat) {
-			PyTuple_SET_ITEM(val, 2, Py_None);
-			Py_INCREF(Py_None);
-		}
-
-		PyList_Append(list, val);
-		Py_DECREF(val);
-	}
-
-	return 0;
-}
-
-static PyObject *statfiles(PyObject *list, PyObject *ctor_args, int keep,
-			   char *path, int len, int dfd)
+int entkind(struct dirent *ent)
 {
-	struct stat buf;
-	struct stat *stp = &buf;
-	int kind;
-	int ret;
-	ssize_t i;
-	ssize_t size = PyList_Size(list);
-
-	for (i = 0; i < size; i++) {
-		PyObject *elt = PyList_GetItem(list, i);
-		char *name = PyString_AsString(PyTuple_GET_ITEM(elt, 0));
-		PyObject *py_st = NULL;
-		PyObject *py_kind = PyTuple_GET_ITEM(elt, 1);
-
-		kind = py_kind == Py_None ? -1 : PyInt_AsLong(py_kind);
-		if (kind != -1 && !keep)
-			continue;
-
-		strncpy(path + len + 1, name, PATH_MAX - len);
-		path[PATH_MAX] = 0;
-
-		if (keep) {
-			py_st = PyObject_CallObject(
-				(PyObject *)&listdir_stat_type, ctor_args);
-			if (!py_st)
-				return PyErr_NoMemory();
-			stp = &((struct listdir_stat *)py_st)->st;
-			PyTuple_SET_ITEM(elt, 2, py_st);
-		}
-
-#ifdef AT_SYMLINK_NOFOLLOW
-		ret = fstatat(dfd, name, stp, AT_SYMLINK_NOFOLLOW);
-#else
-		ret = lstat(path, stp);
+#ifdef DT_REG
+	switch (ent->d_type) {
+	case DT_REG: return S_IFREG;
+	case DT_DIR: return S_IFDIR;
+	case DT_LNK: return S_IFLNK;
+	case DT_BLK: return S_IFBLK;
+	case DT_CHR: return S_IFCHR;
+	case DT_FIFO: return S_IFIFO;
+	case DT_SOCK: return S_IFSOCK;
+	}
 #endif
-		if (ret == -1)
-			return PyErr_SetFromErrnoWithFilename(PyExc_OSError,
-							      path);
-
-		if (kind == -1) {
-			if (S_ISREG(stp->st_mode))
-				kind = S_IFREG;
-			else if (S_ISDIR(stp->st_mode))
-				kind = S_IFDIR;
-			else if (S_ISLNK(stp->st_mode))
-				kind = S_IFLNK;
-			else if (S_ISBLK(stp->st_mode))
-				kind = S_IFBLK;
-			else if (S_ISCHR(stp->st_mode))
-				kind = S_IFCHR;
-			else if (S_ISFIFO(stp->st_mode))
-				kind = S_IFIFO;
-			else if (S_ISSOCK(stp->st_mode))
-				kind = S_IFSOCK;
-			else
-				kind = stp->st_mode;
-		}
-
-		if (py_kind == Py_None && kind != -1) {
-			py_kind = PyInt_FromLong(kind);
-			if (!py_kind)
-				return PyErr_NoMemory();
-			Py_XDECREF(Py_None);
-			PyTuple_SET_ITEM(elt, 1, py_kind);
-		}
-	}
-
-	return 0;
+	return -1;
 }
 
 static PyObject *listdir(PyObject *self, PyObject *args, PyObject *kwargs)
 {
-	static char *kwlist[] = { "path", "stat", NULL };
-	DIR *dir = NULL;
-	PyObject *statobj = NULL;
-	PyObject *list = NULL;
-	PyObject *err = NULL;
-	PyObject *ctor_args = NULL;
-	char *path;
-	char full_path[PATH_MAX + 10];
-	int path_len;
-	int need_stat, keep_stat;
-	int dfd;
+	static char *kwlist[] = { "path", "stat", "skip", NULL };
+	PyObject *statflag = NULL, *list, *elem, *stat, *ret = NULL;
+	char fullpath[PATH_MAX + 10], *path, *skip = NULL;
+	int pathlen, keepstat, kind, dfd = -1, err;
+	struct stat st;
+	struct dirent *ent;
+	DIR *dir;
 
-	if (!PyArg_ParseTupleAndKeywords(args, kwargs, "s#|O:listdir", kwlist,
-					 &path, &path_len, &statobj))
-		goto bail;
+	if (!PyArg_ParseTupleAndKeywords(args, kwargs, "s#|Os:listdir", kwlist,
+					 &path, &pathlen, &statflag, &skip))
+		goto error_parse;
 
-	keep_stat = statobj && PyObject_IsTrue(statobj);
+	if (pathlen >= PATH_MAX)
+		goto error_parse;
+
+	strncpy(fullpath, path, PATH_MAX);
+	fullpath[pathlen] = '/';
+	keepstat = statflag && PyObject_IsTrue(statflag);
 
 #ifdef AT_SYMLINK_NOFOLLOW
 	dfd = open(path, O_RDONLY);
-	if (dfd != -1)
-		dir = fdopendir(dfd);
+	if (dfd == -1) {
+		PyErr_SetFromErrnoWithFilename(PyExc_OSError, path);
+		goto error_parse;
+	}
+	dir = fdopendir(dfd);
 #else
 	dir = opendir(path);
-	dfd = -1;
 #endif
 	if (!dir) {
-		err = PyErr_SetFromErrnoWithFilename(PyExc_OSError, path);
-		goto bail;
-	}
+		PyErr_SetFromErrnoWithFilename(PyExc_OSError, path);
+		goto error_dir;
+ 	}
 
 	list = PyList_New(0);
-	ctor_args = PyTuple_New(0);
-	if (!list || !ctor_args)
-		goto bail;
+	if (!list)
+		goto error_list;
+
+	while ((ent = readdir(dir))) {
+		if (!strcmp(ent->d_name, ".") || !strcmp(ent->d_name, ".."))
+			continue;
 
-	strncpy(full_path, path, PATH_MAX);
-	full_path[path_len] = '/';
-
-	err = listfiles(list, dir, keep_stat, &need_stat);
-	if (err)
-		goto bail;
-
-	PyList_Sort(list);
+		kind = entkind(ent);
+		if (kind == -1 || keepstat) {
+#ifdef AT_SYMLINK_NOFOLLOW
+			err = fstatat(dfd, ent->d_name, &st,
+				      AT_SYMLINK_NOFOLLOW);
+#else
+			strncpy(fullpath + pathlen + 1, ent->d_name,
+				PATH_MAX - pathlen);
+			fullpath[PATH_MAX] = 0;
+			err = lstat(fullpath, &st);
+#endif
+			if (err == -1) {
+				strncpy(fullpath + pathlen + 1, ent->d_name,
+					PATH_MAX - pathlen);
+				fullpath[PATH_MAX] = 0;
+				PyErr_SetFromErrnoWithFilename(PyExc_OSError,
+							       fullpath);
+				goto error;
+			}
+			kind = st.st_mode & S_IFMT;
+		}
 
-	if (!keep_stat && !need_stat)
-		goto done;
+		/* quit early? */
+		if (skip && kind == S_IFDIR && !strcmp(ent->d_name, skip)) {
+			ret = PyList_New(0);
+			goto error;
+		}
 
-	err = statfiles(list, ctor_args, keep_stat, full_path, path_len, dfd);
-	if (!err)
-		goto done;
+		if (keepstat) {
+			stat = PyObject_CallObject((PyObject *)&listdir_stat_type, NULL);
+			if (!stat)
+				goto error;
+			memcpy(&((struct listdir_stat *)stat)->st, &st, sizeof(st));
+			elem = Py_BuildValue("siN", ent->d_name, kind, stat);
+		} else
+			elem = Py_BuildValue("si", ent->d_name, kind);
+		if (!elem)
+			goto error;
 
- bail:
-	Py_XDECREF(list);
+		PyList_Append(list, elem);
+		Py_DECREF(elem);
+	}
+
+	ret = list;
+	Py_INCREF(ret);
 
- done:
-	Py_XDECREF(ctor_args);
-	if (dir)
-		closedir(dir);
-	return err ? err : list;
+error:
+	Py_DECREF(list);
+error_list:
+	closedir(dir);
+error_dir:
+#ifdef AT_SYMLINK_NOFOLLOW
+	close(dfd);
+#endif
+error_parse:
+	return ret;
 }
 
-
 static char osutil_doc[] = "Native operating system services.";
 
 static PyMethodDef methods[] = {
--- a/mercurial/osutil.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/mercurial/osutil.py	Wed Sep 17 11:34:37 2008 +0200
@@ -10,7 +10,7 @@
     if stat.S_ISSOCK(mode): return stat.S_IFSOCK
     return mode
 
-def listdir(path, stat=False):
+def listdir(path, stat=False, skip=None):
     '''listdir(path, stat=False) -> list_of_tuples
 
     Return a sorted list containing information about the entries
@@ -30,6 +30,8 @@
     names.sort()
     for fn in names:
         st = os.lstat(prefix + fn)
+        if fn == skip and stat.S_ISDIR(st.st_mode):
+            return []
         if stat:
             result.append((fn, _mode_to_kind(st.st_mode), st))
         else:
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/parsers.c	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,169 @@
+/*
+ parsers.c - efficient content parsing
+
+ Copyright 2008 Matt Mackall <mpm@selenic.com> and others
+
+ This software may be used and distributed according to the terms of
+ the GNU General Public License, incorporated herein by reference.
+*/
+
+#include <Python.h>
+#include <ctype.h>
+#include <string.h>
+
+static int hexdigit(char c)
+{
+	if (c >= '0' && c <= '9')
+		return c - '0';
+
+	if (c >= 'A' && c <= 'F')
+		return c - 'A' + 10;
+
+	if (c >= 'a' && c <= 'f')
+		return c - 'a' + 10;
+	
+	return -1;
+}
+
+/*
+ * Turn a hex-encoded string into binary.
+ */
+static PyObject *unhexlify(const char *str, int len)
+{
+	PyObject *ret = NULL;
+	const char *c;
+	char *d;
+
+	if (len % 2) {
+		PyErr_SetString(PyExc_ValueError,
+				"input is not even in length");
+		goto bail;
+	}
+
+	ret = PyString_FromStringAndSize(NULL, len / 2);
+	if (!ret)
+		goto bail;
+
+	d = PyString_AsString(ret);
+	if (!d)
+		goto bail;
+
+	for (c = str; c < str + len;) {
+		int hi = hexdigit(*c++);
+		int lo = hexdigit(*c++);
+
+		if (hi == -1 || lo == -1) {
+			PyErr_SetString(PyExc_ValueError,
+					"input contains non-hex character");
+			goto bail;
+		}
+
+		*d++ = (hi << 4) | lo;
+	}
+	
+	goto done;
+	
+bail:
+	Py_XDECREF(ret);
+	ret = NULL;
+done:
+	return ret;
+}
+
+/*
+ * This code assumes that a manifest is stitched together with newline
+ * ('\n') characters.
+ */
+static PyObject *parse_manifest(PyObject *self, PyObject *args)
+{
+	PyObject *mfdict, *fdict;
+	char *str, *cur, *start, *zero;
+	int len;
+
+	if (!PyArg_ParseTuple(args, "O!O!s#:parse_manifest",
+			      &PyDict_Type, &mfdict,
+			      &PyDict_Type, &fdict,
+			      &str, &len))
+		goto quit;
+
+	for (start = cur = str, zero = NULL; cur < str + len; cur++) {
+		PyObject *file = NULL, *node = NULL;
+		PyObject *flags = NULL;
+		int nlen;
+
+		if (!*cur) {
+			zero = cur;
+			continue;
+		}
+		else if (*cur != '\n')
+			continue;
+
+		if (!zero) {
+			PyErr_SetString(PyExc_ValueError,
+					"manifest entry has no separator");
+			goto quit;
+		}
+
+		file = PyString_FromStringAndSize(start, zero - start);
+		if (!file)
+			goto bail;
+
+		nlen = cur - zero - 1;
+
+		node = unhexlify(zero + 1, nlen > 40 ? 40 : nlen);
+		if (!node)
+			goto bail;
+
+		if (nlen > 40) {
+			PyObject *flags;
+
+			flags = PyString_FromStringAndSize(zero + 41,
+							   nlen - 40);
+			if (!flags)
+				goto bail;
+
+			if (PyDict_SetItem(fdict, file, flags) == -1)
+				goto bail;
+		}
+
+		if (PyDict_SetItem(mfdict, file, node) == -1)
+			goto bail;
+
+		start = cur + 1;
+		zero = NULL;
+
+		Py_XDECREF(flags);
+		Py_XDECREF(node);
+		Py_XDECREF(file);
+		continue;
+	bail:
+		Py_XDECREF(flags);
+		Py_XDECREF(node);
+		Py_XDECREF(file);
+		goto quit;
+	}
+
+	if (len > 0 && *(cur - 1) != '\n') {
+		PyErr_SetString(PyExc_ValueError,
+				"manifest contains trailing garbage");
+		goto quit;
+	}
+
+	Py_INCREF(Py_None);
+	return Py_None;
+
+quit:
+	return NULL;
+}
+
+static char parsers_doc[] = "Efficient content parsing.";
+
+static PyMethodDef methods[] = {
+	{"parse_manifest", parse_manifest, METH_VARARGS, "parse a manifest\n"},
+	{NULL, NULL}
+};
+
+PyMODINIT_FUNC initparsers(void)
+{
+	Py_InitModule3("parsers", methods, parsers_doc);
+}
--- a/mercurial/patch.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/mercurial/patch.py	Wed Sep 17 11:34:37 2008 +0200
@@ -8,8 +8,8 @@
 
 from i18n import _
 from node import hex, nullid, short
-import base85, cmdutil, mdiff, util, context, revlog, diffhelpers, copies
-import cStringIO, email.Parser, os, popen2, re, sha, errno
+import base85, cmdutil, mdiff, util, revlog, diffhelpers, copies
+import cStringIO, email.Parser, os, popen2, re, errno
 import sys, tempfile, zlib
 
 class PatchError(Exception):
@@ -229,7 +229,7 @@
                 return internalpatch(patchname, ui, strip, cwd, files)
             except NoHunks:
                 patcher = util.find_exe('gpatch') or util.find_exe('patch')
-                ui.debug('no valid hunks found; trying with %r instead\n' %
+                ui.debug(_('no valid hunks found; trying with %r instead\n') %
                          patcher)
                 if util.needbinarypatch():
                     args.append('--binary')
@@ -376,15 +376,11 @@
 
         if not self.rej:
             return
-        if self.hunks != 1:
-            hunkstr = "s"
-        else:
-            hunkstr = ""
 
         fname = self.fname + ".rej"
         self.ui.warn(
-            _("%d out of %d hunk%s FAILED -- saving rejects to file %s\n") %
-            (len(self.rej), self.hunks, hunkstr, fname))
+            _("%d out of %d hunks FAILED -- saving rejects to file %s\n") %
+            (len(self.rej), self.hunks, fname))
         try: os.unlink(fname)
         except:
             pass
@@ -537,11 +533,11 @@
         # if we hit eof before finishing out the hunk, the last line will
         # be zero length.  Lets try to fix it up.
         while len(self.hunk[-1]) == 0:
-                del self.hunk[-1]
-                del self.a[-1]
-                del self.b[-1]
-                self.lena -= 1
-                self.lenb -= 1
+            del self.hunk[-1]
+            del self.a[-1]
+            del self.b[-1]
+            self.lena -= 1
+            self.lenb -= 1
 
     def read_context_hunk(self, lr):
         self.desc = lr.readline()
@@ -1094,8 +1090,7 @@
         repo.copy(src, dst)
     removes = removes.keys()
     if removes:
-        removes.sort()
-        repo.remove(removes, True)
+        repo.remove(util.sort(removes), True)
     for f in patches:
         ctype, gp = patches[f]
         if gp and gp.mode:
@@ -1113,9 +1108,7 @@
     cmdutil.addremove(repo, cfiles)
     files = patches.keys()
     files.extend([r for r in removes if r not in files])
-    files.sort()
-
-    return files
+    return util.sort(files)
 
 def b85diff(to, tn):
     '''print base85-encoded binary diff'''
@@ -1123,7 +1116,7 @@
         if not text:
             return '0' * 40
         l = len(text)
-        s = sha.new('blob %d\0' % l)
+        s = util.sha1('blob %d\0' % l)
         s.update(text)
         return s.hexdigest()
 
@@ -1155,7 +1148,7 @@
     ret.append('\n')
     return ''.join(ret)
 
-def diff(repo, node1=None, node2=None, files=None, match=util.always,
+def diff(repo, node1=None, node2=None, match=None,
          fp=None, changes=None, opts=None):
     '''print diff of changes to files between two nodes, or node and
     working directory.
@@ -1163,6 +1156,9 @@
     if node1 is None, use first dirstate parent instead.
     if node2 is None, compare node1 with working directory.'''
 
+    if not match:
+        match = cmdutil.matchall(repo)
+
     if opts is None:
         opts = mdiff.defaultopts
     if fp is None:
@@ -1171,12 +1167,6 @@
     if not node1:
         node1 = repo.dirstate.parents()[0]
 
-    ccache = {}
-    def getctx(r):
-        if r not in ccache:
-            ccache[r] = context.changectx(repo, r)
-        return ccache[r]
-
     flcache = {}
     def getfilectx(f, ctx):
         flctx = ctx.filectx(f, filelog=flcache.get(f))
@@ -1186,30 +1176,19 @@
 
     # reading the data for node1 early allows it to play nicely
     # with repo.status and the revlog cache.
-    ctx1 = context.changectx(repo, node1)
+    ctx1 = repo[node1]
     # force manifest reading
     man1 = ctx1.manifest()
     date1 = util.datestr(ctx1.date())
 
     if not changes:
-        changes = repo.status(node1, node2, files, match=match)[:5]
-    modified, added, removed, deleted, unknown = changes
+        changes = repo.status(node1, node2, match=match)
+    modified, added, removed = changes[:3]
 
     if not modified and not added and not removed:
         return
 
-    if node2:
-        ctx2 = context.changectx(repo, node2)
-        execf2 = ctx2.manifest().execf
-        linkf2 = ctx2.manifest().linkf
-    else:
-        ctx2 = context.workingctx(repo)
-        execf2 = util.execfunc(repo.root, None)
-        linkf2 = util.linkfunc(repo.root, None)
-        if execf2 is None:
-            mc = ctx2.parents()[0].manifest().copy()
-            execf2 = mc.execf
-            linkf2 = mc.linkf
+    ctx2 = repo[node2]
 
     if repo.ui.quiet:
         r = None
@@ -1218,15 +1197,14 @@
         r = [hexfunc(node) for node in [node1, node2] if node]
 
     if opts.git:
-        copy, diverge = copies.copies(repo, ctx1, ctx2, repo.changectx(nullid))
+        copy, diverge = copies.copies(repo, ctx1, ctx2, repo[nullid])
         for k, v in copy.items():
             copy[v] = k
 
-    all = modified + added + removed
-    all.sort()
     gone = {}
+    gitmode = {'l': '120000', 'x': '100755', '': '100644'}
 
-    for f in all:
+    for f in util.sort(modified + added + removed):
         to = None
         tn = None
         dodiff = True
@@ -1237,18 +1215,16 @@
             tn = getfilectx(f, ctx2).data()
         a, b = f, f
         if opts.git:
-            def gitmode(x, l):
-                return l and '120000' or (x and '100755' or '100644')
             def addmodehdr(header, omode, nmode):
                 if omode != nmode:
                     header.append('old mode %s\n' % omode)
                     header.append('new mode %s\n' % nmode)
 
             if f in added:
-                mode = gitmode(execf2(f), linkf2(f))
+                mode = gitmode[ctx2.flags(f)]
                 if f in copy:
                     a = copy[f]
-                    omode = gitmode(man1.execf(a), man1.linkf(a))
+                    omode = gitmode[man1.flags(a)]
                     addmodehdr(header, omode, mode)
                     if a in removed and a not in gone:
                         op = 'rename'
@@ -1267,11 +1243,11 @@
                 if f in copy and copy[f] in added and copy[copy[f]] == f:
                     dodiff = False
                 else:
-                    mode = gitmode(man1.execf(f), man1.linkf(f))
-                    header.append('deleted file mode %s\n' % mode)
+                    header.append('deleted file mode %s\n' %
+                                  gitmode[man1.flags(f)])
             else:
-                omode = gitmode(man1.execf(f), man1.linkf(f))
-                nmode = gitmode(execf2(f), linkf2(f))
+                omode = gitmode[man1.flags(f)]
+                nmode = gitmode[ctx2.flags(f)]
                 addmodehdr(header, omode, nmode)
                 if util.binary(to) or util.binary(tn):
                     dodiff = 'binary'
@@ -1297,7 +1273,7 @@
     revwidth = max([len(str(rev)) for rev in revs])
 
     def single(rev, seqno, fp):
-        ctx = repo.changectx(rev)
+        ctx = repo[rev]
         node = ctx.node()
         parents = [p.node() for p in ctx.parents() if p]
         branch = ctx.branch()
--- a/mercurial/repair.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/mercurial/repair.py	Wed Sep 17 11:34:37 2008 +0200
@@ -8,6 +8,7 @@
 
 import changegroup, os
 from node import nullrev, short
+from i18n import _
 
 def _bundle(repo, bases, heads, node, suffix, extranodes=None):
     """create a bundle with the specified revisions as a backup"""
@@ -16,15 +17,15 @@
     if not os.path.isdir(backupdir):
         os.mkdir(backupdir)
     name = os.path.join(backupdir, "%s-%s" % (short(node), suffix))
-    repo.ui.warn("saving bundle to %s\n" % name)
+    repo.ui.warn(_("saving bundle to %s\n") % name)
     return changegroup.writebundle(cg, name, "HG10BZ")
 
 def _collectfiles(repo, striprev):
     """find out the filelogs affected by the strip"""
     files = {}
 
-    for x in xrange(striprev, repo.changelog.count()):
-        for name in repo.changectx(x).files():
+    for x in xrange(striprev, len(repo)):
+        for name in repo[x].files():
             if name in files:
                 continue
             files[name] = 1
@@ -37,7 +38,7 @@
     """return the nodes that have to be saved before the strip"""
     def collectone(revlog):
         extra = []
-        startrev = count = revlog.count()
+        startrev = count = len(revlog)
         # find the truncation point of the revlog
         for i in xrange(0, count):
             node = revlog.node(i)
@@ -72,7 +73,6 @@
 def strip(ui, repo, node, backup="all"):
     cl = repo.changelog
     # TODO delete the undo files, and handle undo of merge sets
-    pp = cl.parents(node)
     striprev = cl.rev(node)
 
     # Some revisions with rev > striprev may not be descendants of striprev.
@@ -85,7 +85,7 @@
     tostrip = {striprev: 1}
     saveheads = {}
     savebases = []
-    for r in xrange(striprev + 1, cl.count()):
+    for r in xrange(striprev + 1, len(cl)):
         parents = cl.parentrevs(r)
         if parents[0] in tostrip or parents[1] in tostrip:
             # r is a descendant of striprev
@@ -126,7 +126,7 @@
         f.strip(striprev)
 
     if saveheads or extranodes:
-        ui.status("adding branch\n")
+        ui.status(_("adding branch\n"))
         f = open(chgrpfile, "rb")
         gen = changegroup.readbundle(f, chgrpfile)
         repo.addchangegroup(gen, 'strip', 'bundle:' + chgrpfile, True)
--- a/mercurial/repo.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/mercurial/repo.py	Wed Sep 17 11:34:37 2008 +0200
@@ -40,3 +40,9 @@
 
     def cancopy(self):
         return self.local()
+
+    def rjoin(self, path):
+        url = self.url()
+        if url.endswith('/'):
+            return url + path
+        return url + '/' + path
--- a/mercurial/revlog.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/mercurial/revlog.py	Wed Sep 17 11:34:37 2008 +0200
@@ -13,13 +13,13 @@
 from node import bin, hex, nullid, nullrev, short
 from i18n import _
 import changegroup, errno, ancestor, mdiff
-import sha, struct, util, zlib
+import struct, util, zlib
 
 _pack = struct.pack
 _unpack = struct.unpack
 _compress = zlib.compress
 _decompress = zlib.decompress
-_sha = sha.new
+_sha = util.sha1
 
 # revlog flags
 REVLOGV0 = 0
@@ -32,13 +32,16 @@
 class RevlogError(Exception):
     pass
 
-class LookupError(RevlogError):
+class LookupError(RevlogError, KeyError):
     def __init__(self, name, index, message):
         self.name = name
         if isinstance(name, str) and len(name) == 20:
             name = short(name)
         RevlogError.__init__(self, _('%s@%s: %s') % (index, name, message))
 
+    def __str__(self):
+        return RevlogError.__str__(self)
+
 def getoffset(q):
     return int(q >> 16)
 
@@ -418,7 +421,7 @@
     A revlog consists of two parts, an index and the revision data.
 
     The index is a file with a fixed record size containing
-    information on each revision, includings its nodeid (hash), the
+    information on each revision, including its nodeid (hash), the
     nodeids of its parents, the position and offset of its data within
     the data file, and the revision it's based on. Finally, each entry
     contains a linkrev entry that can serve as a pointer to external
@@ -512,9 +515,11 @@
 
     def tip(self):
         return self.node(len(self.index) - 2)
-    def count(self):
+    def __len__(self):
         return len(self.index) - 1
-
+    def __iter__(self):
+        for i in xrange(len(self)):
+            yield i
     def rev(self, node):
         try:
             return self.nodemap[node]
@@ -591,6 +596,27 @@
                     visit.append(p)
         return reachable
 
+    def ancestors(self, *revs):
+        'Generate the ancestors of revs using a breadth-first visit'
+        visit = list(revs)
+        seen = util.set([nullrev])
+        while visit:
+            for parent in self.parentrevs(visit.pop(0)):
+                if parent not in seen:
+                    visit.append(parent)
+                    seen.add(parent)
+                    yield parent
+
+    def descendants(self, *revs):
+        'Generate the descendants of revs in topological order'
+        seen = util.set(revs)
+        for i in xrange(min(revs) + 1, len(self)):
+            for x in self.parentrevs(i):
+                if x != nullrev and x in seen:
+                    seen.add(i)
+                    yield i
+                    break
+
     def nodesbetween(self, roots=None, heads=None):
         """Return a tuple containing three elements. Elements 1 and 2 contain
         a final list bases and heads after all the unreachable ones have been
@@ -617,12 +643,11 @@
             lowestrev = nullrev
         if (lowestrev == nullrev) and (heads is None):
             # We want _all_ the nodes!
-            return ([self.node(r) for r in xrange(0, self.count())],
-                    [nullid], list(self.heads()))
+            return ([self.node(r) for r in self], [nullid], list(self.heads()))
         if heads is None:
             # All nodes are ancestors, so the latest ancestor is the last
             # node.
-            highestrev = self.count() - 1
+            highestrev = len(self) - 1
             # Set ancestors to None to signal that every node is an ancestor.
             ancestors = None
             # Set heads to an empty dictionary for later discovery of heads
@@ -751,15 +776,15 @@
         as if they had no children
         """
         if start is None and stop is None:
-            count = self.count()
+            count = len(self)
             if not count:
                 return [nullid]
             ishead = [1] * (count + 1)
             index = self.index
-            for r in xrange(count):
+            for r in self:
                 e = index[r]
                 ishead[e[5]] = ishead[e[6]] = 0
-            return [self.node(r) for r in xrange(count) if ishead[r]]
+            return [self.node(r) for r in self if ishead[r]]
 
         if start is None:
             start = nullid
@@ -771,7 +796,7 @@
         heads = {startrev: 1}
 
         parentrevs = self.parentrevs
-        for r in xrange(startrev + 1, self.count()):
+        for r in xrange(startrev + 1, len(self)):
             for p in parentrevs(r):
                 if p in reachable:
                     if r not in stoprevs:
@@ -786,7 +811,7 @@
         """find the children of a given node"""
         c = []
         p = self.rev(node)
-        for r in range(p + 1, self.count()):
+        for r in range(p + 1, len(self)):
             prevs = [pr for pr in self.parentrevs(r) if pr != nullrev]
             if prevs:
                 for pr in prevs:
@@ -815,8 +840,8 @@
             if str(rev) != id:
                 raise ValueError
             if rev < 0:
-                rev = self.count() + rev
-            if rev < 0 or rev >= self.count():
+                rev = len(self) + rev
+            if rev < 0 or rev >= len(self):
                 raise ValueError
             return self.node(rev)
         except (ValueError, OverflowError):
@@ -917,7 +942,7 @@
                               self.revision(self.node(rev2)))
 
     def revision(self, node):
-        """return an uncompressed revision of a given"""
+        """return an uncompressed revision of a given node"""
         if node == nullid:
             return ""
         if self._cache and self._cache[0] == node:
@@ -979,7 +1004,7 @@
         df = self.opener(self.datafile, 'w')
         try:
             calc = self._io.size
-            for r in xrange(self.count()):
+            for r in self:
                 start = self.start(r) + (r + 1) * calc
                 length = self.length(r)
                 fp.seek(start)
@@ -992,7 +1017,7 @@
         fp = self.opener(self.indexfile, 'w', atomictemp=True)
         self.version &= ~(REVLOGNGINLINEDATA)
         self._inline = False
-        for i in xrange(self.count()):
+        for i in self:
             e = self._io.packentry(self.index[i], self.node, self.version, i)
             fp.write(e)
 
@@ -1028,7 +1053,7 @@
         if node in self.nodemap:
             return node
 
-        curr = self.count()
+        curr = len(self)
         prev = curr - 1
         base = self.base(prev)
         offset = self.end(prev)
@@ -1133,7 +1158,7 @@
 
         yield changegroup.closechunk()
 
-    def addgroup(self, revs, linkmapper, transaction, unique=0):
+    def addgroup(self, revs, linkmapper, transaction):
         """
         add a delta group
 
@@ -1143,7 +1168,7 @@
         """
 
         #track the base of the current delta log
-        r = self.count()
+        r = len(self)
         t = r - 1
         node = None
 
@@ -1170,8 +1195,6 @@
                 link = linkmapper(cs)
                 if node in self.nodemap:
                     # this can happen if two branches make the same change
-                    # if unique:
-                    #    raise RevlogError(_("already have %s") % hex(node[:4]))
                     chain = node
                     continue
                 delta = buffer(chunk, 80)
@@ -1264,13 +1287,13 @@
         trust that the caller has saved the revisions that shouldn't be
         removed and that it'll readd them after this truncation.
         """
-        if self.count() == 0:
+        if len(self) == 0:
             return
 
         if isinstance(self.index, lazyindex):
             self._loadindexmap()
 
-        for rev in xrange(0, self.count()):
+        for rev in self:
             if self.index[rev][4] >= minlink:
                 break
         else:
@@ -1291,15 +1314,15 @@
         # then reset internal state in memory to forget those revisions
         self._cache = None
         self._chunkcache = None
-        for x in xrange(rev, self.count()):
+        for x in xrange(rev, len(self)):
             del self.nodemap[self.node(x)]
 
         del self.index[rev:-1]
 
     def checksize(self):
         expected = 0
-        if self.count():
-            expected = max(0, self.end(self.count() - 1))
+        if len(self):
+            expected = max(0, self.end(len(self) - 1))
 
         try:
             f = self.opener(self.datafile)
@@ -1320,13 +1343,19 @@
             di = actual - (i * s)
             if self._inline:
                 databytes = 0
-                for r in xrange(self.count()):
+                for r in self:
                     databytes += max(0, self.length(r))
                 dd = 0
-                di = actual - self.count() * s - databytes
+                di = actual - len(self) * s - databytes
         except IOError, inst:
             if inst.errno != errno.ENOENT:
                 raise
             di = 0
 
         return (dd, di)
+
+    def files(self):
+        res = [ self.indexfile ]
+        if not self._inline:
+            res.append(self.datafile)
+        return res
--- a/mercurial/sshrepo.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/mercurial/sshrepo.py	Wed Sep 17 11:34:37 2008 +0200
@@ -42,7 +42,7 @@
             cmd = '%s %s "%s init %s"'
             cmd = cmd % (sshcmd, args, remotecmd, self.path)
 
-            ui.note('running %s\n' % cmd)
+            ui.note(_('running %s\n') % cmd)
             res = util.system(cmd)
             if res != 0:
                 self.raise_(repo.RepoError(_("could not create remote repo")))
@@ -60,7 +60,7 @@
         cmd = cmd % (sshcmd, args, remotecmd, self.path)
 
         cmd = util.quotecommand(cmd)
-        ui.note('running %s\n' % cmd)
+        ui.note(_('running %s\n') % cmd)
         self.pipeo, self.pipei, self.pipee = os.popen3(cmd, 'b')
 
         # skip any noise generated by remote shell
--- a/mercurial/sshserver.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/mercurial/sshserver.py	Wed Sep 17 11:34:37 2008 +0200
@@ -204,4 +204,10 @@
                 os.unlink(tempname)
 
     def do_stream_out(self):
-        streamclone.stream_out(self.repo, self.fout)
+        try:
+            for chunk in streamclone.stream_out(self.repo):
+                self.fout.write(chunk)
+            self.fout.flush()
+        except streamclone.StreamException, inst:
+            self.fout.write(str(inst))
+            self.fout.flush()
--- a/mercurial/statichttprepo.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/mercurial/statichttprepo.py	Wed Sep 17 11:34:37 2008 +0200
@@ -9,7 +9,7 @@
 
 from i18n import _
 import changelog, httprangereader
-import repo, localrepo, manifest, util
+import repo, localrepo, manifest, util, store
 import urllib, urllib2, errno
 
 class rangereader(httprangereader.httprangereader):
@@ -54,15 +54,12 @@
                 raise repo.RepoError(_("requirement '%s' not supported") % r)
 
         # setup store
-        if "store" in requirements:
-            self.encodefn = util.encodefilename
-            self.decodefn = util.decodefilename
-            self.spath = self.path + "/store"
-        else:
-            self.encodefn = lambda x: x
-            self.decodefn = lambda x: x
-            self.spath = self.path
-        self.sopener = util.encodedopener(opener(self.spath), self.encodefn)
+        def pjoin(a, b):
+            return a + '/' + b
+        self.store = store.store(requirements, self.path, opener, pjoin)
+        self.spath = self.store.path
+        self.sopener = self.store.opener
+        self.sjoin = self.store.join
 
         self.manifest = manifest.manifest(self.sopener)
         self.changelog = changelog.changelog(self.sopener)
@@ -77,6 +74,9 @@
     def local(self):
         return False
 
+    def lock(self, wait=True):
+        raise util.Abort(_('cannot lock static-http repository'))
+
 def instance(ui, path, create):
     if create:
         raise util.Abort(_('cannot create new static-http repository'))
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/store.py	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,127 @@
+# store.py - repository store handling for Mercurial
+#
+# Copyright 2008 Matt Mackall <mpm@selenic.com>
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+
+import os, stat, osutil, util
+
+def _buildencodefun():
+    e = '_'
+    win_reserved = [ord(x) for x in '\\:*?"<>|']
+    cmap = dict([ (chr(x), chr(x)) for x in xrange(127) ])
+    for x in (range(32) + range(126, 256) + win_reserved):
+        cmap[chr(x)] = "~%02x" % x
+    for x in range(ord("A"), ord("Z")+1) + [ord(e)]:
+        cmap[chr(x)] = e + chr(x).lower()
+    dmap = {}
+    for k, v in cmap.iteritems():
+        dmap[v] = k
+    def decode(s):
+        i = 0
+        while i < len(s):
+            for l in xrange(1, 4):
+                try:
+                    yield dmap[s[i:i+l]]
+                    i += l
+                    break
+                except KeyError:
+                    pass
+            else:
+                raise KeyError
+    return (lambda s: "".join([cmap[c] for c in s]),
+            lambda s: "".join(list(decode(s))))
+
+encodefilename, decodefilename = _buildencodefun()
+
+def _calcmode(path):
+    try:
+        # files in .hg/ will be created using this mode
+        mode = os.stat(path).st_mode
+            # avoid some useless chmods
+        if (0777 & ~util._umask) == (0777 & mode):
+            mode = None
+    except OSError:
+        mode = None
+    return mode
+
+_data = 'data 00manifest.d 00manifest.i 00changelog.d  00changelog.i'
+
+class basicstore:
+    '''base class for local repository stores'''
+    def __init__(self, path, opener, pathjoiner):
+        self.pathjoiner = pathjoiner
+        self.path = path
+        self.createmode = _calcmode(path)
+        self.opener = opener(self.path)
+        self.opener.createmode = self.createmode
+
+    def join(self, f):
+        return self.pathjoiner(self.path, f)
+
+    def _walk(self, relpath, recurse):
+        '''yields (unencoded, encoded, size)'''
+        path = self.pathjoiner(self.path, relpath)
+        striplen = len(self.path) + len(os.sep)
+        prefix = path[striplen:]
+        l = []
+        if os.path.isdir(path):
+            visit = [path]
+            while visit:
+                p = visit.pop()
+                for f, kind, st in osutil.listdir(p, stat=True):
+                    fp = self.pathjoiner(p, f)
+                    if kind == stat.S_IFREG and f[-2:] in ('.d', '.i'):
+                        n = util.pconvert(fp[striplen:])
+                        l.append((n, n, st.st_size))
+                    elif kind == stat.S_IFDIR and recurse:
+                        visit.append(fp)
+        return util.sort(l)
+
+    def datafiles(self):
+        return self._walk('data', True)
+
+    def walk(self):
+        '''yields (unencoded, encoded, size)'''
+        # yield data files first
+        for x in self.datafiles():
+            yield x
+        # yield manifest before changelog
+        meta = self._walk('', False)
+        meta.reverse()
+        for x in meta:
+            yield x
+
+    def copylist(self):
+        return ['requires'] + _data.split()
+
+class encodedstore(basicstore):
+    def __init__(self, path, opener, pathjoiner):
+        self.pathjoiner = pathjoiner
+        self.path = self.pathjoiner(path, 'store')
+        self.createmode = _calcmode(self.path)
+        op = opener(self.path)
+        op.createmode = self.createmode
+        self.opener = lambda f, *args, **kw: op(encodefilename(f), *args, **kw)
+
+    def datafiles(self):
+        for a, b, size in self._walk('data', True):
+            try:
+                a = decodefilename(a)
+            except KeyError:
+                a = None
+            yield a, b, size
+
+    def join(self, f):
+        return self.pathjoiner(self.path, encodefilename(f))
+
+    def copylist(self):
+        return (['requires', '00changelog.i'] +
+                [self.pathjoiner('store', f) for f in _data.split()])
+
+def store(requirements, path, opener, pathjoiner=None):
+    pathjoiner = pathjoiner or os.path.join
+    if 'store' in requirements:
+        return encodedstore(path, opener, pathjoiner)
+    return basicstore(path, opener, pathjoiner)
--- a/mercurial/streamclone.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/mercurial/streamclone.py	Wed Sep 17 11:34:37 2008 +0200
@@ -5,41 +5,20 @@
 # This software may be used and distributed according to the terms
 # of the GNU General Public License, incorporated herein by reference.
 
-import os, osutil, stat, util, lock
+import util, lock
+from i18n import _
+
+class StreamException(Exception):
+    def __init__(self, code):
+        Exception.__init__(self)
+        self.code = code
+    def __str__(self):
+        return '%i\n' % self.code
 
 # if server supports streaming clone, it advertises "stream"
 # capability with value that is version+flags of repo it is serving.
 # client only streams if it can read that repo format.
 
-def walkrepo(root):
-    '''iterate over metadata files in repository.
-    walk in natural (sorted) order.
-    yields 2-tuples: name of .d or .i file, size of file.'''
-
-    strip_count = len(root) + len(os.sep)
-    def walk(path, recurse):
-        for e, kind, st in osutil.listdir(path, stat=True):
-            pe = os.path.join(path, e)
-            if kind == stat.S_IFDIR:
-                if recurse:
-                    for x in walk(pe, True):
-                        yield x
-            else:
-                if kind != stat.S_IFREG or len(e) < 2:
-                    continue
-                sfx = e[-2:]
-                if sfx in ('.d', '.i'):
-                    yield pe[strip_count:], st.st_size
-    # write file data first
-    for x in walk(os.path.join(root, 'data'), True):
-        yield x
-    # write manifest before changelog
-    meta = list(walk(root, False))
-    meta.sort()
-    meta.reverse()
-    for x in meta:
-        yield x
-
 # stream file format is simple.
 #
 # server writes out line that says how many files, how many total
@@ -52,43 +31,35 @@
 #
 #   server writes out raw file data.
 
-def stream_out(repo, fileobj, untrusted=False):
+def stream_out(repo, untrusted=False):
     '''stream out all metadata files in repository.
     writes to file-like object, must support write() and optional flush().'''
 
     if not repo.ui.configbool('server', 'uncompressed', untrusted=untrusted):
-        fileobj.write('1\n')
-        return
-
-    # get consistent snapshot of repo. lock during scan so lock not
-    # needed while we stream, and commits can happen.
-    repolock = None
-    try:
-        try:
-            repolock = repo.lock()
-        except (lock.LockHeld, lock.LockUnavailable), inst:
-            repo.ui.warn('locking the repository failed: %s\n' % (inst,))
-            fileobj.write('2\n')
-            return
+        raise StreamException(1)
 
-        fileobj.write('0\n')
-        repo.ui.debug('scanning\n')
-        entries = []
-        total_bytes = 0
-        for name, size in walkrepo(repo.spath):
-            name = repo.decodefn(util.pconvert(name))
-            entries.append((name, size))
-            total_bytes += size
-    finally:
-        del repolock
+    entries = []
+    total_bytes = 0
+    try:
+        l = None
+        try:
+            repo.ui.debug(_('scanning\n'))
+            # get consistent snapshot of repo, lock during scan
+            l = repo.lock()
+            for name, ename, size in repo.store.walk():
+                entries.append((name, size))
+                total_bytes += size
+        finally:
+            del l
+    except (lock.LockHeld, lock.LockUnavailable), inst:
+        raise StreamException(2)
 
-    repo.ui.debug('%d files, %d bytes to transfer\n' %
+    yield '0\n'
+    repo.ui.debug(_('%d files, %d bytes to transfer\n') %
                   (len(entries), total_bytes))
-    fileobj.write('%d %d\n' % (len(entries), total_bytes))
+    yield '%d %d\n' % (len(entries), total_bytes)
     for name, size in entries:
-        repo.ui.debug('sending %s (%d bytes)\n' % (name, size))
-        fileobj.write('%s\0%d\n' % (name, size))
+        repo.ui.debug(_('sending %s (%d bytes)\n') % (name, size))
+        yield '%s\0%d\n' % (name, size)
         for chunk in util.filechunkiter(repo.sopener(name), limit=size):
-            fileobj.write(chunk)
-    flush = getattr(fileobj, 'flush', None)
-    if flush: flush()
+            yield chunk
--- a/mercurial/templatefilters.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/mercurial/templatefilters.py	Wed Sep 17 11:34:37 2008 +0200
@@ -122,6 +122,36 @@
             .replace("'", '&#39;')) # &apos; invalid in HTML
     return re.sub('[\x00-\x08\x0B\x0C\x0E-\x1F]', ' ', text)
 
+_escapes = [
+    ('\\', '\\\\'), ('"', '\\"'), ('\t', '\\t'), ('\n', '\\n'),
+    ('\r', '\\r'), ('\f', '\\f'), ('\b', '\\b'),
+]
+
+def json(obj):
+    if obj is None or obj is False or obj is True:
+        return {None: 'null', False: 'false', True: 'true'}[obj]
+    elif isinstance(obj, int) or isinstance(obj, float):
+        return str(obj)
+    elif isinstance(obj, str):
+        for k, v in _escapes:
+            obj = obj.replace(k, v)
+        return '"%s"' % obj
+    elif isinstance(obj, unicode):
+        return json(obj.encode('utf-8'))
+    elif hasattr(obj, 'keys'):
+        out = []
+        for k, v in obj.iteritems():
+            s = '%s: %s' % (json(k), json(v))
+            out.append(s)
+        return '{' + ', '.join(out) + '}'
+    elif hasattr(obj, '__iter__'):
+        out = []
+        for i in obj:
+            out.append(json(i))
+        return '[' + ', '.join(out) + ']'
+    else:
+        raise TypeError('cannot encode type %s' % obj.__class__.__name__)
+
 filters = {
     "addbreaks": nl2br,
     "basename": os.path.basename,
@@ -150,5 +180,5 @@
     "user": lambda x: util.shortuser(x),
     "stringescape": lambda x: x.encode('string_escape'),
     "xmlescape": xmlescape,
-    }
-
+    "json": json,
+}
--- a/mercurial/templater.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/mercurial/templater.py	Wed Sep 17 11:34:37 2008 +0200
@@ -81,18 +81,18 @@
     def __contains__(self, key):
         return key in self.cache or key in self.map
 
-    def __call__(self, t, **map):
-        '''perform expansion.
-        t is name of map element to expand.
-        map is added elements to use during expansion.'''
+    def _template(self, t):
+        '''Get the template for the given template name. Use a local cache.'''
         if not t in self.cache:
             try:
                 self.cache[t] = file(self.map[t]).read()
             except IOError, inst:
                 raise IOError(inst.args[0], _('template file %s: %s') %
                               (self.map[t], inst.args[1]))
-        tmpl = self.cache[t]
+        return self.cache[t]
 
+    def _process(self, tmpl, map):
+        '''Render a template. Returns a generator.'''
         while tmpl:
             m = self.template_re.search(tmpl)
             if not m:
@@ -114,18 +114,39 @@
                 v = v(**map)
             if format:
                 if not hasattr(v, '__iter__'):
-                    raise SyntaxError(_("Error expanding '%s%s'")
+                    raise SyntaxError(_("Error expanding '%s%%%s'")
                                       % (key, format))
                 lm = map.copy()
                 for i in v:
                     lm.update(i)
-                    yield self(format, **lm)
+                    t = self._template(format)
+                    yield self._process(t, lm)
             else:
                 if fl:
                     for f in fl.split("|")[1:]:
                         v = self.filters[f](v)
                 yield v
 
+    def __call__(self, t, **map):
+        '''Perform expansion. t is name of map element to expand. map contains
+        added elements for use during expansion. Is a generator.'''
+        tmpl = self._template(t)
+        iters = [self._process(tmpl, map)]
+        while iters:
+            try:
+                item = iters[0].next()
+            except StopIteration:
+                iters.pop(0)
+                continue
+            if isinstance(item, str):
+                yield item
+            elif item is None:
+                yield ''
+            elif hasattr(item, '__iter__'):
+                iters.insert(0, iter(item))
+            else:
+                yield str(item)
+
 def templatepath(name=None):
     '''return location of template file or directory (if no name).
     returns None if not found.'''
--- a/mercurial/transaction.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/mercurial/transaction.py	Wed Sep 17 11:34:37 2008 +0200
@@ -96,9 +96,13 @@
     files = {}
     for l in open(file).readlines():
         f, o = l.split('\0')
-        files[f] = o
+        files[f] = int(o)
     for f in files:
         o = files[f]
-        opener(f, "a").truncate(int(o))
+        if o:
+            opener(f, "a").truncate(int(o))
+        else:
+            fn = opener(f).name
+            os.unlink(fn)
     os.unlink(file)
 
--- a/mercurial/ui.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/mercurial/ui.py	Wed Sep 17 11:34:37 2008 +0200
@@ -312,15 +312,11 @@
         items = self._configitems(section, untrusted=untrusted, abort=True)
         if self.debugflag and not untrusted and self.ucdata:
             uitems = self._configitems(section, untrusted=True, abort=False)
-            keys = uitems.keys()
-            keys.sort()
-            for k in keys:
+            for k in util.sort(uitems):
                 if uitems[k] != items.get(k):
                     self.warn(_("Ignoring untrusted configuration option "
                                 "%s.%s = %s\n") % (section, k, uitems[k]))
-        x = items.items()
-        x.sort()
-        return x
+        return util.sort(items.items())
 
     def walkconfig(self, untrusted=False):
         cdata = self._get_cdata(untrusted)
@@ -335,14 +331,16 @@
 
         Searched in this order: $HGUSER, [ui] section of hgrcs, $EMAIL
         and stop searching if one of these is set.
-        If not found, use ($LOGNAME or $USER or $LNAME or
-        $USERNAME) +"@full.hostname".
+        If not found and ui.askusername is True, ask the user, else use
+        ($LOGNAME or $USER or $LNAME or $USERNAME) + "@full.hostname".
         """
         user = os.environ.get("HGUSER")
         if user is None:
             user = self.config("ui", "username")
         if user is None:
             user = os.environ.get("EMAIL")
+        if user is None and self.configbool("ui", "askusername"):
+            user = self.prompt(_("Enter a commit username:"), default=None)
         if user is None:
             try:
                 user = '%s@%s' % (util.getuser(), socket.getfqdn())
--- a/mercurial/util.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/mercurial/util.py	Wed Sep 17 11:34:37 2008 +0200
@@ -15,7 +15,9 @@
 from i18n import _
 import cStringIO, errno, getpass, re, shutil, sys, tempfile
 import os, stat, threading, time, calendar, ConfigParser, locale, glob, osutil
-import urlparse
+import imp, urlparse
+
+# Python compatibility
 
 try:
     set = set
@@ -23,6 +25,30 @@
 except NameError:
     from sets import Set as set, ImmutableSet as frozenset
 
+_md5 = None
+def md5(s):
+    global _md5
+    if _md5 is None:
+        try:
+            import hashlib
+            _md5 = hashlib.md5
+        except ImportError:
+            import md5
+            _md5 = md5.md5
+    return _md5(s)
+
+_sha1 = None
+def sha1(s):
+    global _sha1
+    if _sha1 is None:
+        try:
+            import hashlib
+            _sha1 = hashlib.sha1
+        except ImportError:
+            import sha
+            _sha1 = sha.sha
+    return _sha1(s)
+
 try:
     _encoding = os.environ.get("HGENCODING")
     if sys.platform == 'darwin' and not _encoding:
@@ -217,8 +243,8 @@
     return pipefilter(s, cmd)
 
 def binary(s):
-    """return true if a string is binary data using diff's heuristic"""
-    if s and '\0' in s[:4096]:
+    """return true if a string is binary data"""
+    if s and '\0' in s:
         return True
     return False
 
@@ -226,6 +252,12 @@
     """return the uniq elements of iterable g"""
     return dict.fromkeys(g).keys()
 
+def sort(l):
+    if not isinstance(l, list):
+        l = list(l)
+    l.sort()
+    return l
+
 class Abort(Exception):
     """Raised if a command needs to print an error and exit."""
 
@@ -251,12 +283,12 @@
         ret.append(p)
     return ret
 
-def patkind(name, dflt_pat='glob'):
+def patkind(name, default):
     """Split a string into an optional pattern kind prefix and the
     actual pattern."""
     for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
         if name.startswith(prefix + ':'): return name.split(':', 1)
-    return dflt_pat, name
+    return default, name
 
 def globre(pat, head='^', tail='$'):
     "convert a glob pattern into a regexp"
@@ -386,17 +418,7 @@
 
         raise Abort('%s not under root' % myname)
 
-def matcher(canonroot, cwd='', names=[], inc=[], exc=[], src=None):
-    return _matcher(canonroot, cwd, names, inc, exc, 'glob', src)
-
-def cmdmatcher(canonroot, cwd='', names=[], inc=[], exc=[], src=None,
-               globbed=False, default=None):
-    default = default or 'relpath'
-    if default == 'relpath' and not globbed:
-        names = expand_glob(names)
-    return _matcher(canonroot, cwd, names, inc, exc, default, src)
-
-def _matcher(canonroot, cwd, names, inc, exc, dflt_pat, src):
+def matcher(canonroot, cwd='', names=[], inc=[], exc=[], src=None, dflt_pat='glob'):
     """build a function to match a set of file patterns
 
     arguments:
@@ -537,13 +559,29 @@
 
 _hgexecutable = None
 
+def main_is_frozen():
+    """return True if we are a frozen executable.
+
+    The code supports py2exe (most common, Windows only) and tools/freeze
+    (portable, not much used).
+    """
+    return (hasattr(sys, "frozen") or # new py2exe
+            hasattr(sys, "importers") or # old py2exe
+            imp.is_frozen("__main__")) # tools/freeze
+
 def hgexecutable():
     """return location of the 'hg' executable.
 
     Defaults to $HG or 'hg' in the search path.
     """
     if _hgexecutable is None:
-        set_hgexecutable(os.environ.get('HG') or find_exe('hg', 'hg'))
+        hg = os.environ.get('HG')
+        if hg:
+            set_hgexecutable(hg)
+        elif main_is_frozen():
+            set_hgexecutable(sys.executable)
+        else:
+            set_hgexecutable(find_exe('hg', 'hg'))
     return _hgexecutable
 
 def set_hgexecutable(path):
@@ -807,7 +845,7 @@
 
 # File system features
 
-def checkfolding(path):
+def checkcase(path):
     """
     Check whether the given path is on a case-sensitive filesystem
 
@@ -827,6 +865,53 @@
     except:
         return True
 
+_fspathcache = {}
+def fspath(name, root):
+    '''Get name in the case stored in the filesystem
+
+    The name is either relative to root, or it is an absolute path starting
+    with root. Note that this function is unnecessary, and should not be
+    called, for case-sensitive filesystems (simply because it's expensive).
+    '''
+    # If name is absolute, make it relative
+    if name.lower().startswith(root.lower()):
+        l = len(root)
+        if name[l] == os.sep or name[l] == os.altsep:
+            l = l + 1
+        name = name[l:]
+
+    if not os.path.exists(os.path.join(root, name)):
+        return None
+
+    seps = os.sep
+    if os.altsep:
+        seps = seps + os.altsep
+    # Protect backslashes. This gets silly very quickly.
+    seps.replace('\\','\\\\')
+    pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
+    dir = os.path.normcase(os.path.normpath(root))
+    result = []
+    for part, sep in pattern.findall(name):
+        if sep:
+            result.append(sep)
+            continue
+
+        if dir not in _fspathcache:
+            _fspathcache[dir] = os.listdir(dir)
+        contents = _fspathcache[dir]
+
+        lpart = part.lower()
+        for n in contents:
+            if n.lower() == lpart:
+                result.append(n)
+                break
+        else:
+            # Cannot happen, as the file exists!
+            result.append(part)
+        dir = os.path.join(dir, lpart)
+
+    return ''.join(result)
+
 def checkexec(path):
     """
     Check whether the given path is on a filesystem with UNIX-like exec flags
@@ -854,12 +939,6 @@
         return False
     return not (new_file_has_exec or exec_flags_cannot_flip)
 
-def execfunc(path, fallback):
-    '''return an is_exec() function with default to fallback'''
-    if checkexec(path):
-        return lambda x: is_exec(os.path.join(path, x))
-    return fallback
-
 def checklink(path):
     """check whether the given path is on a symlink-capable filesystem"""
     # mktemp is not racy because symlink creation will fail if the
@@ -872,12 +951,6 @@
     except (OSError, AttributeError):
         return False
 
-def linkfunc(path, fallback):
-    '''return an is_link() function with default to fallback'''
-    if checklink(path):
-        return lambda x: os.path.islink(os.path.join(path, x))
-    return fallback
-
 _umask = os.umask(0)
 os.umask(_umask)
 
@@ -1044,12 +1117,12 @@
         # through the current COMSPEC. cmd.exe suppress enclosing quotes.
         return '"' + cmd + '"'
 
-    def popen(command):
+    def popen(command, mode='r'):
         # Work around "popen spawned process may not write to stdout
         # under windows"
         # http://bugs.python.org/issue1366
         command += " 2> %s" % nulldev
-        return os.popen(quotecommand(command))
+        return os.popen(quotecommand(command), mode)
 
     def explain_exit(code):
         return _("exited with status %d") % code, code
@@ -1212,8 +1285,8 @@
     def quotecommand(cmd):
         return cmd
 
-    def popen(command):
-        return os.popen(command)
+    def popen(command, mode='r'):
+        return os.popen(command, mode)
 
     def testpid(pid):
         '''return False if pid dead, True if running or not sure'''
@@ -1274,39 +1347,6 @@
         return name
     return find_in_path(name, os.environ.get('PATH', ''), default=default)
 
-def _buildencodefun():
-    e = '_'
-    win_reserved = [ord(x) for x in '\\:*?"<>|']
-    cmap = dict([ (chr(x), chr(x)) for x in xrange(127) ])
-    for x in (range(32) + range(126, 256) + win_reserved):
-        cmap[chr(x)] = "~%02x" % x
-    for x in range(ord("A"), ord("Z")+1) + [ord(e)]:
-        cmap[chr(x)] = e + chr(x).lower()
-    dmap = {}
-    for k, v in cmap.iteritems():
-        dmap[v] = k
-    def decode(s):
-        i = 0
-        while i < len(s):
-            for l in xrange(1, 4):
-                try:
-                    yield dmap[s[i:i+l]]
-                    i += l
-                    break
-                except KeyError:
-                    pass
-            else:
-                raise KeyError
-    return (lambda s: "".join([cmap[c] for c in s]),
-            lambda s: "".join(list(decode(s))))
-
-encodefilename, decodefilename = _buildencodefun()
-
-def encodedopener(openerfn, fn):
-    def o(path, *args, **kw):
-        return openerfn(fn(path), *args, **kw)
-    return o
-
 def mktempcopy(name, emptyok=False, createmode=None):
     """Create a temporary file with the same contents from name
 
--- a/mercurial/verify.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/mercurial/verify.py	Wed Sep 17 11:34:37 2008 +0200
@@ -7,7 +7,7 @@
 
 from node import nullid, short
 from i18n import _
-import revlog
+import revlog, util
 
 def verify(repo):
     lock = repo.lock()
@@ -17,265 +17,219 @@
         del lock
 
 def _verify(repo):
+    mflinkrevs = {}
     filelinkrevs = {}
     filenodes = {}
-    changesets = revisions = files = 0
-    firstbad = [None]
+    revisions = 0
+    badrevs = {}
     errors = [0]
     warnings = [0]
-    neededmanifests = {}
+    ui = repo.ui
+    cl = repo.changelog
+    mf = repo.manifest
 
     def err(linkrev, msg, filename=None):
         if linkrev != None:
-            if firstbad[0] != None:
-                firstbad[0] = min(firstbad[0], linkrev)
-            else:
-                firstbad[0] = linkrev
+            badrevs[linkrev] = True
         else:
-            linkrev = "?"
+            linkrev = '?'
         msg = "%s: %s" % (linkrev, msg)
         if filename:
             msg = "%s@%s" % (filename, msg)
-        repo.ui.warn(" " + msg + "\n")
+        ui.warn(" " + msg + "\n")
         errors[0] += 1
 
+    def exc(linkrev, msg, inst, filename=None):
+        if isinstance(inst, KeyboardInterrupt):
+            ui.warn(_("interrupted"))
+            raise
+        err(linkrev, "%s: %s" % (msg, inst), filename)
+
     def warn(msg):
-        repo.ui.warn(msg + "\n")
+        ui.warn(msg + "\n")
         warnings[0] += 1
 
-    def checksize(obj, name):
+    def checklog(obj, name):
+        if not len(obj) and (havecl or havemf):
+            err(0, _("empty or missing %s") % name)
+            return
+
         d = obj.checksize()
         if d[0]:
             err(None, _("data length off by %d bytes") % d[0], name)
         if d[1]:
             err(None, _("index contains %d extra bytes") % d[1], name)
 
-    def checkversion(obj, name):
         if obj.version != revlog.REVLOGV0:
             if not revlogv1:
                 warn(_("warning: `%s' uses revlog format 1") % name)
         elif revlogv1:
             warn(_("warning: `%s' uses revlog format 0") % name)
 
-    revlogv1 = repo.changelog.version != revlog.REVLOGV0
-    if repo.ui.verbose or not revlogv1:
-        repo.ui.status(_("repository uses revlog format %d\n") %
+    def checkentry(obj, i, node, seen, linkrevs, f):
+        lr = obj.linkrev(node)
+        if lr < 0 or (havecl and lr not in linkrevs):
+            t = "unexpected"
+            if lr < 0 or lr >= len(cl):
+                t = "nonexistent"
+            err(None, _("rev %d point to %s changeset %d") % (i, t, lr), f)
+            if linkrevs:
+                warn(_(" (expected %s)") % " ".join(map(str,linkrevs)))
+            lr = None # can't be trusted
+
+        try:
+            p1, p2 = obj.parents(node)
+            if p1 not in seen and p1 != nullid:
+                err(lr, _("unknown parent 1 %s of %s") %
+                    (short(p1), short(n)), f)
+            if p2 not in seen and p2 != nullid:
+                err(lr, _("unknown parent 2 %s of %s") %
+                    (short(p2), short(p1)), f)
+        except Exception, inst:
+            exc(lr, _("checking parents of %s") % short(node), inst, f)
+
+        if node in seen:
+            err(lr, _("duplicate revision %d (%d)") % (i, seen[n]), f)
+        seen[n] = i
+        return lr
+
+    revlogv1 = cl.version != revlog.REVLOGV0
+    if ui.verbose or not revlogv1:
+        ui.status(_("repository uses revlog format %d\n") %
                        (revlogv1 and 1 or 0))
 
-    havecl = havemf = 1
-    seen = {}
-    repo.ui.status(_("checking changesets\n"))
-    if repo.changelog.count() == 0 and repo.manifest.count() > 1:
-        havecl = 0
-        err(0, _("empty or missing 00changelog.i"))
-    else:
-        checksize(repo.changelog, "changelog")
-
-    for i in xrange(repo.changelog.count()):
-        changesets += 1
-        n = repo.changelog.node(i)
-        l = repo.changelog.linkrev(n)
-        if l != i:
-            err(i, _("incorrect link (%d) for changeset") %(l))
-        if n in seen:
-            err(i, _("duplicates changeset at revision %d") % seen[n])
-        seen[n] = i
-
-        for p in repo.changelog.parents(n):
-            if p not in repo.changelog.nodemap:
-                err(i, _("changeset has unknown parent %s") % short(p))
-        try:
-            changes = repo.changelog.read(n)
-        except KeyboardInterrupt:
-            repo.ui.warn(_("interrupted"))
-            raise
-        except Exception, inst:
-            err(i, _("unpacking changeset: %s") % inst)
-            continue
+    havecl = len(cl) > 0
+    havemf = len(mf) > 0
 
-        if changes[0] not in neededmanifests:
-            neededmanifests[changes[0]] = i
-
-        for f in changes[3]:
-            filelinkrevs.setdefault(f, []).append(i)
-
+    ui.status(_("checking changesets\n"))
     seen = {}
-    repo.ui.status(_("checking manifests\n"))
-    if repo.changelog.count() > 0 and repo.manifest.count() == 0:
-        havemf = 0
-        err(0, _("empty or missing 00manifest.i"))
-    else:
-        checkversion(repo.manifest, "manifest")
-        checksize(repo.manifest, "manifest")
-
-    for i in xrange(repo.manifest.count()):
-        n = repo.manifest.node(i)
-        l = repo.manifest.linkrev(n)
-
-        if l < 0 or (havecl and l >= repo.changelog.count()):
-            err(None, _("bad link (%d) at manifest revision %d") % (l, i))
-
-        if n in neededmanifests:
-            del neededmanifests[n]
-
-        if n in seen:
-            err(l, _("duplicates manifest from %d") % seen[n])
-
-        seen[n] = l
-
-        for p in repo.manifest.parents(n):
-            if p not in repo.manifest.nodemap:
-                err(l, _("manifest has unknown parent %s") % short(p))
+    checklog(cl, "changelog")
+    for i in repo:
+        n = cl.node(i)
+        checkentry(cl, i, n, seen, [i], "changelog")
 
         try:
-            for f, fn in repo.manifest.readdelta(n).iteritems():
-                fns = filenodes.setdefault(f, {})
-                if fn not in fns:
-                    fns[fn] = n
-        except KeyboardInterrupt:
-            repo.ui.warn(_("interrupted"))
-            raise
+            changes = cl.read(n)
+            mflinkrevs.setdefault(changes[0], []).append(i)
+            for f in changes[3]:
+                filelinkrevs.setdefault(f, []).append(i)
         except Exception, inst:
-            err(l, _("reading manifest delta: %s") % inst)
-            continue
-
-    repo.ui.status(_("crosschecking files in changesets and manifests\n"))
+            exc(i, _("unpacking changeset %s") % short(n), inst)
 
-    if havemf > 0:
-        nm = [(c, m) for m, c in neededmanifests.items()]
-        nm.sort()
-        for c, m in nm:
-            err(c, _("changeset refers to unknown manifest %s") % short(m))
-        del neededmanifests, nm
+    ui.status(_("checking manifests\n"))
+    seen = {}
+    checklog(mf, "manifest")
+    for i in mf:
+        n = mf.node(i)
+        lr = checkentry(mf, i, n, seen, mflinkrevs.get(n, []), "manifest")
+        if n in mflinkrevs:
+            del mflinkrevs[n]
 
-    if havecl:
-        fl = filenodes.keys()
-        fl.sort()
-        for f in fl:
-            if f not in filelinkrevs:
-                lrs = [repo.manifest.linkrev(n) for n in filenodes[f]]
-                lrs.sort()
-                err(lrs[0], _("in manifest but not in changeset"), f)
-        del fl
+        try:
+            for f, fn in mf.readdelta(n).iteritems():
+                if not f:
+                    err(lr, _("file without name in manifest"))
+                elif f != "/dev/null":
+                    fns = filenodes.setdefault(f, {})
+                    if fn not in fns:
+                        fns[fn] = n
+        except Exception, inst:
+            exc(lr, _("reading manifest delta %s") % short(n), inst)
+
+    ui.status(_("crosschecking files in changesets and manifests\n"))
 
     if havemf:
-        fl = filelinkrevs.keys()
-        fl.sort()
-        for f in fl:
+        for c, m in util.sort([(c, m) for m in mflinkrevs for c in mflinkrevs[m]]):
+            err(c, _("changeset refers to unknown manifest %s") % short(m))
+        del mflinkrevs
+
+        for f in util.sort(filelinkrevs):
             if f not in filenodes:
                 lr = filelinkrevs[f][0]
                 err(lr, _("in changeset but not in manifest"), f)
-        del fl
 
-    repo.ui.status(_("checking files\n"))
-    ff = dict.fromkeys(filenodes.keys() + filelinkrevs.keys()).keys()
-    ff.sort()
-    for f in ff:
-        if f == "/dev/null":
-            continue
-        files += 1
+    if havecl:
+        for f in util.sort(filenodes):
+            if f not in filelinkrevs:
+                try:
+                    lr = min([repo.file(f).linkrev(n) for n in filenodes[f]])
+                except:
+                    lr = None
+                err(lr, _("in manifest but not in changeset"), f)
+
+    ui.status(_("checking files\n"))
+
+    storefiles = {}
+    for f, f2, size in repo.store.datafiles():
         if not f:
-            lr = filelinkrevs[f][0]
-            err(lr, _("file without name in manifest"))
-            continue
+            err(None, _("cannot decode filename '%s'") % f2)
+        elif size > 0:
+            storefiles[f] = True
+
+    files = util.sort(util.unique(filenodes.keys() + filelinkrevs.keys()))
+    for f in files:
         fl = repo.file(f)
-        checkversion(fl, f)
-        checksize(fl, f)
 
-        if fl.count() == 0:
-            err(filelinkrevs[f][0], _("empty or missing revlog"), f)
-            continue
+        for ff in fl.files():
+            try:
+                del storefiles[ff]
+            except KeyError:
+                err(0, _("missing revlog!"), ff)
 
+        checklog(fl, f)
         seen = {}
-        nodes = {nullid: 1}
-        for i in xrange(fl.count()):
+        for i in fl:
             revisions += 1
             n = fl.node(i)
-            flr = fl.linkrev(n)
-
-            if flr < 0 or (havecl and flr not in filelinkrevs.get(f, [])):
-                if flr < 0 or flr >= repo.changelog.count():
-                    err(None, _("rev %d point to nonexistent changeset %d")
-                        % (i, flr), f)
-                else:
-                    err(None, _("rev %d points to unexpected changeset %d")
-                        % (i, flr), f)
-                if f in filelinkrevs:
-                    warn(_(" (expected %s)") % filelinkrevs[f][0])
-                flr = None # can't be trusted
-            else:
-                if havecl:
-                    filelinkrevs[f].remove(flr)
-
-            if n in seen:
-                err(flr, _("duplicate revision %d") % i, f)
+            lr = checkentry(fl, i, n, seen, filelinkrevs.get(f, []), f)
             if f in filenodes:
                 if havemf and n not in filenodes[f]:
-                    err(flr, _("%s not in manifests") % (short(n)), f)
+                    err(lr, _("%s not in manifests") % (short(n)), f)
                 else:
                     del filenodes[f][n]
 
             # verify contents
             try:
                 t = fl.read(n)
-            except KeyboardInterrupt:
-                repo.ui.warn(_("interrupted"))
-                raise
+                rp = fl.renamed(n)
+                if len(t) != fl.size(i):
+                    if not fl._readmeta(n): # ancient copy?
+                        err(lr, _("unpacked size is %s, %s expected") %
+                            (len(t), fl.size(i)), f)
             except Exception, inst:
-                err(flr, _("unpacking %s: %s") % (short(n), inst), f)
-
-            # verify parents
-            try:
-                (p1, p2) = fl.parents(n)
-                if p1 not in nodes:
-                    err(flr, _("unknown parent 1 %s of %s") %
-                        (short(p1), short(n)), f)
-                if p2 not in nodes:
-                    err(flr, _("unknown parent 2 %s of %s") %
-                            (short(p2), short(p1)), f)
-            except KeyboardInterrupt:
-                repo.ui.warn(_("interrupted"))
-                raise
-            except Exception, inst:
-                err(flr, _("checking parents of %s: %s") % (short(n), inst), f)
-            nodes[n] = 1
+                exc(lr, _("unpacking %s") % short(n), inst, f)
 
             # check renames
             try:
-                rp = fl.renamed(n)
                 if rp:
                     fl2 = repo.file(rp[0])
-                    if fl2.count() == 0:
-                        err(flr, _("empty or missing copy source revlog %s:%s")
+                    if not len(fl2):
+                        err(lr, _("empty or missing copy source revlog %s:%s")
                             % (rp[0], short(rp[1])), f)
                     elif rp[1] == nullid:
-                        err(flr, _("copy source revision is nullid %s:%s")
-                            % (rp[0], short(rp[1])), f)
+                        warn(_("warning: %s@%s: copy source revision is nullid %s:%s")
+                            % (f, lr, rp[0], short(rp[1])))
                     else:
                         rev = fl2.rev(rp[1])
-            except KeyboardInterrupt:
-                repo.ui.warn(_("interrupted"))
-                raise
             except Exception, inst:
-                err(flr, _("checking rename of %s: %s") %
-                    (short(n), inst), f)
+                exc(lr, _("checking rename of %s") % short(n), inst, f)
 
         # cross-check
         if f in filenodes:
-            fns = [(repo.manifest.linkrev(filenodes[f][n]), n)
-                   for n in filenodes[f]]
-            fns.sort()
-            for lr, node in fns:
+            fns = [(mf.linkrev(l), n) for n,l in filenodes[f].items()]
+            for lr, node in util.sort(fns):
                 err(lr, _("%s in manifests not found") % short(node), f)
 
-    repo.ui.status(_("%d files, %d changesets, %d total revisions\n") %
-                   (files, changesets, revisions))
+    for f in storefiles:
+        warn(_("warning: orphan revlog '%s'") % f)
 
+    ui.status(_("%d files, %d changesets, %d total revisions\n") %
+                   (len(files), len(cl), revisions))
     if warnings[0]:
-        repo.ui.warn(_("%d warnings encountered!\n") % warnings[0])
+        ui.warn(_("%d warnings encountered!\n") % warnings[0])
     if errors[0]:
-        repo.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
-        if firstbad[0]:
-            repo.ui.warn(_("(first damaged changeset appears to be %d)\n")
-                         % firstbad[0])
+        ui.warn(_("%d integrity errors encountered!\n") % errors[0])
+        if badrevs:
+            ui.warn(_("(first damaged changeset appears to be %d)\n")
+                    % min(badrevs))
         return 1
--- a/setup.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/setup.py	Wed Sep 17 11:34:37 2008 +0200
@@ -7,7 +7,7 @@
 
 import sys
 if not hasattr(sys, 'version_info') or sys.version_info < (2, 3, 0, 'final'):
-    raise SystemExit, "Mercurial requires python 2.3 or later."
+    raise SystemExit("Mercurial requires python 2.3 or later.")
 
 import os
 import shutil
@@ -19,6 +19,9 @@
 import mercurial.version
 
 extra = {}
+scripts = ['hg']
+if os.name == 'nt':
+    scripts.append('contrib/win32/hg.bat')
 
 # simplified version of distutils.ccompiler.CCompiler.has_function
 # that actually removes its temporary files.
@@ -88,13 +91,15 @@
 cmdclass = {'install_data': install_package_data}
 
 ext_modules=[
-    Extension('mercurial.mpatch', ['mercurial/mpatch.c']),
+    Extension('mercurial.base85', ['mercurial/base85.c']),
     Extension('mercurial.bdiff', ['mercurial/bdiff.c']),
-    Extension('mercurial.base85', ['mercurial/base85.c']),
-    Extension('mercurial.diffhelpers', ['mercurial/diffhelpers.c'])
+    Extension('mercurial.diffhelpers', ['mercurial/diffhelpers.c']),
+    Extension('mercurial.mpatch', ['mercurial/mpatch.c']),
+    Extension('mercurial.parsers', ['mercurial/parsers.c']),
     ]
 
-packages = ['mercurial', 'mercurial.hgweb', 'hgext', 'hgext.convert']
+packages = ['mercurial', 'mercurial.hgweb', 'hgext', 'hgext.convert',
+            'hgext.highlight']
 
 try:
     import posix
@@ -118,7 +123,7 @@
       url='http://selenic.com/mercurial',
       description='Scalable distributed SCM',
       license='GNU GPL',
-      scripts=['hg'],
+      scripts=scripts,
       packages=packages,
       ext_modules=ext_modules,
       data_files=[(os.path.join('mercurial', root),
--- a/templates/changelog.tmpl	Wed Sep 17 11:14:06 2008 +0200
+++ b/templates/changelog.tmpl	Wed Sep 17 11:34:37 2008 +0200
@@ -9,6 +9,7 @@
 
 <div class="buttons">
 <a href="#url#shortlog/#rev#{sessionvars%urlparameter}">shortlog</a>
+<a href="#url#graph{sessionvars%urlparameter}">graph</a>
 <a href="#url#tags{sessionvars%urlparameter}">tags</a>
 <a href="#url#file/#node|short#{sessionvars%urlparameter}">files</a>
 #archives%archiveentry#
--- a/templates/changeset.tmpl	Wed Sep 17 11:14:06 2008 +0200
+++ b/templates/changeset.tmpl	Wed Sep 17 11:34:37 2008 +0200
@@ -6,6 +6,7 @@
 <div class="buttons">
 <a href="#url#log/#rev#{sessionvars%urlparameter}">changelog</a>
 <a href="#url#shortlog/#rev#{sessionvars%urlparameter}">shortlog</a>
+<a href="#url#graph{sessionvars%urlparameter}">graph</a>
 <a href="#url#tags{sessionvars%urlparameter}">tags</a>
 <a href="#url#file/#node|short#{sessionvars%urlparameter}">files</a>
 <a href="#url#raw-rev/#node|short#">raw</a>
@@ -28,10 +29,12 @@
 </tr>
 <tr>
  <th class="date">date:</th>
- <td class="date">#date|date# (#date|age# ago)</td></tr>
+ <td class="date">#date|date# (#date|age# ago)</td>
+</tr>
 <tr>
  <th class="files">files:</th>
- <td class="files">#files#</td></tr>
+ <td class="files">#files#</td>
+</tr>
 <tr>
  <th class="description">description:</th>
  <td class="description">#desc|strip|escape|addbreaks#</td>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/coal/changeset.tmpl	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,72 @@
+{header}
+<title>{repo|escape}: {node|short}</title>
+</head>
+<body>
+<div class="container">
+<div class="menu">
+<div class="logo">
+<a href="http://www.selenic.com/mercurial/">
+<img src="{staticurl}hglogo.png" width=75 height=90 border=0 alt="mercurial"></a>
+</div>
+<ul>
+ <li><a href="{url}shortlog/{node|short}{sessionvars%urlparameter}">log</a></li>
+ <li><a href="{url}graph/{node|short}{sessionvars%urlparameter}">graph</a></li>
+ <li><a href="{url}tags{sessionvars%urlparameter}">tags</a></li>
+</ul>
+<ul>
+ <li class="active">changeset</li>
+ <li><a href="{url}raw-rev/{node|short}{sessionvars%urlparameter}">raw</a></li>
+ <li><a href="{url}file/{node|short}{sessionvars%urlparameter}">browse</a></li>
+</ul>
+<ul>
+ {archives%archiveentry}
+</ul>
+</div>
+
+<div class="main">
+
+<h2>{repo|escape}</h2>
+<h3>changeset {rev}:{node|short} {changesettag}</h3>
+
+<form class="search" action="{url}log">
+{sessionvars%hiddenformentry}
+<p><input name="rev" id="search1" type="text" size="30"></p>
+</form>
+
+<div class="description">{desc|strip|escape|addbreaks}</div>
+
+<table id="changesetEntry">
+<tr>
+ <th class="author">author</th>
+ <td class="author">{author|obfuscate}</td>
+</tr>
+<tr>
+ <th class="date">date</th>
+ <td class="date">{date|date} ({date|age} ago)</td></tr>
+<tr>
+ <th class="author">parents</th>
+ <td class="author">{parent%changesetparent}</td>
+</tr>
+<tr>
+ <th class="author">children</th>
+ <td class="author">{child%changesetchild}</td>
+</tr>
+<tr>
+ <th class="files">files</th>
+ <td class="files">{files}</td>
+</tr>
+</table>
+
+<div class="overflow">
+<table class="bigtable">
+<tr>
+ <th class="lineno">line</th>
+ <th class="source">diff</th>
+</tr>
+</table>
+{diff}
+</div>
+
+</div>
+</div>
+{footer}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/coal/error.tmpl	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,40 @@
+{header}
+<title>{repo|escape}: error</title>
+</head>
+<body>
+
+<div class="container">
+<div class="menu">
+<div class="logo">
+<a href="http://www.selenic.com/mercurial/">
+<img src="{staticurl}hglogo.png" width=75 height=90 border=0 alt="mercurial"></a>
+</div>
+<ul>
+<li><a href="{url}log{sessionvars%urlparameter}">log</a></li>
+<li><a href="{url}graph{sessionvars%urlparameter}">graph</a></li>
+<li><a href="{url}tags{sessionvars%urlparameter}">tags</a></li>
+</ul>
+</div>
+
+<div class="main">
+
+<h2>{repo|escape}</h2>
+<h3>error</h3>
+
+<form class="search" action="{url}log">
+{sessionvars%hiddenformentry}
+<p><input name="rev" id="search1" type="text" size="30"></p>
+</form>
+
+<div class="description">
+<p>
+An error occurred while processing your request:
+</p>
+<p>
+{error|escape}
+</p>
+</div>
+</div>
+</div>
+
+{footer}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/coal/fileannotate.tmpl	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,77 @@
+{header}
+<title>{repo|escape}: {file|escape} annotate</title>
+</head>
+<body>
+
+<div class="container">
+<div class="menu">
+<div class="logo">
+<a href="http://www.selenic.com/mercurial/">
+<img src="{staticurl}hglogo.png" width=75 height=90 border=0 alt="mercurial"></a>
+</div>
+<ul>
+<li><a href="{url}shortlog/{node|short}{sessionvars%urlparameter}">log</a></li>
+<li><a href="{url}graph/{node|short}{sessionvars%urlparameter}">graph</a></li>
+<li><a href="{url}tags{sessionvars%urlparameter}">tags</a></li>
+</ul>
+
+<ul>
+<li><a href="{url}rev/{node|short}{sessionvars%urlparameter}">changeset</a></li>
+<li><a href="{url}file/{node|short}{path|urlescape}{sessionvars%urlparameter}">browse</a></li>
+</ul>
+<ul>
+<li><a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">file</a></li>
+<li><a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">diff</a></li>
+<li class="active">annotate</li>
+<li><a href="{url}log/{node|short}/{file|urlescape}{sessionvars%urlparameter}">file log</a></li>
+<li><a href="{url}raw-annotate/{node|short}/{file|urlescape}">raw</a></li>
+</ul>
+</div>
+
+<div class="main">
+<h2>{repo|escape}</h2>
+<h3>annotate {file|escape} @ {rev}:{node|short}</h3>
+
+<form class="search" action="{url}log">
+{sessionvars%hiddenformentry}
+<p><input name="rev" id="search1" type="text" size="30"></p>
+</form>
+
+<div class="description">{desc|strip|escape|addbreaks}</div>
+
+<table id="changesetEntry">
+<tr>
+ <th class="author">author</th>
+ <td class="author">{author|obfuscate}</td>
+</tr>
+<tr>
+ <th class="date">date</th>
+ <td class="date">{date|date} ({date|age} ago)</td>
+</tr>
+<tr>
+ <th class="author">parents</th>
+ <td class="author">{parent%filerevparent}</td>
+</tr>
+<tr>
+ <th class="author">children</th>
+ <td class="author">{child%filerevchild}</td>
+</tr>
+{changesettag}
+</table>
+
+<br/>
+
+<div class="overflow">
+<table class="bigtable">
+<tr>
+ <th class="annotate">rev</th>
+ <th class="lineno">line</th>
+ <th class="line">source</th>
+</tr>
+{annotate%annotateline}
+</table>
+</div>
+</div>
+</div>
+
+{footer}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/coal/filediff.tmpl	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,74 @@
+{header}
+<title>{repo|escape}: {file|escape} diff</title>
+</head>
+<body>
+
+<div class="container">
+<div class="menu">
+<div class="logo">
+<a href="http://www.selenic.com/mercurial/">
+<img src="{staticurl}hglogo.png" width=75 height=90 border=0 alt="mercurial"></a>
+</div>
+<ul>
+<li><a href="{url}shortlog/{node|short}{sessionvars%urlparameter}">log</a></li>
+<li><a href="{url}graph/{node|short}{sessionvars%urlparameter}">graph</a></li>
+<li><a href="{url}tags{sessionvars%urlparameter}">tags</a></li>
+</ul>
+<ul>
+<li><a href="{url}rev/{node|short}{sessionvars%urlparameter}">changeset</a></li>
+<li><a href="{url}file/{node|short}{path|urlescape}{sessionvars%urlparameter}">browse</a></li>
+</ul>
+<ul>
+<li><a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">file</a></li>
+<li class="active">diff</li>
+<li><a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">annotate</a></li>
+<li><a href="{url}log/{node|short}/{file|urlescape}{sessionvars%urlparameter}">file log</a></li>
+<li><a href="{url}raw-file/{node|short}/{file|urlescape}">raw</a></li>
+</ul>
+</div>
+
+<div class="main">
+<h2>{repo|escape}</h2>
+<h3>diff {file|escape} @ {rev}:{node|short}</h3>
+
+<form class="search" action="{url}log">
+{sessionvars%hiddenformentry}
+<p><input name="rev" id="search1" type="text" size="30"></p>
+</form>
+
+<div class="description">{desc|strip|escape|addbreaks}</div>
+
+<table id="changesetEntry">
+<tr>
+ <th>author</th>
+ <td>{author|obfuscate}</td>
+</tr>
+<tr>
+ <th>date</th>
+ <td>{date|date} ({date|age} ago)</td>
+</tr>
+<tr>
+ <th>parents</th>
+ <td>{parent%filerevparent}</td>
+</tr>
+<tr>
+ <th>children</th>
+ <td>{child%filerevchild}</td>
+</tr>
+{changesettag}
+</table>
+
+<div class="overflow">
+<table class="bigtable">
+<tr>
+ <th class="lineno">line</th>
+ <th class="source">diff</th>
+</tr>
+</table>
+
+{diff}
+</div>
+</div>
+</div>
+
+{footer}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/coal/filelog.tmpl	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,58 @@
+{header}
+<title>{repo|escape}: {file|escape} history</title>
+<link rel="alternate" type="application/atom+xml"
+   href="{url}atom-log/tip/{file|urlescape}" title="Atom feed for {repo|escape}:{file}">
+<link rel="alternate" type="application/rss+xml"
+   href="{url}rss-log/tip/{file|urlescape}" title="RSS feed for {repo|escape}:{file}">
+</head>
+<body>
+
+<div class="container">
+<div class="menu">
+<div class="logo">
+<a href="http://www.selenic.com/mercurial/">
+<img src="{staticurl}hglogo.png" width=75 height=90 border=0 alt="mercurial"></a>
+</div>
+<ul>
+<li><a href="{url}shortlog/{node|short}{sessionvars%urlparameter}">log</a></li>
+<li><a href="{url}graph/{node|short}{sessionvars%urlparameter}">graph</a></li>
+<li><a href="{url}tags{sessionvars%urlparameter}">tags</a></li>
+</ul>
+<ul>
+<li><a href="{url}rev/{node|short}{sessionvars%urlparameter}">changeset</a></li>
+<li><a href="{url}file/{node|short}{path|urlescape}{sessionvars%urlparameter}">browse</a></li>
+</ul>
+<ul>
+<li><a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">file</a></li>
+<li><a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">diff</a></li>
+<li><a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">annotate</a></li>
+<li class="active">file log</li>
+<li><a href="{url}raw-file/{node|short}/{file|urlescape}">raw</a></li>
+</ul>
+</div>
+
+<div class="main">
+
+<h2>{repo|escape}</h2>
+<h3>log {file|escape}</h3>
+
+<form class="search" action="{url}log">
+{sessionvars%hiddenformentry}
+<p><input name="rev" id="search1" type="text" size="30"></p>
+</form>
+
+<div class="navigate">{nav%filenaventry}</div>
+
+<table class="bigtable">
+ <tr> 
+  <th class="age">age</th>
+  <th class="author">author</th>
+  <th class="description">description</th>
+ </tr>
+{entries%filelogentry}
+</table>
+
+</div>
+</div>
+
+{footer}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/coal/filelogentry.tmpl	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,5 @@
+ <tr class="parity{parity}">
+  <td class="age">{date|age}</td>
+  <td class="author">{author|person}</td>
+  <td class="description"><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{desc|strip|firstline|escape}</a></td>
+ </tr>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/coal/filerevision.tmpl	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,74 @@
+{header}
+<title>{repo|escape}: {node|short} {file|escape}</title>
+</head>
+<body>
+
+<div class="container">
+<div class="menu">
+<div class="logo">
+<a href="http://www.selenic.com/mercurial/">
+<img src="{staticurl}hglogo.png" width=75 height=90 border=0 alt="mercurial"></a>
+</div>
+<ul>
+<li><a href="{url}shortlog/{node|short}{sessionvars%urlparameter}">log</a></li>
+<li><a href="{url}graph/{node|short}{sessionvars%urlparameter}">graph</a></li>
+<li><a href="{url}tags{sessionvars%urlparameter}">tags</a></li>
+</ul>
+<ul>
+<li><a href="{url}rev/{node|short}{sessionvars%urlparameter}">changeset</a></li>
+<li><a href="{url}file/{node|short}{path|urlescape}{sessionvars%urlparameter}">browse</a></li>
+</ul>
+<ul>
+<li class="active">file</li>
+<li><a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">diff</a></li>
+<li><a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">annotate</a></li>
+<li><a href="{url}log/{node|short}/{file|urlescape}{sessionvars%urlparameter}">file log</a></li>
+<li><a href="{url}raw-file/{node|short}/{file|urlescape}">raw</a></li>
+</ul>
+</div>
+
+<div class="main">
+
+<h2>{repo|escape}</h2>
+<h3>view {file|escape} @ {rev}:{node|short}</h3>
+
+<form class="search" action="{url}log">
+{sessionvars%hiddenformentry}
+<p><input name="rev" id="search1" type="text" size="30"></p>
+</form>
+
+<div class="description">{desc|strip|escape|addbreaks}</div>
+
+<table id="changesetEntry">
+<tr>
+ <th class="author">author</th>
+ <td class="author">{author|obfuscate}</td>
+</tr>
+<tr>
+ <th class="date">date</th>
+ <td class="date">{date|date} ({date|age} ago)</td>
+</tr>
+<tr>
+ <th class="author">parents</th>
+ <td class="author">{parent%filerevparent}</td>
+</tr>
+<tr>
+ <th class="author">children</th>
+ <td class="author">{child%filerevchild}</td>
+</tr>
+{changesettag}
+</table>
+
+<div class="overflow">
+<table class="bigtable">
+<tr>
+ <th class="lineno">line</th>
+ <th class="source">source</th>
+</tr>
+{text%fileline}
+</table>
+</div>
+</div>
+</div>
+
+{footer}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/coal/footer.tmpl	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,4 @@
+{motd}
+
+</body>
+</html>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/coal/graph.tmpl	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,113 @@
+{header}
+<title>{repo|escape}: revision graph</title>
+<link rel="alternate" type="application/atom+xml"
+   href="{url}atom-log" title="Atom feed for {repo|escape}: log">
+<link rel="alternate" type="application/rss+xml"
+   href="{url}rss-log" title="RSS feed for {repo|escape}: log">
+<!--[if IE]><script type="text/javascript" src="{staticurl}excanvas.js"></script><![endif]-->
+</head>
+<body>
+
+<div class="container">
+<div class="menu">
+<div class="logo">
+<a href="http://www.selenic.com/mercurial/">
+<img src="{staticurl}hglogo.png" width=75 height=90 border=0 alt="mercurial"></a>
+</div>
+<ul>
+<li><a href="{url}shortlog/{node|short}{sessionvars%urlparameter}">log</a></li>
+<li class="active">graph</li>
+<li><a href="{url}tags{sessionvars%urlparameter}">tags</a></li>
+</ul>
+<ul>
+<li><a href="{url}rev/{node|short}{sessionvars%urlparameter}">changeset</a></li>
+<li><a href="{url}file/{node|short}{path|urlescape}{sessionvars%urlparameter}">browse</a></li>
+</ul>
+</div>
+
+<div class="main">
+<h2>{repo|escape}</h2>
+<h3>graph</h3>
+
+<form class="search" action="{url}log">
+{sessionvars%hiddenformentry}
+<p><input name="rev" id="search1" type="text" size="30"></p>
+</form>
+
+<div class="navigate">
+<a href="{url}graph/{uprev}{sessionvars%urlparameter}?revcount={revcountless}">less</a>
+<a href="{url}graph/{uprev}{sessionvars%urlparameter}?revcount={revcountmore}">more</a>
+| {changenav%navgraphentry}
+</div>
+
+<div id="noscript">The revision graph only works with JavaScript-enabled browsers.</div>
+
+<div id="wrapper">
+<ul id="nodebgs"></ul>
+<canvas id="graph" width="224" height="{canvasheight}"></canvas>
+<ul id="graphnodes"></ul>
+</div>
+
+<script type="text/javascript" src="{staticurl}graph.js"></script>
+<script type="text/javascript">
+<!-- hide script content
+
+document.getElementById('noscript').style.display = 'none';
+
+var data = {jsdata|json};
+var graph = new Graph();
+graph.scale({bg_height});
+
+graph.edge = function(x0, y0, x1, y1, color) {
+	
+	this.setColor(color, 0.0, 0.65);
+	this.ctx.beginPath();
+	this.ctx.moveTo(x0, y0);
+	this.ctx.lineTo(x1, y1);
+	this.ctx.stroke();
+	
+}
+
+var revlink = '<li style="_STYLE"><span class="desc">';
+revlink += '<a href="{url}rev/_NODEID{sessionvars%urlparameter}" title="_NODEID">_DESC</a>';
+revlink += '</span><span class="tag">_TAGS</span>';
+revlink += '<span class="info">_DATE ago, by _USER</span></li>';
+
+graph.vertex = function(x, y, color, parity, cur) {
+	
+	this.ctx.beginPath();
+	color = this.setColor(color, 0.25, 0.75);
+	this.ctx.arc(x, y, radius, 0, Math.PI * 2, true);
+	this.ctx.fill();
+	
+	var bg = '<li class="bg parity' + parity + '"></li>';
+	var left = (this.columns + 1) * this.bg_height;
+	var nstyle = 'padding-left: ' + left + 'px;';
+	var item = revlink.replace(/_STYLE/, nstyle);
+	item = item.replace(/_PARITY/, 'parity' + parity);
+	item = item.replace(/_NODEID/, cur[0]);
+	item = item.replace(/_NODEID/, cur[0]);
+	item = item.replace(/_DESC/, cur[3]);
+	item = item.replace(/_USER/, cur[4]);
+	item = item.replace(/_DATE/, cur[5]);
+	item = item.replace(/_TAGS/, cur[7].join('&nbsp; '));
+	
+	return [bg, item];
+	
+}
+
+graph.render(data);
+
+// stop hiding script -->
+</script>
+
+<div class="navigate">
+<a href="{url}graph/{uprev}{sessionvars%urlparameter}?revcount={revcountless}">less</a>
+<a href="{url}graph/{uprev}{sessionvars%urlparameter}?revcount={revcountmore}">more</a>
+| {changenav%navgraphentry}
+</div>
+
+</div>
+</div>
+
+{footer}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/coal/header.tmpl	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,7 @@
+<!-- quirksmode -->
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
+<html>
+<head>
+<link rel="icon" href="{staticurl}hgicon.png" type="image/png">
+<meta name="robots" content="index, nofollow" />
+<link rel="stylesheet" href="{staticurl}style-coal.css" type="text/css" />
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/coal/index.tmpl	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,26 @@
+{header}
+<title>Mercurial repositories index</title>
+</head>
+<body>
+
+<div class="container">
+<div class="menu">
+<a href="http://www.selenic.com/mercurial/">
+<img src="{staticurl}hglogo.png" width=75 height=90 border=0 alt="mercurial"></a>
+</div>
+<div class="main">
+<h2>Mercurial Repositories</h2>
+
+<table class="bigtable">
+    <tr>
+        <th><a href="?sort={sort_name}">Name</a></th>
+        <th><a href="?sort={sort_description}">Description</a></th>
+        <th><a href="?sort={sort_contact}">Contact</a></th>
+        <th><a href="?sort={sort_lastchange}">Last change</a></th>
+        <th>&nbsp;</th>
+    </tr>
+    {entries%indexentry}
+</table>
+</div>
+</div>
+{footer}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/coal/manifest.tmpl	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,52 @@
+{header}
+<title>{repo|escape}: {node|short} {path|escape}</title>
+</head>
+<body>
+
+<div class="container">
+<div class="menu">
+<div class="logo">
+<a href="http://www.selenic.com/mercurial/">
+<img src="{staticurl}hglogo.png" width=75 height=90 border=0 alt="mercurial"></a>
+</div>
+<ul>
+<li><a href="{url}shortlog/{node|short}{sessionvars%urlparameter}">log</a></li>
+<li><a href="{url}graph/{node|short}{sessionvars%urlparameter}">graph</a></li>
+<li><a href="{url}tags{sessionvars%urlparameter}">tags</a></li>
+</ul>
+<ul>
+<li><a href="{url}rev/{node|short}{sessionvars%urlparameter}">changeset</a></li>
+<li class="active">browse</li>
+</ul>
+<ul>
+{archives%archiveentry}
+</ul>
+</div>
+
+<div class="main">
+
+<h2>{repo|escape}</h2>
+<h3>directory {path|escape} @ {rev}:{node|short} {tags%changelogtag}</h3>
+
+<form class="search" action="{url}log">
+{sessionvars%hiddenformentry}
+<p><input name="rev" id="search1" type="text" size="30"></p>
+</form>
+
+<table class="bigtable">
+<tr>
+  <th class="name">name</th>
+  <th class="size">size</th>
+  <th class="permissions">permissions</th>
+</tr>
+<tr class="fileline parity{upparity}">
+  <td class="name"><a href="{url}file/{node|short}{up|urlescape}{sessionvars%urlparameter}">[up]</a></td>
+  <td class="size"></td>
+  <td class="permissions">drwxr-xr-x</td>
+</tr>
+{dentries%direntry}
+{fentries%fileentry}
+</table>
+</div>
+</div>
+{footer}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/coal/map	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,71 @@
+default = 'shortlog'
+
+mimetype = 'text/html; charset={encoding}'
+header = header.tmpl
+footer = footer.tmpl
+search = search.tmpl
+
+changelog = shortlog.tmpl
+shortlog = shortlog.tmpl
+shortlogentry = shortlogentry.tmpl
+graph = graph.tmpl
+
+naventry = '<a href="{url}log/{node|short}{sessionvars%urlparameter}">{label|escape}</a> '
+navshortentry = '<a href="{url}shortlog/{node|short}{sessionvars%urlparameter}">{label|escape}</a> '
+navgraphentry = '<a href="{url}graph/{node|short}{sessionvars%urlparameter}">{label|escape}</a> '
+filenaventry = '<a href="{url}log/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{label|escape}</a> '
+filedifflink = '<a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{file|escape}</a> '
+filenodelink = '<a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{file|escape}</a> '
+fileellipses = '...'
+changelogentry = shortlogentry.tmpl
+searchentry = shortlogentry.tmpl
+changeset = changeset.tmpl
+manifest = manifest.tmpl
+
+direntry = '<tr class="fileline parity{parity}"><td class="name"><a href="{url}file/{node|short}{path|urlescape}{sessionvars%urlparameter}"><img src="{staticurl}coal-folder.png"> {basename|escape}/</a><td class="size"></td><td class="permissions">drwxr-xr-x</td></tr>'
+fileentry = '<tr class="fileline parity{parity}"><td class="filename"><a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}#l1"><img src="{staticurl}coal-file.png"> {basename|escape}</a></td><td class="size">{size}</td><td class="permissions">{permissions|permissions}</td></tr>'
+
+filerevision = filerevision.tmpl
+fileannotate = fileannotate.tmpl
+filediff = filediff.tmpl
+filelog = filelog.tmpl
+fileline = '<tr class="parity{parity}"><td class="lineno"><a href="#{lineid}" id="{lineid}">{linenumber}</a></td><td class="source">{line|escape}</td></tr>'
+filelogentry = filelogentry.tmpl
+
+annotateline = '<tr class="parity{parity}"><td class="annotate"><a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}#{targetline}" title="{node|short}: {desc|escape|firstline}">{author|user}@{rev}</a></td><td class="lineno"><a href="#{lineid}" id="{lineid}">{linenumber}</a></td><td class="source">{line|escape}</td></tr>'
+
+diffblock = '<table class="bigtable parity{parity}">{lines}</table>'
+difflineplus = '<tr><td class="lineno"><a href="#{lineid}" id="{lineid}">{linenumber}</a></td><td class="source plusline">{line|escape}</td></tr>'
+difflineminus = '<tr><td class="lineno"><a href="#{lineid}" id="{lineid}">{linenumber}</a></td><td class="source minusline">{line|escape}</td></tr>'
+difflineat = '<tr><td class="lineno"><a href="#{lineid}" id="{lineid}">{linenumber}</a></td><td class="source atline">{line|escape}</td></tr>'
+diffline = '<tr><td class="lineno"><a href="#{lineid}" id="{lineid}">{linenumber}</a></td><td class="source">{line|escape}</td></tr>'
+
+changelogparent = '<tr><th class="parent">parent {rev}:</th><td class="parent"><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a></td></tr>'
+
+changesetparent = '<a href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a> '
+
+filerevparent = '<a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{rename%filerename}{node|short}</a> '
+filerevchild = '<a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a> '
+
+filerename = '{file|escape}@'
+filelogrename = '<tr><th>base:</th><td><a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{file|escape}@{node|short}</a></td></tr>'
+fileannotateparent = '<tr><td class="metatag">parent:</td><td><a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{rename%filerename}{node|short}</a></td></tr>'
+changesetchild = '<a href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a>'
+changelogchild = '<tr><th class="child">child</th><td class="child"><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a></td></tr>'
+fileannotatechild = '<tr><td class="metatag">child:</td><td><a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a></td></tr>'
+tags = tags.tmpl
+tagentry = '<tr class="tagEntry parity{parity}"><td><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{tag|escape}</a></td><td class="node">{node|short}</td></tr>'
+changelogtag = '<span class="tag">{name|escape}</span> '
+changesettag = '<span class="tag">{tag|escape}</span> '
+filediffparent = '<tr><th class="parent">parent {rev}:</th><td class="parent"><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a></td></tr>'
+filelogparent = '<tr><th>parent {rev}:</th><td><a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a></td></tr>'
+filediffchild = '<tr><th class="child">child {rev}:</th><td class="child"><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a></td></tr>'
+filelogchild = '<tr><th>child {rev}:</th><td><a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a></td></tr>'
+indexentry = '<tr class="parity{parity}"><td><a href="{url}{sessionvars%urlparameter}">{name|escape}</a></td><td>{description}</td><td>{contact|obfuscate}</td><td class="age">{lastchange|age} ago</td><td class="indexlinks">{archives%indexarchiveentry}</td></tr>\n'
+indexarchiveentry = '<a href="{url}archive/{node|short}{extension|urlescape}">&nbsp;&darr;{type|escape}</a>'
+index = index.tmpl
+archiveentry = '<li><a href="{url}archive/{node|short}{extension|urlescape}">{type|escape}</a></li>'
+notfound = notfound.tmpl
+error = error.tmpl
+urlparameter = '{separator}{name}={value|urlescape}'
+hiddenformentry = '<input type="hidden" name="{name}" value="{value|escape}" />'
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/coal/notfound.tmpl	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,12 @@
+{header}
+<title>Mercurial repository not found</title>
+</head>
+<body>
+
+<h2>Mercurial repository not found</h2>
+
+The specified repository "{repo|escape}" is unknown, sorry.
+
+Please go back to the main repository list page.
+
+{footer}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/coal/search.tmpl	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,41 @@
+{header}
+<title>{repo|escape}: searching for {query|escape}</title>
+</head>
+<body>
+
+<div class="container">
+<div class="menu">
+<div class="logo">
+<a href="http://www.selenic.com/mercurial/">
+<img src="{staticurl}hglogo.png" width=75 height=90 border=0 alt="mercurial"></a>
+</div>
+<ul>
+<li><a href="{url}shortlog{sessionvars%urlparameter}">log</a></li>
+<li><a href="{url}graph{sessionvars%urlparameter}">graph</a></li>
+<li><a href="{url}tags{sessionvars%urlparameter}">tags</a></li>
+</ul>
+</div>
+
+<div class="main">
+
+<h2>{repo|escape}</h2>
+<h3>searching for '{query|escape}'</h3>
+
+<form class="search" action="{url}log">
+{sessionvars%hiddenformentry}
+<p><input name="rev" id="search1" type="text" size="30"></p>
+</form>
+
+<table class="bigtable">
+ <tr> 
+  <th class="age">age</th>
+  <th class="author">author</th>
+  <th class="description">description</th>
+ </tr>
+{entries}
+</table>
+
+</div>
+</div>
+
+{footer}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/coal/shortlog.tmpl	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,55 @@
+{header}
+<title>{repo|escape}: log</title>
+<link rel="alternate" type="application/atom+xml"
+   href="{url}atom-log" title="Atom feed for {repo|escape}">
+<link rel="alternate" type="application/rss+xml"
+   href="{url}rss-log" title="RSS feed for {repo|escape}">
+</head>
+<body>
+
+<div class="container">
+<div class="menu">
+<div class="logo">
+<a href="http://www.selenic.com/mercurial/">
+<img src="{staticurl}hglogo.png" width=75 height=90 border=0 alt="mercurial"></a>
+</div>
+<ul>
+<li class="active">log</li>
+<li><a href="{url}graph/{node|short}{sessionvars%urlparameter}">graph</a></li>
+<li><a href="{url}tags{sessionvars%urlparameter}">tags</a></li>
+</ul>
+<ul>
+<li><a href="{url}rev/{node|short}{sessionvars%urlparameter}">changeset</a></li>
+<li><a href="{url}file/{node|short}{path|urlescape}{sessionvars%urlparameter}">browse</a></li>
+</ul>
+<ul>
+{archives%archiveentry}
+</ul>
+</div>
+
+<div class="main">
+
+<h2>{repo|escape}</h2>
+<h3>log</h3>
+
+<form class="search" action="{url}log">
+{sessionvars%hiddenformentry}
+<p><input name="rev" id="search1" type="text" size="30"></p>
+</form>
+
+<div class="navigate">rev {rev}: {changenav%navshortentry}</div>
+
+<table class="bigtable">
+ <tr> 
+  <th class="age">age</th>
+  <th class="author">author</th>
+  <th class="description">description</th>
+ </tr>
+{entries%shortlogentry}
+</table>
+
+<div class="navigate">rev {rev}: {changenav%navshortentry}</div>
+</div>
+</div>
+
+{footer}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/coal/shortlogentry.tmpl	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,5 @@
+ <tr class="parity{parity}">
+  <td class="age">{date|age}</td>
+  <td class="author">{author|person}</td>
+  <td class="description"><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{desc|strip|firstline|escape}</a>{tags%changelogtag}</td>
+ </tr>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/coal/tags.tmpl	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,42 @@
+{header}
+<title>{repo|escape}: tags</title>
+<link rel="alternate" type="application/atom+xml"
+   href="{url}atom-tags" title="Atom feed for {repo|escape}: tags">
+<link rel="alternate" type="application/rss+xml"
+   href="{url}rss-tags" title="RSS feed for {repo|escape}: tags">
+</head>
+<body>
+
+<div class="container">
+<div class="menu">
+<div class="logo">
+<a href="http://www.selenic.com/mercurial/">
+<img src="{staticurl}hglogo.png" width=75 height=90 border=0 alt="mercurial"></a>
+</div>
+<ul>
+<li><a href="{url}shortlog{sessionvars%urlparameter}">log</a></li>
+<li><a href="{url}graph{sessionvars%urlparameter}">graph</a></li>
+<li class="active">tags</li>
+</ul>
+</div>
+
+<div class="main">
+<h2>{repo|escape}</h2>
+<h3>tags</h3>
+
+<form class="search" action="{url}log">
+{sessionvars%hiddenformentry}
+<p><input name="rev" id="search1" type="text" size="30"></p>
+</form>
+
+<table class="bigtable">
+<tr>
+ <th>tag</th>
+ <th>node</th>
+</tr>
+{entries%tagentry}
+</table>
+</div>
+</div>
+
+{footer}
--- a/templates/fileannotate.tmpl	Wed Sep 17 11:14:06 2008 +0200
+++ b/templates/fileannotate.tmpl	Wed Sep 17 11:34:37 2008 +0200
@@ -6,6 +6,7 @@
 <div class="buttons">
 <a href="#url#log/#rev#{sessionvars%urlparameter}">changelog</a>
 <a href="#url#shortlog/#rev#{sessionvars%urlparameter}">shortlog</a>
+<a href="#url#graph{sessionvars%urlparameter}">graph</a>
 <a href="#url#tags{sessionvars%urlparameter}">tags</a>
 <a href="#url#rev/#node|short#{sessionvars%urlparameter}">changeset</a>
 <a href="#url#file/#node|short##path|urlescape#{sessionvars%urlparameter}">files</a>
@@ -27,10 +28,12 @@
  <td>#author|obfuscate#</td></tr>
 <tr>
  <td class="metatag">date:</td>
- <td>#date|date# (#date|age# ago)</td></tr>
+ <td>#date|date# (#date|age# ago)</td>
+</tr>
 <tr>
  <td class="metatag">permissions:</td>
- <td>#permissions|permissions#</td></tr>
+ <td>#permissions|permissions#</td>
+</tr>
 <tr>
   <td class="metatag">description:</td>
   <td>{desc|strip|escape|addbreaks}</td>
--- a/templates/filediff.tmpl	Wed Sep 17 11:14:06 2008 +0200
+++ b/templates/filediff.tmpl	Wed Sep 17 11:34:37 2008 +0200
@@ -6,6 +6,7 @@
 <div class="buttons">
 <a href="#url#log/#rev#{sessionvars%urlparameter}">changelog</a>
 <a href="#url#shortlog/#rev#{sessionvars%urlparameter}">shortlog</a>
+<a href="#url#graph{sessionvars%urlparameter}">graph</a>
 <a href="#url#tags{sessionvars%urlparameter}">tags</a>
 <a href="#url#rev/#node|short#{sessionvars%urlparameter}">changeset</a>
 <a href="#url#file/#node|short#/#file|urlescape#{sessionvars%urlparameter}">file</a>
--- a/templates/filelog.tmpl	Wed Sep 17 11:14:06 2008 +0200
+++ b/templates/filelog.tmpl	Wed Sep 17 11:34:37 2008 +0200
@@ -5,12 +5,12 @@
 <link rel="alternate" type="application/rss+xml"
    href="#url#rss-log/tip/#file|urlescape#" title="RSS feed for #repo|escape#:#file#">
 </head>
-</head>
 <body>
 
 <div class="buttons">
 <a href="#url#log{sessionvars%urlparameter}">changelog</a>
 <a href="#url#shortlog{sessionvars%urlparameter}">shortlog</a>
+<a href="#url#graph{sessionvars%urlparameter}">graph</a>
 <a href="#url#tags{sessionvars%urlparameter}">tags</a>
 <a href="#url#file/#node|short#/#file|urlescape#{sessionvars%urlparameter}">file</a>
 <a href="#url#annotate/#node|short#/#file|urlescape#{sessionvars%urlparameter}">annotate</a>
--- a/templates/filerevision.tmpl	Wed Sep 17 11:14:06 2008 +0200
+++ b/templates/filerevision.tmpl	Wed Sep 17 11:34:37 2008 +0200
@@ -6,6 +6,7 @@
 <div class="buttons">
 <a href="#url#log/#rev#{sessionvars%urlparameter}">changelog</a>
 <a href="#url#shortlog/#rev#{sessionvars%urlparameter}">shortlog</a>
+<a href="#url#graph{sessionvars%urlparameter}">graph</a>
 <a href="#url#tags{sessionvars%urlparameter}">tags</a>
 <a href="#url#rev/#node|short#{sessionvars%urlparameter}">changeset</a>
 <a href="#url#file/#node|short##path|urlescape#{sessionvars%urlparameter}">files</a>
--- a/templates/gitweb/changelog.tmpl	Wed Sep 17 11:14:06 2008 +0200
+++ b/templates/gitweb/changelog.tmpl	Wed Sep 17 11:34:37 2008 +0200
@@ -19,7 +19,12 @@
 </form>
 
 <div class="page_nav">
-<a href="{url}summary{sessionvars%urlparameter}">summary</a> | <a href="{url}shortlog/#rev#{sessionvars%urlparameter}">shortlog</a> | changelog | <a href="{url}tags{sessionvars%urlparameter}">tags</a> | <a href="{url}file/#node|short#{sessionvars%urlparameter}">files</a>#archives%archiveentry#
+<a href="{url}summary{sessionvars%urlparameter}">summary</a> |
+<a href="{url}shortlog/#rev#{sessionvars%urlparameter}">shortlog</a> |
+changelog |
+<a href="{url}graph{sessionvars%urlparameter}">graph</a> |
+<a href="{url}tags{sessionvars%urlparameter}">tags</a> |
+<a href="{url}file/#node|short#{sessionvars%urlparameter}">files</a>#archives%archiveentry#
 <br/>
 #changenav%naventry#<br/>
 </div>
--- a/templates/gitweb/changeset.tmpl	Wed Sep 17 11:14:06 2008 +0200
+++ b/templates/gitweb/changeset.tmpl	Wed Sep 17 11:34:37 2008 +0200
@@ -12,7 +12,7 @@
 </div>
 
 <div class="page_nav">
-<a href="{url}summary{sessionvars%urlparameter}">summary</a> | <a href="{url}shortlog/#rev#{sessionvars%urlparameter}">shortlog</a> | <a href="{url}log/#rev#{sessionvars%urlparameter}">changelog</a> | <a href="{url}tags{sessionvars%urlparameter}">tags</a> | <a href="{url}file/#node|short#{sessionvars%urlparameter}">files</a> | changeset | <a href="{url}raw-rev/#node|short#">raw</a> #archives%archiveentry#<br/>
+<a href="{url}summary{sessionvars%urlparameter}">summary</a> | <a href="{url}shortlog/#rev#{sessionvars%urlparameter}">shortlog</a> | <a href="{url}log/#rev#{sessionvars%urlparameter}">changelog</a> | <a href="{url}graph{sessionvars%urlparameter}">graph</a> | <a href="{url}tags{sessionvars%urlparameter}">tags</a> | <a href="{url}file/#node|short#{sessionvars%urlparameter}">files</a> | changeset | <a href="{url}raw-rev/#node|short#">raw</a> #archives%archiveentry#<br/>
 </div>
 
 <div>
--- a/templates/gitweb/fileannotate.tmpl	Wed Sep 17 11:14:06 2008 +0200
+++ b/templates/gitweb/fileannotate.tmpl	Wed Sep 17 11:34:37 2008 +0200
@@ -15,6 +15,7 @@
 <a href="{url}summary{sessionvars%urlparameter}">summary</a> |
 <a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> |
 <a href="{url}log{sessionvars%urlparameter}">changelog</a> |
+<a href="{url}graph{sessionvars%urlparameter}">graph</a> |
 <a href="{url}tags{sessionvars%urlparameter}">tags</a> |
 <a href="{url}file/#node|short##path|urlescape#{sessionvars%urlparameter}">files</a> |
 <a href="{url}rev/#node|short#{sessionvars%urlparameter}">changeset</a> |
--- a/templates/gitweb/filediff.tmpl	Wed Sep 17 11:14:06 2008 +0200
+++ b/templates/gitweb/filediff.tmpl	Wed Sep 17 11:34:37 2008 +0200
@@ -15,6 +15,7 @@
 <a href="{url}summary{sessionvars%urlparameter}">summary</a> |
 <a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> |
 <a href="{url}log{sessionvars%urlparameter}">changelog</a> |
+<a href="{url}graph{sessionvars%urlparameter}">graph</a> |
 <a href="{url}tags{sessionvars%urlparameter}">tags</a> |
 <a href="{url}file/{node|short}{path|urlescape}{sessionvars%urlparameter}">files</a> |
 <a href="{url}rev/{node|short}{sessionvars%urlparameter}">changeset</a> |
--- a/templates/gitweb/filelog.tmpl	Wed Sep 17 11:14:06 2008 +0200
+++ b/templates/gitweb/filelog.tmpl	Wed Sep 17 11:34:37 2008 +0200
@@ -15,6 +15,7 @@
 <a href="{url}summary{sessionvars%urlparameter}">summary</a> |
 <a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> |
 <a href="{url}log{sessionvars%urlparameter}">changelog</a> |
+<a href="{url}graph{sessionvars%urlparameter}">graph</a> |
 <a href="{url}tags{sessionvars%urlparameter}">tags</a> |
 <a href="{url}file/{node|short}/#file|urlescape#{sessionvars%urlparameter}">file</a> |
 revisions |
--- a/templates/gitweb/filerevision.tmpl	Wed Sep 17 11:14:06 2008 +0200
+++ b/templates/gitweb/filerevision.tmpl	Wed Sep 17 11:34:37 2008 +0200
@@ -15,6 +15,7 @@
 <a href="{url}summary{sessionvars%urlparameter}">summary</a> |
 <a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> |
 <a href="{url}log{sessionvars%urlparameter}">changelog</a> |
+<a href="{url}graph{sessionvars%urlparameter}">graph</a> |
 <a href="{url}tags{sessionvars%urlparameter}">tags</a> |
 <a href="{url}file/#node|short##path|urlescape#{sessionvars%urlparameter}">files</a> |
 <a href="{url}rev/#node|short#{sessionvars%urlparameter}">changeset</a> |
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/gitweb/graph.tmpl	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,121 @@
+#header#
+<title>#repo|escape#: Graph</title>
+<link rel="alternate" type="application/atom+xml"
+   href="{url}atom-log" title="Atom feed for #repo|escape#"/>
+<link rel="alternate" type="application/rss+xml"
+   href="{url}rss-log" title="RSS feed for #repo|escape#"/>
+</head>
+<body>
+
+<div class="page_header">
+<a href="http://www.selenic.com/mercurial/" title="Mercurial" style="float: right;">Mercurial</a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / graph
+</div>
+
+<form action="{url}log">
+{sessionvars%hiddenformentry}
+<div class="search">
+<input type="text" name="rev"  />
+</div>
+</form>
+<div class="page_nav">
+<a href="{url}summary{sessionvars%urlparameter}">summary</a> |
+<a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> |
+<a href="{url}log/#rev#{sessionvars%urlparameter}">changelog</a> |
+graph |
+<a href="{url}tags{sessionvars%urlparameter}">tags</a> |
+<a href="{url}file/#node|short#{sessionvars%urlparameter}">files</a>
+<br/>
+<a href="{url}graph/{uprev}{sessionvars%urlparameter}?revcount={revcountless}">less</a>
+<a href="{url}graph/{uprev}{sessionvars%urlparameter}?revcount={revcountmore}">more</a>
+| #changenav%navgraphentry#<br/>
+</div>
+
+<div class="title">&nbsp;</div>
+
+<div id="noscript">The revision graph only works with JavaScript-enabled browsers.</div>
+
+<div id="wrapper">
+<ul id="nodebgs"></ul>
+<canvas id="graph" width="224" height="#canvasheight#"></canvas>
+<ul id="graphnodes"></ul>
+</div>
+
+<script type="text/javascript" src="#staticurl#graph.js"></script>
+<script>
+<!-- hide script content
+
+document.getElementById('noscript').style.display = 'none';
+
+var data = {jsdata|json};
+var graph = new Graph();
+graph.scale({bg_height});
+
+graph.edge = function(x0, y0, x1, y1, color) {
+	
+	this.setColor(color, 0.0, 0.65);
+	this.ctx.beginPath();
+	this.ctx.moveTo(x0, y0);
+	this.ctx.lineTo(x1, y1);
+	this.ctx.stroke();
+	
+}
+
+var revlink = '<li style="_STYLE"><span class="desc">';
+revlink += '<a class="list" href="{url}rev/_NODEID{sessionvars%urlparameter}" title="_NODEID"><b>_DESC</b></a>';
+revlink += '</span> _TAGS';
+revlink += '<span class="info">_DATE ago, by _USER</span></li>';
+
+graph.vertex = function(x, y, color, parity, cur) {
+	
+	this.ctx.beginPath();
+	color = this.setColor(color, 0.25, 0.75);
+	this.ctx.arc(x, y, radius, 0, Math.PI * 2, true);
+	this.ctx.fill();
+	
+	var bg = '<li class="bg parity' + parity + '"></li>';
+	var left = (this.columns + 1) * this.bg_height;
+	var nstyle = 'padding-left: ' + left + 'px;';
+	var item = revlink.replace(/_STYLE/, nstyle);
+	item = item.replace(/_PARITY/, 'parity' + parity);
+	item = item.replace(/_NODEID/, cur[0]);
+	item = item.replace(/_NODEID/, cur[0]);
+	item = item.replace(/_DESC/, cur[3]);
+	item = item.replace(/_USER/, cur[4]);
+	item = item.replace(/_DATE/, cur[5]);
+	
+	var tagspan = '';
+	if (cur[7].length || (cur[6][0] != 'default' || cur[6][1])) {
+		tagspan = '<span class="logtags">';
+		if (cur[6][1]) {
+			tagspan += '<span class="branchtag" title="' + cur[6][0] + '">';
+			tagspan += cur[6][0] + '</span> ';
+		} else if (!cur[6][1] && cur[6][0] != 'default') {
+			tagspan += '<span class="inbranchtag" title="' + cur[6][0] + '">';
+			tagspan += cur[6][0] + '</span> ';
+		}
+		if (cur[7].length) {
+			for (var t in cur[7]) {
+				var tag = cur[7][t];
+				tagspan += '<span class="tagtag">' + tag + '</span> ';
+			}
+		}
+		tagspan += '</span>';
+	}
+	
+	item = item.replace(/_TAGS/, tagspan);
+	return [bg, item];
+	
+}
+
+graph.render(data);
+
+// stop hiding script -->
+</script>
+
+<div class="page_nav">
+<a href="{url}graph/{uprev}{sessionvars%urlparameter}?revcount={revcountless}">less</a>
+<a href="{url}graph/{uprev}{sessionvars%urlparameter}?revcount={revcountmore}">more</a>
+| {changenav%navgraphentry}
+</div>
+
+#footer#
--- a/templates/gitweb/manifest.tmpl	Wed Sep 17 11:14:06 2008 +0200
+++ b/templates/gitweb/manifest.tmpl	Wed Sep 17 11:34:37 2008 +0200
@@ -15,6 +15,7 @@
 <a href="{url}summary{sessionvars%urlparameter}">summary</a> |
 <a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> |
 <a href="{url}log{sessionvars%urlparameter}">changelog</a> |
+<a href="{url}graph{sessionvars%urlparameter}">graph</a> |
 <a href="{url}tags{sessionvars%urlparameter}">tags</a> |
 files |
 <a href="{url}rev/#node|short#{sessionvars%urlparameter}">changeset</a> #archives%archiveentry#<br/>
--- a/templates/gitweb/map	Wed Sep 17 11:14:06 2008 +0200
+++ b/templates/gitweb/map	Wed Sep 17 11:34:37 2008 +0200
@@ -9,6 +9,7 @@
 notfound = notfound.tmpl
 naventry = '<a href="{url}log/{node|short}{sessionvars%urlparameter}">{label|escape}</a> '
 navshortentry = '<a href="{url}shortlog/{node|short}{sessionvars%urlparameter}">{label|escape}</a> '
+navgraphentry = '<a href="{url}graph/{node|short}{sessionvars%urlparameter}">{label|escape}</a> '
 filenaventry = '<a href="{url}log/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{label|escape}</a> '
 filedifflink = '<a href="#url#diff/#node|short#/#file|urlescape#{sessionvars%urlparameter}">#file|escape#</a> '
 filenodelink = '<tr class="parity#parity#"><td><a class="list" href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">#file|escape#</a></td><td></td><td class="link"><a href="#url#file/#node|short#/#file|urlescape#{sessionvars%urlparameter}">file</a> | <a href="#url#annotate/#node|short#/#file|urlescape#{sessionvars%urlparameter}">annotate</a> | <a href="#url#diff/#node|short#/#file|urlescape#{sessionvars%urlparameter}">diff</a> | <a href="#url#log/#node|short#/#file|urlescape#{sessionvars%urlparameter}">revisions</a></td></tr>'
@@ -24,7 +25,7 @@
 filediff = filediff.tmpl
 filelog = filelog.tmpl
 fileline = '<div style="font-family:monospace" class="parity#parity#"><pre><a class="linenr" href="##lineid#" id="#lineid#">#linenumber#</a> #line|escape#</pre></div>'
-annotateline = '<tr style="font-family:monospace" class="parity#parity#"><td class="linenr" style="text-align: right;"><a href="#url#annotate/#node|short#/#file|urlescape#{sessionvars%urlparameter}#l{targetline}">#author|obfuscate#@#rev#</a></td><td><pre><a class="linenr" href="##lineid#" id="#lineid#">#linenumber#</a></pre></td><td><pre>#line|escape#</pre></td></tr>'
+annotateline = '<tr style="font-family:monospace" class="parity#parity#"><td class="linenr" style="text-align: right;"><a href="#url#annotate/#node|short#/#file|urlescape#{sessionvars%urlparameter}#l{targetline}" title="{node|short}: {desc|escape|firstline}">#author|user#@#rev#</a></td><td><pre><a class="linenr" href="##lineid#" id="#lineid#">#linenumber#</a></pre></td><td><pre>#line|escape#</pre></td></tr>'
 difflineplus = '<span style="color:#008800;"><a class="linenr" href="##lineid#" id="#lineid#">#linenumber#</a> #line|escape#</span>'
 difflineminus = '<span style="color:#cc0000;"><a class="linenr" href="##lineid#" id="#lineid#">#linenumber#</a> #line|escape#</span>'
 difflineat = '<span style="color:#990099;"><a class="linenr" href="##lineid#" id="#lineid#">#linenumber#</a> #line|escape#</span>'
@@ -50,6 +51,7 @@
 filediffchild = '<tr><td>child {rev}</td><td style="font-family:monospace"><a class="list" href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a></td></tr>'
 filelogchild = '<tr><td align="right">child #rev#:&nbsp;</td><td><a href="{url}file{node|short}/#file|urlescape#{sessionvars%urlparameter}">#node|short#</a></td></tr>'
 shortlog = shortlog.tmpl
+graph = graph.tmpl
 tagtag = '<span class="tagtag" title="{name}">{name}</span> '
 branchtag = '<span class="branchtag" title="{name}">{name}</span> '
 inbranchtag = '<span class="inbranchtag" title="{name}">{name}</span> '
--- a/templates/gitweb/search.tmpl	Wed Sep 17 11:14:06 2008 +0200
+++ b/templates/gitweb/search.tmpl	Wed Sep 17 11:34:37 2008 +0200
@@ -22,6 +22,7 @@
 <a href="{url}summary{sessionvars%urlparameter}">summary</a> |
 <a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> |
 <a href="{url}log{sessionvars%urlparameter}">changelog</a> |
+<a href="{url}graph{sessionvars%urlparameter}">graph</a> |
 <a href="{url}tags{sessionvars%urlparameter}">tags</a> |
 <a href="{url}file/#node|short#{sessionvars%urlparameter}">files</a>#archives%archiveentry#
 <br/>
--- a/templates/gitweb/shortlog.tmpl	Wed Sep 17 11:14:06 2008 +0200
+++ b/templates/gitweb/shortlog.tmpl	Wed Sep 17 11:34:37 2008 +0200
@@ -21,6 +21,7 @@
 <a href="{url}summary{sessionvars%urlparameter}">summary</a> |
 shortlog |
 <a href="{url}log/#rev#{sessionvars%urlparameter}">changelog</a> |
+<a href="{url}graph{sessionvars%urlparameter}">graph</a> |
 <a href="{url}tags{sessionvars%urlparameter}">tags</a> |
 <a href="{url}file/#node|short#{sessionvars%urlparameter}">files</a>#archives%archiveentry#
 <br/>
--- a/templates/gitweb/summary.tmpl	Wed Sep 17 11:14:06 2008 +0200
+++ b/templates/gitweb/summary.tmpl	Wed Sep 17 11:34:37 2008 +0200
@@ -22,6 +22,7 @@
 summary |
 <a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> |
 <a href="{url}log{sessionvars%urlparameter}">changelog</a> |
+<a href="{url}graph{sessionvars%urlparameter}">graph</a> |
 <a href="{url}tags{sessionvars%urlparameter}">tags</a> |
 <a href="{url}file/#node|short#{sessionvars%urlparameter}">files</a>#archives%archiveentry#
 <br/>
--- a/templates/gitweb/tags.tmpl	Wed Sep 17 11:14:06 2008 +0200
+++ b/templates/gitweb/tags.tmpl	Wed Sep 17 11:34:37 2008 +0200
@@ -15,6 +15,7 @@
 <a href="{url}summary{sessionvars%urlparameter}">summary</a> |
 <a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> |
 <a href="{url}log{sessionvars%urlparameter}">changelog</a> |
+<a href="{url}graph{sessionvars%urlparameter}">graph</a> |
 tags |
 <a href="{url}file/#node|short#{sessionvars%urlparameter}">files</a>
 <br/>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/graph.tmpl	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,97 @@
+#header#
+<title>#repo|escape#: graph</title>
+<link rel="alternate" type="application/atom+xml"
+   href="#url#atom-tags" title="Atom feed for #repo|escape#: tags">
+<link rel="alternate" type="application/rss+xml"
+   href="#url#rss-tags" title="RSS feed for #repo|escape#: tags">
+<!--[if IE]><script type="text/javascript" src="#staticurl#excanvas.js"></script><![endif]-->
+</head>
+<body>
+
+<div class="buttons">
+<a href="#url#log{sessionvars%urlparameter}">changelog</a>
+<a href="#url#shortlog{sessionvars%urlparameter}">shortlog</a>
+<a href="#url#tags{sessionvars%urlparameter}">tags</a>
+<a href="#url#file/#node|short#/{sessionvars%urlparameter}">files</a>
+</div>
+
+<h2>graph</h2>
+
+<form action="#url#log">
+{sessionvars%hiddenformentry}
+<p>
+<label for="search1">search:</label>
+<input name="rev" id="search1" type="text" size="30">
+navigate: <small class="navigate">#changenav%navgraphentry#</small>
+</p>
+</form>
+
+<div id="noscript">The revision graph only works with JavaScript-enabled browsers.</div>
+
+<div id="wrapper">
+<ul id="nodebgs"></ul>
+<canvas id="graph" width="224" height="#canvasheight#"></canvas>
+<ul id="graphnodes"></ul>
+</div>
+
+<script type="text/javascript" src="#staticurl#graph.js"></script>
+<script type="text/javascript">
+<!-- hide script content
+
+document.getElementById('noscript').style.display = 'none';
+
+var data = {jsdata|json};
+var graph = new Graph();
+graph.scale({bg_height});
+
+graph.edge = function(x0, y0, x1, y1, color) {
+	
+	this.setColor(color, 0.0, 0.65);
+	this.ctx.beginPath();
+	this.ctx.moveTo(x0, y0);
+	this.ctx.lineTo(x1, y1);
+	this.ctx.stroke();
+	
+}
+
+var revlink = '<li style="_STYLE"><span class="desc">';
+revlink += '<a href="{url}rev/_NODEID{sessionvars%urlparameter}" title="_NODEID">_DESC</a>';
+revlink += '</span><span class="info">_DATE ago, by _USER</span></li>';
+
+graph.vertex = function(x, y, color, parity, cur) {
+	
+	this.ctx.beginPath();
+	color = this.setColor(color, 0.25, 0.75);
+	this.ctx.arc(x, y, radius, 0, Math.PI * 2, true);
+	this.ctx.fill();
+	
+	var bg = '<li class="bg parity' + parity + '"></li>';
+	var left = (this.columns + 1) * this.bg_height;
+	var nstyle = 'padding-left: ' + left + 'px;';
+	var item = revlink.replace(/_STYLE/, nstyle);
+	item = item.replace(/_PARITY/, 'parity' + parity);
+	item = item.replace(/_NODEID/, cur[0]);
+	item = item.replace(/_NODEID/, cur[0]);
+	item = item.replace(/_DESC/, cur[3]);
+	item = item.replace(/_USER/, cur[4]);
+	item = item.replace(/_DATE/, cur[5]);
+	
+	return [bg, item];
+	
+}
+
+graph.render(data);
+
+// stop hiding script -->
+</script>
+
+<form action="#url#log">
+{sessionvars%hiddenformentry}
+<p>
+<label for="search1">search:</label>
+<input name="rev" id="search1" type="text" size="30">
+navigate: <small class="navigate">#changenav%navgraphentry#</small>
+</p>
+</form>
+
+#footer#
--- a/templates/index.tmpl	Wed Sep 17 11:14:06 2008 +0200
+++ b/templates/index.tmpl	Wed Sep 17 11:34:37 2008 +0200
@@ -12,7 +12,7 @@
         <td><a href="?sort=#sort_contact#">Contact</a></td>
         <td><a href="?sort=#sort_lastchange#">Last change</a></td>
         <td>&nbsp;</td>
-    <tr>
+    </tr>
     #entries%indexentry#
 </table>
 
--- a/templates/manifest.tmpl	Wed Sep 17 11:14:06 2008 +0200
+++ b/templates/manifest.tmpl	Wed Sep 17 11:34:37 2008 +0200
@@ -6,6 +6,7 @@
 <div class="buttons">
 <a href="#url#log/#rev#{sessionvars%urlparameter}">changelog</a>
 <a href="#url#shortlog/#rev#{sessionvars%urlparameter}">shortlog</a>
+<a href="#url#graph{sessionvars%urlparameter}">graph</a>
 <a href="#url#tags{sessionvars%urlparameter}">tags</a>
 <a href="#url#rev/#node|short#{sessionvars%urlparameter}">changeset</a>
 #archives%archiveentry#
--- a/templates/map	Wed Sep 17 11:14:06 2008 +0200
+++ b/templates/map	Wed Sep 17 11:34:37 2008 +0200
@@ -6,8 +6,10 @@
 changelog = changelog.tmpl
 shortlog = shortlog.tmpl
 shortlogentry = shortlogentry.tmpl
+graph = graph.tmpl
 naventry = '<a href="{url}log/{node|short}{sessionvars%urlparameter}">{label|escape}</a> '
 navshortentry = '<a href="{url}shortlog/{node|short}{sessionvars%urlparameter}">{label|escape}</a> '
+navgraphentry = '<a href="{url}graph/{node|short}{sessionvars%urlparameter}">{label|escape}</a> '
 filenaventry = '<a href="{url}log/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{label|escape}</a> '
 filedifflink = '<a href="#url#diff/#node|short#/#file|urlescape#{sessionvars%urlparameter}">#file|escape#</a> '
 filenodelink = '<a href="#url#file/#node|short#/#file|urlescape#{sessionvars%urlparameter}">#file|escape#</a> '
@@ -24,7 +26,7 @@
 filelog = filelog.tmpl
 fileline = '<div class="parity#parity#"><a class="lineno" href="##lineid#" id="#lineid#">#linenumber#</a>#line|escape#</div>'
 filelogentry = filelogentry.tmpl
-annotateline = '<tr class="parity#parity#"><td class="annotate"><a href="#url#annotate/#node|short#/#file|urlescape#{sessionvars%urlparameter}#l{targetline}">#author|obfuscate#@#rev#</a></td><td><a class="lineno" href="##lineid#" id="#lineid#">#linenumber#</a></td><td><pre>#line|escape#</pre></td></tr>'
+annotateline = '<tr class="parity#parity#"><td class="annotate"><a href="#url#annotate/#node|short#/#file|urlescape#{sessionvars%urlparameter}#l{targetline}" title="{node|short}: {desc|escape|firstline}">#author|user#@#rev#</a></td><td><a class="lineno" href="##lineid#" id="#lineid#">#linenumber#</a></td><td><pre>#line|escape#</pre></td></tr>'
 difflineplus = '<span class="plusline"><a class="lineno" href="##lineid#" id="#lineid#">#linenumber#</a>#line|escape#</span>'
 difflineminus = '<span class="minusline"><a class="lineno" href="##lineid#" id="#lineid#">#linenumber#</a>#line|escape#</span>'
 difflineat = '<span class="atline"><a class="lineno" href="##lineid#" id="#lineid#">#linenumber#</a>#line|escape#</span>'
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/paper/header.tmpl	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,7 @@
+<!-- quirksmode -->
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
+<html>
+<head>
+<link rel="icon" href="{staticurl}hgicon.png" type="image/png">
+<meta name="robots" content="index, nofollow" />
+<link rel="stylesheet" href="{staticurl}style-paper.css" type="text/css" />
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/paper/map	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,72 @@
+default = 'shortlog'
+
+mimetype = 'text/html; charset={encoding}'
+header = header.tmpl
+footer = ../coal/footer.tmpl
+search = ../coal/search.tmpl
+
+changelog = ../coal/shortlog.tmpl
+shortlog = ../coal/shortlog.tmpl
+shortlogentry = ../coal/shortlogentry.tmpl
+graph = ../coal/graph.tmpl
+
+naventry = '<a href="{url}log/{node|short}{sessionvars%urlparameter}">{label|escape}</a> '
+navshortentry = '<a href="{url}shortlog/{node|short}{sessionvars%urlparameter}">{label|escape}</a> '
+navgraphentry = '<a href="{url}graph/{node|short}{sessionvars%urlparameter}">{label|escape}</a> '
+filenaventry = '<a href="{url}log/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{label|escape}</a> '
+filedifflink = '<a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{file|escape}</a> '
+filenodelink = '<a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{file|escape}</a> '
+fileellipses = '...'
+changelogentry = ../coal/shortlogentry.tmpl
+searchentry = ../coal/shortlogentry.tmpl
+changeset = ../coal/changeset.tmpl
+manifest = ../coal/manifest.tmpl
+
+direntry = '<tr class="fileline parity{parity}"><td class="name"><a href="{url}file/{node|short}{path|urlescape}{sessionvars%urlparameter}"><img src="{staticurl}coal-folder.png"> {basename|escape}/</a><td class="size"></td><td class="permissions">drwxr-xr-x</td></tr>'
+fileentry = '<tr class="fileline parity{parity}"><td class="filename"><a href="{url}file/{node|short}/{file|urlescape}#l1{sessionvars%urlparameter}"><img src="{staticurl}coal-file.png"> {basename|escape}</a></td><td class="size">{size}</td><td class="permissions">{permissions|permissions}</td></tr>'
+
+filerevision = ../coal/filerevision.tmpl
+fileannotate = ../coal/fileannotate.tmpl
+filediff = ../coal/filediff.tmpl
+filelog = ../coal/filelog.tmpl
+fileline = '<tr class="parity{parity}"><td class="lineno"><a href="#{lineid}" id="{lineid}">{linenumber}</a></td><td class="source">{line|escape}</td></tr>'
+filelogentry = ../coal/filelogentry.tmpl
+
+annotateline = '<tr class="parity{parity}"><td class="annotate"><a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}#{targetline}" title="{node|short}: {desc|escape|firstline}">{author|user}@{rev}</a></td><td class="lineno"><a href="#{lineid}" id="{lineid}">{linenumber}</a></td><td class="source">{line|escape}</td></tr>'
+
+diffblock = '<table class="bigtable parity{parity}">{lines}</table>'
+difflineplus = '<tr><td class="lineno"><a href="#{lineid}" id="{lineid}">{linenumber}</a></td><td class="source plusline">{line|escape}</td></tr>'
+difflineminus = '<tr><td class="lineno"><a href="#{lineid}" id="{lineid}">{linenumber}</a></td><td class="source minusline">{line|escape}</td></tr>'
+difflineat = '<tr><td class="lineno"><a href="#{lineid}" id="{lineid}">{linenumber}</a></td><td class="source atline">{line|escape}</td></tr>'
+diffline = '<tr><td class="lineno"><a href="#{lineid}" id="{lineid}">{linenumber}</a></td><td class="source">{line|escape}</td></tr>'
+
+changelogparent = '<tr><th class="parent">parent {rev}:</th><td class="parent"><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a></td></tr>'
+
+changesetparent = '<a href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a> '
+
+filerevparent = '<a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{rename%filerename}{node|short}</a> '
+filerevchild = '<a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a> '
+
+filerename = '{file|escape}@'
+filelogrename = '<tr><th>base:</th><td><a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{file|escape}@{node|short}</a></td></tr>'
+fileannotateparent = '<tr><td class="metatag">parent:</td><td><a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{rename%filerename}{node|short}</a></td></tr>'
+changesetchild = '<a href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a>'
+changelogchild = '<tr><th class="child">child</th><td class="child"><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a></td></tr>'
+fileannotatechild = '<tr><td class="metatag">child:</td><td><a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a></td></tr>'
+tags = ../coal/tags.tmpl
+tagentry = '<tr class="tagEntry parity{parity}"><td><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{tag|escape}</a></td><td class="node">{node|short}</td></tr>'
+changelogtag = '<tr><th class="tag">tag:</th><td class="tag">{tag|escape}</td></tr>'
+changelogtag = '<span class="tag">{name|escape}</span> '
+changesettag = '<span class="tag">{tag|escape}</span> '
+filediffparent = '<tr><th class="parent">parent {rev}:</th><td class="parent"><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a></td></tr>'
+filelogparent = '<tr><th>parent {rev}:</th><td><a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a></td></tr>'
+filediffchild = '<tr><th class="child">child {rev}:</th><td class="child"><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a></td></tr>'
+filelogchild = '<tr><th>child {rev}:</th><td><a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a></td></tr>'
+indexentry = '<tr class="parity{parity}"><td><a href="{url}{sessionvars%urlparameter}">{name|escape}</a></td><td>{description}</td><td>{contact|obfuscate}</td><td class="age">{lastchange|age} ago</td><td class="indexlinks">{archives%indexarchiveentry}</td></tr>\n'
+indexarchiveentry = '<a href="{url}archive/{node|short}{extension|urlescape}">&nbsp;&darr;{type|escape}</a>'
+index = ../coal/index.tmpl
+archiveentry = '<li><a href="{url}archive/{node|short}{extension|urlescape}">{type|escape}</a></li>'
+notfound = ../coal/notfound.tmpl
+error = ../coal/error.tmpl
+urlparameter = '{separator}{name}={value|urlescape}'
+hiddenformentry = '<input type="hidden" name="{name}" value="{value|escape}" />'
--- a/templates/raw/map	Wed Sep 17 11:14:06 2008 +0200
+++ b/templates/raw/map	Wed Sep 17 11:34:37 2008 +0200
@@ -13,7 +13,7 @@
 diffblock = '#lines#'
 filediff = filediff.tmpl
 fileannotate = fileannotate.tmpl
-annotateline = '#author#@#rev#: #line#'
+annotateline = '{author|user}@{rev}: {line}'
 manifest = manifest.tmpl
 direntry = 'drwxr-xr-x {basename}\n'
 fileentry = '{permissions|permissions} {size} {basename}\n'
--- a/templates/search.tmpl	Wed Sep 17 11:14:06 2008 +0200
+++ b/templates/search.tmpl	Wed Sep 17 11:34:37 2008 +0200
@@ -6,6 +6,7 @@
 <div class="buttons">
 <a href="#url#log{sessionvars%urlparameter}">changelog</a>
 <a href="#url#shortlog{sessionvars%urlparameter}">shortlog</a>
+<a href="#url#graph{sessionvars%urlparameter}">graph</a>
 <a href="#url#tags{sessionvars%urlparameter}">tags</a>
 <a href="#url#file/#node|short#{sessionvars%urlparameter}">files</a>
 #archives%archiveentry#
--- a/templates/shortlog.tmpl	Wed Sep 17 11:14:06 2008 +0200
+++ b/templates/shortlog.tmpl	Wed Sep 17 11:34:37 2008 +0200
@@ -9,6 +9,7 @@
 
 <div class="buttons">
 <a href="#url#log/#rev#{sessionvars%urlparameter}">changelog</a>
+<a href="#url#graph{sessionvars%urlparameter}">graph</a>
 <a href="#url#tags{sessionvars%urlparameter}">tags</a>
 <a href="#url#file/#node|short#/{sessionvars%urlparameter}">files</a>
 #archives%archiveentry#
Binary file templates/static/background.png has changed
Binary file templates/static/coal-file.png has changed
Binary file templates/static/coal-folder.png has changed
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/static/excanvas.js	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,19 @@
+if(!window.CanvasRenderingContext2D){(function(){var I=Math,i=I.round,L=I.sin,M=I.cos,m=10,A=m/2,Q={init:function(a){var b=a||document;if(/MSIE/.test(navigator.userAgent)&&!window.opera){var c=this;b.attachEvent("onreadystatechange",function(){c.r(b)})}},r:function(a){if(a.readyState=="complete"){if(!a.namespaces["s"]){a.namespaces.add("g_vml_","urn:schemas-microsoft-com:vml")}var b=a.createStyleSheet();b.cssText="canvas{display:inline-block;overflow:hidden;text-align:left;width:300px;height:150px}g_vml_\\:*{behavior:url(#default#VML)}";
+var c=a.getElementsByTagName("canvas");for(var d=0;d<c.length;d++){if(!c[d].getContext){this.initElement(c[d])}}}},q:function(a){var b=a.outerHTML,c=a.ownerDocument.createElement(b);if(b.slice(-2)!="/>"){var d="/"+a.tagName,e;while((e=a.nextSibling)&&e.tagName!=d){e.removeNode()}if(e){e.removeNode()}}a.parentNode.replaceChild(c,a);return c},initElement:function(a){a=this.q(a);a.getContext=function(){if(this.l){return this.l}return this.l=new K(this)};a.attachEvent("onpropertychange",V);a.attachEvent("onresize",
+W);var b=a.attributes;if(b.width&&b.width.specified){a.style.width=b.width.nodeValue+"px"}else{a.width=a.clientWidth}if(b.height&&b.height.specified){a.style.height=b.height.nodeValue+"px"}else{a.height=a.clientHeight}return a}};function V(a){var b=a.srcElement;switch(a.propertyName){case "width":b.style.width=b.attributes.width.nodeValue+"px";b.getContext().clearRect();break;case "height":b.style.height=b.attributes.height.nodeValue+"px";b.getContext().clearRect();break}}function W(a){var b=a.srcElement;
+if(b.firstChild){b.firstChild.style.width=b.clientWidth+"px";b.firstChild.style.height=b.clientHeight+"px"}}Q.init();var R=[];for(var E=0;E<16;E++){for(var F=0;F<16;F++){R[E*16+F]=E.toString(16)+F.toString(16)}}function J(){return[[1,0,0],[0,1,0],[0,0,1]]}function G(a,b){var c=J();for(var d=0;d<3;d++){for(var e=0;e<3;e++){var g=0;for(var h=0;h<3;h++){g+=a[d][h]*b[h][e]}c[d][e]=g}}return c}function N(a,b){b.fillStyle=a.fillStyle;b.lineCap=a.lineCap;b.lineJoin=a.lineJoin;b.lineWidth=a.lineWidth;b.miterLimit=
+a.miterLimit;b.shadowBlur=a.shadowBlur;b.shadowColor=a.shadowColor;b.shadowOffsetX=a.shadowOffsetX;b.shadowOffsetY=a.shadowOffsetY;b.strokeStyle=a.strokeStyle;b.d=a.d;b.e=a.e}function O(a){var b,c=1;a=String(a);if(a.substring(0,3)=="rgb"){var d=a.indexOf("(",3),e=a.indexOf(")",d+1),g=a.substring(d+1,e).split(",");b="#";for(var h=0;h<3;h++){b+=R[Number(g[h])]}if(g.length==4&&a.substr(3,1)=="a"){c=g[3]}}else{b=a}return[b,c]}function S(a){switch(a){case "butt":return"flat";case "round":return"round";
+case "square":default:return"square"}}function K(a){this.a=J();this.m=[];this.k=[];this.c=[];this.strokeStyle="#000";this.fillStyle="#000";this.lineWidth=1;this.lineJoin="miter";this.lineCap="butt";this.miterLimit=m*1;this.globalAlpha=1;this.canvas=a;var b=a.ownerDocument.createElement("div");b.style.width=a.clientWidth+"px";b.style.height=a.clientHeight+"px";b.style.overflow="hidden";b.style.position="absolute";a.appendChild(b);this.j=b;this.d=1;this.e=1}var j=K.prototype;j.clearRect=function(){this.j.innerHTML=
+"";this.c=[]};j.beginPath=function(){this.c=[]};j.moveTo=function(a,b){this.c.push({type:"moveTo",x:a,y:b});this.f=a;this.g=b};j.lineTo=function(a,b){this.c.push({type:"lineTo",x:a,y:b});this.f=a;this.g=b};j.bezierCurveTo=function(a,b,c,d,e,g){this.c.push({type:"bezierCurveTo",cp1x:a,cp1y:b,cp2x:c,cp2y:d,x:e,y:g});this.f=e;this.g=g};j.quadraticCurveTo=function(a,b,c,d){var e=this.f+0.6666666666666666*(a-this.f),g=this.g+0.6666666666666666*(b-this.g),h=e+(c-this.f)/3,l=g+(d-this.g)/3;this.bezierCurveTo(e,
+g,h,l,c,d)};j.arc=function(a,b,c,d,e,g){c*=m;var h=g?"at":"wa",l=a+M(d)*c-A,n=b+L(d)*c-A,o=a+M(e)*c-A,f=b+L(e)*c-A;if(l==o&&!g){l+=0.125}this.c.push({type:h,x:a,y:b,radius:c,xStart:l,yStart:n,xEnd:o,yEnd:f})};j.rect=function(a,b,c,d){this.moveTo(a,b);this.lineTo(a+c,b);this.lineTo(a+c,b+d);this.lineTo(a,b+d);this.closePath()};j.strokeRect=function(a,b,c,d){this.beginPath();this.moveTo(a,b);this.lineTo(a+c,b);this.lineTo(a+c,b+d);this.lineTo(a,b+d);this.closePath();this.stroke()};j.fillRect=function(a,
+b,c,d){this.beginPath();this.moveTo(a,b);this.lineTo(a+c,b);this.lineTo(a+c,b+d);this.lineTo(a,b+d);this.closePath();this.fill()};j.createLinearGradient=function(a,b,c,d){var e=new H("gradient");return e};j.createRadialGradient=function(a,b,c,d,e,g){var h=new H("gradientradial");h.n=c;h.o=g;h.i.x=a;h.i.y=b;return h};j.drawImage=function(a,b){var c,d,e,g,h,l,n,o,f=a.runtimeStyle.width,k=a.runtimeStyle.height;a.runtimeStyle.width="auto";a.runtimeStyle.height="auto";var q=a.width,r=a.height;a.runtimeStyle.width=
+f;a.runtimeStyle.height=k;if(arguments.length==3){c=arguments[1];d=arguments[2];h=(l=0);n=(e=q);o=(g=r)}else if(arguments.length==5){c=arguments[1];d=arguments[2];e=arguments[3];g=arguments[4];h=(l=0);n=q;o=r}else if(arguments.length==9){h=arguments[1];l=arguments[2];n=arguments[3];o=arguments[4];c=arguments[5];d=arguments[6];e=arguments[7];g=arguments[8]}else{throw"Invalid number of arguments";}var s=this.b(c,d),t=[],v=10,w=10;t.push(" <g_vml_:group",' coordsize="',m*v,",",m*w,'"',' coordorigin="0,0"',
+' style="width:',v,";height:",w,";position:absolute;");if(this.a[0][0]!=1||this.a[0][1]){var x=[];x.push("M11='",this.a[0][0],"',","M12='",this.a[1][0],"',","M21='",this.a[0][1],"',","M22='",this.a[1][1],"',","Dx='",i(s.x/m),"',","Dy='",i(s.y/m),"'");var p=s,y=this.b(c+e,d),z=this.b(c,d+g),B=this.b(c+e,d+g);p.x=Math.max(p.x,y.x,z.x,B.x);p.y=Math.max(p.y,y.y,z.y,B.y);t.push("padding:0 ",i(p.x/m),"px ",i(p.y/m),"px 0;filter:progid:DXImageTransform.Microsoft.Matrix(",x.join(""),", sizingmethod='clip');")}else{t.push("top:",
+i(s.y/m),"px;left:",i(s.x/m),"px;")}t.push(' ">','<g_vml_:image src="',a.src,'"',' style="width:',m*e,";"," height:",m*g,';"',' cropleft="',h/q,'"',' croptop="',l/r,'"',' cropright="',(q-h-n)/q,'"',' cropbottom="',(r-l-o)/r,'"'," />","</g_vml_:group>");this.j.insertAdjacentHTML("BeforeEnd",t.join(""))};j.stroke=function(a){var b=[],c=O(a?this.fillStyle:this.strokeStyle),d=c[0],e=c[1]*this.globalAlpha,g=10,h=10;b.push("<g_vml_:shape",' fillcolor="',d,'"',' filled="',Boolean(a),'"',' style="position:absolute;width:',
+g,";height:",h,';"',' coordorigin="0 0" coordsize="',m*g," ",m*h,'"',' stroked="',!a,'"',' strokeweight="',this.lineWidth,'"',' strokecolor="',d,'"',' path="');var l={x:null,y:null},n={x:null,y:null};for(var o=0;o<this.c.length;o++){var f=this.c[o];if(f.type=="moveTo"){b.push(" m ");var k=this.b(f.x,f.y);b.push(i(k.x),",",i(k.y))}else if(f.type=="lineTo"){b.push(" l ");var k=this.b(f.x,f.y);b.push(i(k.x),",",i(k.y))}else if(f.type=="close"){b.push(" x ")}else if(f.type=="bezierCurveTo"){b.push(" c ");
+var k=this.b(f.x,f.y),q=this.b(f.cp1x,f.cp1y),r=this.b(f.cp2x,f.cp2y);b.push(i(q.x),",",i(q.y),",",i(r.x),",",i(r.y),",",i(k.x),",",i(k.y))}else if(f.type=="at"||f.type=="wa"){b.push(" ",f.type," ");var k=this.b(f.x,f.y),s=this.b(f.xStart,f.yStart),t=this.b(f.xEnd,f.yEnd);b.push(i(k.x-this.d*f.radius),",",i(k.y-this.e*f.radius)," ",i(k.x+this.d*f.radius),",",i(k.y+this.e*f.radius)," ",i(s.x),",",i(s.y)," ",i(t.x),",",i(t.y))}if(k){if(l.x==null||k.x<l.x){l.x=k.x}if(n.x==null||k.x>n.x){n.x=k.x}if(l.y==
+null||k.y<l.y){l.y=k.y}if(n.y==null||k.y>n.y){n.y=k.y}}}b.push(' ">');if(typeof this.fillStyle=="object"){var v={x:"50%",y:"50%"},w=n.x-l.x,x=n.y-l.y,p=w>x?w:x;v.x=i(this.fillStyle.i.x/w*100+50)+"%";v.y=i(this.fillStyle.i.y/x*100+50)+"%";var y=[];if(this.fillStyle.p=="gradientradial"){var z=this.fillStyle.n/p*100,B=this.fillStyle.o/p*100-z}else{var z=0,B=100}var C={offset:null,color:null},D={offset:null,color:null};this.fillStyle.h.sort(function(T,U){return T.offset-U.offset});for(var o=0;o<this.fillStyle.h.length;o++){var u=
+this.fillStyle.h[o];y.push(u.offset*B+z,"% ",u.color,",");if(u.offset>C.offset||C.offset==null){C.offset=u.offset;C.color=u.color}if(u.offset<D.offset||D.offset==null){D.offset=u.offset;D.color=u.color}}y.pop();b.push("<g_vml_:fill",' color="',D.color,'"',' color2="',C.color,'"',' type="',this.fillStyle.p,'"',' focusposition="',v.x,", ",v.y,'"',' colors="',y.join(""),'"',' opacity="',e,'" />')}else if(a){b.push('<g_vml_:fill color="',d,'" opacity="',e,'" />')}else{b.push("<g_vml_:stroke",' opacity="',
+e,'"',' joinstyle="',this.lineJoin,'"',' miterlimit="',this.miterLimit,'"',' endcap="',S(this.lineCap),'"',' weight="',this.lineWidth,'px"',' color="',d,'" />')}b.push("</g_vml_:shape>");this.j.insertAdjacentHTML("beforeEnd",b.join(""));this.c=[]};j.fill=function(){this.stroke(true)};j.closePath=function(){this.c.push({type:"close"})};j.b=function(a,b){return{x:m*(a*this.a[0][0]+b*this.a[1][0]+this.a[2][0])-A,y:m*(a*this.a[0][1]+b*this.a[1][1]+this.a[2][1])-A}};j.save=function(){var a={};N(this,a);
+this.k.push(a);this.m.push(this.a);this.a=G(J(),this.a)};j.restore=function(){N(this.k.pop(),this);this.a=this.m.pop()};j.translate=function(a,b){var c=[[1,0,0],[0,1,0],[a,b,1]];this.a=G(c,this.a)};j.rotate=function(a){var b=M(a),c=L(a),d=[[b,c,0],[-c,b,0],[0,0,1]];this.a=G(d,this.a)};j.scale=function(a,b){this.d*=a;this.e*=b;var c=[[a,0,0],[0,b,0],[0,0,1]];this.a=G(c,this.a)};j.clip=function(){};j.arcTo=function(){};j.createPattern=function(){return new P};function H(a){this.p=a;this.n=0;this.o=
+0;this.h=[];this.i={x:0,y:0}}H.prototype.addColorStop=function(a,b){b=O(b);this.h.push({offset:1-a,color:b})};function P(){}G_vmlCanvasManager=Q;CanvasRenderingContext2D=K;CanvasGradient=H;CanvasPattern=P})()};
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/static/graph.js	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,137 @@
+// branch_renderer.js - Rendering of branch DAGs on the client side
+//
+// Copyright 2008 Dirkjan Ochtman <dirkjan AT ochtman DOT nl>
+// Copyright 2006 Alexander Schremmer <alex AT alexanderweb DOT de>
+//
+// derived from code written by Scott James Remnant <scott@ubuntu.com>
+// Copyright 2005 Canonical Ltd.
+//
+// This software may be used and distributed according to the terms
+// of the GNU General Public License, incorporated herein by reference.
+
+var colors = [
+	[ 1.0, 0.0, 0.0 ],
+	[ 1.0, 1.0, 0.0 ],
+	[ 0.0, 1.0, 0.0 ],
+	[ 0.0, 1.0, 1.0 ],
+	[ 0.0, 0.0, 1.0 ],
+	[ 1.0, 0.0, 1.0 ]
+];
+
+function Graph() {
+	
+	this.canvas = document.getElementById('graph');
+	if (navigator.userAgent.indexOf('MSIE') >= 0) this.canvas = window.G_vmlCanvasManager.initElement(this.canvas);
+	this.ctx = this.canvas.getContext('2d');
+	this.ctx.strokeStyle = 'rgb(0, 0, 0)';
+	this.ctx.fillStyle = 'rgb(0, 0, 0)';
+	this.cur = [0, 0];
+	this.line_width = 3;
+	this.bg = [0, 4];
+	this.cell = [2, 0];
+	this.columns = 0;
+	this.revlink = '';
+	
+	this.scale = function(height) {
+		this.bg_height = height;
+		this.box_size = Math.floor(this.bg_height / 1.2);
+		this.cell_height = this.box_size;
+	}
+	
+	function colorPart(num) {
+		num *= 255
+		num = num < 0 ? 0 : num;
+		num = num > 255 ? 255 : num;
+		var digits = Math.round(num).toString(16);
+		if (num < 16) {
+			return '0' + digits;
+		} else {
+			return digits;
+		}
+	}
+
+	this.setColor = function(color, bg, fg) {
+		
+		// Set the colour.
+		//
+		// Picks a distinct colour based on an internal wheel; the bg
+		// parameter provides the value that should be assigned to the 'zero'
+		// colours and the fg parameter provides the multiplier that should be
+		// applied to the foreground colours.
+		
+		color %= colors.length;
+		var red = (colors[color][0] * fg) || bg;
+		var green = (colors[color][1] * fg) || bg;
+		var blue = (colors[color][2] * fg) || bg;
+		red = Math.round(red * 255);
+		green = Math.round(green * 255);
+		blue = Math.round(blue * 255);
+		var s = 'rgb(' + red + ', ' + green + ', ' + blue + ')';
+		this.ctx.strokeStyle = s;
+		this.ctx.fillStyle = s;
+		return s;
+		
+	}
+
+	this.render = function(data) {
+		
+		var backgrounds = '';
+		var nodedata = '';
+		
+		for (var i in data) {
+			
+			var parity = i % 2;
+			this.cell[1] += this.bg_height;
+			this.bg[1] += this.bg_height;
+			
+			var cur = data[i];
+			var node = cur[1];
+			var edges = cur[2];
+			var fold = false;
+			
+			for (var j in edges) {
+				
+				line = edges[j];
+				start = line[0];
+				end = line[1];
+				color = line[2];
+
+				if (end > this.columns || start > this.columns) {
+					this.columns += 1;
+				}
+				
+				if (start == this.columns && start > end) {
+					var fold = true;
+				}
+				
+				x0 = this.cell[0] + this.box_size * start + this.box_size / 2;
+				y0 = this.bg[1] - this.bg_height / 2;
+				x1 = this.cell[0] + this.box_size * end + this.box_size / 2;
+				y1 = this.bg[1] + this.bg_height / 2;
+				
+				this.edge(x0, y0, x1, y1, color);
+				
+			}
+			
+			// Draw the revision node in the right column
+			
+			column = node[0]
+			color = node[1]
+			
+			radius = this.box_size / 8;
+			x = this.cell[0] + this.box_size * column + this.box_size / 2;
+			y = this.bg[1] - this.bg_height / 2;
+			var add = this.vertex(x, y, color, parity, cur);
+			backgrounds += add[0];
+			nodedata += add[1];
+			
+			if (fold) this.columns -= 1;
+			
+		}
+		
+		document.getElementById('nodebgs').innerHTML += backgrounds;
+		document.getElementById('graphnodes').innerHTML += nodedata;
+		
+	}
+
+}
Binary file templates/static/hgicon.png has changed
Binary file templates/static/hglogo.png has changed
--- a/templates/static/highlight.css	Wed Sep 17 11:14:06 2008 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,59 +0,0 @@
-.c { color: #808080 } /* Comment */
-.err { color: #F00000; background-color: #F0A0A0 } /* Error */
-.k { color: #008000; font-weight: bold } /* Keyword */
-.o { color: #303030 } /* Operator */
-.cm { color: #808080 } /* Comment.Multiline */
-.cp { color: #507090 } /* Comment.Preproc */
-.c1 { color: #808080 } /* Comment.Single */
-.cs { color: #cc0000; font-weight: bold } /* Comment.Special */
-.gd { color: #A00000 } /* Generic.Deleted */
-.ge { font-style: italic } /* Generic.Emph */
-.gr { color: #FF0000 } /* Generic.Error */
-.gh { color: #000080; font-weight: bold } /* Generic.Heading */
-.gi { color: #00A000 } /* Generic.Inserted */
-.go { color: #808080 } /* Generic.Output */
-.gp { color: #c65d09; font-weight: bold } /* Generic.Prompt */
-.gs { font-weight: bold } /* Generic.Strong */
-.gu { color: #800080; font-weight: bold } /* Generic.Subheading */
-.gt { color: #0040D0 } /* Generic.Traceback */
-.kc { color: #008000; font-weight: bold } /* Keyword.Constant */
-.kd { color: #008000; font-weight: bold } /* Keyword.Declaration */
-.kp { color: #003080; font-weight: bold } /* Keyword.Pseudo */
-.kr { color: #008000; font-weight: bold } /* Keyword.Reserved */
-.kt { color: #303090; font-weight: bold } /* Keyword.Type */
-.m { color: #6000E0; font-weight: bold } /* Literal.Number */
-.s { background-color: #fff0f0 } /* Literal.String */
-.na { color: #0000C0 } /* Name.Attribute */
-.nb { color: #007020 } /* Name.Builtin */
-.nc { color: #B00060; font-weight: bold } /* Name.Class */
-.no { color: #003060; font-weight: bold } /* Name.Constant */
-.nd { color: #505050; font-weight: bold } /* Name.Decorator */
-.ni { color: #800000; font-weight: bold } /* Name.Entity */
-.ne { color: #F00000; font-weight: bold } /* Name.Exception */
-.nf { color: #0060B0; font-weight: bold } /* Name.Function */
-.nl { color: #907000; font-weight: bold } /* Name.Label */
-.nn { color: #0e84b5; font-weight: bold } /* Name.Namespace */
-.nt { color: #007000 } /* Name.Tag */
-.nv { color: #906030 } /* Name.Variable */
-.ow { color: #000000; font-weight: bold } /* Operator.Word */
-.w { color: #bbbbbb } /* Text.Whitespace */
-.mf { color: #6000E0; font-weight: bold } /* Literal.Number.Float */
-.mh { color: #005080; font-weight: bold } /* Literal.Number.Hex */
-.mi { color: #0000D0; font-weight: bold } /* Literal.Number.Integer */
-.mo { color: #4000E0; font-weight: bold } /* Literal.Number.Oct */
-.sb { background-color: #fff0f0 } /* Literal.String.Backtick */
-.sc { color: #0040D0 } /* Literal.String.Char */
-.sd { color: #D04020 } /* Literal.String.Doc */
-.s2 { background-color: #fff0f0 } /* Literal.String.Double */
-.se { color: #606060; font-weight: bold; background-color: #fff0f0 } /* Literal.String.Escape */
-.sh { background-color: #fff0f0 } /* Literal.String.Heredoc */
-.si { background-color: #e0e0e0 } /* Literal.String.Interpol */
-.sx { color: #D02000; background-color: #fff0f0 } /* Literal.String.Other */
-.sr { color: #000000; background-color: #fff0ff } /* Literal.String.Regex */
-.s1 { background-color: #fff0f0 } /* Literal.String.Single */
-.ss { color: #A06000 } /* Literal.String.Symbol */
-.bp { color: #007020 } /* Name.Builtin.Pseudo */
-.vc { color: #306090 } /* Name.Variable.Class */
-.vg { color: #d07000; font-weight: bold } /* Name.Variable.Global */
-.vi { color: #3030B0 } /* Name.Variable.Instance */
-.il { color: #0000D0; font-weight: bold } /* Literal.Number.Integer.Long */
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/static/style-coal.css	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,206 @@
+body {
+  margin: 0;
+  padding: 0;
+  background: black url(background.png) repeat-x;
+  font-family: sans-serif;
+}
+
+.container {
+  padding-right: 150px;
+}
+
+.main {
+  position: relative;
+  background: white;
+  padding: 2em;
+  border-right: 15px solid black;
+  border-bottom: 15px solid black;
+}
+
+#.main {
+  width:98%;
+}
+
+.overflow {
+  width: 100%;
+  overflow: auto;
+}
+
+.menu {
+  background: #999;
+  padding: 10px;
+  width: 75px;
+  margin: 0;
+  font-size: 80%;
+  text-align: left;
+  position: fixed;
+  top: 27px;
+  left: auto;
+  right: 27px;
+}
+
+#.menu {
+  position: absolute !important;
+  top:expression(eval(document.body.scrollTop + 27));
+}
+
+.menu ul {
+  list-style: none;
+  padding: 0;
+  margin: 10px 0 0 0;
+}
+
+.menu li {
+  margin-bottom: 3px;
+  padding: 2px 4px;
+  background: white;
+  color: black;
+  font-weight: normal;
+}
+
+.menu li.active {
+  background: black;
+  color: white;
+}
+
+.menu a { color: black; display: block; }
+
+.search {
+  position: absolute;
+  top: .7em;
+  right: 2em;
+}
+
+a { text-decoration:none; }
+.age { white-space:nowrap; }
+.date { white-space:nowrap; }
+.indexlinks { white-space:nowrap; }
+.parity0 { background-color: #f5f5f5; }
+.parity1 { background-color: white; }
+.plusline { color: green; }
+.minusline { color: red; }
+.atline { color: purple; }
+
+.navigate {
+  text-align: right;
+  font-size: 60%;
+  margin: 1em 0 1em 0;
+}
+
+.tag {
+  color: #999;
+  font-size: 70%;
+  font-weight: normal;
+  margin-left: .5em;
+  vertical-align: baseline;
+}
+
+/* Common */
+pre { margin: 0; }
+
+h2 { font-size: 120%; border-bottom: 1px solid #999; }
+h3 {
+  margin-top: -.7em;
+  font-size: 100%;
+}
+
+/* log and tags tables */
+.bigtable {
+  border-bottom: 1px solid #999;
+  border-collapse: collapse;
+  font-size: 90%;
+  width: 100%; 
+  font-weight: normal;
+  text-align: left;
+}
+
+.bigtable td {
+  padding: 1px 4px 1px 4px;
+  vertical-align: top;
+}
+
+.bigtable th { 
+  padding: 1px 4px 1px 4px;
+  border-bottom: 1px solid #999; 
+  font-size: smaller;
+}
+.bigtable tr { border: none; }
+.bigtable .age { width: 6em; }
+.bigtable .author { width: 10em; }
+.bigtable .description { }
+.bigtable .node { width: 5em; font-family: monospace;}
+.bigtable .lineno { width: 2em; text-align: right;}
+.bigtable .lineno a { color: #999; font-size: smaller; font-family: monospace;}
+.bigtable td.source { font-family: monospace; white-space: pre; }
+.bigtable .permissions { width: 8em; text-align: left;}
+.bigtable .size { width: 5em; text-align: right; }
+.bigtable .annotate { text-align: right; }
+.bigtable td.annotate { font-size: smaller; }
+
+.fileline { font-family: monospace; }
+.fileline img { border: 0; }
+
+/* Changeset entry */
+#changesetEntry { 
+  border-collapse: collapse;
+  font-size: 90%;
+  width: 100%;
+  margin-bottom: 1em;
+}
+
+#changesetEntry th { 
+  padding: 1px 4px 1px 4px;
+  width: 4em;
+  text-align: right;
+  font-weight: normal;
+  color: #999;
+  margin-right: .5em;
+  vertical-align: top;
+}
+
+div.description {
+  border-left: 3px solid #999;
+  margin: 1em 0 1em 0;
+  padding: .3em;
+}
+
+div#wrapper {
+	position: relative;
+	border-top: 1px solid black;
+	border-bottom: 1px solid black;
+	margin: 0;
+	padding: 0;
+}
+
+canvas {
+	position: absolute;
+	z-index: 5;
+	top: -0.7em;
+	margin: 0;
+}
+
+ul#graphnodes {
+	position: absolute;
+	z-index: 10;
+	top: -1.0em;
+	list-style: none inside none;
+	padding: 0;
+}
+
+ul#nodebgs {
+	list-style: none inside none;
+	padding: 0;
+	margin: 0;
+	top: -0.7em;
+}
+
+ul#graphnodes li, ul#nodebgs li {
+	height: 39px;
+}
+
+ul#graphnodes li .info {
+	display: block;
+	font-size: 70%;
+	position: relative;
+	top: -3px;
+}
--- a/templates/static/style-gitweb.css	Wed Sep 17 11:14:06 2008 +0200
+++ b/templates/static/style-gitweb.css	Wed Sep 17 11:34:37 2008 +0200
@@ -79,3 +79,45 @@
 	background-color: #d5dde6;
 	border-color: #e3ecf4 #9398f4 #9398f4 #e3ecf4;
 }
+
+/* Graph */
+div#wrapper {
+	position: relative;
+	margin: 0;
+	padding: 0;
+	margin-top: 3px;
+}
+
+canvas {
+	position: absolute;
+	z-index: 5;
+	top: -0.9em;
+	margin: 0;
+}
+
+ul#nodebgs {
+	list-style: none inside none;
+	padding: 0;
+	margin: 0;
+	top: -0.7em;
+}
+ 
+ul#graphnodes li, ul#nodebgs li {
+	height: 39px;
+}
+
+ul#graphnodes {
+	position: absolute;
+	z-index: 10;
+	top: -0.8em;
+	list-style: none inside none;
+	padding: 0;
+}
+
+ul#graphnodes li .info {
+	display: block;
+	font-size: 100%;
+	position: relative;
+	top: -3px;
+	font-style: italic;
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/static/style-paper.css	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,203 @@
+body {
+  margin: 0;
+  padding: 0;
+  background: white;
+  font-family: sans-serif;
+}
+
+.container {
+  padding-left: 115px;
+}
+
+.main {
+  position: relative;
+  background: white;
+  padding: 2em 2em 2em 0;
+}
+
+#.main {
+  width: 98%;
+}
+
+.overflow {
+  width: 100%;
+  overflow: auto;
+}
+
+.menu {
+  width: 90px;
+  margin: 0;
+  font-size: 80%;
+  text-align: left;
+  position: fixed;
+  top: 20px;
+  left: 20px;
+  right: auto;
+}
+
+#.menu {
+  position: absolute !important;
+  top:expression(eval(document.body.scrollTop + 27));
+}
+
+.menu ul {
+  list-style: none;
+  padding: 0;
+  margin: 10px 0 0 0;
+  border-left: 2px solid #999;
+}
+
+.menu li {
+  margin-bottom: 3px;
+  padding: 2px 4px;
+  background: white;
+  color: black;
+  font-weight: normal;
+}
+
+.menu li.active {
+  font-weight: bold;
+}
+
+.menu a { color: black; display: block; }
+
+.search {
+  position: absolute;
+  top: .7em;
+  right: 2em;
+}
+
+a { text-decoration:none; }
+.age { white-space:nowrap; }
+.date { white-space:nowrap; }
+.indexlinks { white-space:nowrap; }
+.parity0 { background-color: #f5f5f5; }
+.parity1 { background-color: white; }
+.plusline { color: green; }
+.minusline { color: red; }
+.atline { color: purple; }
+
+.navigate {
+  text-align: right;
+  font-size: 60%;
+  margin: 1em 0 1em 0;
+}
+
+.tag {
+  color: #999;
+  font-size: 70%;
+  font-weight: normal;
+  margin-left: .5em;
+  vertical-align: baseline;
+}
+
+/* Common */
+pre { margin: 0; }
+
+h2 { font-size: 120%; border-bottom: 1px solid #999; }
+h3 {
+  margin-top: -.7em;
+  font-size: 100%;
+}
+
+/* log and tags tables */
+.bigtable {
+  border-bottom: 1px solid #999;
+  border-collapse: collapse;
+  font-size: 90%;
+  width: 100%; 
+  font-weight: normal;
+  text-align: left;
+}
+
+.bigtable td {
+  padding: 1px 4px 1px 4px;
+  vertical-align: top;
+}
+
+.bigtable th { 
+  padding: 1px 4px 1px 4px;
+  border-bottom: 1px solid #999; 
+  font-size: smaller;
+}
+.bigtable tr { border: none; }
+.bigtable .age { width: 6em; }
+.bigtable .author { width: 10em; }
+.bigtable .description { }
+.bigtable .node { width: 5em; font-family: monospace;}
+.bigtable .lineno { width: 2em; text-align: right;}
+.bigtable .lineno a { color: #999; font-size: smaller; font-family: monospace;}
+.bigtable td.source { font-family: monospace; white-space: pre; }
+.bigtable .permissions { width: 8em; text-align: left;}
+.bigtable .size { width: 5em; text-align: right; }
+.bigtable .annotate { text-align: right; }
+.bigtable td.annotate { font-size: smaller; }
+
+.fileline { font-family: monospace; }
+.fileline img { border: 0; }
+
+/* Changeset entry */
+#changesetEntry { 
+  border-collapse: collapse;
+  font-size: 90%;
+  width: 100%;
+  margin-bottom: 1em;
+}
+
+#changesetEntry th { 
+  padding: 1px 4px 1px 4px;
+  width: 4em;
+  text-align: right;
+  font-weight: normal;
+  color: #999;
+  margin-right: .5em;
+  vertical-align: top;
+}
+
+div.description {
+  border-left: 2px solid #999;
+  margin: 1em 0 1em 0;
+  padding: .3em;
+}
+
+/* Graph */
+div#wrapper {
+	position: relative;
+	border-top: 1px solid black;
+	border-bottom: 1px solid black;
+	margin: 0;
+	padding: 0;
+}
+
+canvas {
+	position: absolute;
+	z-index: 5;
+	top: -0.7em;
+	margin: 0;
+}
+
+ul#nodebgs {
+	list-style: none inside none;
+	padding: 0;
+	margin: 0;
+	top: -0.7em;
+}
+ 
+ul#graphnodes li, ul#nodebgs li {
+	height: 39px;
+}
+
+ul#graphnodes {
+	position: absolute;
+	z-index: 10;
+	top: -1.0em;
+	list-style: none inside none;
+	padding: 0;
+}
+
+ul#graphnodes li .info {
+	display: block;
+	font-size: 70%;
+	position: relative;
+	top: -3px;
+}
--- a/templates/static/style.css	Wed Sep 17 11:14:06 2008 +0200
+++ b/templates/static/style.css	Wed Sep 17 11:34:37 2008 +0200
@@ -64,3 +64,42 @@
 #filediffEntry { }
 #filediffEntry th { font-weight: normal; background-color: #888; color: #fff; text-align: right; }
 
+/* Graph */
+div#wrapper {
+	position: relative;
+	margin: 0;
+	padding: 0;
+}
+
+canvas {
+	position: absolute;
+	z-index: 5;
+	top: -0.6em;
+	margin: 0;
+}
+
+ul#nodebgs {
+	list-style: none inside none;
+	padding: 0;
+	margin: 0;
+	top: -0.7em;
+}
+ 
+ul#graphnodes li, ul#nodebgs li {
+	height: 39px;
+}
+
+ul#graphnodes {
+	position: absolute;
+	z-index: 10;
+	top: -0.85em;
+	list-style: none inside none;
+	padding: 0;
+}
+
+ul#graphnodes li .info {
+	display: block;
+	font-size: 70%;
+	position: relative;
+	top: -1px;
+}
--- a/templates/tags.tmpl	Wed Sep 17 11:14:06 2008 +0200
+++ b/templates/tags.tmpl	Wed Sep 17 11:34:37 2008 +0200
@@ -10,6 +10,7 @@
 <div class="buttons">
 <a href="#url#log{sessionvars%urlparameter}">changelog</a>
 <a href="#url#shortlog{sessionvars%urlparameter}">shortlog</a>
+<a href="#url#graph{sessionvars%urlparameter}">graph</a>
 <a href="#url#file/#node|short#/{sessionvars%urlparameter}">files</a>
 <a type="application/rss+xml" href="#url#rss-tags">rss</a>
 <a type="application/atom+xml" href="#url#atom-tags">atom</a>
--- a/tests/hghave	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/hghave	Wed Sep 17 11:34:37 2008 +0200
@@ -57,6 +57,24 @@
     finally:
         os.remove(path)
 
+def has_icasefs():
+    # Stolen from mercurial.util
+    fd, path = tempfile.mkstemp(prefix=tempprefix)
+    os.close(fd)
+    try:
+        s1 = os.stat(path)
+        d, b = os.path.split(path)
+        p2 = os.path.join(d, b.upper())
+        if path == p2:
+            p2 = os.path.join(d, b.lower())
+        try:
+            s2 = os.stat(p2)
+            return s2 == s1
+        except:
+            return False
+    finally:
+        os.remove(path)
+
 def has_inotify():
     try:
         import hgext.inotify.linux.watcher
@@ -136,6 +154,7 @@
     "fifo": (has_fifo, "named pipes"),
     "git": (has_git, "git command line client"),
     "hotshot": (has_hotshot, "python hotshot module"),
+    "icasefs": (has_icasefs, "case insensitive file system"),
     "inotify": (has_inotify, "inotify extension support"),
     "lsprof": (has_lsprof, "python lsprof module"),
     "mtn": (has_mtn, "monotone client (> 0.31)"),
--- a/tests/md5sum.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/md5sum.py	Wed Sep 17 11:34:37 2008 +0200
@@ -7,7 +7,11 @@
 # GPL-compatible.
 
 import sys
-import md5
+
+try:
+    from hashlib import md5
+except ImportError:
+    from md5 import md5
 
 for filename in sys.argv[1:]:
     try:
@@ -16,7 +20,7 @@
         sys.stderr.write('%s: Can\'t open: %s\n' % (filename, msg))
         sys.exit(1)
 
-    m = md5.new()
+    m = md5()
     try:
         while 1:
             data = fp.read(8192)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-1102	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,16 @@
+#!/bin/sh
+
+rm -rf a
+hg init a
+cd a
+echo a > a
+hg ci -Am0
+hg tag t1 # 1
+hg tag --remove t1 # 2
+
+hg co 1
+hg tag -r0 t1
+hg tags
+
+
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-1102.out	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,4 @@
+adding a
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+tip                                3:a49829c4fc11
+t1                                 0:f7b1eb17ad24
--- a/tests/test-acl.out	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-acl.out	Wed Sep 17 11:34:37 2008 +0200
@@ -58,8 +58,6 @@
 adding quux/file.py revisions
 added 3 changesets with 3 changes to 3 files
 calling hook pretxnchangegroup.acl: hgext.acl.hook
-acl: acl.allow not enabled
-acl: acl.deny not enabled
 acl: changes have source "push" - skipping
 updating the branch cache
 rolling back last transaction
--- a/tests/test-add	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-add	Wed Sep 17 11:34:37 2008 +0200
@@ -32,6 +32,7 @@
 echo % should fail
 hg add a
 hg st
+hg resolve -m a
 hg ci -m merge
 
 echo % issue683
--- a/tests/test-add.out	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-add.out	Wed Sep 17 11:34:37 2008 +0200
@@ -18,9 +18,7 @@
 warning: conflicts during merge.
 merging a failed!
 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
-There are unresolved merges, you can redo the full merge using:
-  hg update -C 2
-  hg merge 1
+use 'hg resolve' to retry unresolved file merges
 M a
 ? a.orig
 % should fail
--- a/tests/test-addremove-similar.out	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-addremove-similar.out	Wed Sep 17 11:34:37 2008 +0200
@@ -10,8 +10,8 @@
 removing empty-file
 adding large-file
 adding tiny-file
+removing large-file
 adding small-file
-removing large-file
 removing tiny-file
 recording removal of tiny-file as rename to small-file (82% similar)
 % should all fail
--- a/tests/test-addremove.out	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-addremove.out	Wed Sep 17 11:34:37 2008 +0200
@@ -2,14 +2,16 @@
 adding foo
 dir/bar
 foo
+committed changeset 0:d44511117907
 adding dir/bar_2
 adding foo_2
 dir/bar_2
 foo_2
+committed changeset 1:a85812e0561a
 adding a
 adding c
+removing a
 adding b
+removing c
 adding d
-removing a
-removing c
 recording removal of a as rename to b (100% similar)
--- a/tests/test-archive	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-archive	Wed Sep 17 11:34:37 2008 +0200
@@ -12,10 +12,36 @@
 hg commit -Am 3 -d '1000000000 0'
 echo "[web]" >> .hg/hgrc
 echo "name = test-archive" >> .hg/hgrc
-echo "allow_archive = gz bz2, zip" >> .hg/hgrc
+cp .hg/hgrc .hg/hgrc-base
+
+# check http return codes
+test_archtype() {
+    echo "allow_archive = $1" >> .hg/hgrc
+    hg serve -p $HGPORT -d --pid-file=hg.pid -E errors.log
+    cat hg.pid >> $DAEMON_PIDS
+    echo % $1 allowed should give 200
+    "$TESTDIR/get-with-headers.py" localhost:$HGPORT "/archive/tip.$2" | head -n 1
+    echo % $3 and $4 disallowed should both give 403
+    "$TESTDIR/get-with-headers.py" localhost:$HGPORT "/archive/tip.$3" | head -n 1
+    "$TESTDIR/get-with-headers.py" localhost:$HGPORT "/archive/tip.$4" | head -n 1
+    kill `cat hg.pid`
+    cat errors.log
+    cp .hg/hgrc-base .hg/hgrc
+}
+
+echo
+test_archtype gz tar.gz tar.bz2 zip
+test_archtype bz2 tar.bz2 zip tar.gz
+test_archtype zip zip tar.gz tar.bz2
+
+echo "allow_archive = gz bz2 zip" >> .hg/hgrc
 hg serve -p $HGPORT -d --pid-file=hg.pid -E errors.log
 cat hg.pid >> $DAEMON_PIDS
 
+echo % invalid arch type should give 404
+"$TESTDIR/get-with-headers.py" localhost:$HGPORT "/archive/tip.invalid" | head -n 1
+echo
+
 TIP=`hg id -v | cut -f1 -d' '`
 QTIP=`hg id -q`
 cat > getarchive.py <<EOF
@@ -40,10 +66,11 @@
 gzip -dc test-$QTIP.tar.gz | tar tf - | sed "s/$QTIP/TIP/"
 
 cat > md5comp.py <<EOF
-import md5, sys
+from mercurial.util import md5
+import sys
 f1, f2 = sys.argv[1:3]
-h1 = md5.md5(file(f1, 'rb').read()).hexdigest()
-h2 = md5.md5(file(f2, 'rb').read()).hexdigest()
+h1 = md5(file(f1, 'rb').read()).hexdigest()
+h2 = md5(file(f2, 'rb').read()).hexdigest()
 print h1 == h2 or "md5 differ: " + repr((h1, h2))
 EOF
 
--- a/tests/test-archive.out	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-archive.out	Wed Sep 17 11:34:37 2008 +0200
@@ -1,6 +1,25 @@
 adding foo
 adding bar
 adding baz/bletch
+
+% gz allowed should give 200
+200 Script output follows
+% tar.bz2 and zip disallowed should both give 403
+403 Forbidden
+403 Forbidden
+% bz2 allowed should give 200
+200 Script output follows
+% zip and tar.gz disallowed should both give 403
+403 Forbidden
+403 Forbidden
+% zip allowed should give 200
+200 Script output follows
+% tar.gz and tar.bz2 disallowed should both give 403
+403 Forbidden
+403 Forbidden
+% invalid arch type should give 404
+404 Not Found
+
 test-archive-TIP/.hg_archival.txt
 test-archive-TIP/bar
 test-archive-TIP/baz/bletch
--- a/tests/test-branches	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-branches	Wed Sep 17 11:34:37 2008 +0200
@@ -11,6 +11,11 @@
 hg branch a
 hg commit -d '1 0' -u test -m "Adding a branch"
 
+hg branch q
+echo 'aa' >a
+hg branch -C
+hg commit -d '2 0' -u test -m "Adding to a branch"
+
 hg update -C 0
 echo 'b' >b
 hg add b
--- a/tests/test-branches.out	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-branches.out	Wed Sep 17 11:34:37 2008 +0200
@@ -1,21 +1,34 @@
 marked working directory as branch a
+marked working directory as branch q
+reset working directory to branch a
 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
 marked working directory as branch b
 created new head
-0 files updated, 0 files merged, 1 files removed, 0 files unresolved
-created new head
+1 files updated, 0 files merged, 2 files removed, 0 files unresolved
 marked working directory as branch c
 marked working directory as branch a branch name much longer than the default justification used by branches
-a branch name much longer than the default justification used by branches 6:b8cb5af34c4d
-b                              4:22df7444f7c1
-a                              1:dd6b440dd85a
-c                              5:5ca481e59b8c (inactive)
+a branch name much longer than the default justification used by branches 7:10ff5895aa57
+b                              4:aee39cd168d0
+c                              6:589736a22561 (inactive)
+a                              5:d8cbc61dbaa6 (inactive)
 default                        0:19709c5a4e75 (inactive)
 -------
-a branch name much longer than the default justification used by branches 6:b8cb5af34c4d
-b                              4:22df7444f7c1
-a                              1:dd6b440dd85a
+a branch name much longer than the default justification used by branches 7:10ff5895aa57
+b                              4:aee39cd168d0
 --- Branch a
+changeset:   5:d8cbc61dbaa6
+branch:      a
+parent:      2:881fe2b92ad0
+user:        test
+date:        Thu Jan 01 00:00:04 1970 +0000
+summary:     Adding b branch head 2
+
+changeset:   2:881fe2b92ad0
+branch:      a
+user:        test
+date:        Thu Jan 01 00:00:02 1970 +0000
+summary:     Adding to a branch
+
 changeset:   1:dd6b440dd85a
 branch:      a
 user:        test
@@ -23,20 +36,13 @@
 summary:     Adding a branch
 
 ---- Branch b
-changeset:   4:22df7444f7c1
-branch:      b
-parent:      2:ac22033332d1
-user:        test
-date:        Thu Jan 01 00:00:04 1970 +0000
-summary:     Adding b branch head 2
-
-changeset:   3:aee39cd168d0
+changeset:   4:aee39cd168d0
 branch:      b
 user:        test
 date:        Thu Jan 01 00:00:03 1970 +0000
 summary:     Adding b branch head 1
 
-changeset:   2:ac22033332d1
+changeset:   3:ac22033332d1
 branch:      b
 parent:      0:19709c5a4e75
 user:        test
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-bundle-type	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,41 @@
+#!/bin/sh
+
+echo % bundle w/o type option
+hg init t1
+hg init t2
+cd t1
+echo blablablablabla > file.txt
+hg ci -Ama
+hg log | grep summary
+hg bundle ../b1 ../t2
+
+cd ../t2
+hg pull ../b1
+hg up
+hg log | grep summary
+cd ..
+
+for t in "None" "bzip2" "gzip"; do
+  echo % test bundle type $t
+  hg init t$t
+  cd t1
+  hg bundle -t $t ../b$t ../t$t
+  head -n 1 ../b$t | cut -b 1-6
+  cd ../t$t
+  hg pull ../b$t
+  hg up
+  hg log | grep summary
+  cd ..
+done
+
+echo % test garbage file
+echo garbage > bgarbage
+hg init tgarbage
+cd tgarbage
+hg pull ../bgarbage
+cd ..
+
+echo % test invalid bundle type
+cd t1
+hg bundle -a -t garbage ../bgarbage
+cd ..
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-bundle-type.out	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,58 @@
+% bundle w/o type option
+adding file.txt
+summary:     a
+searching for changes
+1 changesets found
+pulling from ../b1
+requesting all changes
+adding changesets
+adding manifests
+adding file changes
+added 1 changesets with 1 changes to 1 files
+(run 'hg update' to get a working copy)
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+summary:     a
+% test bundle type None
+searching for changes
+1 changesets found
+HG10UN
+pulling from ../bNone
+requesting all changes
+adding changesets
+adding manifests
+adding file changes
+added 1 changesets with 1 changes to 1 files
+(run 'hg update' to get a working copy)
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+summary:     a
+% test bundle type bzip2
+searching for changes
+1 changesets found
+HG10BZ
+pulling from ../bbzip2
+requesting all changes
+adding changesets
+adding manifests
+adding file changes
+added 1 changesets with 1 changes to 1 files
+(run 'hg update' to get a working copy)
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+summary:     a
+% test bundle type gzip
+searching for changes
+1 changesets found
+HG10GZ
+pulling from ../bgzip
+requesting all changes
+adding changesets
+adding manifests
+adding file changes
+added 1 changesets with 1 changes to 1 files
+(run 'hg update' to get a working copy)
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+summary:     a
+% test garbage file
+abort: ../bgarbage: not a Mercurial bundle file
+% test invalid bundle type
+1 changesets found
+abort: unknown bundle type specified with --type
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-casefolding	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,41 @@
+#!/bin/sh
+
+"$TESTDIR/hghave" icasefs || exit 80
+
+echo '% test file addition with bad case'
+hg init repo1
+cd repo1
+echo a > a
+hg add A
+hg st
+hg ci -m adda
+hg manifest
+cd ..
+
+echo '% test case collision on rename (issue 750)'
+hg init repo2
+cd repo2
+echo a > a
+hg --debug ci -Am adda
+hg mv a A
+# 'a' used to be removed under windows
+test -f a || echo 'a is missing'
+hg st
+cd ..
+
+echo '% test case collision between revisions (issue 912)'
+hg init repo3
+cd repo3
+echo a > a
+hg ci -Am adda
+hg rm a
+hg ci -Am removea
+echo A > A
+hg ci -Am addA
+# Used to fail under case insensitive fs
+hg up -C 0
+hg up -C
+cd ..
+
+
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-casefolding.out	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,14 @@
+% test file addition with bad case
+adding a
+A a
+a
+% test case collision on rename (issue 750)
+adding a
+a
+committed changeset 0:07f4944404050f47db2e5c5071e0e84e7a27bba9
+A: not overwriting - file exists
+% test case collision between revisions (issue 912)
+adding a
+adding A
+1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+1 files updated, 0 files merged, 1 files removed, 0 files unresolved
--- a/tests/test-children.out	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-children.out	Wed Sep 17 11:34:37 2008 +0200
@@ -5,6 +5,18 @@
 % hg children at revision 3 (tip)
 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
 % hg children at nullrev (should be 0 and 3)
+changeset:   0:4df8521a7374
+user:        test
+date:        Thu Jan 01 00:00:00 1970 +0000
+summary:     0
+
+changeset:   3:e2962852269d
+tag:         tip
+parent:      -1:000000000000
+user:        test
+date:        Thu Jan 01 00:00:03 1970 +0000
+summary:     3
+
 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
 % hg children at revision 1 (should be 2)
 changeset:   2:8f5eea5023c2
--- a/tests/test-churn	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-churn	Wed Sep 17 11:34:37 2008 +0200
@@ -3,6 +3,8 @@
 echo "[extensions]" >> $HGRCPATH
 echo "churn=" >> $HGRCPATH
 
+COLUMNS=80; export COLUMNS
+
 echo % create test repository
 hg init repo
 cd repo
--- a/tests/test-clone	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-clone	Wed Sep 17 11:34:37 2008 +0200
@@ -1,39 +1,63 @@
 #!/bin/sh
 
+echo
+echo % prepare repo a
 mkdir a
 cd a
 hg init
 echo a > a
 hg add a
 hg commit -m test -d '0 0'
+echo first line > b
+hg add b
+# create a non-inlined filelog
+python -c 'for x in range(10000): print x' >> data1
+for j in 0 1 2 3 4 5 6 7 8 9; do
+    cat data1 >> b
+    hg commit -m test -d '0 0'
+done
+echo % "list files in store/data (should show a 'b.d')"
+for i in .hg/store/data/*; do
+    echo $i
+done
 
-# Default operation
+echo
+echo % default operation
 hg clone . ../b
 cd ../b
 cat a
 hg verify
 
-# No update
+echo
+echo % no update
 hg clone -U . ../c
 cd ../c
 cat a 2>/dev/null || echo "a not present"
 hg verify
 
-# Default destination
+echo
+echo % default destination
 mkdir ../d
 cd ../d
 hg clone ../a
 cd a
 hg cat a
 
-# check that we drop the file:// from the path before
-# writing the .hgrc
+echo
+echo % "check that we drop the file:// from the path before"
+echo % "writing the .hgrc"
 cd ../..
 hg clone file://a e
 grep 'file:' e/.hg/hgrc
 
-# check that path aliases are expanded
+echo
+echo % check that path aliases are expanded
 hg clone -q -U --config 'paths.foobar=a#0' foobar f
 hg -R f showconfig paths.default | sed -e 's,.*/,,'
 
+echo
+echo % use --pull
+hg clone --pull a g
+hg -R g verify
+
 exit 0
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-clone-cgi	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,62 @@
+#!/bin/sh
+# This is a test of the wire protocol over CGI-based hgweb.
+
+echo % initialize repository
+hg init test
+cd test
+echo a > a
+hg ci -Ama
+cd ..
+
+cat >hgweb.cgi <<HGWEB
+#!/usr/bin/env python
+#
+# An example CGI script to use hgweb, edit as necessary
+
+import cgitb
+cgitb.enable()
+
+from mercurial import demandimport; demandimport.enable()
+from mercurial.hgweb import hgweb
+from mercurial.hgweb import wsgicgi
+
+application = hgweb("test", "Empty test repository")
+wsgicgi.launch(application)
+HGWEB
+chmod 755 hgweb.cgi
+
+DOCUMENT_ROOT="/var/www/hg"; export DOCUMENT_ROOT
+GATEWAY_INTERFACE="CGI/1.1"; export GATEWAY_INTERFACE
+HTTP_ACCEPT="text/xml,application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5"; export HTTP_ACCEPT
+HTTP_ACCEPT_CHARSET="ISO-8859-1,utf-8;q=0.7,*;q=0.7"; export HTTP_ACCEPT_CHARSET
+HTTP_ACCEPT_ENCODING="gzip,deflate"; export HTTP_ACCEPT_ENCODING
+HTTP_ACCEPT_LANGUAGE="en-us,en;q=0.5"; export HTTP_ACCEPT_LANGUAGE
+HTTP_CACHE_CONTROL="max-age=0"; export HTTP_CACHE_CONTROL
+HTTP_CONNECTION="keep-alive"; export HTTP_CONNECTION
+HTTP_HOST="hg.omnifarious.org"; export HTTP_HOST
+HTTP_KEEP_ALIVE="300"; export HTTP_KEEP_ALIVE
+HTTP_USER_AGENT="Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.8.0.4) Gecko/20060608 Ubuntu/dapper-security Firefox/1.5.0.4"; export HTTP_USER_AGENT
+PATH_INFO="/"; export PATH_INFO
+PATH_TRANSLATED="/var/www/hg/index.html"; export PATH_TRANSLATED
+REMOTE_ADDR="127.0.0.2"; export REMOTE_ADDR
+REMOTE_PORT="44703"; export REMOTE_PORT
+REQUEST_METHOD="GET"; export REQUEST_METHOD
+REQUEST_URI="/test/"; export REQUEST_URI
+SCRIPT_FILENAME="/home/hopper/hg_public/test.cgi"; export SCRIPT_FILENAME
+SCRIPT_NAME="/test"; export SCRIPT_NAME
+SCRIPT_URI="http://hg.omnifarious.org/test/"; export SCRIPT_URI
+SCRIPT_URL="/test/"; export SCRIPT_URL
+SERVER_ADDR="127.0.0.1"; export SERVER_ADDR
+SERVER_ADMIN="eric@localhost"; export SERVER_ADMIN
+SERVER_NAME="hg.omnifarious.org"; export SERVER_NAME
+SERVER_PORT="80"; export SERVER_PORT
+SERVER_PROTOCOL="HTTP/1.1"; export SERVER_PROTOCOL
+SERVER_SIGNATURE="<address>Apache/2.0.53 (Fedora) Server at hg.omnifarious.org Port 80</address>"; export SERVER_SIGNATURE
+SERVER_SOFTWARE="Apache/2.0.53 (Fedora)"; export SERVER_SOFTWARE
+
+echo % try hgweb request
+QUERY_STRING="cmd=changegroup"; export QUERY_STRING
+python hgweb.cgi >page1 2>&1 ; echo $?
+python "$TESTDIR/md5sum.py" page1
+
+exit 0
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-clone-cgi.out	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,5 @@
+% initialize repository
+adding a
+% try hgweb request
+0
+54086fe9a47b47d83204f38bda0b90c2  page1
--- a/tests/test-clone.out	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-clone.out	Wed Sep 17 11:34:37 2008 +0200
@@ -1,21 +1,52 @@
+
+% prepare repo a
+% list files in store/data (should show a 'b.d')
+.hg/store/data/a.i
+.hg/store/data/b.d
+.hg/store/data/b.i
+
+% default operation
 updating working directory
-1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+2 files updated, 0 files merged, 0 files removed, 0 files unresolved
 a
 checking changesets
 checking manifests
 crosschecking files in changesets and manifests
 checking files
-1 files, 1 changesets, 1 total revisions
+2 files, 11 changesets, 11 total revisions
+
+% no update
 a not present
 checking changesets
 checking manifests
 crosschecking files in changesets and manifests
 checking files
-1 files, 1 changesets, 1 total revisions
+2 files, 11 changesets, 11 total revisions
+
+% default destination
 destination directory: a
 updating working directory
-1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+2 files updated, 0 files merged, 0 files removed, 0 files unresolved
 a
+
+% check that we drop the file:// from the path before
+% writing the .hgrc
 updating working directory
-1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+% check that path aliases are expanded
 a#0
+
+% use --pull
+requesting all changes
+adding changesets
+adding manifests
+adding file changes
+added 11 changesets with 11 changes to 2 files
+updating working directory
+2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+checking changesets
+checking manifests
+crosschecking files in changesets and manifests
+checking files
+2 files, 11 changesets, 11 total revisions
--- a/tests/test-command-template	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-command-template	Wed Sep 17 11:34:37 2008 +0200
@@ -23,7 +23,7 @@
 echo other 4 >> d
 hg add d
 hg commit -m 'new head' -d '1500000 0' -u 'person'
-hg merge -q
+hg merge -q foo
 hg commit -m 'merge' -d '1500001 0' -u 'person'
 # second branch starting at nullrev
 hg update null
--- a/tests/test-commit	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-commit	Wed Sep 17 11:34:37 2008 +0200
@@ -48,6 +48,10 @@
 hg commit -d '0 0' -m commit-16 quux
 echo >> dir/file
 hg -v commit -d '0 0' -m commit-17 dir/file
+# An empty date was interpreted as epoch origin
+echo foo >> foo
+hg commit -d '' -m commit-no-date
+hg tip --template '{date|isodate}\n' | grep '1970'
 cd ..
 
 echo % partial subdir commit test
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-commit-unresolved	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,41 @@
+#!/bin/sh
+
+echo "[extensions]" >> $HGRCPATH
+echo "graphlog=" >> $HGRCPATH
+
+addcommit () {
+    echo $1 > $1
+    hg add $1
+    hg commit -d "${2} 0" -u test -m $1
+}
+
+commit () {
+    hg commit -d "${2} 0" -u test -m $1
+}
+
+hg init a
+cd a
+addcommit "A" 0
+addcommit "B" 1
+echo "C" >> A
+commit "C" 2
+
+hg update -C 0
+echo "D" >> A
+commit "D" 3
+
+echo
+echo "% Merging a conflict araises"
+hg merge
+
+echo
+echo "% Correct the conflict without marking the file as resolved"
+echo "ABCD" > A
+hg commit -m "Merged"
+
+echo
+echo "% Mark the conflict as resolved and commit"
+hg resolve -m A
+hg commit -m "Merged"
+
+exit 0
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-commit-unresolved.out	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,14 @@
+1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+created new head
+
+% Merging a conflict araises
+merging A
+warning: conflicts during merge.
+merging A failed!
+1 files updated, 0 files merged, 0 files removed, 1 files unresolved
+use 'hg resolve' to retry unresolved file merges
+
+% Correct the conflict without marking the file as resolved
+abort: unresolved merge conflicts (see hg resolve)
+
+% Mark the conflict as resolved and commit
--- a/tests/test-commit.out	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-commit.out	Wed Sep 17 11:34:37 2008 +0200
@@ -11,16 +11,19 @@
 abort: file bar not found!
 adding dir/file
 dir/file
+committed changeset 2:d2a76177cb42
 adding dir.file
 abort: no match under directory dir!
 abort: no match under directory .!
 abort: no match under directory ../dir2!
 dir/file
+committed changeset 3:1cd62a2d8db5
 does-not-exist: No such file or directory
 abort: file does-not-exist not found!
 abort: file baz not tracked!
 abort: file quux not tracked!
 dir/file
+committed changeset 4:49176991390e
 % partial subdir commit test
 adding bar/bar
 adding foo/foo
--- a/tests/test-committer.out	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-committer.out	Wed Sep 17 11:34:37 2008 +0200
@@ -22,7 +22,5 @@
 date:        Mon Jan 12 13:46:40 1970 +0000
 summary:     commit-1
 
-transaction abort!
-rollback completed
 abort: Please specify a username.
 No username found, using user@host instead
--- a/tests/test-conflict.out	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-conflict.out	Wed Sep 17 11:34:37 2008 +0200
@@ -4,9 +4,7 @@
 warning: conflicts during merge.
 merging a failed!
 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
-There are unresolved merges, you can redo the full merge using:
-  hg update -C 2
-  hg merge 1
+use 'hg resolve' to retry unresolved file merges
 e7fe8eb3e180+0d24b7662d3e+ tip
 <<<<<<< local
 something else
--- a/tests/test-context.py	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-context.py	Wed Sep 17 11:34:37 2008 +0200
@@ -16,4 +16,4 @@
 repo.add(['foo'])
 repo.commit(text='commit1', date="0 0")
 
-print "workingfilectx.date =", repo.workingctx().filectx('foo').date()
+print "workingfilectx.date =", repo[None]['foo'].date()
--- a/tests/test-convert-baz	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-convert-baz	Wed Sep 17 11:34:37 2008 +0200
@@ -69,4 +69,5 @@
 
 echo % show graph log
 glog -R baz-repo-hg
+hg up -q -R baz-repo-hg
 hg -R baz-repo-hg manifest --debug
--- a/tests/test-convert-clonebranches	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-convert-clonebranches	Wed Sep 17 11:34:37 2008 +0200
@@ -17,7 +17,7 @@
 echo b > b
 hg ci -qAm addb
 hg up -qC
-hg merge
+hg merge default
 hg ci -qm mergeab
 hg tag -ql mergeab
 cd ..
--- a/tests/test-convert-cvs	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-convert-cvs	Wed Sep 17 11:34:37 2008 +0200
@@ -7,6 +7,11 @@
     cvs -f $@
 }
 
+hgcat()
+{
+    hg --cwd src-hg cat -r tip "$1"
+}
+
 echo "[extensions]" >> $HGRCPATH
 echo "convert = " >> $HGRCPATH
 
@@ -45,13 +50,13 @@
 
 echo % convert fresh repo
 hg convert src src-hg | sed -e 's/connecting to.*cvsrepo/connecting to cvsrepo/g'
-cat src-hg/a
-cat src-hg/b/c
+hgcat a
+hgcat b/c
 
 echo % convert fresh repo with --filemap
 echo include b/c > filemap
 hg convert --filemap filemap src src-filemap | sed -e 's/connecting to.*cvsrepo/connecting to cvsrepo/g'
-cat src-hg/b/c
+hgcat b/c
 hg -R src-filemap log --template '#rev# #desc# files: #files#\n'
 
 echo % commit new file revisions
@@ -64,12 +69,12 @@
 
 echo % convert again
 hg convert src src-hg | sed -e 's/connecting to.*cvsrepo/connecting to cvsrepo/g'
-cat src-hg/a
-cat src-hg/b/c
+hgcat a
+hgcat b/c
 
 echo % convert again with --filemap
 hg convert --filemap filemap src src-filemap | sed -e 's/connecting to.*cvsrepo/connecting to cvsrepo/g'
-cat src-hg/b/c
+hgcat b/c
 hg -R src-filemap log --template '#rev# #desc# files: #files#\n'
 
 echo % commit branch
@@ -84,12 +89,12 @@
 
 echo % convert again
 hg convert src src-hg | sed -e 's/connecting to.*cvsrepo/connecting to cvsrepo/g'
-cat src-hg/a
-cat src-hg/b/c
+hgcat a
+hgcat b/c
 
 echo % convert again with --filemap
 hg convert --filemap filemap src src-filemap | sed -e 's/connecting to.*cvsrepo/connecting to cvsrepo/g'
-cat src-hg/b/c
+hgcat b/c
 hg -R src-filemap log --template '#rev# #desc# files: #files#\n'
 
 echo "graphlog = " >> $HGRCPATH
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-convert-cvs-branch	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,60 @@
+#!/bin/sh
+
+# This is http://www.selenic.com/mercurial/bts/issue1148
+
+"$TESTDIR/hghave" cvs || exit 80
+
+cvscall()
+{
+    cvs -f "$@"
+}
+
+echo "[extensions]" >> $HGRCPATH
+echo "convert = " >> $HGRCPATH
+echo "graphlog = " >> $HGRCPATH
+echo "[convert]" >> $HGRCPATH
+echo "cvsps=builtin" >> $HGRCPATH
+
+echo % create cvs repository
+mkdir cvsrepo
+cd cvsrepo
+export CVSROOT=`pwd`
+export CVS_OPTIONS=-f
+cd ..
+
+cvscall -q -d "$CVSROOT" init
+
+echo % Create a new project
+
+mkdir src
+cd src
+echo "1" > a > b
+cvscall import -m "init" src v0 r0
+cd ..
+cvscall co src
+cd src
+
+echo % Branch the project
+
+cvscall tag -b BRANCH
+cvscall up -r BRANCH
+
+echo % Modify file a, then b, then a 
+
+echo "2" > a
+cvscall ci -m "mod a" | grep '<--' | sed -e 's:.*src/\(.*\),v.*:checking in src/\1,v:g'
+
+echo "2" > b
+cvscall ci -m "mod b" | grep '<--' | sed -e 's:.*src/\(.*\),v.*:checking in src/\1,v:g'
+
+echo "3" > a
+cvscall ci -m "mod a again" | grep '<--' | sed -e 's:.*src/\(.*\),v.*:checking in src/\1,v:g'
+
+echo % Convert
+
+cd ..
+hg convert src | sed -e 's/connecting to.*cvsrepo/connecting to cvsrepo/g'
+
+echo % Check the result
+
+hg -R src-hg glog --template '#rev# (#branches#) #desc# files: #files#\n'
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-convert-cvs-branch.out	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,53 @@
+% create cvs repository
+% Create a new project
+N src/a
+N src/b
+
+No conflicts created by this import
+
+cvs checkout: Updating src
+U src/a
+U src/b
+% Branch the project
+cvs tag: Tagging .
+T a
+T b
+cvs update: Updating .
+% Modify file a, then b, then a
+cvs commit: Examining .
+checking in src/a,v
+cvs commit: Examining .
+checking in src/b,v
+cvs commit: Examining .
+checking in src/a,v
+% Convert
+assuming destination src-hg
+initializing destination src-hg repository
+using builtin cvsps
+collecting CVS rlog
+7 log entries
+creating changesets
+5 changeset entries
+connecting to cvsrepo
+scanning source...
+sorting...
+converting...
+4 Initial revision
+3 init
+2 mod a
+1 mod b
+0 mod a again
+updating tags
+% Check the result
+o  5 () update tags files: .hgtags
+|
+| o  4 (BRANCH) mod a again files: a
+| |
+| o  3 (BRANCH) mod b files: b
+| |
+| o  2 (BRANCH) mod a files: a
+| |
+| o  1 (v0) init files:
+|/
+o  0 () Initial revision files: a b
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-convert-cvs-builtincvsps	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,104 @@
+#!/bin/sh
+
+"$TESTDIR/hghave" cvs || exit 80
+
+cvscall()
+{
+    cvs -f "$@"
+}
+
+hgcat()
+{
+    hg --cwd src-hg cat -r tip "$1"
+}
+
+echo "[extensions]" >> $HGRCPATH
+echo "convert = " >> $HGRCPATH
+echo "graphlog = " >> $HGRCPATH
+echo "[convert]" >> $HGRCPATH
+echo "cvsps=builtin" >> $HGRCPATH
+
+echo % create cvs repository
+mkdir cvsrepo
+cd cvsrepo
+export CVSROOT=`pwd`
+export CVS_OPTIONS=-f
+cd ..
+
+cvscall -q -d "$CVSROOT" init
+
+echo % create source directory
+mkdir src-temp
+cd src-temp
+echo a > a
+mkdir b
+cd b
+echo c > c
+cd ..
+
+echo % import source directory
+cvscall -q import -m import src INITIAL start
+cd ..
+
+echo % checkout source directory
+cvscall -q checkout src
+
+echo % commit a new revision changing b/c
+cd src
+sleep 1
+echo c >> b/c
+cvscall -q commit -mci0 . | grep '<--' |\
+    sed -e 's:.*src/\(.*\),v.*:checking in src/\1,v:g'
+cd ..
+
+echo % convert fresh repo
+hg convert src src-hg | sed -e 's/connecting to.*cvsrepo/connecting to cvsrepo/g'
+hgcat a
+hgcat b/c
+
+echo % convert fresh repo with --filemap
+echo include b/c > filemap
+hg convert --filemap filemap src src-filemap | sed -e 's/connecting to.*cvsrepo/connecting to cvsrepo/g'
+hgcat b/c
+hg -R src-filemap log --template '#rev# #desc# files: #files#\n'
+
+echo % commit new file revisions
+cd src
+echo a >> a
+echo c >> b/c
+cvscall -q commit -mci1 . | grep '<--' |\
+    sed -e 's:.*src/\(.*\),v.*:checking in src/\1,v:g'
+cd ..
+
+echo % convert again
+hg convert src src-hg | sed -e 's/connecting to.*cvsrepo/connecting to cvsrepo/g'
+hgcat a
+hgcat b/c
+
+echo % convert again with --filemap
+hg convert --filemap filemap src src-filemap | sed -e 's/connecting to.*cvsrepo/connecting to cvsrepo/g'
+hgcat b/c
+hg -R src-filemap log --template '#rev# #desc# files: #files#\n'
+
+echo % commit branch
+cd src
+cvs -q update -r1.1 b/c
+cvs -q tag -b branch
+cvs -q update -r branch
+echo d >> b/c
+cvs -q commit -mci2 . | grep '<--' |\
+    sed -e 's:.*src/\(.*\),v.*:checking in src/\1,v:g'
+cd ..
+
+echo % convert again
+hg convert src src-hg | sed -e 's/connecting to.*cvsrepo/connecting to cvsrepo/g'
+hgcat a
+hgcat b/c
+
+echo % convert again with --filemap
+hg convert --filemap filemap src src-filemap | sed -e 's/connecting to.*cvsrepo/connecting to cvsrepo/g'
+hgcat b/c
+hg -R src-filemap log --template '#rev# #desc# files: #files#\n'
+
+echo "graphlog = " >> $HGRCPATH
+hg -R src-hg glog --template '#rev# (#branches#) #desc# files: #files#\n'
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-convert-cvs-builtincvsps.out	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,138 @@
+% create cvs repository
+% create source directory
+% import source directory
+N src/a
+N src/b/c
+
+No conflicts created by this import
+
+% checkout source directory
+U src/a
+U src/b/c
+% commit a new revision changing b/c
+checking in src/b/c,v
+% convert fresh repo
+initializing destination src-hg repository
+using builtin cvsps
+collecting CVS rlog
+5 log entries
+creating changesets
+3 changeset entries
+connecting to cvsrepo
+scanning source...
+sorting...
+converting...
+2 Initial revision
+1 import
+0 ci0
+updating tags
+a
+c
+c
+% convert fresh repo with --filemap
+initializing destination src-filemap repository
+using builtin cvsps
+collecting CVS rlog
+5 log entries
+creating changesets
+3 changeset entries
+connecting to cvsrepo
+scanning source...
+sorting...
+converting...
+2 Initial revision
+1 import
+rolling back last transaction
+0 ci0
+updating tags
+c
+c
+2 update tags files: .hgtags
+1 ci0 files: b/c
+0 Initial revision files: b/c
+% commit new file revisions
+checking in src/a,v
+checking in src/b/c,v
+% convert again
+using builtin cvsps
+collecting CVS rlog
+7 log entries
+creating changesets
+4 changeset entries
+connecting to cvsrepo
+scanning source...
+sorting...
+converting...
+0 ci1
+a
+a
+c
+c
+c
+% convert again with --filemap
+using builtin cvsps
+collecting CVS rlog
+7 log entries
+creating changesets
+4 changeset entries
+connecting to cvsrepo
+scanning source...
+sorting...
+converting...
+0 ci1
+c
+c
+c
+3 ci1 files: b/c
+2 update tags files: .hgtags
+1 ci0 files: b/c
+0 Initial revision files: b/c
+% commit branch
+U b/c
+T a
+T b/c
+checking in src/b/c,v
+% convert again
+using builtin cvsps
+collecting CVS rlog
+8 log entries
+creating changesets
+5 changeset entries
+connecting to cvsrepo
+scanning source...
+sorting...
+converting...
+0 ci2
+a
+c
+d
+% convert again with --filemap
+using builtin cvsps
+collecting CVS rlog
+8 log entries
+creating changesets
+5 changeset entries
+connecting to cvsrepo
+scanning source...
+sorting...
+converting...
+0 ci2
+c
+d
+4 ci2 files: b/c
+3 ci1 files: b/c
+2 update tags files: .hgtags
+1 ci0 files: b/c
+0 Initial revision files: b/c
+o  5 (branch) ci2 files: b/c
+|
+| o  4 () ci1 files: a b/c
+| |
+| o  3 () update tags files: .hgtags
+| |
+| o  2 () ci0 files: b/c
+|/
+| o  1 (INITIAL) import files:
+|/
+o  0 () Initial revision files: a b/c
+
--- a/tests/test-convert-cvs.out	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-convert-cvs.out	Wed Sep 17 11:34:37 2008 +0200
@@ -79,7 +79,6 @@
 converting...
 0 ci2
 a
-a
 c
 d
 % convert again with --filemap
--- a/tests/test-convert-darcs	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-convert-darcs	Wed Sep 17 11:34:37 2008 +0200
@@ -58,4 +58,5 @@
 # "c" file in p1.1 patch are reverted too.
 # Just to say that manifest not listing "c" here is a bug.
 glog -R darcs-repo-hg
+hg up -q -R darcs-repo-hg
 hg -R darcs-repo-hg manifest --debug
--- a/tests/test-convert-filemap	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-convert-filemap	Wed Sep 17 11:34:37 2008 +0200
@@ -80,6 +80,7 @@
 	echo "include $i" >> "$fmap"
     done
     hg -q convert $opts --filemap "$fmap" --datesort source "$repo"
+    hg up -q -R "$repo"
     glog -R "$repo"
     hg -R "$repo" manifest --debug
 }
@@ -115,6 +116,7 @@
 rename copied copied2
 EOF
 hg -q convert --filemap renames.fmap --datesort source renames.repo
+hg up -q -R renames.repo
 glog -R renames.repo
 hg -R renames.repo manifest --debug
 hg --cwd renames.repo debugrename copied2
--- a/tests/test-convert-filemap.out	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-convert-filemap.out	Wed Sep 17 11:34:37 2008 +0200
@@ -29,7 +29,7 @@
 copied renamed from foo:2ed2a3912a0b24502043eae84ee4b279c18b90dd
 
 % foo: skip unwanted merges; use 1st parent in 1st merge, 2nd in 2nd
-o  3 "8: change foo" files: foo
+@  3 "8: change foo" files: foo
 |
 o  2 "6: change foo baz" files: foo
 |
@@ -39,7 +39,7 @@
 
 9a7b52012991e4873687192c3e17e61ba3e837a3 644   foo
 % bar: merges are not merges anymore
-o  4 "7: second merge; change bar" files: bar
+@  4 "7: second merge; change bar" files: bar
 |
 o  3 "5: change bar baz quux" files: bar
 |
@@ -51,7 +51,7 @@
 
 9463f52fe115e377cf2878d4fc548117211063f2 644   bar
 % baz: 1st merge is not a merge anymore; 2nd still is
-o    4 "7: second merge; change bar" files: baz
+@    4 "7: second merge; change bar" files: baz
 |\
 | o  3 "6: change foo baz" files: baz
 | |
@@ -63,7 +63,7 @@
 
 94c1be4dfde2ee8d78db8bbfcf81210813307c3d 644   baz
 % foo quux: we add additional merges when they are interesting
-o  8 "8: change foo" files: foo
+@  8 "8: change foo" files: foo
 |
 o    7 "7: second merge; change bar" files:
 |\
@@ -84,14 +84,14 @@
 9a7b52012991e4873687192c3e17e61ba3e837a3 644   foo
 bc3eca3f47023a3e70ca0d8cc95a22a6827db19d 644   quux
 % bar quux: partial conversion
-o  1 "3: change bar quux" files: bar quux
+@  1 "3: change bar quux" files: bar quux
 |
 o  0 "1: add bar quux; copy foo to copied" files: bar quux
 
 b79105bedc55102f394e90a789c9c380117c1b4a 644   bar
 db0421cc6b685a458c8d86c7d5c004f94429ea23 644   quux
 % bar quux: complete the partial conversion
-o  4 "7: second merge; change bar" files: bar
+@  4 "7: second merge; change bar" files: bar
 |
 o  3 "5: change bar baz quux" files: bar quux
 |
@@ -104,11 +104,11 @@
 9463f52fe115e377cf2878d4fc548117211063f2 644   bar
 bc3eca3f47023a3e70ca0d8cc95a22a6827db19d 644   quux
 % foo: partial conversion
-o  0 "0: add foo baz dir/" files: foo
+@  0 "0: add foo baz dir/" files: foo
 
 2ed2a3912a0b24502043eae84ee4b279c18b90dd 644   foo
 % foo: complete the partial conversion
-o  3 "8: change foo" files: foo
+@  3 "8: change foo" files: foo
 |
 o  2 "6: change foo baz" files: foo
 |
@@ -118,12 +118,12 @@
 
 9a7b52012991e4873687192c3e17e61ba3e837a3 644   foo
 % copied: copied file; source not included in new repo
-o  0 "1: add bar quux; copy foo to copied" files: copied
+@  0 "1: add bar quux; copy foo to copied" files: copied
 
 2ed2a3912a0b24502043eae84ee4b279c18b90dd 644   copied
 copied not renamed
 % foo copied: copied file; source included in new repo
-o  4 "8: change foo" files: foo
+@  4 "8: change foo" files: foo
 |
 o  3 "6: change foo baz" files: foo
 |
@@ -136,7 +136,7 @@
 6ca237634e1f6bee1b6db94292fb44f092a25842 644   copied
 9a7b52012991e4873687192c3e17e61ba3e837a3 644   foo
 copied renamed from foo:2ed2a3912a0b24502043eae84ee4b279c18b90dd
-o  4 "8: change foo" files: foo2
+@  4 "8: change foo" files: foo2
 |
 o  3 "6: change foo baz" files: foo2
 |
--- a/tests/test-convert-git	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-convert-git	Wed Sep 17 11:34:37 2008 +0200
@@ -53,7 +53,7 @@
 cd ..
 
 hg convert --datesort git-repo
-
+hg up -q -R git-repo-hg
 hg -R git-repo-hg tip -v
 
 count=10
@@ -117,12 +117,14 @@
 	echo "include $i" >> "$fmap"
     done
     hg -q convert $opts --filemap "$fmap" --datesort git-repo2 "$repo"
+    hg up -q -R "$repo"
     glog -R "$repo"
     hg -R "$repo" manifest --debug
 }
 
 echo '% full conversion'
 hg -q convert --datesort git-repo2 fullrepo
+hg up -q -R fullrepo
 glog -R fullrepo
 hg -R fullrepo manifest --debug
 
--- a/tests/test-convert-git.out	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-convert-git.out	Wed Sep 17 11:34:37 2008 +0200
@@ -24,7 +24,7 @@
 
 
 % full conversion
-o    9 "Discard change to foo" files: foo
+@    9 "Discard change to foo" files: foo
 |\
 | o  8 "change foo" files: foo
 | |
@@ -49,7 +49,7 @@
 9277c9cc8dd4576fc01a17939b4351e5ada93466 644   foo
 88dfeab657e8cf2cef3dec67b914f49791ae76b1 644   quux
 % foo bar baz: octopus merge
-o    8 "Discard change to foo" files: foo
+@    8 "Discard change to foo" files: foo
 |\
 | o  7 "change foo" files: foo
 | |
@@ -71,7 +71,7 @@
 354ae8da6e890359ef49ade27b68bbc361f3ca88 644   baz
 9277c9cc8dd4576fc01a17939b4351e5ada93466 644   foo
 % foo baz quux: only some parents of an octopus merge; "discard" a head
-o  6 "Discard change to foo" files: foo
+@  6 "Discard change to foo" files: foo
 |
 o  5 "change foo" files: foo
 |
--- a/tests/test-convert-hg-source.out	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-convert-hg-source.out	Wed Sep 17 11:34:37 2008 +0200
@@ -1,9 +1,9 @@
 created new head
-merging baz and foo
+merging baz and foo to baz
 1 files updated, 1 files merged, 0 files removed, 0 files unresolved
 (branch merge, don't forget to commit)
 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
-merging foo and baz
+merging foo and baz to baz
 1 files updated, 1 files merged, 0 files removed, 0 files unresolved
 (branch merge, don't forget to commit)
 created new head
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-convert-hg-startrev	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,61 @@
+#!/bin/sh
+
+echo '[extensions]' >> $HGRCPATH
+echo 'hgext.graphlog =' >> $HGRCPATH
+echo 'hgext.convert =' >> $HGRCPATH
+
+glog()
+{
+    hg -R "$1" glog --template '#rev# "#desc#" files: #files#\n'
+}
+
+hg init source
+cd source
+
+echo a > a
+echo b > b
+hg ci -d '0 0' -qAm '0: add a b'
+echo c > c
+hg ci -d '1 0' -qAm '1: add c'
+hg copy a e
+echo b >> b
+hg ci -d '2 0' -qAm '2: copy e from a, change b'
+hg up -C 0
+echo a >> a
+hg ci -d '3 0' -qAm '3: change a'
+hg merge
+hg copy b d
+hg ci -d '4 0' -qAm '4: merge 2 and 3, copy d from b'
+echo a >> a
+hg ci -d '5 0' -qAm '5: change a'
+cd ..
+
+echo % convert from null revision
+hg convert --config convert.hg.startrev=null source empty
+glog empty
+
+echo % convert from zero revision
+hg convert --config convert.hg.startrev=0 source full
+glog full
+
+echo % convert from merge parent
+hg convert --config convert.hg.startrev=1 source conv1
+glog conv1
+cd conv1
+echo % check copy preservation
+hg log --follow --copies e
+echo % check copy removal on missing parent
+hg log --follow --copies d
+hg cat -r tip a b
+hg -q verify
+cd ..
+
+echo % convert from merge
+hg convert --config convert.hg.startrev=4 source conv4
+glog conv4
+cd conv4
+hg up -C
+hg cat -r tip a b
+hg -q verify
+cd ..
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-convert-hg-startrev.out	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,88 @@
+1 files updated, 0 files merged, 2 files removed, 0 files unresolved
+merging a and e to e
+2 files updated, 1 files merged, 0 files removed, 0 files unresolved
+(branch merge, don't forget to commit)
+% convert from null revision
+initializing destination empty repository
+scanning source...
+sorting...
+converting...
+% convert from zero revision
+initializing destination full repository
+scanning source...
+sorting...
+converting...
+5 0: add a b
+4 1: add c
+3 2: copy e from a, change b
+2 3: change a
+1 4: merge 2 and 3, copy d from b
+0 5: change a
+o  5 "5: change a" files: a
+|
+o    4 "4: merge 2 and 3, copy d from b" files: d e
+|\
+| o  3 "3: change a" files: a
+| |
+o |  2 "2: copy e from a, change b" files: b e
+| |
+o |  1 "1: add c" files: c
+|/
+o  0 "0: add a b" files: a b
+
+% convert from merge parent
+initializing destination conv1 repository
+scanning source...
+sorting...
+converting...
+3 1: add c
+2 2: copy e from a, change b
+1 4: merge 2 and 3, copy d from b
+0 5: change a
+o  3 "5: change a" files: a
+|
+o  2 "4: merge 2 and 3, copy d from b" files: a d e
+|
+o  1 "2: copy e from a, change b" files: b e
+|
+o  0 "1: add c" files: a b c
+
+% check copy preservation
+changeset:   2:cb71f8e79b45
+user:        test
+date:        Thu Jan 01 00:00:04 1970 +0000
+summary:     4: merge 2 and 3, copy d from b
+
+changeset:   1:3334790240a8
+user:        test
+date:        Thu Jan 01 00:00:02 1970 +0000
+summary:     2: copy e from a, change b
+
+% check copy removal on missing parent
+changeset:   2:cb71f8e79b45
+user:        test
+date:        Thu Jan 01 00:00:04 1970 +0000
+summary:     4: merge 2 and 3, copy d from b
+
+a
+a
+a
+b
+b
+% convert from merge
+initializing destination conv4 repository
+scanning source...
+sorting...
+converting...
+1 4: merge 2 and 3, copy d from b
+0 5: change a
+o  1 "5: change a" files: a
+|
+o  0 "4: merge 2 and 3, copy d from b" files: a b c d e
+
+5 files updated, 0 files merged, 0 files removed, 0 files unresolved
+a
+a
+a
+b
+b
--- a/tests/test-convert-svn-sink	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-convert-svn-sink	Wed Sep 17 11:34:37 2008 +0200
@@ -128,6 +128,7 @@
 hg --cwd b up -C 2
 hg --cwd b merge
 hg --cwd b revert -r 2 b
+hg resolve -m b
 hg --cwd b ci -d '5 0' -m 'merge'
 
 hg convert -d svn b
--- a/tests/test-convert-svn-sink.out	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-convert-svn-sink.out	Wed Sep 17 11:34:37 2008 +0200
@@ -265,9 +265,7 @@
 warning: conflicts during merge.
 merging b failed!
 2 files updated, 0 files merged, 0 files removed, 1 files unresolved
-There are unresolved merges, you can redo the full merge using:
-  hg update -C 2
-  hg merge 4
+use 'hg resolve' to retry unresolved file merges
 assuming destination b-hg
 initializing svn repo 'b-hg'
 initializing svn wc 'b-hg-wc'
--- a/tests/test-convert-tla	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-convert-tla	Wed Sep 17 11:34:37 2008 +0200
@@ -69,4 +69,5 @@
 
 echo % show graph log
 glog -R tla-repo-hg
+hg up -q -R tla-repo-hg
 hg -R tla-repo-hg manifest --debug
--- a/tests/test-convert.out	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-convert.out	Wed Sep 17 11:34:37 2008 +0200
@@ -2,18 +2,18 @@
 
 Convert a foreign SCM repository to a Mercurial one.
 
-    Accepted source formats:
-    - Mercurial
-    - CVS
-    - Darcs
-    - git
-    - Subversion
-    - Monotone
-    - GNU Arch
+    Accepted source formats [identifiers]:
+    - Mercurial [hg]
+    - CVS [cvs]
+    - Darcs [darcs]
+    - git [git]
+    - Subversion [svn]
+    - Monotone [mtn]
+    - GNU Arch [gnuarch]
 
-    Accepted destination formats:
-    - Mercurial
-    - Subversion (history on branches is not preserved)
+    Accepted destination formats [identifiers]:
+    - Mercurial [hg]
+    - Subversion [svn] (history on branches is not preserved)
 
     If no revision is given, all revisions will be converted. Otherwise,
     convert will only import up to the named revision (given in a format
@@ -73,6 +73,52 @@
 
     --config convert.hg.saverev=True          (boolean)
         allow target to preserve source revision ID
+    --config convert.hg.startrev=0            (hg revision identifier)
+        convert start revision and its descendants
+
+    CVS Source
+    ----------
+
+    CVS source will use a sandbox (i.e. a checked-out copy) from CVS
+    to indicate the starting point of what will be converted. Direct
+    access to the repository files is not needed, unless of course
+    the repository is :local:. The conversion uses the top level
+    directory in the sandbox to find the CVS repository, and then uses
+    CVS rlog commands to find files to convert. This means that unless
+    a filemap is given, all files under the starting directory will be
+    converted, and that any directory reorganisation in the CVS
+    sandbox is ignored.
+
+    Because CVS does not have changesets, it is necessary to collect
+    individual commits to CVS and merge them into changesets. CVS source
+    can use the external 'cvsps' program (this is a legacy option and may
+    be removed in future) or use its internal changeset merging code.
+    External cvsps is default, and options may be passed to it by setting
+        --config convert.cvsps='cvsps -A -u --cvs-direct -q'
+    The options shown are the defaults.
+
+    Internal cvsps is selected by setting
+        --config convert.cvsps=builtin
+    and has a few more configurable options:
+        --config convert.cvsps.fuzz=60   (integer)
+            Specify the maximum time (in seconds) that is allowed between
+            commits with identical user and log message in a single
+            changeset. When very large files were checked in as part
+            of a changeset then the default may not be long enough.
+        --config convert.cvsps.mergeto='{{mergetobranch ([-\w]+)}}'
+            Specify a regular expression to which commit log messages are
+            matched. If a match occurs, then the conversion process will
+            insert a dummy revision merging the branch on which this log
+            message occurs to the branch indicated in the regex.
+        --config convert.cvsps.mergefrom='{{mergefrombranch ([-\w]+)}}'
+            Specify a regular expression to which commit log messages are
+            matched. If a match occurs, then the conversion process will
+            add the most recent revision on the branch indicated in the
+            regex as the second parent of the changeset.
+
+    The hgext/convert/cvsps wrapper script allows the builtin changeset
+    merging code to be run without doing a conversion. Its parameters and
+    output are similar to that of cvsps 2.1.
 
     Subversion Source
     -----------------
--- a/tests/test-copy-move-merge.out	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-copy-move-merge.out	Wed Sep 17 11:34:37 2008 +0200
@@ -12,16 +12,16 @@
   checking for directory renames
  a: remote moved to c -> m
  a: remote moved to b -> m
-copying a to b
-copying a to c
-picked tool 'internal:merge' for a (binary False symlink False)
-merging a and b
-my a@fb3948d97f07+ other b@40da226db0f0 ancestor a@583c7b748052
+preserving a for resolve of b
+preserving a for resolve of c
+removing a
+picked tool 'internal:merge' for b (binary False symlink False)
+merging a and b to b
+my b@fb3948d97f07+ other b@40da226db0f0 ancestor a@583c7b748052
  premerge successful
-removing a
-picked tool 'internal:merge' for a (binary False symlink False)
-merging a and c
-my a@fb3948d97f07+ other c@40da226db0f0 ancestor a@583c7b748052
+picked tool 'internal:merge' for c (binary False symlink False)
+merging a and c to c
+my c@fb3948d97f07+ other c@40da226db0f0 ancestor a@583c7b748052
  premerge successful
 0 files updated, 2 files merged, 0 files removed, 0 files unresolved
 (branch merge, don't forget to commit)
--- a/tests/test-copy.out	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-copy.out	Wed Sep 17 11:34:37 2008 +0200
@@ -1,6 +1,7 @@
 A b
 b
  b: copy a:b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3
+committed changeset 1:386a3cc01532710ca78aed9a54fa2f459c04f29c
 we should see two history entries
 changeset:   1:386a3cc01532
 tag:         tip
--- a/tests/test-debugcomplete.out	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-debugcomplete.out	Wed Sep 17 11:34:37 2008 +0200
@@ -33,6 +33,7 @@
 recover
 remove
 rename
+resolve
 revert
 rollback
 root
@@ -79,6 +80,7 @@
 recover
 remove
 rename
+resolve
 revert
 rollback
 root
--- a/tests/test-dispatch.out	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-dispatch.out	Wed Sep 17 11:34:37 2008 +0200
@@ -10,7 +10,7 @@
     or tip if no revision is checked out.
 
     Output may be to a file, in which case the name of the file is
-    given using a format string.  The formatting rules are the same as
+    given using a format string. The formatting rules are the same as
     for the export command, with the following additions:
 
     %s   basename of file being printed
--- a/tests/test-double-merge.out	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-double-merge.out	Wed Sep 17 11:34:37 2008 +0200
@@ -10,10 +10,11 @@
   checking for directory renames
  foo: versions differ -> m
  foo: remote copied to bar -> m
-copying foo to bar
-picked tool 'internal:merge' for foo (binary False symlink False)
-merging foo and bar
-my foo@2092631ce82b+ other bar@7731dad1c2b9 ancestor foo@310fd17130da
+preserving foo for resolve of bar
+preserving foo for resolve of foo
+picked tool 'internal:merge' for bar (binary False symlink False)
+merging foo and bar to bar
+my bar@2092631ce82b+ other bar@7731dad1c2b9 ancestor foo@310fd17130da
  premerge successful
 picked tool 'internal:merge' for foo (binary False symlink False)
 merging foo
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-dumprevlog	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,47 @@
+#!/bin/sh
+
+CONTRIBDIR=$TESTDIR/../contrib
+
+mkdir repo-a
+cd repo-a
+hg init
+
+echo this is file a > a
+hg add a
+hg commit -m first -d '0 0'
+
+echo adding to file a >> a
+hg commit -m second -d '0 0'
+
+echo adding more to file a >> a
+hg commit -m third -d '0 0'
+
+hg verify
+
+echo dumping revlog of file a to stdout:
+python $CONTRIBDIR/dumprevlog .hg/store/data/a.i
+echo dumprevlog done
+
+# dump all revlogs to file repo.dump
+find .hg/store -name "*.i" | sort | xargs python $CONTRIBDIR/dumprevlog > ../repo.dump
+
+cd ..
+
+mkdir repo-b
+cd repo-b
+hg init
+
+echo undumping:
+python $CONTRIBDIR/undumprevlog < ../repo.dump
+echo undumping done
+
+hg verify
+
+cd ..
+
+echo comparing repos:
+hg -R repo-b incoming repo-a
+hg -R repo-a incoming repo-b
+echo comparing done
+
+exit 0
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-dumprevlog.out	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,53 @@
+checking changesets
+checking manifests
+crosschecking files in changesets and manifests
+checking files
+1 files, 3 changesets, 3 total revisions
+dumping revlog of file a to stdout:
+file: .hg/store/data/a.i
+node: 183d2312b35066fb6b3b449b84efc370d50993d0
+linkrev: 0
+parents: 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000
+length: 15
+-start-
+this is file a
+
+-end-
+node: b1047953b6e6b633c0d8197eaa5116fbdfd3095b
+linkrev: 1
+parents: 183d2312b35066fb6b3b449b84efc370d50993d0 0000000000000000000000000000000000000000
+length: 32
+-start-
+this is file a
+adding to file a
+
+-end-
+node: 8c4fd1f7129b8cdec6c7f58bf48fb5237a4030c1
+linkrev: 2
+parents: b1047953b6e6b633c0d8197eaa5116fbdfd3095b 0000000000000000000000000000000000000000
+length: 54
+-start-
+this is file a
+adding to file a
+adding more to file a
+
+-end-
+dumprevlog done
+undumping:
+.hg/store/00changelog.i
+.hg/store/00manifest.i
+.hg/store/data/a.i
+undumping done
+checking changesets
+checking manifests
+crosschecking files in changesets and manifests
+checking files
+1 files, 3 changesets, 3 total revisions
+comparing repos:
+comparing with repo-a
+searching for changes
+no changes found
+comparing with repo-b
+searching for changes
+no changes found
+comparing done
--- a/tests/test-empty	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-empty	Wed Sep 17 11:34:37 2008 +0200
@@ -1,7 +1,17 @@
 #!/bin/sh
 
-hg init
+hg init a
+cd a
 hg log
 hg grep wah
 hg manifest
 hg verify
+ls .hg
+ls .hg/store
+
+cd ..
+hg clone a b
+cd b
+hg verify
+ls .hg
+ls .hg/store
--- a/tests/test-empty.out	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-empty.out	Wed Sep 17 11:34:37 2008 +0200
@@ -3,3 +3,19 @@
 crosschecking files in changesets and manifests
 checking files
 0 files, 0 changesets, 0 total revisions
+00changelog.i
+requires
+store
+updating working directory
+0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+checking changesets
+checking manifests
+crosschecking files in changesets and manifests
+checking files
+0 files, 0 changesets, 0 total revisions
+00changelog.i
+branch
+dirstate
+hgrc
+requires
+store
--- a/tests/test-extension	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-extension	Wed Sep 17 11:34:37 2008 +0200
@@ -80,3 +80,25 @@
 hg help debugextension
 hg --debug help debugextension
 echo 'debugextension = !' >> $HGRCPATH
+
+echo % issue811
+debugpath=`pwd`/debugissue811.py
+cat > debugissue811.py <<EOF
+'''show all loaded extensions
+'''
+from mercurial import extensions, commands
+
+def debugextensions(ui):
+    "yet another debug command"
+    ui.write("%s\n" % '\n'.join([x for x, y in extensions.extensions()]))
+
+cmdtable = {"debugextensions": (debugextensions, (), "hg debugextensions")}
+commands.norepo += " debugextensions"
+EOF
+echo "debugissue811 = $debugpath" >> $HGRCPATH
+echo "mq=" >> $HGRCPATH
+echo "hgext.mq=" >> $HGRCPATH
+echo "hgext/mq=" >> $HGRCPATH
+
+echo % show extensions
+hg debugextensions
--- a/tests/test-extension.out	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-extension.out	Wed Sep 17 11:34:37 2008 +0200
@@ -33,6 +33,13 @@
  debugfoobar:
       yet another debug command
 
+special help topics:
+ dates             Date Formats
+ patterns          File Name Patterns
+ environment, env  Environment Variables
+ revs, revisions   Specifying Single Revisions
+ mrevs, multirevs  Specifying Multiple Revisions
+
 global options:
  -R --repository      repository root directory or symbolic path name
     --cwd             change working directory
@@ -50,3 +57,7 @@
     --profile         print command execution profile
     --version         output version information and exit
  -h --help            display help and exit
+% issue811
+% show extensions
+debugissue811
+mq
--- a/tests/test-fetch	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-fetch	Wed Sep 17 11:34:37 2008 +0200
@@ -7,6 +7,7 @@
 echo "[extensions]" >> $HGRCPATH
 echo "fetch=" >> $HGRCPATH
 
+echo % test fetch with default branches only
 hg init a
 echo a > a/a
 hg --cwd a commit -d '1 0' -Ama
@@ -66,4 +67,93 @@
 echo % should abort, because i is modified
 hg --cwd i fetch ../h
 
+
+echo % test fetch with named branches
+hg init nbase
+echo base > nbase/a
+hg -R nbase ci -d '1 0' -Am base
+hg -R nbase branch a
+echo a > nbase/a
+hg -R nbase ci -d '2 0' -m a
+hg -R nbase up -C 0
+hg -R nbase branch b
+echo b > nbase/b
+hg -R nbase ci -Ad '3 0' -m b
+
+echo
+echo % pull in change on foreign branch
+hg clone nbase n1
+hg clone nbase n2
+hg -R n1 up -C a
+echo aa > n1/a
+hg -R n1 ci -d '4 0' -m a1
+
+hg -R n2 up -C b
+hg -R n2 fetch -d '9 0' -m 'merge' n1
+echo '% parent should be 2 (no automatic update)'
+hg -R n2 parents --template '{rev}\n'
+rm -fr n1 n2
+
+echo
+echo % pull in changes on both foreign and local branches
+hg clone nbase n1
+hg clone nbase n2
+hg -R n1 up -C a
+echo aa > n1/a
+hg -R n1 ci -d '4 0' -m a1
+hg -R n1 up -C b
+echo bb > n1/b
+hg -R n1 ci -d '5 0' -m b1
+
+hg -R n2 up -C b
+hg -R n2 fetch -d '9 0' -m 'merge' n1
+echo '% parent should be 4 (fast forward)'
+hg -R n2 parents --template '{rev}\n'
+rm -fr n1 n2
+
+echo
+echo '% pull changes on foreign (2 new heads) and local (1 new head) branches'
+echo % with a local change
+hg clone nbase n1
+hg clone nbase n2
+hg -R n1 up -C a
+echo a1 > n1/a
+hg -R n1 ci -d '4 0' -m a1
+hg -R n1 up -C b
+echo bb > n1/b
+hg -R n1 ci -d '5 0' -m b1
+hg -R n1 up -C 1
+echo a2 > n1/a
+hg -R n1 ci -d '6 0' -m a2
+
+hg -R n2 up -C b
+echo change >> n2/c
+hg -R n2 ci -Ad '7 0' -m local
+hg -R n2 fetch -d '9 0' -m 'merge' n1
+echo '% parent should be 7 (new merge changeset)'
+hg -R n2 parents --template '{rev}\n'
+rm -fr n1 n2
+
+echo '% pull in changes on foreign (merge of local branch) and local (2 new'
+echo '% heads) with a local change'
+hg clone nbase n1
+hg clone nbase n2
+hg -R n1 up -C a
+hg -R n1 merge b
+hg -R n1 ci -d '4 0' -m merge
+hg -R n1 up -C 2
+echo c > n1/a
+hg -R n1 ci -d '5 0' -m c
+hg -R n1 up -C 2
+echo cc > n1/a
+hg -R n1 ci -d '6 0' -m cc
+
+hg -R n2 up -C b
+echo change >> n2/b
+hg -R n2 ci -Ad '7 0' -m local
+hg -R n2 fetch -d '9 0' -m 'merge' n1
+echo '% parent should be 3 (fetch did not merge anything)'
+hg -R n2 parents --template '{rev}\n'
+rm -fr n1 n2
+
 true
--- a/tests/test-fetch.out	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-fetch.out	Wed Sep 17 11:34:37 2008 +0200
@@ -1,3 +1,4 @@
+% test fetch with default branches only
 adding a
 updating working directory
 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
@@ -79,3 +80,93 @@
 new changeset 4:55aa4f32ec59 merges remote changes with local
 % should abort, because i is modified
 abort: working directory is missing some files
+% test fetch with named branches
+adding a
+marked working directory as branch a
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+marked working directory as branch b
+adding b
+created new head
+
+% pull in change on foreign branch
+updating working directory
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+updating working directory
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+pulling from n1
+searching for changes
+adding changesets
+adding manifests
+adding file changes
+added 1 changesets with 1 changes to 1 files
+% parent should be 2 (no automatic update)
+2
+
+% pull in changes on both foreign and local branches
+updating working directory
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+updating working directory
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+pulling from n1
+searching for changes
+adding changesets
+adding manifests
+adding file changes
+added 2 changesets with 2 changes to 2 files
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+% parent should be 4 (fast forward)
+4
+
+% pull changes on foreign (2 new heads) and local (1 new head) branches
+% with a local change
+updating working directory
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+updating working directory
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+created new head
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+adding c
+pulling from n1
+searching for changes
+adding changesets
+adding manifests
+adding file changes
+added 3 changesets with 3 changes to 2 files (+2 heads)
+updating to 5:708c6cce3d26
+1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+merging with 3:d83427717b1f
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+new changeset 7:48f1a33f52af merges remote changes with local
+% parent should be 7 (new merge changeset)
+7
+% pull in changes on foreign (merge of local branch) and local (2 new
+% heads) with a local change
+updating working directory
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+updating working directory
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+(branch merge, don't forget to commit)
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+created new head
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+created new head
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+pulling from n1
+searching for changes
+adding changesets
+adding manifests
+adding file changes
+added 3 changesets with 2 changes to 1 files (+2 heads)
+not merging with 1 other new branch heads (use "hg heads ." and "hg merge" to merge them)
+% parent should be 3 (fetch did not merge anything)
+3
--- a/tests/test-globalopts.out	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-globalopts.out	Wed Sep 17 11:34:37 2008 +0200
@@ -183,6 +183,7 @@
  recover      roll back an interrupted transaction
  remove       remove the specified files on the next commit
  rename       rename files; equivalent of copy + remove
+ resolve      resolve file merges from a branch merge or update
  revert       restore individual files or dirs to an earlier state
  rollback     roll back the last transaction
  root         print the root (top) of the current working dir
@@ -236,6 +237,7 @@
  recover      roll back an interrupted transaction
  remove       remove the specified files on the next commit
  rename       rename files; equivalent of copy + remove
+ resolve      resolve file merges from a branch merge or update
  revert       restore individual files or dirs to an earlier state
  rollback     roll back the last transaction
  root         print the root (top) of the current working dir
--- a/tests/test-help.out	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-help.out	Wed Sep 17 11:34:37 2008 +0200
@@ -74,6 +74,7 @@
  recover      roll back an interrupted transaction
  remove       remove the specified files on the next commit
  rename       rename files; equivalent of copy + remove
+ resolve      resolve file merges from a branch merge or update
  revert       restore individual files or dirs to an earlier state
  rollback     roll back the last transaction
  root         print the root (top) of the current working dir
@@ -123,6 +124,7 @@
  recover      roll back an interrupted transaction
  remove       remove the specified files on the next commit
  rename       rename files; equivalent of copy + remove
+ resolve      resolve file merges from a branch merge or update
  revert       restore individual files or dirs to an earlier state
  rollback     roll back the last transaction
  root         print the root (top) of the current working dir
@@ -199,9 +201,9 @@
 
  -r --rev                  revision
  -a --text                 treat all files as text
- -p --show-function        show which function each change is in
  -g --git                  use git extended diff format
     --nodates              don't include dates in diff headers
+ -p --show-function        show which function each change is in
  -w --ignore-all-space     ignore white space when comparing lines
  -b --ignore-space-change  ignore changes in the amount of white space
  -B --ignore-blank-lines   ignore changes whose lines are all blank
@@ -216,10 +218,10 @@
 
 show changed files in the working directory
 
-    Show status of files in the repository.  If names are given, only
-    files that match are shown.  Files that are clean or ignored or
+    Show status of files in the repository. If names are given, only
+    files that match are shown. Files that are clean or ignored or
     source of a copy/move operation, are not listed unless -c (clean),
-    -i (ignored), -C (copies) or -A is given.  Unless options described
+    -i (ignored), -C (copies) or -A is given. Unless options described
     with "show only ..." are given, the options -mardu are used.
 
     Option -q/--quiet hides untracked (unknown and ignored) files
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-hgk	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,11 @@
+#!/bin/sh
+
+echo "[extensions]" >> $HGRCPATH
+echo "hgk=" >> $HGRCPATH
+
+hg init repo
+cd repo
+echo a > a
+hg ci -Am adda
+hg debug-cat-file commit 0
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-hgk.out	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,9 @@
+adding a
+tree a0c8bcbbb45c
+parent 000000000000
+author test 0 0
+committer test 0 0
+revision 0
+branch default
+
+adda
--- a/tests/test-hgweb-commands	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-hgweb-commands	Wed Sep 17 11:34:37 2008 +0200
@@ -34,21 +34,22 @@
 echo % Overviews
 "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/tags/?style=atom' | sed "s/http:\/\/[^/]*\//http:\/\/127.0.0.1\//"
 "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/summary/?style=gitweb' | sed "s/[0-9]* years ago/long ago/g"
+"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/graph/?style=gitweb' | sed "s/[0-9]* years/long/g"
 
 echo % capabilities
-"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/capabilities'
+"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '?cmd=capabilities'
 echo % heads
-"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/heads'
+"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '?cmd=heads'
 echo % lookup
-"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/lookup/1'
+"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '?cmd=lookup&node=1'
 echo % branches
-"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/branches'
+"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '?cmd=branches'
 echo % changegroup
-"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/changegroup'
+"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '?cmd=changegroup'
 echo % stream_out
-"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/stream_out'
+"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '?cmd=stream_out'
 echo % failing unbundle, requires POST request
-"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/unbundle'
+"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '?cmd=unbundle'
 
 echo % Static files
 "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/static/style.css'
Binary file tests/test-hgweb-commands.out has changed
--- a/tests/test-hgweb-no-path-info	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-hgweb-no-path-info	Wed Sep 17 11:34:37 2008 +0200
@@ -41,19 +41,20 @@
 	'SERVER_PROTOCOL': 'HTTP/1.0'
 }
 
+def process(app):
+    content = app(env, startrsp)
+    sys.stdout.write(output.getvalue())
+    sys.stdout.write(''.join(content))
+    print '---- ERRORS'
+    print errors.getvalue()
+
 output = StringIO()
 env['QUERY_STRING'] = 'style=atom'
-hgweb('.', name = 'repo')(env, startrsp)
-print output.getvalue()
-print '---- ERRORS'
-print errors.getvalue()
+process(hgweb('.', name='repo'))
 
 output = StringIO()
 env['QUERY_STRING'] = 'style=raw'
-hgwebdir({'repo': '.'})(env, startrsp)
-print output.getvalue()
-print '---- ERRORS'
-print errors.getvalue()
+process(hgwebdir({'repo': '.'}))
 EOF
 
 python request.py | sed "s/http:\/\/127\.0\.0\.1:[0-9]*\//http:\/\/127.0.0.1\//"
--- a/tests/test-hgweb-no-path-info.out	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-hgweb-no-path-info.out	Wed Sep 17 11:34:37 2008 +0200
@@ -35,7 +35,6 @@
  </entry>
 
 </feed>
-
 ---- ERRORS
 
 ---- HEADERS
@@ -45,6 +44,5 @@
 
 repo/
 
-
 ---- ERRORS
 
--- a/tests/test-hgweb-no-request-uri	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-hgweb-no-request-uri	Wed Sep 17 11:34:37 2008 +0200
@@ -41,37 +41,33 @@
 	'SERVER_PROTOCOL': 'HTTP/1.0'
 }
 
+def process(app):
+	content = app(env, startrsp)
+	sys.stdout.write(output.getvalue())
+	sys.stdout.write(''.join(content))
+	print '---- ERRORS'
+	print errors.getvalue()
+	
+
 output = StringIO()
 env['PATH_INFO'] = '/'
 env['QUERY_STRING'] = 'style=atom'
-hgweb('.', name = 'repo')(env, startrsp)
-print output.getvalue()
-print '---- ERRORS'
-print errors.getvalue()
+process(hgweb('.', name = 'repo'))
 
 output = StringIO()
 env['PATH_INFO'] = '/file/tip/'
 env['QUERY_STRING'] = 'style=raw'
-hgweb('.', name = 'repo')(env, startrsp)
-print output.getvalue()
-print '---- ERRORS'
-print errors.getvalue()
+process(hgweb('.', name = 'repo'))
 
 output = StringIO()
 env['PATH_INFO'] = '/'
 env['QUERY_STRING'] = 'style=raw'
-hgwebdir({'repo': '.'})(env, startrsp)
-print output.getvalue()
-print '---- ERRORS'
-print errors.getvalue()
+process(hgwebdir({'repo': '.'}))
 
 output = StringIO()
 env['PATH_INFO'] = '/repo/file/tip/'
 env['QUERY_STRING'] = 'style=raw'
-hgwebdir({'repo': '.'})(env, startrsp)
-print output.getvalue()
-print '---- ERRORS'
-print errors.getvalue()
+process(hgwebdir({'repo': '.'}))
 EOF
 
 python request.py | sed "s/http:\/\/127\.0\.0\.1:[0-9]*\//http:\/\/127.0.0.1\//"
--- a/tests/test-hgweb-no-request-uri.out	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-hgweb-no-request-uri.out	Wed Sep 17 11:34:37 2008 +0200
@@ -35,6 +35,24 @@
  </entry>
 
 </feed>
+---- ERRORS
+
+---- HEADERS
+200 Script output follows
+---- DATA
+[('Content-Type', 'text/plain; charset=ascii')]
+
+-rw-r--r-- 4 bar
+
+
+---- ERRORS
+
+---- HEADERS
+200 Script output follows
+---- DATA
+[('Content-Type', 'text/plain; charset=ascii')]
+
+/repo/
 
 ---- ERRORS
 
@@ -46,27 +64,5 @@
 -rw-r--r-- 4 bar
 
 
-
 ---- ERRORS
 
----- HEADERS
-200 Script output follows
----- DATA
-[('Content-Type', 'text/plain; charset=ascii')]
-
-/repo/
-
-
----- ERRORS
-
----- HEADERS
-200 Script output follows
----- DATA
-[('Content-Type', 'text/plain; charset=ascii')]
-
--rw-r--r-- 4 bar
-
-
-
----- ERRORS
-
--- a/tests/test-hgweb.out	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-hgweb.out	Wed Sep 17 11:34:37 2008 +0200
@@ -186,4 +186,46 @@
 	background-color: #d5dde6;
 	border-color: #e3ecf4 #9398f4 #9398f4 #e3ecf4;
 }
+
+/* Graph */
+div#wrapper {
+	position: relative;
+	margin: 0;
+	padding: 0;
+	margin-top: 3px;
+}
+
+canvas {
+	position: absolute;
+	z-index: 5;
+	top: -0.9em;
+	margin: 0;
+}
+
+ul#nodebgs {
+	list-style: none inside none;
+	padding: 0;
+	margin: 0;
+	top: -0.7em;
+}
+ 
+ul#graphnodes li, ul#nodebgs li {
+	height: 39px;
+}
+
+ul#graphnodes {
+	position: absolute;
+	z-index: 10;
+	top: -0.8em;
+	list-style: none inside none;
+	padding: 0;
+}
+
+ul#graphnodes li .info {
+	display: block;
+	font-size: 100%;
+	position: relative;
+	top: -3px;
+	font-style: italic;
+}
 % errors
--- a/tests/test-highlight	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-highlight	Wed Sep 17 11:34:37 2008 +0200
@@ -5,6 +5,8 @@
 cat <<EOF >> $HGRCPATH
 [extensions]
 hgext.highlight =
+[web]
+pygments_style = friendly
 EOF
 
 hg init test
@@ -16,14 +18,65 @@
 hg serve -p $HGPORT -d -n test --pid-file=hg.pid -A access.log -E errors.log
 cat hg.pid >> $DAEMON_PIDS
 
-echo % hgweb filerevision
+echo % hgweb filerevision, html
 ("$TESTDIR/get-with-headers.py" localhost:$HGPORT '/file/tip/get-with-headers.py') \
     | sed "s/[0-9]* years ago/long ago/g" | sed "s/class=\"k\"/class=\"kn\"/g"
 
-echo % hgweb fileannotate
+echo % hgweb fileannotate, html
 ("$TESTDIR/get-with-headers.py" localhost:$HGPORT '/annotate/tip/get-with-headers.py') \
     | sed "s/[0-9]* years ago/long ago/g" | sed "s/class=\"k\"/class=\"kn\"/g"
 
+echo % hgweb fileannotate, raw
+("$TESTDIR/get-with-headers.py" localhost:$HGPORT '/annotate/tip/get-with-headers.py?style=raw') \
+    | sed "s/test@//" > a
+
+echo "200 Script output follows" > b
+echo "" >> b
+echo "" >> b
+hg annotate "get-with-headers.py" >> b
+echo "" >> b
+echo "" >> b
+echo "" >> b
+echo "" >> b
+
+diff -u b a
+
+echo
+echo % hgweb filerevision, raw
+("$TESTDIR/get-with-headers.py" localhost:$HGPORT '/file/tip/get-with-headers.py?style=raw') \
+    > a
+
+echo "200 Script output follows" > b
+echo "" >> b
+hg cat get-with-headers.py >> b
+
+diff -u b a
+
+echo
+echo % hgweb highlightcss friendly
+"$TESTDIR/get-with-headers.py" localhost:$HGPORT '/highlightcss' > out
+head -n 4 out
+rm out
+
+echo % errors encountered
+cat errors.log
+kill `cat hg.pid`
+
+# Change the pygments style
+cat > .hg/hgrc <<EOF
+[web]
+pygments_style = fruity
+EOF
+
+echo % hg serve again
+hg serve -p $HGPORT -d -n test --pid-file=hg.pid -A access.log -E errors.log
+cat hg.pid >> $DAEMON_PIDS
+
+echo % hgweb highlightcss fruity
+"$TESTDIR/get-with-headers.py" localhost:$HGPORT '/highlightcss' > out
+head -n 4 out
+rm out
+
 echo % errors encountered
 cat errors.log
 
--- a/tests/test-highlight.out	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-highlight.out	Wed Sep 17 11:34:37 2008 +0200
@@ -1,6 +1,6 @@
 adding get-with-headers.py
 % hg serve
-% hgweb filerevision
+% hgweb filerevision, html
 200 Script output follows
 
 <!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
@@ -10,7 +10,7 @@
 <meta name="robots" content="index, nofollow" />
 <link rel="stylesheet" href="/static/style.css" type="text/css" />
 
-<link rel="stylesheet" href="/static/highlight.css" type="text/css" />
+<link rel="stylesheet" href="/highlightcss" type="text/css" />
 <title>test:get-with-headers.py</title>
 </head>
 <body>
@@ -18,6 +18,7 @@
 <div class="buttons">
 <a href="/log/0">changelog</a>
 <a href="/shortlog/0">shortlog</a>
+<a href="/graph">graph</a>
 <a href="/tags">tags</a>
 <a href="/rev/79ee608ca36d">changeset</a>
 <a href="/file/79ee608ca36d/">files</a>
@@ -62,7 +63,7 @@
 </body>
 </html>
 
-% hgweb fileannotate
+% hgweb fileannotate, html
 200 Script output follows
 
 <!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
@@ -72,7 +73,7 @@
 <meta name="robots" content="index, nofollow" />
 <link rel="stylesheet" href="/static/style.css" type="text/css" />
 
-<link rel="stylesheet" href="/static/highlight.css" type="text/css" />
+<link rel="stylesheet" href="/highlightcss" type="text/css" />
 <title>test: get-with-headers.py annotate</title>
 </head>
 <body>
@@ -80,6 +81,7 @@
 <div class="buttons">
 <a href="/log/0">changelog</a>
 <a href="/shortlog/0">shortlog</a>
+<a href="/graph">graph</a>
 <a href="/tags">tags</a>
 <a href="/rev/79ee608ca36d">changeset</a>
 <a href="/file/79ee608ca36d/">files</a>
@@ -101,10 +103,12 @@
  <td>&#116;&#101;&#115;&#116;</td></tr>
 <tr>
  <td class="metatag">date:</td>
- <td>Thu Jan 01 00:00:00 1970 +0000 (long ago)</td></tr>
+ <td>Thu Jan 01 00:00:00 1970 +0000 (long ago)</td>
+</tr>
 <tr>
  <td class="metatag">permissions:</td>
- <td>-rwxr-xr-x</td></tr>
+ <td>-rwxr-xr-x</td>
+</tr>
 <tr>
   <td class="metatag">description:</td>
   <td>a</td>
@@ -114,7 +118,7 @@
 <br/>
 
 <table cellspacing="0" cellpadding="0">
-<tr class="parity0"><td class="annotate"><a href="/annotate/79ee608ca36d/get-with-headers.py#l1">&#116;&#101;&#115;&#116;@0</a></td><td><a class="lineno" href="#l1" id="l1">     1</a></td><td><pre><span class="c">#!/usr/bin/env python</span></pre></td></tr><tr class="parity1"><td class="annotate"><a href="/annotate/79ee608ca36d/get-with-headers.py#l2">&#116;&#101;&#115;&#116;@0</a></td><td><a class="lineno" href="#l2" id="l2">     2</a></td><td><pre></pre></td></tr><tr class="parity0"><td class="annotate"><a href="/annotate/79ee608ca36d/get-with-headers.py#l3">&#116;&#101;&#115;&#116;@0</a></td><td><a class="lineno" href="#l3" id="l3">     3</a></td><td><pre><span class="n">__doc__</span> <span class="o">=</span> <span class="s">&quot;&quot;&quot;This does HTTP get requests given a host:port and path and returns</span></pre></td></tr><tr class="parity1"><td class="annotate"><a href="/annotate/79ee608ca36d/get-with-headers.py#l4">&#116;&#101;&#115;&#116;@0</a></td><td><a class="lineno" href="#l4" id="l4">     4</a></td><td><pre><span class="s">a subset of the headers plus the body of the result.&quot;&quot;&quot;</span></pre></td></tr><tr class="parity0"><td class="annotate"><a href="/annotate/79ee608ca36d/get-with-headers.py#l5">&#116;&#101;&#115;&#116;@0</a></td><td><a class="lineno" href="#l5" id="l5">     5</a></td><td><pre></pre></td></tr><tr class="parity1"><td class="annotate"><a href="/annotate/79ee608ca36d/get-with-headers.py#l6">&#116;&#101;&#115;&#116;@0</a></td><td><a class="lineno" href="#l6" id="l6">     6</a></td><td><pre><span class="kn">import</span> <span class="nn">httplib</span><span class="o">,</span> <span class="nn">sys</span></pre></td></tr><tr class="parity0"><td class="annotate"><a href="/annotate/79ee608ca36d/get-with-headers.py#l7">&#116;&#101;&#115;&#116;@0</a></td><td><a class="lineno" href="#l7" id="l7">     7</a></td><td><pre><span class="n">headers</span> <span class="o">=</span> <span class="p">[</span><span class="n">h</span><span class="o">.</span><span class="n">lower</span><span class="p">()</span> <span class="kn">for</span> <span class="n">h</span> <span class="ow">in</span> <span class="n">sys</span><span class="o">.</span><span class="n">argv</span><span class="p">[</span><span class="mf">3</span><span class="p">:]]</span></pre></td></tr><tr class="parity1"><td class="annotate"><a href="/annotate/79ee608ca36d/get-with-headers.py#l8">&#116;&#101;&#115;&#116;@0</a></td><td><a class="lineno" href="#l8" id="l8">     8</a></td><td><pre><span class="n">conn</span> <span class="o">=</span> <span class="n">httplib</span><span class="o">.</span><span class="n">HTTPConnection</span><span class="p">(</span><span class="n">sys</span><span class="o">.</span><span class="n">argv</span><span class="p">[</span><span class="mf">1</span><span class="p">])</span></pre></td></tr><tr class="parity0"><td class="annotate"><a href="/annotate/79ee608ca36d/get-with-headers.py#l9">&#116;&#101;&#115;&#116;@0</a></td><td><a class="lineno" href="#l9" id="l9">     9</a></td><td><pre><span class="n">conn</span><span class="o">.</span><span class="n">request</span><span class="p">(</span><span class="s">&quot;GET&quot;</span><span class="p">,</span> <span class="n">sys</span><span class="o">.</span><span class="n">argv</span><span class="p">[</span><span class="mf">2</span><span class="p">])</span></pre></td></tr><tr class="parity1"><td class="annotate"><a href="/annotate/79ee608ca36d/get-with-headers.py#l10">&#116;&#101;&#115;&#116;@0</a></td><td><a class="lineno" href="#l10" id="l10">    10</a></td><td><pre><span class="n">response</span> <span class="o">=</span> <span class="n">conn</span><span class="o">.</span><span class="n">getresponse</span><span class="p">()</span></pre></td></tr><tr class="parity0"><td class="annotate"><a href="/annotate/79ee608ca36d/get-with-headers.py#l11">&#116;&#101;&#115;&#116;@0</a></td><td><a class="lineno" href="#l11" id="l11">    11</a></td><td><pre><span class="kn">print</span> <span class="n">response</span><span class="o">.</span><span class="n">status</span><span class="p">,</span> <span class="n">response</span><span class="o">.</span><span class="n">reason</span></pre></td></tr><tr class="parity1"><td class="annotate"><a href="/annotate/79ee608ca36d/get-with-headers.py#l12">&#116;&#101;&#115;&#116;@0</a></td><td><a class="lineno" href="#l12" id="l12">    12</a></td><td><pre><span class="kn">for</span> <span class="n">h</span> <span class="ow">in</span> <span class="n">headers</span><span class="p">:</span></pre></td></tr><tr class="parity0"><td class="annotate"><a href="/annotate/79ee608ca36d/get-with-headers.py#l13">&#116;&#101;&#115;&#116;@0</a></td><td><a class="lineno" href="#l13" id="l13">    13</a></td><td><pre>    <span class="kn">if</span> <span class="n">response</span><span class="o">.</span><span class="n">getheader</span><span class="p">(</span><span class="n">h</span><span class="p">,</span> <span class="bp">None</span><span class="p">)</span> <span class="ow">is</span> <span class="ow">not</span> <span class="bp">None</span><span class="p">:</span></pre></td></tr><tr class="parity1"><td class="annotate"><a href="/annotate/79ee608ca36d/get-with-headers.py#l14">&#116;&#101;&#115;&#116;@0</a></td><td><a class="lineno" href="#l14" id="l14">    14</a></td><td><pre>        <span class="kn">print</span> <span class="s">&quot;</span><span class="si">%s</span><span class="s">: </span><span class="si">%s</span><span class="s">&quot;</span> <span class="o">%</span> <span class="p">(</span><span class="n">h</span><span class="p">,</span> <span class="n">response</span><span class="o">.</span><span class="n">getheader</span><span class="p">(</span><span class="n">h</span><span class="p">))</span></pre></td></tr><tr class="parity0"><td class="annotate"><a href="/annotate/79ee608ca36d/get-with-headers.py#l15">&#116;&#101;&#115;&#116;@0</a></td><td><a class="lineno" href="#l15" id="l15">    15</a></td><td><pre><span class="kn">print</span></pre></td></tr><tr class="parity1"><td class="annotate"><a href="/annotate/79ee608ca36d/get-with-headers.py#l16">&#116;&#101;&#115;&#116;@0</a></td><td><a class="lineno" href="#l16" id="l16">    16</a></td><td><pre><span class="n">sys</span><span class="o">.</span><span class="n">stdout</span><span class="o">.</span><span class="n">write</span><span class="p">(</span><span class="n">response</span><span class="o">.</span><span class="n">read</span><span class="p">())</span></pre></td></tr><tr class="parity0"><td class="annotate"><a href="/annotate/79ee608ca36d/get-with-headers.py#l17">&#116;&#101;&#115;&#116;@0</a></td><td><a class="lineno" href="#l17" id="l17">    17</a></td><td><pre></pre></td></tr><tr class="parity1"><td class="annotate"><a href="/annotate/79ee608ca36d/get-with-headers.py#l18">&#116;&#101;&#115;&#116;@0</a></td><td><a class="lineno" href="#l18" id="l18">    18</a></td><td><pre><span class="kn">if</span> <span class="mf">200</span> <span class="o">&lt;=</span> <span class="n">response</span><span class="o">.</span><span class="n">status</span> <span class="o">&lt;=</span> <span class="mf">299</span><span class="p">:</span></pre></td></tr><tr class="parity0"><td class="annotate"><a href="/annotate/79ee608ca36d/get-with-headers.py#l19">&#116;&#101;&#115;&#116;@0</a></td><td><a class="lineno" href="#l19" id="l19">    19</a></td><td><pre>    <span class="n">sys</span><span class="o">.</span><span class="n">exit</span><span class="p">(</span><span class="mf">0</span><span class="p">)</span></pre></td></tr><tr class="parity1"><td class="annotate"><a href="/annotate/79ee608ca36d/get-with-headers.py#l20">&#116;&#101;&#115;&#116;@0</a></td><td><a class="lineno" href="#l20" id="l20">    20</a></td><td><pre><span class="n">sys</span><span class="o">.</span><span class="n">exit</span><span class="p">(</span><span class="mf">1</span><span class="p">)</span></pre></td></tr>
+<tr class="parity0"><td class="annotate"><a href="/annotate/79ee608ca36d/get-with-headers.py#l1" title="79ee608ca36d: a">test@0</a></td><td><a class="lineno" href="#l1" id="l1">     1</a></td><td><pre><span class="c">#!/usr/bin/env python</span></pre></td></tr><tr class="parity1"><td class="annotate"><a href="/annotate/79ee608ca36d/get-with-headers.py#l2" title="79ee608ca36d: a">test@0</a></td><td><a class="lineno" href="#l2" id="l2">     2</a></td><td><pre></pre></td></tr><tr class="parity0"><td class="annotate"><a href="/annotate/79ee608ca36d/get-with-headers.py#l3" title="79ee608ca36d: a">test@0</a></td><td><a class="lineno" href="#l3" id="l3">     3</a></td><td><pre><span class="n">__doc__</span> <span class="o">=</span> <span class="s">&quot;&quot;&quot;This does HTTP get requests given a host:port and path and returns</span></pre></td></tr><tr class="parity1"><td class="annotate"><a href="/annotate/79ee608ca36d/get-with-headers.py#l4" title="79ee608ca36d: a">test@0</a></td><td><a class="lineno" href="#l4" id="l4">     4</a></td><td><pre><span class="s">a subset of the headers plus the body of the result.&quot;&quot;&quot;</span></pre></td></tr><tr class="parity0"><td class="annotate"><a href="/annotate/79ee608ca36d/get-with-headers.py#l5" title="79ee608ca36d: a">test@0</a></td><td><a class="lineno" href="#l5" id="l5">     5</a></td><td><pre></pre></td></tr><tr class="parity1"><td class="annotate"><a href="/annotate/79ee608ca36d/get-with-headers.py#l6" title="79ee608ca36d: a">test@0</a></td><td><a class="lineno" href="#l6" id="l6">     6</a></td><td><pre><span class="kn">import</span> <span class="nn">httplib</span><span class="o">,</span> <span class="nn">sys</span></pre></td></tr><tr class="parity0"><td class="annotate"><a href="/annotate/79ee608ca36d/get-with-headers.py#l7" title="79ee608ca36d: a">test@0</a></td><td><a class="lineno" href="#l7" id="l7">     7</a></td><td><pre><span class="n">headers</span> <span class="o">=</span> <span class="p">[</span><span class="n">h</span><span class="o">.</span><span class="n">lower</span><span class="p">()</span> <span class="kn">for</span> <span class="n">h</span> <span class="ow">in</span> <span class="n">sys</span><span class="o">.</span><span class="n">argv</span><span class="p">[</span><span class="mf">3</span><span class="p">:]]</span></pre></td></tr><tr class="parity1"><td class="annotate"><a href="/annotate/79ee608ca36d/get-with-headers.py#l8" title="79ee608ca36d: a">test@0</a></td><td><a class="lineno" href="#l8" id="l8">     8</a></td><td><pre><span class="n">conn</span> <span class="o">=</span> <span class="n">httplib</span><span class="o">.</span><span class="n">HTTPConnection</span><span class="p">(</span><span class="n">sys</span><span class="o">.</span><span class="n">argv</span><span class="p">[</span><span class="mf">1</span><span class="p">])</span></pre></td></tr><tr class="parity0"><td class="annotate"><a href="/annotate/79ee608ca36d/get-with-headers.py#l9" title="79ee608ca36d: a">test@0</a></td><td><a class="lineno" href="#l9" id="l9">     9</a></td><td><pre><span class="n">conn</span><span class="o">.</span><span class="n">request</span><span class="p">(</span><span class="s">&quot;GET&quot;</span><span class="p">,</span> <span class="n">sys</span><span class="o">.</span><span class="n">argv</span><span class="p">[</span><span class="mf">2</span><span class="p">])</span></pre></td></tr><tr class="parity1"><td class="annotate"><a href="/annotate/79ee608ca36d/get-with-headers.py#l10" title="79ee608ca36d: a">test@0</a></td><td><a class="lineno" href="#l10" id="l10">    10</a></td><td><pre><span class="n">response</span> <span class="o">=</span> <span class="n">conn</span><span class="o">.</span><span class="n">getresponse</span><span class="p">()</span></pre></td></tr><tr class="parity0"><td class="annotate"><a href="/annotate/79ee608ca36d/get-with-headers.py#l11" title="79ee608ca36d: a">test@0</a></td><td><a class="lineno" href="#l11" id="l11">    11</a></td><td><pre><span class="kn">print</span> <span class="n">response</span><span class="o">.</span><span class="n">status</span><span class="p">,</span> <span class="n">response</span><span class="o">.</span><span class="n">reason</span></pre></td></tr><tr class="parity1"><td class="annotate"><a href="/annotate/79ee608ca36d/get-with-headers.py#l12" title="79ee608ca36d: a">test@0</a></td><td><a class="lineno" href="#l12" id="l12">    12</a></td><td><pre><span class="kn">for</span> <span class="n">h</span> <span class="ow">in</span> <span class="n">headers</span><span class="p">:</span></pre></td></tr><tr class="parity0"><td class="annotate"><a href="/annotate/79ee608ca36d/get-with-headers.py#l13" title="79ee608ca36d: a">test@0</a></td><td><a class="lineno" href="#l13" id="l13">    13</a></td><td><pre>    <span class="kn">if</span> <span class="n">response</span><span class="o">.</span><span class="n">getheader</span><span class="p">(</span><span class="n">h</span><span class="p">,</span> <span class="bp">None</span><span class="p">)</span> <span class="ow">is</span> <span class="ow">not</span> <span class="bp">None</span><span class="p">:</span></pre></td></tr><tr class="parity1"><td class="annotate"><a href="/annotate/79ee608ca36d/get-with-headers.py#l14" title="79ee608ca36d: a">test@0</a></td><td><a class="lineno" href="#l14" id="l14">    14</a></td><td><pre>        <span class="kn">print</span> <span class="s">&quot;</span><span class="si">%s</span><span class="s">: </span><span class="si">%s</span><span class="s">&quot;</span> <span class="o">%</span> <span class="p">(</span><span class="n">h</span><span class="p">,</span> <span class="n">response</span><span class="o">.</span><span class="n">getheader</span><span class="p">(</span><span class="n">h</span><span class="p">))</span></pre></td></tr><tr class="parity0"><td class="annotate"><a href="/annotate/79ee608ca36d/get-with-headers.py#l15" title="79ee608ca36d: a">test@0</a></td><td><a class="lineno" href="#l15" id="l15">    15</a></td><td><pre><span class="kn">print</span></pre></td></tr><tr class="parity1"><td class="annotate"><a href="/annotate/79ee608ca36d/get-with-headers.py#l16" title="79ee608ca36d: a">test@0</a></td><td><a class="lineno" href="#l16" id="l16">    16</a></td><td><pre><span class="n">sys</span><span class="o">.</span><span class="n">stdout</span><span class="o">.</span><span class="n">write</span><span class="p">(</span><span class="n">response</span><span class="o">.</span><span class="n">read</span><span class="p">())</span></pre></td></tr><tr class="parity0"><td class="annotate"><a href="/annotate/79ee608ca36d/get-with-headers.py#l17" title="79ee608ca36d: a">test@0</a></td><td><a class="lineno" href="#l17" id="l17">    17</a></td><td><pre></pre></td></tr><tr class="parity1"><td class="annotate"><a href="/annotate/79ee608ca36d/get-with-headers.py#l18" title="79ee608ca36d: a">test@0</a></td><td><a class="lineno" href="#l18" id="l18">    18</a></td><td><pre><span class="kn">if</span> <span class="mf">200</span> <span class="o">&lt;=</span> <span class="n">response</span><span class="o">.</span><span class="n">status</span> <span class="o">&lt;=</span> <span class="mf">299</span><span class="p">:</span></pre></td></tr><tr class="parity0"><td class="annotate"><a href="/annotate/79ee608ca36d/get-with-headers.py#l19" title="79ee608ca36d: a">test@0</a></td><td><a class="lineno" href="#l19" id="l19">    19</a></td><td><pre>    <span class="n">sys</span><span class="o">.</span><span class="n">exit</span><span class="p">(</span><span class="mf">0</span><span class="p">)</span></pre></td></tr><tr class="parity1"><td class="annotate"><a href="/annotate/79ee608ca36d/get-with-headers.py#l20" title="79ee608ca36d: a">test@0</a></td><td><a class="lineno" href="#l20" id="l20">    20</a></td><td><pre><span class="n">sys</span><span class="o">.</span><span class="n">exit</span><span class="p">(</span><span class="mf">1</span><span class="p">)</span></pre></td></tr>
 </table>
 
 
@@ -126,4 +130,20 @@
 </body>
 </html>
 
+% hgweb fileannotate, raw
+
+% hgweb filerevision, raw
+
+% hgweb highlightcss friendly
+200 Script output follows
+
+/* pygments_style = friendly */
+
 % errors encountered
+% hg serve again
+% hgweb highlightcss fruity
+200 Script output follows
+
+/* pygments_style = fruity */
+
+% errors encountered
--- a/tests/test-hook.out	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-hook.out	Wed Sep 17 11:34:37 2008 +0200
@@ -150,4 +150,5 @@
 foo
 calling hook commit.auto: <function autohook>
 Automatically installed hook
+committed changeset 1:52998019f6252a2b893452765fcb0a47351a5708
 hooks.commit.auto=<function autohook>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-issue1089	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,17 @@
+#!/bin/sh
+
+hg init a
+cd a
+mkdir a
+echo a > a/b
+hg ci -Am m
+hg rm a
+hg ci -m m a
+
+mkdir a b
+echo a > a/b
+hg ci -Am m
+hg rm a
+cd b
+# relative delete
+hg ci -m m ../a
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-issue1089.out	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,4 @@
+adding a/b
+removing a/b
+adding a/b
+removing a/b
--- a/tests/test-issue1175.out	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-issue1175.out	Wed Sep 17 11:34:37 2008 +0200
@@ -8,6 +8,7 @@
 b
  b: searching for copy revision for a
  b: copy a:b80de5d138758541c5f05265ad144ab9fa86d1db
+committed changeset 5:755e75751bf67eb4378bca61987df035d90a7a06
 checking changesets
 checking manifests
 crosschecking files in changesets and manifests
--- a/tests/test-issue612.out	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-issue612.out	Wed Sep 17 11:34:37 2008 +0200
@@ -3,7 +3,7 @@
 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
 created new head
 ? src/a.o
-merging src/a.c and source/a.c
+merging src/a.c and source/a.c to source/a.c
 1 files updated, 1 files merged, 0 files removed, 0 files unresolved
 (branch merge, don't forget to commit)
 M source/a.c
--- a/tests/test-issue672.out	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-issue672.out	Wed Sep 17 11:34:37 2008 +0200
@@ -30,8 +30,9 @@
    1a -> 1 *
   checking for directory renames
  1a: local moved to 1 -> m
+preserving 1a for resolve of 1a
 picked tool 'internal:merge' for 1a (binary False symlink False)
-merging 1a and 1
+merging 1a and 1 to 1a
 my 1a@ac7575e3c052+ other 1@746e9549ea96 ancestor 1@81f4b099af3d
  premerge successful
 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
@@ -47,11 +48,11 @@
    1a -> 1 *
   checking for directory renames
  1: remote moved to 1a -> m
-copying 1 to 1a
-picked tool 'internal:merge' for 1 (binary False symlink False)
-merging 1 and 1a
-my 1@746e9549ea96+ other 1a@ac7575e3c052 ancestor 1@81f4b099af3d
+preserving 1 for resolve of 1a
+removing 1
+picked tool 'internal:merge' for 1a (binary False symlink False)
+merging 1 and 1a to 1a
+my 1a@746e9549ea96+ other 1a@ac7575e3c052 ancestor 1@81f4b099af3d
  premerge successful
-removing 1
 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
 (branch merge, don't forget to commit)
--- a/tests/test-keyword	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-keyword	Wed Sep 17 11:34:37 2008 +0200
@@ -208,12 +208,16 @@
 rm log
 echo % status
 hg status
+echo % verify
+hg verify
 
 echo % cat
 cat a b
 echo % hg cat
 hg cat sym a b
 echo
+echo % annotate
+hg annotate a
 
 echo % remove
 hg debugrebuildstate
@@ -281,6 +285,47 @@
 echo % kwexpand nonexistent
 hg kwexpand nonexistent 2>&1 | sed 's/nonexistent:.*/nonexistent:/'
 
+echo % hg serve
+hg serve -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
+cat hg.pid >> $DAEMON_PIDS
+echo % expansion
+echo % hgweb file
+("$TESTDIR/get-with-headers.py" localhost:$HGPORT '/file/tip/a/?style=raw')
+echo % no expansion
+echo % hgweb annotate
+("$TESTDIR/get-with-headers.py" localhost:$HGPORT '/annotate/tip/a/?style=raw')
+echo % hgweb changeset
+("$TESTDIR/get-with-headers.py" localhost:$HGPORT '/rev/tip/?style=raw')
+echo % hgweb filediff
+("$TESTDIR/get-with-headers.py" localhost:$HGPORT '/diff/bb948857c743/a?style=raw')
+echo % errors encountered
+cat errors.log
+
+echo % merge/resolve
+echo '$Id$' > m
+hg add m
+hg commit -m 4kw 
+echo foo >> m
+hg commit -m 5foo
+echo % simplemerge
+hg update 4
+echo foo >> m
+hg commit -m 6foo
+hg merge
+hg commit -m simplemerge
+cat m
+echo % conflict
+hg update 4
+echo bar >> m
+hg commit -m 8bar
+hg merge
+echo % keyword stays outside conflict zone
+cat m
+echo % resolve to local
+HGMERGE=internal:local hg resolve
+hg commit -m localresolve
+cat m
+
 echo % switch off expansion
 echo % kwshrink with unknown file u
 cp a u
@@ -296,13 +341,3 @@
 echo % hg cat
 hg cat sym a b
 echo
-
-echo % hg serve
-hg serve -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
-cat hg.pid >> $DAEMON_PIDS
-echo % hgweb changeset
-("$TESTDIR/get-with-headers.py" localhost:$HGPORT '/rev/tip/?style=raw')
-echo % hgweb filediff
-("$TESTDIR/get-with-headers.py" localhost:$HGPORT '/diff/bb948857c743/a?style=raw')
-echo % errors encountered
-cat errors.log
--- a/tests/test-keyword.out	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-keyword.out	Wed Sep 17 11:34:37 2008 +0200
@@ -117,6 +117,7 @@
 b
 overwriting a expanding keywords
 running hook commit.test: cp a hooktest
+committed changeset 1:ef63ca68695bc9495032c6fda1350c71e6d256e9
 % status
 ? hooktest
 % identify
@@ -223,6 +224,7 @@
 c
  c: copy a:0045e12f6c5791aac80ca6cbfd97709a88307292
 overwriting c expanding keywords
+committed changeset 2:e22d299ac0c2bd8897b3df5114374b9e4d4ca62f
 % cat a c
 expand $Id: a,v ef63ca68695b 1970/01/01 00:00:00 user $
 do not process $Id:
@@ -280,7 +282,14 @@
 % commit
 a
 overwriting a expanding keywords
+committed changeset 2:bb948857c743469b22bbf51f7ec8112279ca5d83
 % status
+% verify
+checking changesets
+checking manifests
+crosschecking files in changesets and manifests
+checking files
+3 files, 3 changesets, 4 total revisions
 % cat
 expand $Id: a bb948857c743 Thu, 01 Jan 1970 00:00:02 +0000 user $
 do not process $Id:
@@ -294,7 +303,13 @@
 $Xinfo: User Name <user@example.com>: firstline $
 ignore $Id$
 a
+% annotate
+1: expand $Id$
+1: do not process $Id:
+1: xxx $
+2: $Xinfo$
 % remove
+committed changeset 3:d14c712653769de926994cf7fbb06c8fbd68f012
 % status
 % rollback
 rolling back last transaction
@@ -326,6 +341,7 @@
 % commit rejecttest
 a
 overwriting a expanding keywords
+committed changeset 2:85e279d709ffc28c9fdd1b868570985fc3d87082
 % export
 % import
 applying ../rejecttest.diff
@@ -345,10 +361,11 @@
 % kwexpand a
 overwriting a expanding keywords
 % kwexpand x/a should abort
-abort: outstanding uncommitted changes in given files
+abort: outstanding uncommitted changes
 x/a
  x/a: copy a:779c764182ce5d43e2b1eb66ce06d7b47bfe342e
 overwriting x/a expanding keywords
+committed changeset 3:cfa68229c1167443337266ebac453c73b1d5d16e
 % cat a
 expand $Id: x/a cfa68229c116 Thu, 01 Jan 1970 00:00:03 +0000 user $
 do not process $Id:
@@ -363,37 +380,28 @@
 $Xinfo$
 % kwexpand nonexistent
 nonexistent:
-% switch off expansion
-% kwshrink with unknown file u
-overwriting a shrinking keywords
-overwriting x/a shrinking keywords
-% cat
-expand $Id$
-do not process $Id:
-xxx $
-$Xinfo$
-ignore $Id$
-% hg cat
+% hg serve
+% expansion
+% hgweb file
+200 Script output follows
+
 expand $Id: a bb948857c743 Thu, 01 Jan 1970 00:00:02 +0000 user $
 do not process $Id:
 xxx $
 $Xinfo: User Name <user@example.com>: firstline $
-ignore $Id$
-a
-% cat
-expand $Id$
-do not process $Id:
-xxx $
-$Xinfo$
-ignore $Id$
-% hg cat
-expand $Id$
-do not process $Id:
-xxx $
-$Xinfo$
-ignore $Id$
-a
-% hg serve
+% no expansion
+% hgweb annotate
+200 Script output follows
+
+
+user@1: expand $Id$
+user@1: do not process $Id:
+user@1: xxx $
+user@2: $Xinfo$
+
+
+
+
 % hgweb changeset
 200 Script output follows
 
@@ -429,3 +437,60 @@
 
 
 % errors encountered
+% merge/resolve
+% simplemerge
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+created new head
+0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+(branch merge, don't forget to commit)
+$Id: m 8731e1dadc99 Thu, 01 Jan 1970 00:00:00 +0000 test $
+foo
+% conflict
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+created new head
+merging m
+warning: conflicts during merge.
+merging m failed!
+0 files updated, 0 files merged, 0 files removed, 1 files unresolved
+use 'hg resolve' to retry unresolved file merges
+% keyword stays outside conflict zone
+$Id$
+<<<<<<< local
+bar
+=======
+foo
+>>>>>>> other
+% resolve to local
+$Id: m 43dfd2854b5b Thu, 01 Jan 1970 00:00:00 +0000 test $
+bar
+% switch off expansion
+% kwshrink with unknown file u
+overwriting a shrinking keywords
+overwriting m shrinking keywords
+overwriting x/a shrinking keywords
+% cat
+expand $Id$
+do not process $Id:
+xxx $
+$Xinfo$
+ignore $Id$
+% hg cat
+expand $Id: a bb948857c743 Thu, 01 Jan 1970 00:00:02 +0000 user $
+do not process $Id:
+xxx $
+$Xinfo: User Name <user@example.com>: firstline $
+ignore $Id$
+a
+% cat
+expand $Id$
+do not process $Id:
+xxx $
+$Xinfo$
+ignore $Id$
+% hg cat
+expand $Id$
+do not process $Id:
+xxx $
+$Xinfo$
+ignore $Id$
+a
--- a/tests/test-log.out	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-log.out	Wed Sep 17 11:34:37 2008 +0200
@@ -150,7 +150,6 @@
 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
 (branch merge, don't forget to commit)
 % log -r . with two parents
-warning: working directory has two parents, tag '.' uses the first
 changeset:   3:e62f78d544b4
 parent:      1:3d5bf5654eda
 user:        test
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-mactext	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,32 @@
+#!/bin/sh
+
+cat > unix2mac.py <<EOF
+import sys
+
+for path in sys.argv[1:]:
+    data = file(path, 'rb').read()
+    data = data.replace('\n', '\r')
+    file(path, 'wb').write(data)
+EOF
+
+cat > print.py <<EOF
+import sys
+print(sys.stdin.read().replace('\n', '<LF>').replace('\r', '<CR>').replace('\0', '<NUL>'))
+EOF
+
+hg init
+echo '[hooks]' >> .hg/hgrc
+echo 'pretxncommit.cr = python:hgext.win32text.forbidcr' >> .hg/hgrc
+echo 'pretxnchangegroup.cr = python:hgext.win32text.forbidcr' >> .hg/hgrc
+cat .hg/hgrc
+echo
+
+echo hello > f
+hg add f
+hg ci -m 1 -d'0 0'
+echo
+
+python unix2mac.py f
+hg ci -m 2 -d'0 0'
+hg cat f | python print.py
+cat f | python print.py
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-mactext.out	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,12 @@
+[hooks]
+pretxncommit.cr = python:hgext.win32text.forbidcr
+pretxnchangegroup.cr = python:hgext.win32text.forbidcr
+
+
+Attempt to commit or push text file(s) using CR line endings
+in dea860dc51ec: f
+transaction abort!
+rollback completed
+abort: pretxncommit.cr hook failed
+hello<LF>
+hello<CR>
--- a/tests/test-manifest	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-manifest	Wed Sep 17 11:34:37 2008 +0200
@@ -14,6 +14,10 @@
 hg init
 hg -q pull "$TESTDIR/test-manifest.hg"
 
+echo % should be empty
+hg manifest
+
+hg co
 hg manifest
 hg manifest -v
 hg manifest --debug
--- a/tests/test-manifest.out	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-manifest.out	Wed Sep 17 11:34:37 2008 +0200
@@ -1,3 +1,5 @@
+% should be empty
+3 files updated, 0 files merged, 0 files removed, 0 files unresolved
 a
 b/a
 l
--- a/tests/test-merge-commit.out	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-merge-commit.out	Wed Sep 17 11:34:37 2008 +0200
@@ -1,6 +1,6 @@
 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
 created new head
-merging bar and foo
+merging bar and foo to bar
 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
 (branch merge, don't forget to commit)
 % contents of bar should be line0 line1 line2
@@ -27,6 +27,7 @@
  ancestor 0a3ab4856510 local 2d2f9a22c82b+ remote 7d3b554bfdf1
   searching for copies back to rev 1
  bar: versions differ -> m
+preserving bar for resolve of bar
 picked tool 'internal:merge' for bar (binary False symlink False)
 merging bar
 my bar@2d2f9a22c82b+ other bar@7d3b554bfdf1 ancestor bar@0a3ab4856510
@@ -49,7 +50,7 @@
 adding file changes
 added 3 changesets with 3 changes to 2 files (+1 heads)
 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
-merging foo and bar
+merging foo and bar to bar
 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
 (branch merge, don't forget to commit)
 % contents of bar should be line0 line1 line2
@@ -76,6 +77,7 @@
  ancestor 0a3ab4856510 local 2d2f9a22c82b+ remote 96ab80c60897
   searching for copies back to rev 1
  bar: versions differ -> m
+preserving bar for resolve of bar
 picked tool 'internal:merge' for bar (binary False symlink False)
 merging bar
 my bar@2d2f9a22c82b+ other bar@96ab80c60897 ancestor bar@0a3ab4856510
--- a/tests/test-merge-default.out	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-merge-default.out	Wed Sep 17 11:34:37 2008 +0200
@@ -5,10 +5,10 @@
 created new head
 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
 % should fail because not at a head
-abort: repo has 3 heads - please merge with an explicit rev
+abort: branch 'default' has 3 heads - please merge with an explicit rev
 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
 % should fail because > 2 heads
-abort: repo has 3 heads - please merge with an explicit rev
+abort: branch 'default' has 3 heads - please merge with an explicit rev
 % should succeed
 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
 (branch merge, don't forget to commit)
--- a/tests/test-merge-internal-tools-pattern.out	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-merge-internal-tools-pattern.out	Wed Sep 17 11:34:37 2008 +0200
@@ -9,9 +9,7 @@
 created new head
 # merge using internal:fail tool
 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
-There are unresolved merges, you can redo the full merge using:
-  hg update -C 2
-  hg merge 1
+use 'hg resolve' to retry unresolved file merges
 line 1
 line 2
 third line
--- a/tests/test-merge-local.out	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-merge-local.out	Wed Sep 17 11:34:37 2008 +0200
@@ -21,10 +21,7 @@
 merging zzz2_merge_bad
 merging zzz2_merge_bad failed!
 3 files updated, 1 files merged, 2 files removed, 1 files unresolved
-There are unresolved merges with locally modified files.
-You can finish the partial merge using:
-  hg update 0
-  hg update 1
+use 'hg resolve' to retry unresolved file merges
 2 files updated, 0 files merged, 3 files removed, 0 files unresolved
 --- a/zzz1_merge_ok
 +++ b/zzz1_merge_ok
@@ -42,10 +39,7 @@
 warning: conflicts during merge.
 merging zzz2_merge_bad failed!
 3 files updated, 1 files merged, 2 files removed, 1 files unresolved
-There are unresolved merges with locally modified files.
-You can finish the partial merge using:
-  hg update 0
-  hg update 1
+use 'hg resolve' to retry unresolved file merges
 2 files updated, 0 files merged, 3 files removed, 0 files unresolved
 --- a/zzz1_merge_ok
 +++ b/zzz1_merge_ok
--- a/tests/test-merge-remove.out	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-merge-remove.out	Wed Sep 17 11:34:37 2008 +0200
@@ -1,5 +1,5 @@
 created new head
-merging foo1 and foo
+merging foo1 and foo to foo1
 1 files updated, 1 files merged, 0 files removed, 0 files unresolved
 (branch merge, don't forget to commit)
 n   0         -2 bar
@@ -13,7 +13,6 @@
 copy: foo -> foo1
 R bar
 R foo1
-  foo
 % readding foo1 and bar
 adding bar
 adding foo1
@@ -24,7 +23,6 @@
 M foo1
   foo
 % reverting foo1 and bar
-warning: working directory has two parents, tag '.' uses the first
 saving current version of bar as bar.orig
 reverting bar
 saving current version of foo1 as foo1.orig
--- a/tests/test-merge-revert2.out	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-merge-revert2.out	Wed Sep 17 11:34:37 2008 +0200
@@ -13,10 +13,7 @@
 warning: conflicts during merge.
 merging file1 failed!
 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
-There are unresolved merges with locally modified files.
-You can redo the full merge using:
-  hg update 0
-  hg update 1
+use 'hg resolve' to retry unresolved file merges
 diff -r f248da0d4c3e file1
 --- a/file1
 +++ b/file1
--- a/tests/test-merge10.out	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-merge10.out	Wed Sep 17 11:34:37 2008 +0200
@@ -8,7 +8,7 @@
 added 1 changesets with 1 changes to 1 files (+1 heads)
 (run 'hg heads' to see heads, 'hg merge' to merge)
 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
-merging testdir/subdir/a and testdir/a
+merging testdir/subdir/a and testdir/a to testdir/subdir/a
 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
 (branch merge, don't forget to commit)
 M testdir/subdir/a
--- a/tests/test-merge7	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-merge7	Wed Sep 17 11:34:37 2008 +0200
@@ -43,6 +43,7 @@
 three
 EOF
 rm -f *.orig
+hg resolve -m test.txt
 hg commit -m "Merge 1" -d "1000000 0"
 
 # change test-a again
--- a/tests/test-merge7.out	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-merge7.out	Wed Sep 17 11:34:37 2008 +0200
@@ -11,9 +11,7 @@
 warning: conflicts during merge.
 merging test.txt failed!
 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
-There are unresolved merges, you can redo the full merge using:
-  hg update -C 1
-  hg merge 2
+use 'hg resolve' to retry unresolved file merges
 pulling from ../test-a
 searching for changes
 adding changesets
@@ -26,15 +24,14 @@
  ancestor faaea63e63a9 local 451c744aabcc+ remote a070d41e8360
   searching for copies back to rev 1
  test.txt: versions differ -> m
+preserving test.txt for resolve of test.txt
 picked tool 'internal:merge' for test.txt (binary False symlink False)
 merging test.txt
 my test.txt@451c744aabcc+ other test.txt@a070d41e8360 ancestor test.txt@faaea63e63a9
 warning: conflicts during merge.
 merging test.txt failed!
 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
-There are unresolved merges, you can redo the full merge using:
-  hg update -C 3
-  hg merge 4
+use 'hg resolve' to retry unresolved file merges
 one
 <<<<<<< local
 two-point-five
--- a/tests/test-merge9	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-merge9	Wed Sep 17 11:34:37 2008 +0200
@@ -23,9 +23,31 @@
 
 # test with the rename on the remote side
 HGMERGE=false hg merge
+hg resolve -l
 
 # test with the rename on the local side
 hg up -C 1
 HGMERGE=false hg merge
 
+echo % show unresolved
+hg resolve -l
+
+echo % unmark baz
+hg resolve -u baz
+
+echo % show
+hg resolve -l
+
+echo % re-resolve baz
+hg resolve baz
+
+echo % after
+hg resolve -l
+
+echo % resolve all
+hg resolve
+
+echo % after
+hg resolve -l
+
 true
--- a/tests/test-merge9.out	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-merge9.out	Wed Sep 17 11:34:37 2008 +0200
@@ -5,16 +5,33 @@
 created new head
 merging bar
 merging bar failed!
-merging foo and baz
+merging foo and baz to baz
 1 files updated, 1 files merged, 0 files removed, 1 files unresolved
-There are unresolved merges, you can redo the full merge using:
-  hg update -C 2
-  hg merge 1
+use 'hg resolve' to retry unresolved file merges
+U bar
+R baz
 3 files updated, 0 files merged, 1 files removed, 0 files unresolved
 merging bar
 merging bar failed!
-merging baz and foo
+merging baz and foo to baz
 1 files updated, 1 files merged, 0 files removed, 1 files unresolved
-There are unresolved merges, you can redo the full merge using:
-  hg update -C 1
-  hg merge 2
+use 'hg resolve' to retry unresolved file merges
+% show unresolved
+U bar
+R baz
+% unmark baz
+% show
+U bar
+U baz
+% re-resolve baz
+merging baz and foo to baz
+% after
+U bar
+R baz
+% resolve all
+merging bar
+warning: conflicts during merge.
+merging bar failed!
+% after
+U bar
+R baz
--- a/tests/test-mq	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-mq	Wed Sep 17 11:34:37 2008 +0200
@@ -266,6 +266,14 @@
 hg strip tip 2>&1 | sed 's/\(saving bundle to \).*/\1/'
 hg unbundle .hg/strip-backup/*
 
+echo % strip with local changes, should complain
+hg up
+echo y>y
+hg add y
+hg strip tip | sed 's/\(saving bundle to \).*/\1/'
+echo % --force strip with local changes
+hg strip -f tip 2>&1 | sed 's/\(saving bundle to \).*/\1/'
+
 echo '% cd b; hg qrefresh'
 hg init refresh
 cd refresh
--- a/tests/test-mq-missingfiles.out	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-mq-missingfiles.out	Wed Sep 17 11:34:37 2008 +0200
@@ -30,7 +30,7 @@
 % push git patch with missing target
 applying changeb
 unable to find 'b' for patching
-1 out of 1 hunk FAILED -- saving rejects to file b.rej
+1 out of 1 hunks FAILED -- saving rejects to file b.rej
 patch failed, unable to continue (try -v)
 b: No such file or directory
 b not tracked!
--- a/tests/test-mq-qdelete	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-mq-qdelete	Wed Sep 17 11:34:37 2008 +0200
@@ -9,9 +9,9 @@
 echo 'base' > base
 hg ci -Ambase -d '1 0'
 
-hg qnew a
-hg qnew b
-hg qnew c
+hg qnew -d '1 0' a
+hg qnew -d '1 0' b
+hg qnew -d '1 0' c
 
 hg qdel
 
@@ -35,3 +35,33 @@
 hg qdel -r qbase:e
 hg qapplied
 hg log --template '{rev} {desc}\n'
+
+cd ..
+hg init b
+cd b
+
+echo 'base' > base
+hg ci -Ambase -d '1 0'
+
+hg qfinish
+hg qfinish -a
+
+hg qnew -d '1 0' a
+hg qnew -d '1 0' b
+hg qnew c # XXX fails to apply by /usr/bin/patch if we put a date
+
+hg qfinish 0
+hg qfinish b
+
+hg qpop
+hg qfinish -a c
+hg qpush
+
+hg qfinish qbase:b
+hg qapplied
+hg log --template '{rev} {desc}\n'
+
+hg qfinish -a c
+hg qapplied
+hg log --template '{rev} {desc}\n'
+ls .hg/patches
--- a/tests/test-mq-qdelete.out	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-mq-qdelete.out	Wed Sep 17 11:34:37 2008 +0200
@@ -22,3 +22,23 @@
 2 [mq]: d
 1 [mq]: a
 0 base
+adding base
+abort: no revisions specified
+no patches applied
+abort: revision 0 is not managed
+abort: cannot delete revision 2 above applied patches
+Now at: b
+abort: unknown revision 'c'!
+applying c
+Now at: c
+c
+3 imported patch c
+2 [mq]: b
+1 [mq]: a
+0 base
+3 imported patch c
+2 [mq]: b
+1 [mq]: a
+0 base
+series
+status
--- a/tests/test-mq-qdiff	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-mq-qdiff	Wed Sep 17 11:34:37 2008 +0200
@@ -25,3 +25,35 @@
 echo % qdiff dirname
 hg qdiff . | sed -e "s/\(+++ [a-zA-Z0-9_/.-]*\).*/\1/" \
                  -e "s/\(--- [a-zA-Z0-9_/.-]*\).*/\1/"
+
+echo % qdiff filename
+hg qdiff --nodates base
+
+echo % revert
+hg revert -a
+
+echo % qpop
+hg qpop
+
+echo % qdelete mqbase
+hg qdelete mqbase
+
+echo % commit 2
+printf '1\n2\n3\n4\nhello world\ngoodbye world\n7\n8\n9\n' > lines
+hg ci -Amlines -d '2 0'
+
+echo % qnew 2
+hg qnew -mmqbase2 mqbase2
+printf '\n\n1\n2\n3\n4\nhello  world\n     goodbye world\n7\n8\n9\n' > lines
+
+echo % qdiff -U 1
+hg qdiff --nodates -U 1
+
+echo % qdiff -b
+hg qdiff --nodates -b
+
+echo % qdiff -U 1 -B
+hg qdiff --nodates -U 1 -B
+
+echo qdiff -w
+hg qdiff --nodates -w
--- a/tests/test-mq-qdiff.out	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-mq-qdiff.out	Wed Sep 17 11:34:37 2008 +0200
@@ -17,3 +17,71 @@
 @@ -1,1 +1,1 @@
 -base
 +patched
+% qdiff filename
+diff -r 67e992f2c4f3 base
+--- a/base
++++ b/base
+@@ -1,1 +1,1 @@
+-base
++patched
+% revert
+% qpop
+Patch queue now empty
+% qdelete mqbase
+% commit 2
+adding lines
+% qnew 2
+% qdiff -U 1
+diff -r 35fb829491c1 lines
+--- a/lines
++++ b/lines
+@@ -1,1 +1,3 @@
++
++
+ 1
+@@ -4,4 +6,4 @@
+ 4
+-hello world
+-goodbye world
++hello  world
++     goodbye world
+ 7
+% qdiff -b
+diff -r 35fb829491c1 lines
+--- a/lines
++++ b/lines
+@@ -1,9 +1,11 @@
++
++
+ 1
+ 2
+ 3
+ 4
+-hello world
+-goodbye world
++hello  world
++     goodbye world
+ 7
+ 8
+ 9
+% qdiff -U 1 -B
+diff -r 35fb829491c1 lines
+--- a/lines
++++ b/lines
+@@ -4,4 +6,4 @@
+ 4
+-hello world
+-goodbye world
++hello  world
++     goodbye world
+ 7
+qdiff -w
+diff -r 35fb829491c1 lines
+--- a/lines
++++ b/lines
+@@ -1,3 +1,5 @@
++
++
+ 1
+ 2
+ 3
--- a/tests/test-mq-qimport	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-mq-qimport	Wed Sep 17 11:34:37 2008 +0200
@@ -21,9 +21,13 @@
 echo "[diff]" >> $HGRCPATH
 echo "git=1" >> $HGRCPATH
 
-echo % build diff with CRLF
 hg init repo
 cd repo
+
+echo % qimport non-existing-file
+hg qimport non-existing-file
+
+echo % build diff with CRLF
 python ../writelines.py b 5 'a\n' 5 'a\r\n'
 hg ci -Am addb
 python ../writelines.py b 2 'a\n' 10 'b\n' 2 'a\r\n'
--- a/tests/test-mq-qimport.out	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-mq-qimport.out	Wed Sep 17 11:34:37 2008 +0200
@@ -1,3 +1,5 @@
+% qimport non-existing-file
+abort: unable to read non-existing-file
 % build diff with CRLF
 adding b
 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
--- a/tests/test-mq.out	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-mq.out	Wed Sep 17 11:34:37 2008 +0200
@@ -29,6 +29,7 @@
  qcommit      commit changes in the queue repository
  qdelete      remove patches from queue
  qdiff        diff of the current patch and subsequent modifications
+ qfinish      move applied patches into repository history
  qfold        fold the named patches into the current patch
  qgoto        push or pop patches until named patch is at top of stack
  qguard       set or print guards for a patch
@@ -96,6 +97,7 @@
 A somefile
 % qnew with uncommitted changes and missing file (issue 803)
 someotherfile: No such file or directory
+someotherfile: No such file or directory
 A somefile
 issue803.patch
 Patch queue now empty
@@ -250,6 +252,12 @@
 adding file changes
 added 1 changesets with 1 changes to 1 files
 (run 'hg update' to get a working copy)
+% strip with local changes, should complain
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+abort: local changes found
+% --force strip with local changes
+0 files updated, 0 files merged, 2 files removed, 0 files unresolved
+saving bundle to 
 % cd b; hg qrefresh
 adding a
 foo
@@ -279,7 +287,7 @@
 applying foo
 applying bar
 file foo already exists
-1 out of 1 hunk FAILED -- saving rejects to file foo.rej
+1 out of 1 hunks FAILED -- saving rejects to file foo.rej
 patch failed, unable to continue (try -v)
 patch failed, rejects left in working dir
 Errors during apply, please fix and refresh bar
--- a/tests/test-newbranch	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-newbranch	Wed Sep 17 11:34:37 2008 +0200
@@ -24,7 +24,7 @@
 echo bleah > a
 hg ci -m "modify a branch" -d "1000000 0"
 
-hg merge
+hg merge default
 hg branch
 hg ci -m "merge" -d "1000000 0"
 hg log
@@ -73,4 +73,38 @@
 hg parents
 hg manifest
 
-exit 0
+echo % test merging, add 3 default heads and one test head
+cd ..
+hg init merges
+cd merges
+echo a > a
+hg ci -Ama
+
+echo b > b
+hg ci -Amb
+
+hg up 0
+echo c > c
+hg ci -Amc
+
+hg up 0
+echo d > d
+hg ci -Amd
+
+hg up 0
+hg branch test
+echo e >> e
+hg ci -Ame
+
+hg log
+
+echo % implicit merge with test branch as parent
+hg merge
+hg up -C default
+echo % implicit merge with default branch as parent
+hg merge
+echo % 3 branch heads, explicit merge required
+hg merge 2
+hg ci -m merge
+echo % 2 branch heads, implicit merge works
+hg merge
--- a/tests/test-newbranch.out	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-newbranch.out	Wed Sep 17 11:34:37 2008 +0200
@@ -116,3 +116,57 @@
 
 a
 ff
+% test merging, add 3 default heads and one test head
+adding a
+adding b
+0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+adding c
+created new head
+0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+adding d
+created new head
+0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+marked working directory as branch test
+adding e
+created new head
+changeset:   4:3a1e01ed1df4
+branch:      test
+tag:         tip
+parent:      0:cb9a9f314b8b
+user:        test
+date:        Thu Jan 01 00:00:00 1970 +0000
+summary:     e
+
+changeset:   3:980f7dc84c29
+parent:      0:cb9a9f314b8b
+user:        test
+date:        Thu Jan 01 00:00:00 1970 +0000
+summary:     d
+
+changeset:   2:d36c0562f908
+parent:      0:cb9a9f314b8b
+user:        test
+date:        Thu Jan 01 00:00:00 1970 +0000
+summary:     c
+
+changeset:   1:d2ae7f538514
+user:        test
+date:        Thu Jan 01 00:00:00 1970 +0000
+summary:     b
+
+changeset:   0:cb9a9f314b8b
+user:        test
+date:        Thu Jan 01 00:00:00 1970 +0000
+summary:     a
+
+% implicit merge with test branch as parent
+abort: branch 'test' has one head - please merge with an explicit rev
+1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+% implicit merge with default branch as parent
+abort: branch 'default' has 3 heads - please merge with an explicit rev
+% 3 branch heads, explicit merge required
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+(branch merge, don't forget to commit)
+% 2 branch heads, implicit merge works
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+(branch merge, don't forget to commit)
--- a/tests/test-parseindex	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-parseindex	Wed Sep 17 11:34:37 2008 +0200
@@ -44,8 +44,8 @@
     return wrapper
 
 cl = changelog.changelog(opener('.hg/store'))
-print cl.count(), 'revisions:'
-for r in xrange(cl.count()):
+print len(cl), 'revisions:'
+for r in cl:
     print short(cl.node(r))
 EOF
 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-pull-http	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,36 @@
+#!/bin/sh
+
+cp "$TESTDIR"/printenv.py .
+
+hg init test
+cd test
+echo a > a
+hg ci -Ama -d '0 0'
+
+cd ..
+hg clone test test2
+cd test2
+echo a >> a
+hg ci -mb -d '0 0'
+
+echo % expect error, cloning not allowed
+echo '[web]' > .hg/hgrc
+echo 'allowpull = false' >> .hg/hgrc
+hg serve -p $HGPORT -d --pid-file=hg.pid -E errors.log
+cat hg.pid >> $DAEMON_PIDS
+hg clone http://localhost:$HGPORT/ test3 | sed -e 's,:[0-9][0-9]*/,/,'
+kill `cat hg.pid`
+echo % serve errors
+cat errors.log
+
+req() {
+	hg serve -p $HGPORT -d --pid-file=hg.pid -E errors.log
+	cat hg.pid >> $DAEMON_PIDS
+	hg --cwd ../test pull http://localhost:$HGPORT/ | sed -e 's,:[0-9][0-9]*/,/,'
+	kill `cat hg.pid`
+	echo % serve errors
+	cat errors.log
+}
+
+echo % expect error, pulling not allowed
+req
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-pull-http.out	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,12 @@
+adding a
+updating working directory
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+% expect error, cloning not allowed
+abort: error: 
+requesting all changes
+% serve errors
+% expect error, pulling not allowed
+abort: error: 
+pulling from http://localhost/
+searching for changes
+% serve errors
--- a/tests/test-purge	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-purge	Wed Sep 17 11:34:37 2008 +0200
@@ -83,23 +83,15 @@
 rm r1
 # hide error messages to avoid changing the output when the text changes
 hg purge -p 2> /dev/null
-if [ $? -ne 0 ]; then
-    echo "refused to run"
-fi
-if [ -f untracked_file ]; then
-    echo "untracked_file still around"
-fi
-hg purge -p --force
+hg st
+
+hg purge -p
 hg purge -v 2> /dev/null
-if [ $? -ne 0 ]; then
-    echo "refused to run"
-fi
-if [ -f untracked_file ]; then
-    echo "untracked_file still around"
-fi
-hg purge -v --force
+hg st
+
+hg purge -v
 hg revert --all --quiet
-ls
+hg st -a
 
 echo '% tracked file in ignored directory (issue621)'
 echo directory >> .hgignore
--- a/tests/test-purge.out	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-purge.out	Wed Sep 17 11:34:37 2008 +0200
@@ -51,14 +51,12 @@
 directory
 r1
 % abort with missing files until we support name mangling filesystems
-refused to run
-untracked_file still around
 untracked_file
-refused to run
-untracked_file still around
+! r1
+? untracked_file
+untracked_file
 Removing file untracked_file
-directory
-r1
+! r1
 % tracked file in ignored directory (issue621)
 untracked_file
 Removing file untracked_file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-rebase-abort	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,44 @@
+#!/bin/sh
+
+echo "[extensions]" >> $HGRCPATH
+echo "graphlog=" >> $HGRCPATH
+echo "rebase=" >> $HGRCPATH
+
+cleanoutput () {
+    sed -e 's/\(Rebase status stored to\).*/\1/'  \
+        -e 's/\(Rebase status restored from\).*/\1/' \
+        -e 's/\(saving bundle to \).*/\1/'
+}
+
+hg init a
+cd a
+echo 'c1' >common
+hg add common
+hg commit -d '0 0' -u test -m "C1"
+
+echo 'c2' >>common
+hg commit -d '1 0' -u test -m "C2"
+
+echo 'c3' >>common
+hg commit -d '2 0' -u test -m "C3"
+
+hg update -C 1
+echo 'l1' >>extra
+hg add extra
+hg commit -d '3 0' -u test -m "L1"
+
+sed -e 's/c2/l2/' common > common.new
+mv common.new common
+hg commit -d '4 0' -u test -m "L2"
+
+hg glog  --template '{rev}: {desc}\n'
+
+echo
+echo '% Conflicting rebase'
+hg rebase -s 3 -d 2 2>&1 | cleanoutput
+
+echo
+echo '% Abort'
+hg rebase --abort 2>&1 | cleanoutput
+
+hg glog  --template '{rev}: {desc}\n'
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-rebase-abort.out	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,31 @@
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+created new head
+@  4: L2
+|
+o  3: L1
+|
+| o  2: C3
+|/
+o  1: C2
+|
+o  0: C1
+
+
+% Conflicting rebase
+merging common
+warning: conflicts during merge.
+merging common failed!
+abort: fix unresolved conflicts with hg resolve then run hg rebase --continue
+
+% Abort
+rebase aborted
+@  4: L2
+|
+o  3: L1
+|
+| o  2: C3
+|/
+o  1: C2
+|
+o  0: C1
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-rebase-collapse	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,91 @@
+#!/bin/sh
+
+echo "[extensions]" >> $HGRCPATH
+echo "graphlog=" >> $HGRCPATH
+echo "rebase=" >> $HGRCPATH
+
+BASE=`pwd`
+
+addcommit () {
+    echo $1 > $1
+    hg add $1
+    hg commit -d "${2} 0" -u test -m $1
+}
+
+commit () {
+    hg commit -d "${2} 0" -u test -m $1
+}
+
+createrepo () {
+    cd $BASE
+    rm -rf a
+    hg init a
+    cd a
+    addcommit "A" 0
+    addcommit "B" 1
+    addcommit "C" 2
+    addcommit "D" 3
+
+    hg update -C 0
+    addcommit "E" 4
+
+    hg update -C 0
+    addcommit "F" 5
+
+    hg merge -r 4
+    commit "G" 6
+
+    hg update -C 5
+    addcommit "H" 7
+}
+
+createrepo > /dev/null 2>&1
+hg glog  --template '{rev}: {desc}\n'
+echo '% Rebasing'
+hg up -C 3
+hg rebase --collapse 2>&1 | sed 's/\(saving bundle to \).*/\1/'
+hg glog  --template '{rev}: {desc}\n'
+
+createrepo > /dev/null 2>&1
+echo '% Rebasing'
+hg rebase --base 6 --collapse 2>&1 | sed 's/\(saving bundle to \).*/\1/'
+hg glog  --template '{rev}: {desc}\n'
+
+createrepocomplex () {
+    cd $BASE
+    rm -rf a
+    hg init a
+    cd a
+    addcommit "A" 0
+    addcommit "B" 1
+
+    hg up 0
+    addcommit "C" 2
+    hg merge
+    commit "D" 3
+
+    hg up 1
+    addcommit "E" 4
+
+    addcommit "F" 5
+
+    hg merge
+    commit "G" 6
+
+    hg up 0
+    addcommit "H" 7
+}
+
+createrepocomplex > /dev/null 2>&1
+hg glog  --template '{rev}: {desc}\n'
+
+echo
+echo '% Rebase and collapse - more than one external (fail)'
+hg rebase -s 2 --collapse
+
+echo
+echo '% Rebase and collapse'
+hg rebase -s 4 --collapse 2>&1 | sed 's/\(saving bundle to \).*/\1/'
+hg glog  --template '{rev}: {desc}\n'
+
+exit 0
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-rebase-collapse.out	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,104 @@
+@  7: H
+|
+| o  6: G
+|/|
+o |  5: F
+| |
+| o  4: E
+|/
+| o  3: D
+| |
+| o  2: C
+| |
+| o  1: B
+|/
+o  0: A
+
+% Rebasing
+3 files updated, 0 files merged, 2 files removed, 0 files unresolved
+saving bundle to 
+adding branch
+adding changesets
+adding manifests
+adding file changes
+added 5 changesets with 6 changes to 6 files (+1 heads)
+rebase completed
+@  5: Collapsed revision
+|  * B
+|  * C
+|  * D
+o  4: H
+|
+| o  3: G
+|/|
+o |  2: F
+| |
+| o  1: E
+|/
+o  0: A
+
+% Rebasing
+saving bundle to 
+adding branch
+adding changesets
+adding manifests
+adding file changes
+added 3 changesets with 3 changes to 3 files (+1 heads)
+rebase completed
+@  6: Collapsed revision
+|  * E
+|  * G
+o  5: H
+|
+o  4: F
+|
+| o  3: D
+| |
+| o  2: C
+| |
+| o  1: B
+|/
+o  0: A
+
+@  7: H
+|
+| o    6: G
+| |\
+| | o  5: F
+| | |
+| | o  4: E
+| | |
+| o |  3: D
+| |\|
+| o |  2: C
+|/ /
+| o  1: B
+|/
+o  0: A
+
+
+% Rebase and collapse - more than one external (fail)
+abort: unable to collapse, there is more than one external parent
+
+% Rebase and collapse
+saving bundle to 
+adding branch
+adding changesets
+adding manifests
+adding file changes
+added 2 changesets with 3 changes to 3 files
+rebase completed
+@    5: Collapsed revision
+|\   * E
+| |  * F
+| |  * G
+| o  4: H
+| |
+o |    3: D
+|\ \
+| o |  2: C
+| |/
+o /  1: B
+|/
+o  0: A
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-rebase-conflicts	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,55 @@
+#!/bin/sh
+
+echo "[extensions]" >> $HGRCPATH
+echo "graphlog=" >> $HGRCPATH
+echo "rebase=" >> $HGRCPATH
+
+cleanoutput () {
+    sed -e 's/\(Rebase status stored to\).*/\1/'  \
+        -e 's/\(Rebase status restored from\).*/\1/' \
+        -e 's/\(saving bundle to \).*/\1/'
+}
+
+hg init a
+cd a
+echo 'c1' >common
+hg add common
+hg commit -d '0 0' -u test -m "C1"
+
+echo 'c2' >>common
+hg commit -d '1 0' -u test -m "C2"
+
+echo 'c3' >>common
+hg commit -d '2 0' -u test -m "C3"
+
+hg update -C 1
+echo 'l1' >>extra
+hg add extra
+hg commit -d '3 0' -u test -m "L1"
+
+sed -e 's/c2/l2/' common > common.new
+mv common.new common
+hg commit -d '4 0' -u test -m "L2"
+
+hg glog  --template '{rev}: {desc}\n'
+
+echo
+echo '% Try to call --continue'
+hg rebase --continue
+
+echo
+echo '% Conflicting rebase'
+hg rebase -s 3 -d 2
+
+echo
+echo '% Try to continue without solving the conflict'
+hg rebase --continue 
+
+echo
+echo '% Conclude rebase'
+echo 'solved merge' >common
+hg resolve -m common
+hg rebase --continue 2>&1 | cleanoutput
+
+
+hg glog  --template '{rev}: {desc}\n'
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-rebase-conflicts.out	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,43 @@
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+created new head
+@  4: L2
+|
+o  3: L1
+|
+| o  2: C3
+|/
+o  1: C2
+|
+o  0: C1
+
+
+% Try to call --continue
+abort: no rebase in progress
+
+% Conflicting rebase
+merging common
+warning: conflicts during merge.
+merging common failed!
+abort: fix unresolved conflicts with hg resolve then run hg rebase --continue
+
+% Try to continue without solving the conflict
+abort: unresolved merge conflicts (see hg resolve)
+
+% Conclude rebase
+saving bundle to 
+adding branch
+adding changesets
+adding manifests
+adding file changes
+added 2 changesets with 2 changes to 2 files
+rebase completed
+@  4: L2
+|
+o  3: L1
+|
+o  2: C3
+|
+o  1: C2
+|
+o  0: C1
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-rebase-interruptions	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,82 @@
+#!/bin/sh
+
+echo "[extensions]" >> $HGRCPATH
+echo "graphlog=" >> $HGRCPATH
+echo "rebase=" >> $HGRCPATH
+
+BASE=`pwd`
+
+addcommit () {
+    echo $1 > $1
+    hg add $1
+    hg commit -d "${2} 0" -u test -m $1
+}
+
+commit () {
+    hg commit -d "${2} 0" -u test -m $1
+}
+
+createrepo () {
+    cd $BASE
+    rm -rf a
+    hg init a
+    cd a
+    addcommit "A" 0
+    addcommit "B" 1
+    echo "C" >> A
+    commit "C" 2
+
+    hg update -C 0
+    echo "D" >> A
+    commit "D" 3
+    addcommit "E" 4
+}
+
+echo "% Changes during an interruption - continue"
+createrepo > /dev/null 2>&1
+hg glog  --template '{rev}: {desc}\n'
+
+echo
+echo "% - Rebasing B onto E"
+hg rebase -s 1 -d 4 2>&1 | sed 's/\(saving bundle to \).*/\1/'
+
+echo
+echo "% - Force a commit on C during the interruption"
+hg update -C 2
+addcommit "Extra" 5
+hg glog  --template '{rev}: {desc}\n'
+
+echo
+echo "% - Resume the rebasing"
+hg rebase --continue
+
+echo
+echo "% - Solve the conflict and go on"
+echo 'conflict solved' > A
+rm A.orig
+hg resolve -m A
+hg rebase --continue
+hg glog  --template '{rev}: {desc}\n'
+
+echo
+echo
+echo "% Changes during an interruption - abort"
+createrepo > /dev/null 2>&1
+hg glog  --template '{rev}: {desc}\n'
+
+echo
+echo "% Rebasing B onto E"
+hg rebase -s 1 -d 4 2>&1 | sed 's/\(saving bundle to \).*/\1/'
+
+echo
+echo "% Force a commit on B' during the interruption"
+hg update -C 5
+addcommit "Extra" 6
+hg glog  --template '{rev}: {desc}\n'
+
+echo
+echo "% Abort the rebasing"
+hg rebase --abort
+hg glog  --template '{rev}: {desc}\n'
+
+exit 0
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-rebase-interruptions.out	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,113 @@
+% Changes during an interruption - continue
+@  4: E
+|
+o  3: D
+|
+| o  2: C
+| |
+| o  1: B
+|/
+o  0: A
+
+
+% - Rebasing B onto E
+merging A
+warning: conflicts during merge.
+merging A failed!
+abort: fix unresolved conflicts with hg resolve then run hg rebase --continue
+
+% - Force a commit on C during the interruption
+1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+@  6: Extra
+|
+| o  5: B
+| |
+| o  4: E
+| |
+| o  3: D
+| |
+o |  2: C
+| |
+o |  1: B
+|/
+o  0: A
+
+
+% - Resume the rebasing
+merging A
+warning: conflicts during merge.
+merging A failed!
+abort: fix unresolved conflicts with hg resolve then run hg rebase --continue
+
+% - Solve the conflict and go on
+warning: new changesets detected on source branch, not stripping
+rebase completed
+@  7: C
+|
+| o  6: Extra
+| |
+o |  5: B
+| |
+o |  4: E
+| |
+o |  3: D
+| |
+| o  2: C
+| |
+| o  1: B
+|/
+o  0: A
+
+
+
+% Changes during an interruption - abort
+@  4: E
+|
+o  3: D
+|
+| o  2: C
+| |
+| o  1: B
+|/
+o  0: A
+
+
+% Rebasing B onto E
+merging A
+warning: conflicts during merge.
+merging A failed!
+abort: fix unresolved conflicts with hg resolve then run hg rebase --continue
+
+% Force a commit on B' during the interruption
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+@  6: Extra
+|
+o  5: B
+|
+o  4: E
+|
+o  3: D
+|
+| o  2: C
+| |
+| o  1: B
+|/
+o  0: A
+
+
+% Abort the rebasing
+warning: new changesets detected on target branch, not stripping
+@  6: Extra
+|
+o  5: B
+|
+o  4: E
+|
+o  3: D
+|
+| o  2: C
+| |
+| o  1: B
+|/
+o  0: A
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-rebase-issue-noparam-single-rev	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,49 @@
+#!/bin/sh
+
+echo "[extensions]" >> $HGRCPATH
+echo "graphlog=" >> $HGRCPATH
+echo "rebase=" >> $HGRCPATH
+
+addcommit () {
+    echo $1 > $1
+    hg add $1
+    hg commit -d "${2} 0" -u test -m $1
+}
+
+hg init a
+cd a
+addcommit "c1" 0
+addcommit "c2" 1
+
+addcommit "l1" 2
+
+hg update -C 1
+addcommit "r1" 3
+addcommit "r2" 4
+hg glog --template '{rev}:{desc}\n'
+
+echo
+echo '% Rebase with no arguments - single revision in source branch'
+hg update -C 2
+hg rebase 2>&1 | sed 's/\(saving bundle to \).*/\1/'
+hg glog --template '{rev}:{desc}\n'
+
+cd ..
+rm -rf a
+hg init a
+cd a
+addcommit "c1" 0
+addcommit "c2" 1
+
+addcommit "l1" 2
+addcommit "l2" 3
+
+hg update -C 1
+addcommit "r1" 4
+hg glog --template '{rev}:{desc}\n'
+
+echo
+echo '% Rebase with no arguments - single revision in target branch'
+hg update -C 3
+hg rebase 2>&1 | sed 's/\(saving bundle to \).*/\1/'
+hg glog --template '{rev}:{desc}\n'
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-rebase-issue-noparam-single-rev.out	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,64 @@
+0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+created new head
+@  4:r2
+|
+o  3:r1
+|
+| o  2:l1
+|/
+o  1:c2
+|
+o  0:c1
+
+
+% Rebase with no arguments - single revision in source branch
+1 files updated, 0 files merged, 2 files removed, 0 files unresolved
+saving bundle to 
+adding branch
+adding changesets
+adding manifests
+adding file changes
+added 3 changesets with 3 changes to 3 files
+rebase completed
+@  4:l1
+|
+o  3:r2
+|
+o  2:r1
+|
+o  1:c2
+|
+o  0:c1
+
+0 files updated, 0 files merged, 2 files removed, 0 files unresolved
+created new head
+@  4:r1
+|
+| o  3:l2
+| |
+| o  2:l1
+|/
+o  1:c2
+|
+o  0:c1
+
+
+% Rebase with no arguments - single revision in target branch
+2 files updated, 0 files merged, 1 files removed, 0 files unresolved
+saving bundle to 
+adding branch
+adding changesets
+adding manifests
+adding file changes
+added 3 changesets with 3 changes to 3 files
+rebase completed
+@  4:l2
+|
+o  3:l1
+|
+o  2:r1
+|
+o  1:c2
+|
+o  0:c1
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-rebase-mq	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,76 @@
+#!/bin/sh
+
+echo "[extensions]" >> $HGRCPATH
+echo "graphlog=" >> $HGRCPATH
+echo "rebase=" >> $HGRCPATH
+echo "mq=" >> $HGRCPATH
+
+filterpatch()
+{
+    sed -e "s/^\(# Date\).*/\1/" \
+        -e "s/^\(# Node ID\).*/\1/" \
+        -e "s/^\(# Parent\).*/\1/" \
+        -e "s/^\(diff -r \)\([a-f0-9]* \)\(-r \)\([a-f0-9]* \)/\1x \3y /" \
+        -e "s/\(--- [a-zA-Z0-9_/.-]*\).*/\1/" \
+        -e "s/\(+++ [a-zA-Z0-9_/.-]*\).*/\1/"
+}
+
+hg init a
+cd a
+hg qinit -c # This must work even with a managed mq queue
+
+echo 'c1' > f
+hg add f
+hg commit -d '0 0' -u test -m "C1"
+
+echo 'r1' > f
+hg commit -d '2 0' -u test -m "R1"
+
+hg up 0
+hg qnew f.patch
+echo 'mq1' > f
+hg qref -m 'P0'
+
+hg qnew f2.patch
+echo 'mq2' > f
+hg qref -m 'P1'
+hg glog  --template '{rev} {desc} tags: {tags}\n'
+
+echo
+echo '% Rebase - try to rebase on an applied mq patch'
+hg rebase -s 1 -d 3
+
+echo
+echo '% Rebase - generate a conflict'
+hg rebase -s 2 -d 1
+
+echo
+echo '% Fix the 1st conflict'
+echo 'mq1r1' > f
+hg resolve -m f
+hg rebase -c 2>&1 | sed -e 's/\(saving bundle to \).*/\1/'
+
+echo
+echo '% Fix the 2nd conflict'
+echo 'mq1r1mq2' > f
+hg resolve -m f
+hg rebase -c 2>&1 | sed -e 's/\(saving bundle to \).*/\1/'
+
+hg glog  --template '{rev} {desc} tags: {tags}\n'
+
+echo
+echo '% Update to qbase'
+hg up qbase
+echo '% f correctly reflects the merge result'
+cat f
+echo '% And the patch is correct'
+cat .hg/patches/f.patch | filterpatch
+
+echo
+echo '% Update to qtip'
+hg up qtip
+echo '% f correctly reflects the merge result'
+cat f
+echo '% And the patch is correct'
+cat .hg/patches/f2.patch | filterpatch
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-rebase-mq-skip	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,44 @@
+#!/bin/sh
+# This emulates the effects of an hg pull --rebase in which the remote repo 
+# already has one local mq patch
+
+echo "[extensions]" >> $HGRCPATH
+echo "graphlog=" >> $HGRCPATH
+echo "rebase=" >> $HGRCPATH
+echo "mq=" >> $HGRCPATH
+
+hg init a
+cd a
+hg qinit -c # This must work even with a managed mq queue
+
+echo 'c1' > c1
+hg add c1
+hg commit -d '0 0' -u test -m "C1"
+
+echo 'r1' > r1
+hg add r1
+hg commit -d '1 0' -u test -m "R1"
+
+hg up 0
+hg qnew p0.patch
+echo 'p0' > p0
+hg add p0
+hg qref -m 'P0'
+
+hg qnew p1.patch
+echo 'p1' > p1
+hg add p1
+hg qref -m 'P1'
+hg export qtip > p1.patch 
+
+echo
+echo '% "Mainstream" import p1.patch'
+hg up -C 1
+hg import p1.patch
+rm p1.patch
+
+echo
+echo '% Rebase'
+hg up -C qtip
+hg rebase  2>&1 | sed -e 's/\(saving bundle to \).*/\1/'
+hg glog  --template '{rev} {desc} tags: {tags}\n'
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-rebase-mq-skip.out	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,24 @@
+0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+
+% "Mainstream" import p1.patch
+1 files updated, 0 files merged, 2 files removed, 0 files unresolved
+applying p1.patch
+
+% Rebase
+1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+nothing changed
+saving bundle to 
+adding branch
+adding changesets
+adding manifests
+adding file changes
+added 2 changesets with 2 changes to 2 files
+rebase completed
+@  3 P0 tags: p0.patch qtip tip qbase
+|
+o  2 P1 tags: qparent
+|
+o  1 R1 tags:
+|
+o  0 C1 tags:
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-rebase-mq.out	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,79 @@
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+@  3 P1 tags: qtip tip f2.patch
+|
+o  2 P0 tags: f.patch qbase
+|
+| o  1 R1 tags:
+|/
+o  0 C1 tags: qparent
+
+
+% Rebase - try to rebase on an applied mq patch
+abort: cannot rebase onto an applied mq patch
+
+% Rebase - generate a conflict
+merging f
+warning: conflicts during merge.
+merging f failed!
+abort: fix unresolved conflicts with hg resolve then run hg rebase --continue
+
+% Fix the 1st conflict
+merging f
+warning: conflicts during merge.
+merging f failed!
+abort: fix unresolved conflicts with hg resolve then run hg rebase --continue
+
+% Fix the 2nd conflict
+saving bundle to 
+adding branch
+adding changesets
+adding manifests
+adding file changes
+added 2 changesets with 2 changes to 1 files
+rebase completed
+@  3 P1 tags: qtip tip f2.patch
+|
+o  2 P0 tags: f.patch qbase
+|
+o  1 R1 tags: qparent
+|
+o  0 C1 tags:
+
+
+% Update to qbase
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+% f correctly reflects the merge result
+mq1r1
+% And the patch is correct
+# HG changeset patch
+# User test
+# Date
+# Node ID
+# Parent
+P0
+
+diff -r x -r y f
+--- a/f
++++ b/f
+@@ -1,1 +1,1 @@
+-r1
++mq1r1
+
+% Update to qtip
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+% f correctly reflects the merge result
+mq1r1mq2
+% And the patch is correct
+# HG changeset patch
+# User test
+# Date
+# Node ID
+# Parent
+P1
+
+diff -r x -r y f
+--- a/f
++++ b/f
+@@ -1,1 +1,1 @@
+-mq1r1
++mq1r1mq2
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-rebase-parameters	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,105 @@
+#!/bin/sh
+
+echo "[extensions]" >> $HGRCPATH
+echo "rebase=" >> $HGRCPATH
+
+addcommit () {
+    echo $1 > $1
+    hg add $1
+    hg commit -d "${2} 0" -u test -m $1
+}
+
+commit () {
+    hg commit -d "${2} 0" -u test -m $1
+}
+
+createrepo () {
+    hg init a
+    cd a
+    addcommit "c1" 0
+    addcommit "c2" 1
+    addcommit "c3" 2
+
+    hg update -C 1
+    addcommit "l1" 3
+    addcommit "l2" 4
+    addcommit "l3" 5
+
+    hg update -C 2
+    addcommit "r1" 6
+    addcommit "r2" 7
+}
+
+createrepo > /dev/null 2>&1
+echo "% These fail"
+echo
+echo "% Use continue and abort"
+hg rebase --continue --abort
+
+echo
+echo "% Use continue and collapse"
+hg rebase --continue --collapse
+
+echo
+echo "% Use continue/abort and dest/source"
+hg rebase --continue --dest 4
+
+echo
+echo "% Use source and base"
+hg rebase --base 5 --source 4
+
+echo
+echo "% Rebase with no arguments - from current"
+hg rebase
+
+echo
+echo "% Rebase with no arguments - from the current branch"
+hg update 6
+hg rebase
+
+echo "% ----------"
+echo "% These work"
+echo
+echo "% Rebase with no arguments (from 3 onto 7)"
+hg update -C 5
+hg rebase 2>&1 | sed 's/\(saving bundle to \).*/\1/'
+
+createrepo > /dev/null 2>&1
+echo
+echo "% Rebase with base == '.' => same as no arguments (from 3 onto 7)"
+hg update -C 5
+hg rebase --base . 2>&1 | sed 's/\(saving bundle to \).*/\1/'
+
+createrepo > /dev/null 2>&1
+echo
+echo "% Rebase with dest == `hg branch` => same as no arguments (from 3 onto 7)"
+hg update -C 5
+hg rebase --dest `hg branch` 2>&1 | sed 's/\(saving bundle to \).*/\1/'
+
+createrepo > /dev/null 2>&1
+echo
+echo "% Specify only source (from 4 onto 7)"
+hg rebase --source 4 2>&1 | sed 's/\(saving bundle to \).*/\1/'
+
+createrepo > /dev/null 2>&1
+echo
+echo "% Specify only dest (from 3 onto 6)"
+hg update -C 5
+hg rebase --dest 6 2>&1 | sed 's/\(saving bundle to \).*/\1/'
+
+createrepo > /dev/null 2>&1
+echo
+echo "% Specify only base (from 3 onto 7)"
+hg rebase --base 5 2>&1 | sed 's/\(saving bundle to \).*/\1/'
+
+createrepo > /dev/null 2>&1
+echo
+echo "% Specify source and dest (from 4 onto 6)"
+hg rebase --source 4 --dest 6 2>&1 | sed 's/\(saving bundle to \).*/\1/'
+
+createrepo > /dev/null 2>&1
+echo
+echo "% Specify base and dest (from 3 onto 6)"
+hg rebase --base 4 --dest 6 2>&1 | sed 's/\(saving bundle to \).*/\1/'
+
+exit 0
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-rebase-parameters.out	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,194 @@
+% These fail
+
+% Use continue and abort
+hg rebase: cannot use both abort and continue
+hg rebase [-s rev | -b rev] [-d rev] [--collapse] | [-c] | [-a] | [--keep]
+
+move changeset (and descendants) to a different branch
+
+    Rebase uses repeated merging to graft changesets from one part of history
+    onto another. This can be useful for linearizing local changes relative to
+    a master development tree.
+
+    If a rebase is interrupted to manually resolve a merge, it can be continued
+    with --continue or aborted with --abort.
+
+options:
+
+    --keep      keep original revisions
+ -s --source    rebase from a given revision
+ -b --base      rebase from the base of a given revision
+ -d --dest      rebase onto a given revision
+    --collapse  collapse the rebased revisions
+ -c --continue  continue an interrupted rebase
+ -a --abort     abort an interrupted rebase
+    --style     display using template map file
+    --template  display with template
+
+use "hg -v help rebase" to show global options
+
+% Use continue and collapse
+hg rebase: cannot use collapse with continue or abort
+hg rebase [-s rev | -b rev] [-d rev] [--collapse] | [-c] | [-a] | [--keep]
+
+move changeset (and descendants) to a different branch
+
+    Rebase uses repeated merging to graft changesets from one part of history
+    onto another. This can be useful for linearizing local changes relative to
+    a master development tree.
+
+    If a rebase is interrupted to manually resolve a merge, it can be continued
+    with --continue or aborted with --abort.
+
+options:
+
+    --keep      keep original revisions
+ -s --source    rebase from a given revision
+ -b --base      rebase from the base of a given revision
+ -d --dest      rebase onto a given revision
+    --collapse  collapse the rebased revisions
+ -c --continue  continue an interrupted rebase
+ -a --abort     abort an interrupted rebase
+    --style     display using template map file
+    --template  display with template
+
+use "hg -v help rebase" to show global options
+
+% Use continue/abort and dest/source
+hg rebase: abort and continue do not allow specifying revisions
+hg rebase [-s rev | -b rev] [-d rev] [--collapse] | [-c] | [-a] | [--keep]
+
+move changeset (and descendants) to a different branch
+
+    Rebase uses repeated merging to graft changesets from one part of history
+    onto another. This can be useful for linearizing local changes relative to
+    a master development tree.
+
+    If a rebase is interrupted to manually resolve a merge, it can be continued
+    with --continue or aborted with --abort.
+
+options:
+
+    --keep      keep original revisions
+ -s --source    rebase from a given revision
+ -b --base      rebase from the base of a given revision
+ -d --dest      rebase onto a given revision
+    --collapse  collapse the rebased revisions
+ -c --continue  continue an interrupted rebase
+ -a --abort     abort an interrupted rebase
+    --style     display using template map file
+    --template  display with template
+
+use "hg -v help rebase" to show global options
+
+% Use source and base
+hg rebase: cannot specify both a revision and a base
+hg rebase [-s rev | -b rev] [-d rev] [--collapse] | [-c] | [-a] | [--keep]
+
+move changeset (and descendants) to a different branch
+
+    Rebase uses repeated merging to graft changesets from one part of history
+    onto another. This can be useful for linearizing local changes relative to
+    a master development tree.
+
+    If a rebase is interrupted to manually resolve a merge, it can be continued
+    with --continue or aborted with --abort.
+
+options:
+
+    --keep      keep original revisions
+ -s --source    rebase from a given revision
+ -b --base      rebase from the base of a given revision
+ -d --dest      rebase onto a given revision
+    --collapse  collapse the rebased revisions
+ -c --continue  continue an interrupted rebase
+ -a --abort     abort an interrupted rebase
+    --style     display using template map file
+    --template  display with template
+
+use "hg -v help rebase" to show global options
+
+% Rebase with no arguments - from current
+nothing to rebase
+
+% Rebase with no arguments - from the current branch
+0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+nothing to rebase
+% ----------
+% These work
+
+% Rebase with no arguments (from 3 onto 7)
+3 files updated, 0 files merged, 2 files removed, 0 files unresolved
+saving bundle to 
+adding branch
+adding changesets
+adding manifests
+adding file changes
+added 5 changesets with 5 changes to 5 files
+rebase completed
+
+% Rebase with base == '.' => same as no arguments (from 3 onto 7)
+3 files updated, 0 files merged, 3 files removed, 0 files unresolved
+saving bundle to 
+adding branch
+adding changesets
+adding manifests
+adding file changes
+added 5 changesets with 5 changes to 5 files
+rebase completed
+
+% Rebase with dest == default => same as no arguments (from 3 onto 7)
+3 files updated, 0 files merged, 3 files removed, 0 files unresolved
+saving bundle to 
+adding branch
+adding changesets
+adding manifests
+adding file changes
+added 5 changesets with 5 changes to 5 files
+rebase completed
+
+% Specify only source (from 4 onto 7)
+saving bundle to 
+adding branch
+adding changesets
+adding manifests
+adding file changes
+added 4 changesets with 4 changes to 4 files (-1 heads)
+rebase completed
+
+% Specify only dest (from 3 onto 6)
+3 files updated, 0 files merged, 3 files removed, 0 files unresolved
+saving bundle to 
+adding branch
+adding changesets
+adding manifests
+adding file changes
+added 5 changesets with 5 changes to 5 files (+1 heads)
+rebase completed
+
+% Specify only base (from 3 onto 7)
+saving bundle to 
+adding branch
+adding changesets
+adding manifests
+adding file changes
+added 5 changesets with 5 changes to 5 files
+rebase completed
+
+% Specify source and dest (from 4 onto 6)
+saving bundle to 
+adding branch
+adding changesets
+adding manifests
+adding file changes
+added 4 changesets with 4 changes to 4 files
+rebase completed
+
+% Specify base and dest (from 3 onto 6)
+saving bundle to 
+adding branch
+adding changesets
+adding manifests
+adding file changes
+added 5 changesets with 5 changes to 5 files (+1 heads)
+rebase completed
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-rebase-pull	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,50 @@
+#!/bin/sh
+
+echo "[extensions]" >> $HGRCPATH
+echo "graphlog=" >> $HGRCPATH
+echo "rebase=" >> $HGRCPATH
+
+BASE=`pwd`
+
+addcommit () {
+    echo $1 > $1
+    hg add $1
+    hg commit -d "${2} 0" -u test -m $1
+}
+
+commit () {
+    hg commit -d "${2} 0" -u test -m $1
+}
+
+cd $BASE
+rm -rf a
+hg init a
+cd a
+addcommit "C1" 0
+addcommit "C2" 1
+
+cd ..
+hg clone a b
+cd b
+addcommit "L1" 2
+
+cd ../a
+addcommit "R1" 3
+
+cd ../b
+echo
+echo "% Now b has one revision to be pulled from a"
+hg pull --rebase 2>&1 | sed -e 's/\(saving bundle to \).*/\1/' \
+                -e 's/\(pulling from \).*/\1/'
+
+hg glog --template '{rev}:{desc}\n'
+
+echo
+echo "% Re-run pull --rebase"
+hg pull --rebase 2>&1 | sed 's/\(pulling from \).*/\1/'
+
+echo
+echo "% Invoke pull --rebase with --update"
+hg pull --rebase --update
+
+exit 0
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-rebase-pull.out	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,34 @@
+updating working directory
+2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+% Now b has one revision to be pulled from a
+pulling from 
+searching for changes
+adding changesets
+adding manifests
+adding file changes
+added 1 changesets with 1 changes to 1 files (+1 heads)
+(run 'hg heads' to see heads, 'hg merge' to merge)
+saving bundle to 
+adding branch
+adding changesets
+adding manifests
+adding file changes
+added 2 changesets with 2 changes to 2 files
+rebase completed
+@  3:L1
+|
+o  2:R1
+|
+o  1:C2
+|
+o  0:C1
+
+
+% Re-run pull --rebase
+pulling from 
+searching for changes
+no changes found
+
+% Invoke pull --rebase with --update
+abort: --update and --rebase are not compatible
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-rebase-scenario-global	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,82 @@
+#!/bin/sh
+
+echo "[extensions]" >> $HGRCPATH
+echo "graphlog=" >> $HGRCPATH
+echo "rebase=" >> $HGRCPATH
+
+BASE=`pwd`
+
+addcommit () {
+    echo $1 > $1
+    hg add $1
+    hg commit -d "${2} 0" -u test -m $1
+}
+
+commit () {
+    hg commit -d "${2} 0" -u test -m $1
+}
+
+createrepo () {
+    cd $BASE
+    rm -rf a
+    hg init a
+    cd a
+    addcommit "A" 0
+    addcommit "B" 1
+
+    hg update -C 0
+    addcommit "C" 2
+
+    hg update -C 0
+    addcommit "D" 3
+
+    hg merge -r 2
+    commit "E" 4
+
+    hg update -C 3
+    addcommit "F" 5
+}
+
+createrepo > /dev/null 2>&1
+hg glog  --template '{rev}: {desc}\n'
+
+echo '% Rebasing'
+echo '% B onto F - simple rebase'
+hg rebase -s 1 -d 5 2>&1 | sed 's/\(saving bundle to \).*/\1/'
+hg glog  --template '{rev}: {desc}\n'
+
+createrepo > /dev/null 2>&1
+echo '% B onto D - intermediate point'
+hg rebase -s 1 -d 3 2>&1 | sed 's/\(saving bundle to \).*/\1/'
+hg glog  --template '{rev}: {desc}\n'
+
+createrepo > /dev/null 2>&1
+echo '% C onto F - skip of E'
+hg rebase -s 2 -d 5 2>&1 | sed 's/\(saving bundle to \).*/\1/'
+hg glog  --template '{rev}: {desc}\n'
+
+createrepo > /dev/null 2>&1
+echo '% D onto C - rebase of a branching point (skip E)'
+hg rebase -s 3 -d 2 2>&1 | sed 's/\(saving bundle to \).*/\1/'
+hg glog  --template '{rev}: {desc}\n'
+
+createrepo > /dev/null 2>&1
+echo '% E onto F - merged revision having a parent in ancestors of target'
+hg rebase -s 4 -d 5 2>&1 | sed 's/\(saving bundle to \).*/\1/'
+hg glog  --template '{rev}: {desc}\n'
+
+createrepo > /dev/null 2>&1
+echo '% D onto B - E maintains C as parent'
+hg rebase -s 3 -d 1 2>&1 | sed 's/\(saving bundle to \).*/\1/'
+hg glog  --template '{rev}: {desc}\n'
+
+echo '% These will fail'
+createrepo > /dev/null 2>&1
+echo '% E onto D - rebase onto an ancestor'
+hg rebase -s 4 -d 3
+echo '% D onto E - rebase onto a descendant'
+hg rebase -s 3 -d 4
+echo '% E onto B - merge revision with both parents not in ancestors of target'
+hg rebase -s 4 -d 1
+
+exit 0
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-rebase-scenario-global.out	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,138 @@
+@  5: F
+|
+| o  4: E
+|/|
+o |  3: D
+| |
+| o  2: C
+|/
+| o  1: B
+|/
+o  0: A
+
+% Rebasing
+% B onto F - simple rebase
+saving bundle to 
+adding branch
+adding changesets
+adding manifests
+adding file changes
+added 5 changesets with 4 changes to 4 files (+1 heads)
+rebase completed
+@  5: B
+|
+o  4: F
+|
+| o  3: E
+|/|
+o |  2: D
+| |
+| o  1: C
+|/
+o  0: A
+
+% B onto D - intermediate point
+saving bundle to 
+adding branch
+adding changesets
+adding manifests
+adding file changes
+added 5 changesets with 4 changes to 4 files (+2 heads)
+rebase completed
+@  5: B
+|
+| o  4: F
+|/
+| o  3: E
+|/|
+o |  2: D
+| |
+| o  1: C
+|/
+o  0: A
+
+% C onto F - skip of E
+nothing changed
+saving bundle to 
+adding branch
+adding changesets
+adding manifests
+adding file changes
+added 3 changesets with 3 changes to 3 files (+1 heads)
+rebase completed
+@  4: C
+|
+o  3: F
+|
+o  2: D
+|
+| o  1: B
+|/
+o  0: A
+
+% D onto C - rebase of a branching point (skip E)
+nothing changed
+saving bundle to 
+adding branch
+adding changesets
+adding manifests
+adding file changes
+added 2 changesets with 2 changes to 2 files
+rebase completed
+@  4: F
+|
+o  3: D
+|
+o  2: C
+|
+| o  1: B
+|/
+o  0: A
+
+% E onto F - merged revision having a parent in ancestors of target
+saving bundle to 
+adding branch
+adding changesets
+adding manifests
+adding file changes
+added 2 changesets with 1 changes to 1 files (-1 heads)
+rebase completed
+@    5: E
+|\
+| o  4: F
+| |
+| o  3: D
+| |
+o |  2: C
+|/
+| o  1: B
+|/
+o  0: A
+
+% D onto B - E maintains C as parent
+saving bundle to 
+adding branch
+adding changesets
+adding manifests
+adding file changes
+added 3 changesets with 2 changes to 2 files
+rebase completed
+@  5: F
+|
+| o  4: E
+|/|
+o |  3: D
+| |
+| o  2: C
+| |
+o |  1: B
+|/
+o  0: A
+
+% These will fail
+% E onto D - rebase onto an ancestor
+abort: cannot rebase a descendant
+% D onto E - rebase onto a descendant
+abort: cannot rebase an ancestor
+% E onto B - merge revision with both parents not in ancestors of target
+abort: cannot use revision 4 as base, result would have 3 parents
--- a/tests/test-remove.out	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-remove.out	Wed Sep 17 11:34:37 2008 +0200
@@ -70,15 +70,15 @@
 adding test/bar
 adding test/foo
 % dir, options none
+removing test/bar
 removing test/foo
-removing test/bar
 R test/bar
 R test/foo
 ./foo
 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
 % dir, options -f
+removing test/bar
 removing test/foo
-removing test/bar
 R test/bar
 R test/foo
 ./foo
@@ -91,8 +91,8 @@
 ./test/foo
 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
 % dir, options -Af
+removing test/bar
 removing test/foo
-removing test/bar
 R test/bar
 R test/foo
 ./foo
--- a/tests/test-rename-merge1.out	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-rename-merge1.out	Wed Sep 17 11:34:37 2008 +0200
@@ -19,12 +19,12 @@
  a2: divergent renames -> dr
  a: remote moved to b -> m
  b2: remote created -> g
-copying a to b
-picked tool 'internal:merge' for a (binary False symlink False)
-merging a and b
-my a@f26ec4fc3fa3+ other b@8e765a822af2 ancestor a@af1939970a1c
+preserving a for resolve of b
+removing a
+picked tool 'internal:merge' for b (binary False symlink False)
+merging a and b to b
+my b@f26ec4fc3fa3+ other b@8e765a822af2 ancestor a@af1939970a1c
  premerge successful
-removing a
 warning: detected divergent renames of a2 to:
  c2
  b2
--- a/tests/test-rename-merge2.out	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-rename-merge2.out	Wed Sep 17 11:34:37 2008 +0200
@@ -13,10 +13,11 @@
   checking for directory renames
  rev: versions differ -> m
  a: remote copied to b -> m
-copying a to b
-picked tool 'python ../merge' for a (binary False symlink False)
-merging a and b
-my a@e300d1c794ec+ other b@735846fee2d7 ancestor a@924404dff337
+preserving a for resolve of b
+preserving rev for resolve of rev
+picked tool 'python ../merge' for b (binary False symlink False)
+merging a and b to b
+my b@e300d1c794ec+ other b@735846fee2d7 ancestor a@924404dff337
  premerge successful
 picked tool 'python ../merge' for rev (binary False symlink False)
 merging rev
@@ -45,9 +46,11 @@
  a: remote is newer -> g
  b: local copied to a -> m
  rev: versions differ -> m
+preserving b for resolve of b
+preserving rev for resolve of rev
 getting a
 picked tool 'python ../merge' for b (binary False symlink False)
-merging b and a
+merging b and a to b
 my b@ac809aeed39a+ other a@f4db7e329e71 ancestor a@924404dff337
  premerge successful
 picked tool 'python ../merge' for rev (binary False symlink False)
@@ -76,12 +79,13 @@
   checking for directory renames
  rev: versions differ -> m
  a: remote moved to b -> m
-copying a to b
-picked tool 'python ../merge' for a (binary False symlink False)
-merging a and b
-my a@e300d1c794ec+ other b@e03727d2d66b ancestor a@924404dff337
+preserving a for resolve of b
+preserving rev for resolve of rev
+removing a
+picked tool 'python ../merge' for b (binary False symlink False)
+merging a and b to b
+my b@e300d1c794ec+ other b@e03727d2d66b ancestor a@924404dff337
  premerge successful
-removing a
 picked tool 'python ../merge' for rev (binary False symlink False)
 merging rev
 my rev@e300d1c794ec+ other rev@e03727d2d66b ancestor rev@924404dff337
@@ -107,8 +111,10 @@
   checking for directory renames
  b: local moved to a -> m
  rev: versions differ -> m
+preserving b for resolve of b
+preserving rev for resolve of rev
 picked tool 'python ../merge' for b (binary False symlink False)
-merging b and a
+merging b and a to b
 my b@ecf3cb2a4219+ other a@f4db7e329e71 ancestor a@924404dff337
  premerge successful
 picked tool 'python ../merge' for rev (binary False symlink False)
@@ -136,6 +142,7 @@
   checking for directory renames
  rev: versions differ -> m
  b: remote created -> g
+preserving rev for resolve of rev
 getting b
 picked tool 'python ../merge' for rev (binary False symlink False)
 merging rev
@@ -161,6 +168,7 @@
    b -> a 
   checking for directory renames
  rev: versions differ -> m
+preserving rev for resolve of rev
 picked tool 'python ../merge' for rev (binary False symlink False)
 merging rev
 my rev@ac809aeed39a+ other rev@97c705ade336 ancestor rev@924404dff337
@@ -187,6 +195,7 @@
  a: other deleted -> r
  rev: versions differ -> m
  b: remote created -> g
+preserving rev for resolve of rev
 removing a
 getting b
 picked tool 'python ../merge' for rev (binary False symlink False)
@@ -212,6 +221,7 @@
    b -> a 
   checking for directory renames
  rev: versions differ -> m
+preserving rev for resolve of rev
 picked tool 'python ../merge' for rev (binary False symlink False)
 merging rev
 my rev@ecf3cb2a4219+ other rev@97c705ade336 ancestor rev@924404dff337
@@ -231,6 +241,8 @@
   searching for copies back to rev 1
  b: versions differ -> m
  rev: versions differ -> m
+preserving b for resolve of b
+preserving rev for resolve of rev
 picked tool 'python ../merge' for b (binary False symlink False)
 merging b
 my b@ec03c2ca8642+ other b@79cc6877a3b7 ancestor a@924404dff337
@@ -262,6 +274,7 @@
  a: divergent renames -> dr
  rev: versions differ -> m
  c: remote created -> g
+preserving rev for resolve of rev
 warning: detected divergent renames of a to:
  b
  c
@@ -286,6 +299,8 @@
   searching for copies back to rev 1
  b: versions differ -> m
  rev: versions differ -> m
+preserving b for resolve of b
+preserving rev for resolve of rev
 picked tool 'python ../merge' for b (binary False symlink False)
 merging b
 my b@ac809aeed39a+ other b@af30c7647fc7 ancestor b@000000000000
@@ -310,6 +325,8 @@
  a: other deleted -> r
  b: versions differ -> m
  rev: versions differ -> m
+preserving b for resolve of b
+preserving rev for resolve of rev
 removing a
 picked tool 'python ../merge' for b (binary False symlink False)
 merging b
@@ -334,6 +351,8 @@
  a: remote is newer -> g
  b: versions differ -> m
  rev: versions differ -> m
+preserving b for resolve of b
+preserving rev for resolve of rev
 getting a
 picked tool 'python ../merge' for b (binary False symlink False)
 merging b
@@ -359,6 +378,8 @@
  a: other deleted -> r
  b: versions differ -> m
  rev: versions differ -> m
+preserving b for resolve of b
+preserving rev for resolve of rev
 removing a
 picked tool 'python ../merge' for b (binary False symlink False)
 merging b
@@ -383,6 +404,8 @@
  a: remote is newer -> g
  b: versions differ -> m
  rev: versions differ -> m
+preserving b for resolve of b
+preserving rev for resolve of rev
 getting a
 picked tool 'python ../merge' for b (binary False symlink False)
 merging b
@@ -407,6 +430,8 @@
   searching for copies back to rev 1
  b: versions differ -> m
  rev: versions differ -> m
+preserving b for resolve of b
+preserving rev for resolve of rev
 picked tool 'python ../merge' for b (binary False symlink False)
 merging b
 my b@0b76e65c8289+ other b@735846fee2d7 ancestor b@000000000000
@@ -431,6 +456,8 @@
  b: versions differ -> m
  rev: versions differ -> m
  a: prompt recreating -> g
+preserving b for resolve of b
+preserving rev for resolve of rev
 getting a
 picked tool 'python ../merge' for b (binary False symlink False)
 merging b
@@ -455,6 +482,8 @@
   searching for copies back to rev 1
  b: versions differ -> m
  rev: versions differ -> m
+preserving b for resolve of b
+preserving rev for resolve of rev
 picked tool 'python ../merge' for b (binary False symlink False)
 merging b
 my b@0b76e65c8289+ other b@e03727d2d66b ancestor b@000000000000
@@ -483,11 +512,12 @@
   checking for directory renames
  rev: versions differ -> m
  a: remote moved to b -> m
-copying a to b
-picked tool 'python ../merge' for a (binary False symlink False)
-merging a and b
-my a@e300d1c794ec+ other b@79cc6877a3b7 ancestor a@924404dff337
+preserving a for resolve of b
+preserving rev for resolve of rev
 removing a
+picked tool 'python ../merge' for b (binary False symlink False)
+merging a and b to b
+my b@e300d1c794ec+ other b@79cc6877a3b7 ancestor a@924404dff337
 picked tool 'python ../merge' for rev (binary False symlink False)
 merging rev
 my rev@e300d1c794ec+ other rev@79cc6877a3b7 ancestor rev@924404dff337
@@ -513,8 +543,10 @@
   checking for directory renames
  b: local moved to a -> m
  rev: versions differ -> m
+preserving b for resolve of b
+preserving rev for resolve of rev
 picked tool 'python ../merge' for b (binary False symlink False)
-merging b and a
+merging b and a to b
 my b@ec03c2ca8642+ other a@f4db7e329e71 ancestor a@924404dff337
 picked tool 'python ../merge' for rev (binary False symlink False)
 merging rev
@@ -544,8 +576,10 @@
  b: local moved to a -> m
  rev: versions differ -> m
  c: remote created -> g
+preserving b for resolve of b
+preserving rev for resolve of rev
 picked tool 'python ../merge' for b (binary False symlink False)
-merging b and a
+merging b and a to b
 my b@ecf3cb2a4219+ other a@2b958612230f ancestor a@924404dff337
  premerge successful
 getting c
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-revlog-ancestry.py	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,74 @@
+import os
+from mercurial import hg, ui, merge
+from hgext import graphlog
+
+u = ui.ui()
+
+repo = hg.repository(u, 'test1', create=1)
+os.chdir('test1')
+
+def commit(text, time):
+    repo.commit(text=text, date="%d 0" % time)
+
+def addcommit(name, time):
+    f = file(name, 'w')
+    f.write('%s\n' % name)
+    f.close()
+    repo.add([name])
+    commit(name, time)
+
+def update(rev):
+    merge.update(repo, rev, False, True, False)
+
+def merge_(rev):
+    merge.update(repo, rev, True, False, False)
+
+if __name__ == '__main__':
+    addcommit("A", 0)
+    addcommit("B", 1)
+
+    update(0)
+    addcommit("C", 2)
+
+    merge_(1)
+    commit("D", 3)
+
+    update(2)
+    addcommit("E", 4)
+    addcommit("F", 5)
+
+    update(3)
+    addcommit("G", 6)
+
+    merge_(5)
+    commit("H", 7)
+
+    update(5)
+    addcommit("I", 8)
+
+    # Ancestors
+    print 'Ancestors of 5'
+    for r in repo.changelog.ancestors(5):
+        print r,
+
+    print '\nAncestors of 6 and 5'
+    for r in repo.changelog.ancestors(6, 5):
+        print r,
+
+    print '\nAncestors of 5 and 4'
+    for r in repo.changelog.ancestors(5, 4):
+        print r,
+
+    # Descendants
+    print '\n\nDescendants of 5'
+    for r in repo.changelog.descendants(5):
+        print r,
+
+    print '\nDescendants of 5 and 3'
+    for r in repo.changelog.descendants(5, 3):
+        print r,
+
+    print '\nDescendants of 5 and 4'
+    for r in repo.changelog.descendants(5, 4):
+        print r,
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-revlog-ancestry.py.out	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,13 @@
+Ancestors of 5
+4 2 0 
+Ancestors of 6 and 5
+3 4 2 1 0 
+Ancestors of 5 and 4
+4 2 0 
+
+Descendants of 5
+7 8 
+Descendants of 5 and 3
+6 7 8 
+Descendants of 5 and 4
+5 7 8
--- a/tests/test-serve	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-serve	Wed Sep 17 11:34:37 2008 +0200
@@ -2,9 +2,11 @@
 
 hgserve()
 {
-    hg serve -a localhost -p $HGPORT1 -d --pid-file=hg.pid -v $@ \
+    hg serve -a localhost -p $HGPORT1 -d --pid-file=hg.pid -E errors.log -v $@ \
         | sed -e 's/:[0-9][0-9]*//g' -e 's/http:\/\/[^/]*\//http:\/\/localhost\//'
     cat hg.pid >> "$DAEMON_PIDS"
+    echo % errors
+    cat errors.log
     sleep 1
     kill `cat hg.pid`
     sleep 1
@@ -17,11 +19,13 @@
 echo 'accesslog = access.log' >> .hg/hgrc
 
 echo % Without -v
-hg serve -a localhost -p $HGPORT -d --pid-file=hg.pid
+hg serve -a localhost -p $HGPORT -d --pid-file=hg.pid -E errors.log
 cat hg.pid >> "$DAEMON_PIDS"
 if [ -f access.log ]; then
     echo 'access log created - .hg/hgrc respected'
 fi
+echo % errors
+cat errors.log
 
 echo % With -v
 hgserve
--- a/tests/test-serve.out	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-serve.out	Wed Sep 17 11:34:37 2008 +0200
@@ -1,12 +1,18 @@
 % Without -v
 access log created - .hg/hgrc respected
+% errors
 % With -v
-listening at http://localhost/ (127.0.0.1)
+listening at http://localhost/ (bound to 127.0.0.1)
+% errors
 % With --prefix foo
-listening at http://localhost/foo/ (127.0.0.1)
+listening at http://localhost/foo/ (bound to 127.0.0.1)
+% errors
 % With --prefix /foo
-listening at http://localhost/foo/ (127.0.0.1)
+listening at http://localhost/foo/ (bound to 127.0.0.1)
+% errors
 % With --prefix foo/
-listening at http://localhost/foo/ (127.0.0.1)
+listening at http://localhost/foo/ (bound to 127.0.0.1)
+% errors
 % With --prefix /foo/
-listening at http://localhost/foo/ (127.0.0.1)
+listening at http://localhost/foo/ (bound to 127.0.0.1)
+% errors
--- a/tests/test-static-http	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-static-http	Wed Sep 17 11:34:37 2008 +0200
@@ -49,6 +49,12 @@
 echo 'changegroup = python ../printenv.py changegroup' >> .hg/hgrc
 http_proxy= hg pull | sed -e 's,:[0-9][0-9]*/,/,'
 
+echo '% trying to push'
+hg update
+echo more foo >> bar
+hg commit -m"test" -d "100000000 0"
+http_proxy= hg push | sed -e 's,:[0-9][0-9]*/,/,'
+
 echo '% test with "/" URI (issue 747)'
 cd ..
 hg init
--- a/tests/test-static-http.out	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-static-http.out	Wed Sep 17 11:34:37 2008 +0200
@@ -29,6 +29,10 @@
 adding file changes
 added 1 changesets with 1 changes to 1 files
 (run 'hg update' to get a working copy)
+% trying to push
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+abort: cannot lock static-http repository
+pushing to static-http://localhost/remote
 % test with "/" URI (issue 747)
 requesting all changes
 adding changesets
--- a/tests/test-tag.out	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-tag.out	Wed Sep 17 11:34:37 2008 +0200
@@ -36,7 +36,9 @@
 868cc8fbb43b754ad09fa109885d243fc49adae7 gawk
 868cc8fbb43b754ad09fa109885d243fc49adae7 gorp
 3807bcf62c5614cb6c16436b514d7764ca5f1631 gack
+3807bcf62c5614cb6c16436b514d7764ca5f1631 gack
 0000000000000000000000000000000000000000 gack
+868cc8fbb43b754ad09fa109885d243fc49adae7 gorp
 0000000000000000000000000000000000000000 gorp
 3ecf002a1c572a2f3bb4e665417e60fca65bbd42 bleah1
 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
--- a/tests/test-tags.out	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-tags.out	Wed Sep 17 11:34:37 2008 +0200
@@ -45,16 +45,16 @@
 created new head
 tip                                4:36195b728445
 bar                                1:b204a97e6e8d
-changeset:   5:57e1983b4a60
+changeset:   5:1f98c77278de
 tag:         tip
 user:        test
 date:        Mon Jan 12 13:46:40 1970 +0000
 summary:     Removed tag bar
 
-tip                                5:57e1983b4a60
+tip                                5:1f98c77278de
 % remove nonexistent tag
 abort: tag 'foobar' does not exist
-changeset:   5:57e1983b4a60
+changeset:   5:1f98c77278de
 tag:         tip
 user:        test
 date:        Mon Jan 12 13:46:40 1970 +0000
--- a/tests/test-transplant	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-transplant	Wed Sep 17 11:34:37 2008 +0200
@@ -96,3 +96,21 @@
 hg transplant --continue
 hg transplant 1:3
 hg locate
+cd ..
+
+# Test transplant --merge (issue 1111)
+echo % test transplant merge
+hg init t1111
+cd t1111
+echo a > a
+hg ci -Am adda
+echo b >> a
+hg ci -m appendb
+echo c >> a
+hg ci -m appendc
+hg up -C 0
+echo d >> a
+hg ci -m appendd
+echo % tranplant
+hg transplant -m 1
+cd ..
--- a/tests/test-transplant.out	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-transplant.out	Wed Sep 17 11:34:37 2008 +0200
@@ -110,14 +110,14 @@
 applying a1e30dd1b8e7
 patching file foo
 Hunk #1 FAILED at 0
-1 out of 1 hunk FAILED -- saving rejects to file foo.rej
+1 out of 1 hunks FAILED -- saving rejects to file foo.rej
 patch failed to apply
 abort: Fix up the merge and run hg transplant --continue
 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
 applying a1e30dd1b8e7
 patching file foo
 Hunk #1 FAILED at 0
-1 out of 1 hunk FAILED -- saving rejects to file foo.rej
+1 out of 1 hunks FAILED -- saving rejects to file foo.rej
 patch failed to apply
 abort: Fix up the merge and run hg transplant --continue
 a1e30dd1b8e7 transplanted as f1563cf27039
@@ -129,3 +129,10 @@
 added
 bar
 foo
+% test transplant merge
+adding a
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+created new head
+% tranplant
+applying 42dc4432fd35
+1:42dc4432fd35 merged at a9f4acbac129
--- a/tests/test-up-local-change.out	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-up-local-change.out	Wed Sep 17 11:34:37 2008 +0200
@@ -23,6 +23,7 @@
    b
  a: versions differ -> m
  b: remote created -> g
+preserving a for resolve of a
 picked tool 'true' for a (binary False symlink False)
 merging a
 my a@33aaa84a386b+ other a@802f095af299 ancestor a@33aaa84a386b
@@ -60,6 +61,7 @@
    b
  a: versions differ -> m
  b: remote created -> g
+preserving a for resolve of a
 picked tool 'true' for a (binary False symlink False)
 merging a
 my a@33aaa84a386b+ other a@802f095af299 ancestor a@33aaa84a386b
@@ -113,6 +115,8 @@
   searching for copies back to rev 1
  a: versions differ -> m
  b: versions differ -> m
+preserving a for resolve of a
+preserving b for resolve of b
 picked tool 'true' for a (binary False symlink False)
 merging a
 my a@802f095af299+ other a@030602aee63d ancestor a@33aaa84a386b
--- a/tests/test-update-reverse.out	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-update-reverse.out	Wed Sep 17 11:34:37 2008 +0200
@@ -46,9 +46,9 @@
  side2: remote deleted -> r
  side1: remote deleted -> r
  main: remote created -> g
-getting main
 removing side1
 removing side2
+getting main
 1 files updated, 0 files merged, 2 files removed, 0 files unresolved
 Should only show a main
 a
--- a/tests/test-username-newline.out	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-username-newline.out	Wed Sep 17 11:34:37 2008 +0200
@@ -1,10 +1,6 @@
 adding a
-transaction abort!
-rollback completed
 abort: username 'foo\nbar1' contains a newline
 
-transaction abort!
-rollback completed
 abort: username 'foo\nbar2' contains a newline
 
 transaction abort!
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-verify	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,26 @@
+#!/bin/sh
+
+echo % prepare repo
+hg init
+echo "some text" > FOO.txt
+echo "another text" > bar.txt
+echo "more text" > QUICK.txt
+hg add
+hg ci -d '0 0' -mtest1
+
+echo
+echo % verify
+hg verify
+
+echo
+echo % introduce some bugs in repo
+cd .hg/store/data
+mv _f_o_o.txt.i X_f_o_o.txt.i
+mv bar.txt.i xbar.txt.i
+rm _q_u_i_c_k.txt.i
+
+echo
+echo % verify
+hg verify
+
+exit 0
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-verify.out	Wed Sep 17 11:34:37 2008 +0200
@@ -0,0 +1,34 @@
+% prepare repo
+adding FOO.txt
+adding QUICK.txt
+adding bar.txt
+
+% verify
+checking changesets
+checking manifests
+crosschecking files in changesets and manifests
+checking files
+3 files, 1 changesets, 3 total revisions
+
+% introduce some bugs in repo
+
+% verify
+checking changesets
+checking manifests
+crosschecking files in changesets and manifests
+checking files
+ ?: cannot decode filename 'data/X_f_o_o.txt.i'
+ data/FOO.txt.i@0: missing revlog!
+ 0: empty or missing FOO.txt
+ FOO.txt@0: f62022d3d590 in manifests not found
+ data/QUICK.txt.i@0: missing revlog!
+ 0: empty or missing QUICK.txt
+ QUICK.txt@0: 88b857db8eba in manifests not found
+ data/bar.txt.i@0: missing revlog!
+ 0: empty or missing bar.txt
+ bar.txt@0: 256559129457 in manifests not found
+warning: orphan revlog 'data/xbar.txt.i'
+3 files, 1 changesets, 0 total revisions
+1 warnings encountered!
+10 integrity errors encountered!
+(first damaged changeset appears to be 0)
--- a/tests/test-walk.out	Wed Sep 17 11:14:06 2008 +0200
+++ b/tests/test-walk.out	Wed Sep 17 11:34:37 2008 +0200
@@ -275,10 +275,10 @@
 fifo: unsupported file type (type is fifo)
 
 hg debugwalk fenugreek
-m  fenugreek  fenugreek  exact
+f  fenugreek  fenugreek  exact
 
 hg debugwalk fenugreek
-m  fenugreek  fenugreek  exact
+f  fenugreek  fenugreek  exact
 
 hg debugwalk new
 f  new  new  exact