changeset 16871:d566aa319d5f

merge with stable
author Matt Mackall <mpm@selenic.com>
date Mon, 04 Jun 2012 17:57:57 -0500
parents 54b03085065f (diff) 00e1e40d709c (current diff)
children 40d930848fd0
files mercurial/commands.py
diffstat 195 files changed, 5150 insertions(+), 4381 deletions(-) [+]
line wrap: on
line diff
--- a/contrib/check-code.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/contrib/check-code.py	Mon Jun 04 17:57:57 2012 -0500
@@ -45,12 +45,10 @@
   [
     (r'pushd|popd', "don't use 'pushd' or 'popd', use 'cd'"),
     (r'\W\$?\(\([^\)\n]*\)\)', "don't use (()) or $(()), use 'expr'"),
-    (r'^function', "don't use 'function', use old style"),
     (r'grep.*-q', "don't use 'grep -q', redirect to /dev/null"),
     (r'sed.*-i', "don't use 'sed -i', use a temporary file"),
     (r'echo.*\\n', "don't use 'echo \\n', use printf"),
     (r'echo -n', "don't use 'echo -n', use printf"),
-    (r'^diff.*-\w*N', "don't use 'diff -N'"),
     (r'(^| )wc[^|]*$\n(?!.*\(re\))', "filter wc output"),
     (r'head -c', "don't use 'head -c', use 'dd'"),
     (r'sha1sum', "don't use sha1sum, use $TESTDIR/md5sum.py"),
@@ -62,10 +60,8 @@
     (r'(^|\|\s*)grep (-\w\s+)*[^|]*[(|]\w',
      "use egrep for extended grep syntax"),
     (r'/bin/', "don't use explicit paths for tools"),
-    (r'\$PWD', "don't use $PWD, use `pwd`"),
     (r'[^\n]\Z', "no trailing newline"),
     (r'export.*=', "don't export and assign at once"),
-    (r'^([^"\'\n]|("[^"\n]*")|(\'[^\'\n]*\'))*\^', "^ must be quoted"),
     (r'^source\b', "don't use 'source', use '.'"),
     (r'touch -d', "don't use 'touch -d', use 'touch -t' instead"),
     (r'ls +[^|\n-]+ +-', "options to 'ls' must come before filenames"),
@@ -79,7 +75,12 @@
     (r'^( *)\t', "don't use tabs to indent"),
   ],
   # warnings
-  []
+  [
+    (r'^function', "don't use 'function', use old style"),
+    (r'^diff.*-\w*N', "don't use 'diff -N'"),
+    (r'\$PWD', "don't use $PWD, use `pwd`"),
+    (r'^([^"\'\n]|("[^"\n]*")|(\'[^\'\n]*\'))*\^', "^ must be quoted"),
+  ]
 ]
 
 testfilters = [
@@ -91,7 +92,8 @@
 utestpats = [
   [
     (r'^(\S|  $ ).*(\S[ \t]+|^[ \t]+)\n', "trailing whitespace on non-output"),
-    (uprefix + r'.*\|\s*sed', "use regex test output patterns instead of sed"),
+    (uprefix + r'.*\|\s*sed[^|>\n]*\n',
+     "use regex test output patterns instead of sed"),
     (uprefix + r'(true|exit 0)', "explicit zero exit unnecessary"),
     (uprefix + r'.*(?<!\[)\$\?', "explicit exit code checks unnecessary"),
     (uprefix + r'.*\|\| echo.*(fail|error)',
@@ -106,9 +108,9 @@
 for i in [0, 1]:
     for p, m in testpats[i]:
         if p.startswith(r'^'):
-            p = r"^  \$ (%s)" % p[1:]
+            p = r"^  [$>] (%s)" % p[1:]
         else:
-            p = r"^  \$ .*(%s)" % p
+            p = r"^  [$>] .*(%s)" % p
         utestpats[i].append((p, m))
 
 utestfilters = [
@@ -133,11 +135,12 @@
     (r'^\s+\w+=\w+[^,)\n]$', "missing whitespace in assignment"),
     (r'(\s+)try:\n((?:\n|\1\s.*\n)+?)\1except.*?:\n'
      r'((?:\n|\1\s.*\n)+?)\1finally:', 'no try/except/finally in Py2.4'),
-    (r'.{85}', "line too long"),
+    (r'.{81}', "line too long"),
     (r' x+[xo][\'"]\n\s+[\'"]x', 'string join across lines with no space'),
     (r'[^\n]\Z', "no trailing newline"),
     (r'(\S[ \t]+|^[ \t]+)\n', "trailing whitespace"),
-#    (r'^\s+[^_ \n][^_. \n]+_[^_\n]+\s*=', "don't use underbars in identifiers"),
+#    (r'^\s+[^_ \n][^_. \n]+_[^_\n]+\s*=',
+#     "don't use underbars in identifiers"),
     (r'^\s+(self\.)?[A-za-z][a-z0-9]+[A-Z]\w* = ',
      "don't use camelcase in identifiers"),
     (r'^\s*(if|while|def|class|except|try)\s[^[\n]*:\s*[^\\n]#\s]+',
@@ -199,11 +202,11 @@
      "always assign an opened file to a variable, and close it afterwards"),
     (r'(?i)descendent', "the proper spelling is descendAnt"),
     (r'\.debug\(\_', "don't mark debug messages for translation"),
+    (r'\.strip\(\)\.split\(\)', "no need to strip before splitting"),
+    (r'^\s*except\s*:', "warning: naked except clause", r'#.*re-raises'),
   ],
   # warnings
   [
-    (r'.{81}', "warning: line over 80 characters"),
-    (r'^\s*except:$', "warning: naked except clause"),
     (r'ui\.(status|progress|write|note|warn)\([\'\"]x',
      "warning: unwrapped ui message"),
   ]
@@ -222,7 +225,7 @@
     (r'^  ', "don't use spaces to indent"),
     (r'\S\t', "don't use tabs except for indent"),
     (r'(\S[ \t]+|^[ \t]+)\n', "trailing whitespace"),
-    (r'.{85}', "line too long"),
+    (r'.{81}', "line too long"),
     (r'(while|if|do|for)\(', "use space after while/if/do/for"),
     (r'return\(', "return is not a function"),
     (r' ;', "no space before ;"),
@@ -352,7 +355,13 @@
 
         prelines = None
         errors = []
-        for p, msg in pats:
+        for pat in pats:
+            if len(pat) == 3:
+                p, msg, ignore = pat
+            else:
+                p, msg = pat
+                ignore = None
+
             # fix-up regexes for multiline searches
             po = p
             # \s doesn't match \n
@@ -383,6 +392,8 @@
                         print "Skipping %s for %s:%s (check-code -ignore)" % (
                             name, f, n)
                     continue
+                elif ignore and re.search(ignore, l, re.MULTILINE):
+                    continue
                 bd = ""
                 if blame:
                     bd = 'working directory'
--- a/contrib/debugcmdserver.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/contrib/debugcmdserver.py	Mon Jun 04 17:57:57 2012 -0500
@@ -24,7 +24,7 @@
 def read(size):
     data = sys.stdin.read(size)
     if not data:
-        raise EOFError()
+        raise EOFError
     sys.stdout.write(data)
     sys.stdout.flush()
     return data
--- a/contrib/hg-ssh	Mon Jun 04 17:22:09 2012 -0500
+++ b/contrib/hg-ssh	Mon Jun 04 17:57:57 2012 -0500
@@ -24,6 +24,9 @@
 
 You can use pattern matching of your normal shell, e.g.:
 command="cd repos && hg-ssh user/thomas/* projects/{mercurial,foo}"
+
+You can also add a --read-only flag to allow read-only access to a key, e.g.:
+command="hg-ssh --read-only repos/*"
 """
 
 # enable importing on demand to reduce startup time
@@ -33,25 +36,51 @@
 
 import sys, os, shlex
 
-cwd = os.getcwd()
-allowed_paths = [os.path.normpath(os.path.join(cwd, os.path.expanduser(path)))
-                 for path in sys.argv[1:]]
-orig_cmd = os.getenv('SSH_ORIGINAL_COMMAND', '?')
-try:
-    cmdargv = shlex.split(orig_cmd)
-except ValueError, e:
-    sys.stderr.write('Illegal command "%s": %s\n' % (orig_cmd, e))
-    sys.exit(255)
+def main():
+    cwd = os.getcwd()
+    readonly = False
+    args = sys.argv[1:]
+    while len(args):
+        if args[0] == '--read-only':
+            readonly = True
+            args.pop(0)
+        else:
+            break
+    allowed_paths = [os.path.normpath(os.path.join(cwd,
+                                                   os.path.expanduser(path)))
+                     for path in args]
+    orig_cmd = os.getenv('SSH_ORIGINAL_COMMAND', '?')
+    try:
+        cmdargv = shlex.split(orig_cmd)
+    except ValueError, e:
+        sys.stderr.write('Illegal command "%s": %s\n' % (orig_cmd, e))
+        sys.exit(255)
 
-if cmdargv[:2] == ['hg', '-R'] and cmdargv[3:] == ['serve', '--stdio']:
-    path = cmdargv[2]
-    repo = os.path.normpath(os.path.join(cwd, os.path.expanduser(path)))
-    if repo in allowed_paths:
-        dispatch.dispatch(dispatch.request(['-R', repo, 'serve', '--stdio']))
+    if cmdargv[:2] == ['hg', '-R'] and cmdargv[3:] == ['serve', '--stdio']:
+        path = cmdargv[2]
+        repo = os.path.normpath(os.path.join(cwd, os.path.expanduser(path)))
+        if repo in allowed_paths:
+            cmd = ['-R', repo, 'serve', '--stdio']
+            if readonly:
+                cmd += [
+                    '--config',
+                    'hooks.prechangegroup.hg-ssh=python:__main__.rejectpush',
+                    '--config',
+                    'hooks.prepushkey.hg-ssh=python:__main__.rejectpush'
+                    ]
+            dispatch.dispatch(dispatch.request(cmd))
+        else:
+            sys.stderr.write('Illegal repository "%s"\n' % repo)
+            sys.exit(255)
     else:
-        sys.stderr.write('Illegal repository "%s"\n' % repo)
+        sys.stderr.write('Illegal command "%s"\n' % orig_cmd)
         sys.exit(255)
-else:
-    sys.stderr.write('Illegal command "%s"\n' % orig_cmd)
-    sys.exit(255)
 
+def rejectpush(ui, **kwargs):
+    ui.warn("Permission denied\n")
+    # mercurial hooks use unix process conventions for hook return values
+    # so a truthy return means failure
+    return True
+
+if __name__ == '__main__':
+    main()
--- a/contrib/perf.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/contrib/perf.py	Mon Jun 04 17:57:57 2012 -0500
@@ -33,20 +33,34 @@
     try:
         m = scmutil.match(repo[None], pats, {})
         timer(lambda: len(list(repo.dirstate.walk(m, [], True, False))))
-    except:
+    except Exception:
         try:
             m = scmutil.match(repo[None], pats, {})
             timer(lambda: len([b for a, b, c in repo.dirstate.statwalk([], m)]))
-        except:
+        except Exception:
             timer(lambda: len(list(cmdutil.walk(repo, pats, {}))))
 
 def perfstatus(ui, repo, *pats):
     #m = match.always(repo.root, repo.getcwd())
-    #timer(lambda: sum(map(len, repo.dirstate.status(m, [], False, False, False))))
+    #timer(lambda: sum(map(len, repo.dirstate.status(m, [], False, False,
+    #                                                False))))
     timer(lambda: sum(map(len, repo.status())))
 
+def clearcaches(cl):
+    # behave somewhat consistently across internal API changes
+    if util.safehasattr(cl, 'clearcaches'):
+        cl.clearcaches()
+    elif util.safehasattr(cl, '_nodecache'):
+        from mercurial.node import nullid, nullrev
+        cl._nodecache = {nullid: nullrev}
+        cl._nodepos = None
+
 def perfheads(ui, repo):
-    timer(lambda: len(repo.changelog.headrevs()))
+    cl = repo.changelog
+    def d():
+        len(cl.headrevs())
+        clearcaches(cl)
+    timer(d)
 
 def perftags(ui, repo):
     import mercurial.changelog, mercurial.manifest
@@ -57,6 +71,13 @@
         return len(repo.tags())
     timer(t)
 
+def perfancestors(ui, repo):
+    heads = repo.changelog.headrevs()
+    def d():
+        for a in repo.changelog.ancestors(heads):
+            pass
+    timer(d)
+
 def perfdirstate(ui, repo):
     "a" in repo.dirstate
     def d():
@@ -71,6 +92,14 @@
         del repo.dirstate._dirs
     timer(d)
 
+def perfdirstatewrite(ui, repo):
+    ds = repo.dirstate
+    "a" in ds
+    def d():
+        ds._dirty = True
+        ds.write()
+    timer(d)
+
 def perfmanifest(ui, repo):
     def d():
         t = repo.manifest.tip()
@@ -111,6 +140,10 @@
 def perflookup(ui, repo, rev):
     timer(lambda: len(repo.lookup(rev)))
 
+def perfrevrange(ui, repo, *specs):
+    revrange = scmutil.revrange
+    timer(lambda: len(revrange(repo, specs)))
+
 def perfnodelookup(ui, repo, rev):
     import mercurial.revlog
     mercurial.revlog._prereadsize = 2**24 # disable lazy parser in old hg
@@ -125,20 +158,9 @@
     mercurial.revlog._prereadsize = 2**24 # disable lazy parser in old hg
     n = repo[rev].node()
     cl = mercurial.revlog.revlog(repo.sopener, "00changelog.i")
-    # behave somewhat consistently across internal API changes
-    if util.safehasattr(cl, 'clearcaches'):
-        clearcaches = cl.clearcaches
-    elif util.safehasattr(cl, '_nodecache'):
-        from mercurial.node import nullid, nullrev
-        def clearcaches():
-            cl._nodecache = {nullid: nullrev}
-            cl._nodepos = None
-    else:
-        def clearcaches():
-            pass
     def d():
         cl.rev(n)
-        clearcaches()
+        clearcaches(cl)
     timer(d)
 
 def perflog(ui, repo, **opts):
@@ -205,6 +227,7 @@
     'perffncacheload': (perffncacheload, []),
     'perffncachewrite': (perffncachewrite, []),
     'perflookup': (perflookup, []),
+    'perfrevrange': (perfrevrange, []),
     'perfnodelookup': (perfnodelookup, []),
     'perfparents': (perfparents, []),
     'perfstartup': (perfstartup, []),
@@ -215,8 +238,10 @@
     'perfindex': (perfindex, []),
     'perfheads': (perfheads, []),
     'perftags': (perftags, []),
+    'perfancestors': (perfancestors, []),
     'perfdirstate': (perfdirstate, []),
     'perfdirstatedirs': (perfdirstate, []),
+    'perfdirstatewrite': (perfdirstatewrite, []),
     'perflog': (perflog,
                 [('', 'rename', False, 'ask log to follow renames')]),
     'perftemplating': (perftemplating, []),
--- a/contrib/setup3k.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/contrib/setup3k.py	Mon Jun 04 17:57:57 2012 -0500
@@ -26,22 +26,22 @@
 try:
     import hashlib
     sha = hashlib.sha1()
-except:
+except ImportError:
     try:
         import sha
-    except:
+    except ImportError:
         raise SystemExit(
             "Couldn't import standard hashlib (incomplete Python install).")
 
 try:
     import zlib
-except:
+except ImportError:
     raise SystemExit(
         "Couldn't import standard zlib (incomplete Python install).")
 
 try:
     import bz2
-except:
+except ImportError:
     raise SystemExit(
         "Couldn't import standard bz2 (incomplete Python install).")
 
@@ -84,7 +84,7 @@
             os.dup2(devnull.fileno(), sys.stderr.fileno())
             objects = cc.compile([fname], output_dir=tmpdir)
             cc.link_executable(objects, os.path.join(tmpdir, "a.out"))
-        except:
+        except Exception:
             return False
         return True
     finally:
--- a/contrib/shrink-revlog.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/contrib/shrink-revlog.py	Mon Jun 04 17:57:57 2012 -0500
@@ -240,7 +240,7 @@
             writerevs(ui, r1, r2, order, tr)
             report(ui, r1, r2)
             tr.close()
-        except:
+        except: # re-raises
             # Abort transaction first, so we truncate the files before
             # deleting them.
             tr.abort()
--- a/doc/gendoc.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/doc/gendoc.py	Mon Jun 04 17:57:57 2012 -0500
@@ -102,9 +102,9 @@
         ui.write("\n")
 
     section(ui, _("Extensions"))
-    ui.write(_("This section contains help for extensions that are distributed "
-               "together with Mercurial. Help for other extensions is available "
-               "in the help system."))
+    ui.write(_("This section contains help for extensions that are "
+               "distributed together with Mercurial. Help for other "
+               "extensions is available in the help system."))
     ui.write("\n\n"
              ".. contents::\n"
              "   :class: htmlonly\n"
--- a/doc/hgmanpage.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/doc/hgmanpage.py	Mon Jun 04 17:57:57 2012 -0500
@@ -582,7 +582,7 @@
                                     self._docinfo[name],
                                     self.defs['indent'][1],
                                     self.defs['indent'][1]))
-            elif not name in skip:
+            elif name not in skip:
                 if name in self._docinfo_names:
                     label = self._docinfo_names[name]
                 else:
--- a/hgext/acl.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/hgext/acl.py	Mon Jun 04 17:57:57 2012 -0500
@@ -152,6 +152,8 @@
 from mercurial import util, match
 import getpass, urllib
 
+testedwith = 'internal'
+
 def _getusers(ui, group):
 
     # First, try to use group definition from section [acl.groups]
@@ -172,7 +174,7 @@
         return True
 
     for ug in usersorgroups.replace(',', ' ').split():
-        if user == ug or ug.find('@') == 0 and user in _getusers(ui, ug[1:]):
+        if user == ug or ug.startswith('@') and user in _getusers(ui, ug[1:]):
             return True
 
     return False
@@ -188,15 +190,20 @@
     ui.debug('acl: %s enabled, %d entries for user %s\n' %
              (key, len(pats), user))
 
+    # Branch-based ACL
     if not repo:
         if pats:
-            return lambda b: '*' in pats or b in pats
-        return lambda b: False
+            # If there's an asterisk (meaning "any branch"), always return True;
+            # Otherwise, test if b is in pats
+            if '*' in pats:
+                return util.always
+            return lambda b: b in pats
+        return util.never
 
+    # Path-based ACL
     if pats:
         return match.match(repo.root, '', pats)
-    return match.exact(repo.root, '', [])
-
+    return util.never
 
 def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
     if hooktype not in ['pretxnchangegroup', 'pretxncommit']:
--- a/hgext/bugzilla.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/hgext/bugzilla.py	Mon Jun 04 17:57:57 2012 -0500
@@ -282,6 +282,8 @@
 from mercurial import cmdutil, mail, templater, util
 import re, time, urlparse, xmlrpclib
 
+testedwith = 'internal'
+
 class bzaccess(object):
     '''Base class for access to Bugzilla.'''
 
@@ -416,7 +418,8 @@
         for id in bugs.keys():
             self.ui.status(_('  bug %s\n') % id)
             cmdfmt = self.ui.config('bugzilla', 'notify', self.default_notify)
-            bzdir = self.ui.config('bugzilla', 'bzdir', '/var/www/html/bugzilla')
+            bzdir = self.ui.config('bugzilla', 'bzdir',
+                                   '/var/www/html/bugzilla')
             try:
                 # Backwards-compatible with old notify string, which
                 # took one string. This will throw with a new format
@@ -468,8 +471,8 @@
                 userid = self.get_user_id(defaultuser)
                 user = defaultuser
             except KeyError:
-                raise util.Abort(_('cannot find bugzilla user id for %s or %s') %
-                                 (user, defaultuser))
+                raise util.Abort(_('cannot find bugzilla user id for %s or %s')
+                                 % (user, defaultuser))
         return (user, userid)
 
     def updatebug(self, bugid, newstate, text, committer):
@@ -909,4 +912,3 @@
             bz.notify(bugs, util.email(ctx.user()))
     except Exception, e:
         raise util.Abort(_('Bugzilla error: %s') % e)
-
--- a/hgext/children.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/hgext/children.py	Mon Jun 04 17:57:57 2012 -0500
@@ -8,12 +8,17 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
-'''command to display child changesets'''
+'''command to display child changesets (DEPRECATED)
+
+This extension is deprecated. You should use :hg:`log -r
+"children(REV)"` instead.
+'''
 
 from mercurial import cmdutil
 from mercurial.commands import templateopts
 from mercurial.i18n import _
 
+testedwith = 'internal'
 
 def children(ui, repo, file_=None, **opts):
     """show the children of the given or working directory revision
--- a/hgext/churn.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/hgext/churn.py	Mon Jun 04 17:57:57 2012 -0500
@@ -13,6 +13,8 @@
 import os
 import time, datetime
 
+testedwith = 'internal'
+
 def maketemplater(ui, repo, tmpl):
     tmpl = templater.parsestring(tmpl, quoted=False)
     try:
--- a/hgext/color.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/hgext/color.py	Mon Jun 04 17:57:57 2012 -0500
@@ -105,6 +105,8 @@
 from mercurial import commands, dispatch, extensions, ui as uimod, util
 from mercurial.i18n import _
 
+testedwith = 'internal'
+
 # start and stop parameters for effects
 _effects = {'none': 0, 'black': 30, 'red': 31, 'green': 32, 'yellow': 33,
             'blue': 34, 'magenta': 35, 'cyan': 36, 'white': 37, 'bold': 1,
--- a/hgext/convert/__init__.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/hgext/convert/__init__.py	Mon Jun 04 17:57:57 2012 -0500
@@ -13,6 +13,8 @@
 from mercurial import commands, templatekw
 from mercurial.i18n import _
 
+testedwith = 'internal'
+
 # Commands definition was moved elsewhere to ease demandload job.
 
 def convert(ui, src, dest=None, revmapfile=None, **opts):
@@ -328,7 +330,8 @@
           ('', 'root', '', _('specify cvsroot')),
           # Options specific to builtin cvsps
           ('', 'parents', '', _('show parent changesets')),
-          ('', 'ancestors', '', _('show current changeset in ancestor branches')),
+          ('', 'ancestors', '',
+           _('show current changeset in ancestor branches')),
           # Options that are ignored for compatibility with cvsps-2.1
           ('A', 'cvs-direct', None, _('ignored for compatibility')),
          ],
--- a/hgext/convert/bzr.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/hgext/convert/bzr.py	Mon Jun 04 17:57:57 2012 -0500
@@ -72,7 +72,7 @@
                 self.ui.warn(_('warning: lightweight checkouts may cause '
                                'conversion failures, try with a regular '
                                'branch instead.\n'))
-        except:
+        except Exception:
             self.ui.note(_('bzr source type could not be determined\n'))
 
     def before(self):
--- a/hgext/convert/common.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/hgext/convert/common.py	Mon Jun 04 17:57:57 2012 -0500
@@ -76,7 +76,7 @@
 
     def getheads(self):
         """Return a list of this repository's heads"""
-        raise NotImplementedError()
+        raise NotImplementedError
 
     def getfile(self, name, rev):
         """Return a pair (data, mode) where data is the file content
@@ -84,7 +84,7 @@
         identifier returned by a previous call to getchanges(). Raise
         IOError to indicate that name was deleted in rev.
         """
-        raise NotImplementedError()
+        raise NotImplementedError
 
     def getchanges(self, version):
         """Returns a tuple of (files, copies).
@@ -95,18 +95,18 @@
 
         copies is a dictionary of dest: source
         """
-        raise NotImplementedError()
+        raise NotImplementedError
 
     def getcommit(self, version):
         """Return the commit object for version"""
-        raise NotImplementedError()
+        raise NotImplementedError
 
     def gettags(self):
         """Return the tags as a dictionary of name: revision
 
         Tag names must be UTF-8 strings.
         """
-        raise NotImplementedError()
+        raise NotImplementedError
 
     def recode(self, s, encoding=None):
         if not encoding:
@@ -116,10 +116,10 @@
             return s.encode("utf-8")
         try:
             return s.decode(encoding).encode("utf-8")
-        except:
+        except UnicodeError:
             try:
                 return s.decode("latin-1").encode("utf-8")
-            except:
+            except UnicodeError:
                 return s.decode(encoding, "replace").encode("utf-8")
 
     def getchangedfiles(self, rev, i):
@@ -133,7 +133,7 @@
 
         This function is only needed to support --filemap
         """
-        raise NotImplementedError()
+        raise NotImplementedError
 
     def converted(self, rev, sinkrev):
         '''Notify the source that a revision has been converted.'''
@@ -175,13 +175,13 @@
 
     def getheads(self):
         """Return a list of this repository's heads"""
-        raise NotImplementedError()
+        raise NotImplementedError
 
     def revmapfile(self):
         """Path to a file that will contain lines
         source_rev_id sink_rev_id
         mapping equivalent revision identifiers for each system."""
-        raise NotImplementedError()
+        raise NotImplementedError
 
     def authorfile(self):
         """Path to a file that will contain lines
@@ -203,7 +203,7 @@
         a particular revision (or even what that revision would be)
         before it receives the file data.
         """
-        raise NotImplementedError()
+        raise NotImplementedError
 
     def puttags(self, tags):
         """Put tags into sink.
@@ -212,7 +212,7 @@
         Return a pair (tag_revision, tag_parent_revision), or (None, None)
         if nothing was changed.
         """
-        raise NotImplementedError()
+        raise NotImplementedError
 
     def setbranch(self, branch, pbranches):
         """Set the current branch name. Called before the first putcommit
@@ -247,7 +247,7 @@
 
     def hascommit(self, rev):
         """Return True if the sink contains rev"""
-        raise NotImplementedError()
+        raise NotImplementedError
 
 class commandline(object):
     def __init__(self, ui, command):
@@ -333,7 +333,7 @@
         argmax = 4096
         try:
             argmax = os.sysconf("SC_ARG_MAX")
-        except:
+        except (AttributeError, ValueError):
             pass
 
         # Windows shells impose their own limits on command line length,
--- a/hgext/convert/convcmd.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/hgext/convert/convcmd.py	Mon Jun 04 17:57:57 2012 -0500
@@ -190,7 +190,7 @@
                 children.setdefault(n, [])
                 hasparent = False
                 for p in parents[n]:
-                    if not p in self.map:
+                    if p not in self.map:
                         visit.append(p)
                         hasparent = True
                     children.setdefault(p, []).append(n)
@@ -462,7 +462,7 @@
     if not revmapfile:
         try:
             revmapfile = destc.revmapfile()
-        except:
+        except Exception:
             revmapfile = os.path.join(destc, "map")
 
     c = converter(ui, srcc, destc, revmapfile, opts)
--- a/hgext/convert/cvs.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/hgext/convert/cvs.py	Mon Jun 04 17:57:57 2012 -0500
@@ -121,12 +121,13 @@
                         pf = open(cvspass)
                         for line in pf.read().splitlines():
                             part1, part2 = line.split(' ', 1)
+                            # /1 :pserver:user@example.com:2401/cvsroot/foo
+                            # Ah<Z
                             if part1 == '/1':
-                                # /1 :pserver:user@example.com:2401/cvsroot/foo Ah<Z
                                 part1, part2 = part2.split(' ', 1)
                                 format = format1
+                            # :pserver:user@example.com:/cvsroot/foo Ah<Z
                             else:
-                                # :pserver:user@example.com:/cvsroot/foo Ah<Z
                                 format = format0
                             if part1 == format:
                                 passw = part2
--- a/hgext/convert/cvsps.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/hgext/convert/cvsps.py	Mon Jun 04 17:57:57 2012 -0500
@@ -336,7 +336,8 @@
                 else:
                     myrev = '.'.join(myrev[:-2] + ['0', myrev[-2]])
                     branches = [b for b in branchmap if branchmap[b] == myrev]
-                    assert len(branches) == 1, 'unknown branch: %s' % e.mergepoint
+                    assert len(branches) == 1, ('unknown branch: %s'
+                                                % e.mergepoint)
                     e.mergepoint = branches[0]
             else:
                 e.mergepoint = None
@@ -705,11 +706,11 @@
         if mergeto:
             m = mergeto.search(c.comment)
             if m:
-                try:
+                if m.groups():
                     m = m.group(1)
                     if m == 'HEAD':
                         m = None
-                except:
+                else:
                     m = None   # if no group found then merge to HEAD
                 if m in branches and c.branch != m:
                     # insert empty changeset for merge
--- a/hgext/convert/git.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/hgext/convert/git.py	Mon Jun 04 17:57:57 2012 -0500
@@ -69,7 +69,7 @@
 
     def catfile(self, rev, type):
         if rev == hex(nullid):
-            raise IOError()
+            raise IOError
         data, ret = self.gitread("git cat-file %s %s" % (type, rev))
         if ret:
             raise util.Abort(_('cannot read %r object at %s') % (type, rev))
@@ -181,8 +181,8 @@
                 m, f = l[:-1].split("\t")
                 changes.append(f)
         else:
-            fh = self.gitopen('git diff-tree --name-only --root -r %s "%s^%s" --'
-                             % (version, version, i + 1))
+            fh = self.gitopen('git diff-tree --name-only --root -r %s '
+                              '"%s^%s" --' % (version, version, i + 1))
             changes = [f.rstrip('\n') for f in fh]
         if fh.close():
             raise util.Abort(_('cannot read changes in %s') % version)
@@ -211,7 +211,7 @@
                         continue
                     name = '%s%s' % (reftype, name[prefixlen:])
                     bookmarks[name] = rev
-            except:
+            except Exception:
                 pass
 
         return bookmarks
--- a/hgext/convert/hg.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/hgext/convert/hg.py	Mon Jun 04 17:57:57 2012 -0500
@@ -95,7 +95,7 @@
             self.after()
             try:
                 self.repo = hg.repository(self.ui, branchpath)
-            except:
+            except Exception:
                 self.repo = hg.repository(self.ui, branchpath, create=True)
             self.before()
 
@@ -105,7 +105,7 @@
         for b in pbranches:
             try:
                 self.repo.lookup(b[0])
-            except:
+            except Exception:
                 missings.setdefault(b[1], []).append(b[0])
 
         if missings:
@@ -192,7 +192,7 @@
 
         try:
             oldlines = sorted(parentctx['.hgtags'].data().splitlines(True))
-        except:
+        except Exception:
             oldlines = []
 
         newlines = sorted([("%s %s\n" % (tags[tag], tag)) for tag in tags])
@@ -224,7 +224,7 @@
             bookmarks.write(self.repo)
 
     def hascommit(self, rev):
-        if not rev in self.repo and self.clonebranches:
+        if rev not in self.repo and self.clonebranches:
             raise util.Abort(_('revision %s not found in destination '
                                'repository (lookups with clonebranches=true '
                                'are not implemented)') % rev)
@@ -241,7 +241,7 @@
             # try to provoke an exception if this isn't really a hg
             # repo, but some other bogus compatible-looking url
             if not self.repo.local():
-                raise error.RepoError()
+                raise error.RepoError
         except error.RepoError:
             ui.traceback()
             raise NoRepo(_("%s is not a local Mercurial repository") % path)
@@ -259,7 +259,7 @@
                                  % startnode)
             startrev = self.repo.changelog.rev(startnode)
             children = {startnode: 1}
-            for rev in self.repo.changelog.descendants(startrev):
+            for rev in self.repo.changelog.descendants([startrev]):
                 children[self.repo.changelog.node(rev)] = 1
             self.keep = children.__contains__
         else:
@@ -294,7 +294,8 @@
         if not parents:
             files = sorted(ctx.manifest())
             # getcopies() is not needed for roots, but it is a simple way to
-            # detect missing revlogs and abort on errors or populate self.ignored
+            # detect missing revlogs and abort on errors or populate
+            # self.ignored
             self.getcopies(ctx, parents, files)
             return [(f, rev) for f in files if f not in self.ignored], {}
         if self._changescache and self._changescache[0] == rev:
--- a/hgext/convert/monotone.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/hgext/convert/monotone.py	Mon Jun 04 17:57:57 2012 -0500
@@ -30,7 +30,7 @@
                 f = file(path, 'rb')
                 header = f.read(16)
                 f.close()
-            except:
+            except IOError:
                 header = ''
             if header != 'SQLite format 3\x00':
                 raise norepo
@@ -283,11 +283,11 @@
 
     def getfile(self, name, rev):
         if not self.mtnisfile(name, rev):
-            raise IOError() # file was deleted or renamed
+            raise IOError # file was deleted or renamed
         try:
             data = self.mtnrun("get_file_of", name, r=rev)
-        except:
-            raise IOError() # file was deleted or renamed
+        except Exception:
+            raise IOError # file was deleted or renamed
         self.mtnloadmanifest(rev)
         node, attr = self.files.get(name, (None, ""))
         return data, attr
@@ -317,7 +317,7 @@
     def getchangedfiles(self, rev, i):
         # This function is only needed to support --filemap
         # ... and we don't support that
-        raise NotImplementedError()
+        raise NotImplementedError
 
     def before(self):
         # Check if we have a new enough version to use automate stdio
--- a/hgext/convert/subversion.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/hgext/convert/subversion.py	Mon Jun 04 17:57:57 2012 -0500
@@ -85,8 +85,8 @@
         self.copyfrom_rev = p.copyfrom_rev
         self.action = p.action
 
-def get_log_child(fp, url, paths, start, end, limit=0, discover_changed_paths=True,
-                    strict_node_history=False):
+def get_log_child(fp, url, paths, start, end, limit=0,
+                  discover_changed_paths=True, strict_node_history=False):
     protocol = -1
     def receiver(orig_paths, revnum, author, date, message, pool):
         if orig_paths is not None:
@@ -139,7 +139,7 @@
                                    ' hg executable is in PATH'))
             try:
                 orig_paths, revnum, author, date, message = entry
-            except:
+            except (TypeError, ValueError):
                 if entry is None:
                     break
                 raise util.Abort(_("log stream exception '%s'") % entry)
@@ -176,7 +176,7 @@
                       'know better.\n'))
             return True
         data = inst.fp.read()
-    except:
+    except Exception:
         # Could be urllib2.URLError if the URL is invalid or anything else.
         return False
     return '<m:human-readable errcode="160013">' in data
@@ -276,7 +276,8 @@
             except ValueError:
                 raise util.Abort(_('svn: revision %s is not an integer') % rev)
 
-        self.trunkname = self.ui.config('convert', 'svn.trunk', 'trunk').strip('/')
+        self.trunkname = self.ui.config('convert', 'svn.trunk',
+                                        'trunk').strip('/')
         self.startrev = self.ui.config('convert', 'svn.startrev', default=0)
         try:
             self.startrev = int(self.startrev)
@@ -862,13 +863,14 @@
                     pass
         except SubversionException, (inst, num):
             if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION:
-                raise util.Abort(_('svn: branch has no revision %s') % to_revnum)
+                raise util.Abort(_('svn: branch has no revision %s')
+                                 % to_revnum)
             raise
 
     def getfile(self, file, rev):
         # TODO: ra.get_file transmits the whole file instead of diffs.
         if file in self.removed:
-            raise IOError()
+            raise IOError
         mode = ''
         try:
             new_module, revnum = revsplit(rev)[1:]
@@ -889,7 +891,7 @@
             notfound = (svn.core.SVN_ERR_FS_NOT_FOUND,
                 svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND)
             if e.apr_err in notfound: # File not found
-                raise IOError()
+                raise IOError
             raise
         if mode == 'l':
             link_prefix = "link "
@@ -949,8 +951,8 @@
             if not p.startswith('/'):
                 p = self.module + '/' + p
             relpaths.append(p.strip('/'))
-        args = [self.baseurl, relpaths, start, end, limit, discover_changed_paths,
-                strict_node_history]
+        args = [self.baseurl, relpaths, start, end, limit,
+                discover_changed_paths, strict_node_history]
         arg = encodeargs(args)
         hgexe = util.hgexecutable()
         cmd = '%s debugsvnlog' % util.shellquote(hgexe)
--- a/hgext/eol.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/hgext/eol.py	Mon Jun 04 17:57:57 2012 -0500
@@ -94,6 +94,8 @@
 from mercurial import util, config, extensions, match, error
 import re, os
 
+testedwith = 'internal'
+
 # Matches a lone LF, i.e., one that is not part of CRLF.
 singlelf = re.compile('(^|[^\r])\n')
 # Matches a single EOL which can either be a CRLF where repeated CR
@@ -111,7 +113,8 @@
         return s
     if ui.configbool('eol', 'only-consistent', True) and inconsistenteol(s):
         return s
-    if ui.configbool('eol', 'fix-trailing-newline', False) and s and s[-1] != '\n':
+    if (ui.configbool('eol', 'fix-trailing-newline', False)
+        and s and s[-1] != '\n'):
         s = s + '\n'
     return eolre.sub('\n', s)
 
@@ -121,7 +124,8 @@
         return s
     if ui.configbool('eol', 'only-consistent', True) and inconsistenteol(s):
         return s
-    if ui.configbool('eol', 'fix-trailing-newline', False) and s and s[-1] != '\n':
+    if (ui.configbool('eol', 'fix-trailing-newline', False)
+        and s and s[-1] != '\n'):
         s = s + '\n'
     return eolre.sub('\r\n', s)
 
--- a/hgext/extdiff.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/hgext/extdiff.py	Mon Jun 04 17:57:57 2012 -0500
@@ -66,6 +66,8 @@
 from mercurial import scmutil, scmutil, util, commands, encoding
 import os, shlex, shutil, tempfile, re
 
+testedwith = 'internal'
+
 def snapshot(ui, repo, files, node, tmproot):
     '''snapshot files as of some revision
     if not using snapshot, -I/-X does not work and recursive diff
@@ -88,7 +90,7 @@
     ctx = repo[node]
     for fn in files:
         wfn = util.pconvert(fn)
-        if not wfn in ctx:
+        if wfn not in ctx:
             # File doesn't exist; could be a bogus modify
             continue
         ui.note('  %s\n' % wfn)
--- a/hgext/fetch.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/hgext/fetch.py	Mon Jun 04 17:57:57 2012 -0500
@@ -5,13 +5,15 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
-'''pull, update and merge in one command'''
+'''pull, update and merge in one command (DEPRECATED)'''
 
 from mercurial.i18n import _
 from mercurial.node import nullid, short
 from mercurial import commands, cmdutil, hg, util, error
 from mercurial.lock import release
 
+testedwith = 'internal'
+
 def fetch(ui, repo, source='default', **opts):
     '''pull changes from a remote repository, merge new changes if needed.
 
@@ -38,7 +40,10 @@
 
     parent, p2 = repo.dirstate.parents()
     branch = repo.dirstate.branch()
-    branchnode = repo.branchtags().get(branch)
+    try:
+        branchnode = repo.branchtip(branch)
+    except error.RepoLookupError:
+        branchnode = None
     if parent != branchnode:
         raise util.Abort(_('working dir not at branch tip '
                            '(use "hg update" to check out branch tip)'))
--- a/hgext/gpg.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/hgext/gpg.py	Mon Jun 04 17:57:57 2012 -0500
@@ -12,6 +12,7 @@
 
 cmdtable = {}
 command = cmdutil.command(cmdtable)
+testedwith = 'internal'
 
 class gpg(object):
     def __init__(self, path, key=None):
@@ -43,7 +44,7 @@
                 try:
                     if f:
                         os.unlink(f)
-                except:
+                except OSError:
                     pass
         keys = []
         key, fingerprint = None, None
@@ -286,4 +287,3 @@
         return "%s\n" % hgnode.hex(node)
     else:
         raise util.Abort(_("unknown signature version"))
-
--- a/hgext/graphlog.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/hgext/graphlog.py	Mon Jun 04 17:57:57 2012 -0500
@@ -20,6 +20,7 @@
 
 cmdtable = {}
 command = cmdutil.command(cmdtable)
+testedwith = 'internal'
 
 ASCIIDATA = 'ASC'
 
@@ -391,15 +392,41 @@
     return expr, filematcher
 
 def getlogrevs(repo, pats, opts):
-    """Return (revs, expr, filematcher) where revs is a list of
+    """Return (revs, expr, filematcher) where revs is an iterable of
     revision numbers, expr is a revset string built from log options
     and file patterns or None, and used to filter 'revs'. If --stat or
     --patch are not passed filematcher is None. Otherwise it is a
     callable taking a revision number and returning a match objects
     filtering the files to be detailed when displaying the revision.
     """
+    def increasingrevs(repo, revs, matcher):
+        # The sorted input rev sequence is chopped in sub-sequences
+        # which are sorted in ascending order and passed to the
+        # matcher. The filtered revs are sorted again as they were in
+        # the original sub-sequence. This achieve several things:
+        #
+        # - getlogrevs() now returns a generator which behaviour is
+        #   adapted to log need. First results come fast, last ones
+        #   are batched for performances.
+        #
+        # - revset matchers often operate faster on revision in
+        #   changelog order, because most filters deal with the
+        #   changelog.
+        #
+        # - revset matchers can reorder revisions. "A or B" typically
+        #   returns returns the revision matching A then the revision
+        #   matching B. We want to hide this internal implementation
+        #   detail from the caller, and sorting the filtered revision
+        #   again achieves this.
+        for i, window in cmdutil.increasingwindows(0, len(revs), windowsize=1):
+            orevs = revs[i:i + window]
+            nrevs = set(matcher(repo, sorted(orevs)))
+            for rev in orevs:
+                if rev in nrevs:
+                    yield rev
+
     if not len(repo):
-        return [], None, None
+        return iter([]), None, None
     # Default --rev value depends on --follow but --follow behaviour
     # depends on revisions resolved from --rev...
     follow = opts.get('follow') or opts.get('follow_first')
@@ -411,18 +438,17 @@
         else:
             revs = range(len(repo) - 1, -1, -1)
     if not revs:
-        return [], None, None
+        return iter([]), None, None
     expr, filematcher = _makelogrevset(repo, pats, opts, revs)
     if expr:
-        # Evaluate revisions in changelog order for performance
-        # reasons but preserve the original sequence order in the
-        # filtered result.
-        matched = set(revset.match(repo.ui, expr)(repo, sorted(revs)))
-        revs = [r for r in revs if r in matched]
+        matcher = revset.match(repo.ui, expr)
+        revs = increasingrevs(repo, revs, matcher)
     if not opts.get('hidden'):
         # --hidden is still experimental and not worth a dedicated revset
         # yet. Fortunately, filtering revision number is fast.
-        revs = [r for r in revs if r not in repo.changelog.hiddenrevs]
+        revs = (r for r in revs if r not in repo.changelog.hiddenrevs)
+    else:
+        revs = iter(revs)
     return revs, expr, filematcher
 
 def generate(ui, dag, displayer, showparents, edgefn, getrenamed=None,
--- a/hgext/hgcia.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/hgext/hgcia.py	Mon Jun 04 17:57:57 2012 -0500
@@ -46,17 +46,15 @@
 from mercurial import cmdutil, patch, templater, util, mail
 import email.Parser
 
-import xmlrpclib
+import socket, xmlrpclib
 from xml.sax import saxutils
+testedwith = 'internal'
 
 socket_timeout = 30 # seconds
-try:
+if util.safehasattr(socket, 'setdefaulttimeout'):
     # set a timeout for the socket so you don't have to wait so looooong
     # when cia.vc is having problems. requires python >= 2.3:
-    import socket
     socket.setdefaulttimeout(socket_timeout)
-except:
-    pass
 
 HGCIA_VERSION = '0.1'
 HGCIA_URL = 'http://hg.kublai.com/mercurial/hgcia'
--- a/hgext/hgk.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/hgext/hgk.py	Mon Jun 04 17:57:57 2012 -0500
@@ -39,6 +39,8 @@
 from mercurial.node import nullid, nullrev, short
 from mercurial.i18n import _
 
+testedwith = 'internal'
+
 def difftree(ui, repo, node1=None, node2=None, *files, **opts):
     """diff trees from two commits"""
     def __difftree(repo, node1, node2, files=[]):
@@ -95,7 +97,8 @@
     nlprefix = '\n' + prefix
     if ctx is None:
         ctx = repo[n]
-    ui.write("tree %s\n" % short(ctx.changeset()[0])) # use ctx.node() instead ??
+    # use ctx.node() instead ??
+    ui.write("tree %s\n" % short(ctx.changeset()[0]))
     for p in ctx.parents():
         ui.write("parent %s\n" % p)
 
@@ -113,7 +116,8 @@
     ui.write("branch %s\n\n" % ctx.branch())
 
     if prefix != "":
-        ui.write("%s%s\n" % (prefix, description.replace('\n', nlprefix).strip()))
+        ui.write("%s%s\n" % (prefix,
+                             description.replace('\n', nlprefix).strip()))
     else:
         ui.write(description + "\n")
     if prefix:
--- a/hgext/highlight/__init__.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/hgext/highlight/__init__.py	Mon Jun 04 17:57:57 2012 -0500
@@ -24,6 +24,7 @@
 import highlight
 from mercurial.hgweb import webcommands, webutil, common
 from mercurial import extensions, encoding
+testedwith = 'internal'
 
 def filerevision_highlight(orig, web, tmpl, fctx):
     mt = ''.join(tmpl('mimetype', encoding=encoding.encoding))
@@ -51,11 +52,13 @@
     pg_style = web.config('web', 'pygments_style', 'colorful')
     fmter = highlight.HtmlFormatter(style = pg_style)
     req.respond(common.HTTP_OK, 'text/css')
-    return ['/* pygments_style = %s */\n\n' % pg_style, fmter.get_style_defs('')]
+    return ['/* pygments_style = %s */\n\n' % pg_style,
+            fmter.get_style_defs('')]
 
 def extsetup():
     # monkeypatch in the new version
-    extensions.wrapfunction(webcommands, '_filerevision', filerevision_highlight)
+    extensions.wrapfunction(webcommands, '_filerevision',
+                            filerevision_highlight)
     extensions.wrapfunction(webcommands, 'annotate', annotate_highlight)
     webcommands.highlightcss = generate_css
     webcommands.__all__.append('highlightcss')
--- a/hgext/inotify/__init__.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/hgext/inotify/__init__.py	Mon Jun 04 17:57:57 2012 -0500
@@ -15,6 +15,8 @@
 import server
 from client import client, QueryFailed
 
+testedwith = 'internal'
+
 def serve(ui, repo, **opts):
     '''start an inotify server for this repository'''
     server.start(ui, repo.dirstate, repo.root, opts)
@@ -46,7 +48,8 @@
             files = match.files()
             if '.' in files:
                 files = []
-            if self._inotifyon and not ignored and not subrepos and not self._dirty:
+            if (self._inotifyon and not ignored and not subrepos and
+                not self._dirty):
                 cli = client(ui, repo)
                 try:
                     result = cli.statusquery(files, match, False,
--- a/hgext/inotify/server.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/hgext/inotify/server.py	Mon Jun 04 17:57:57 2012 -0500
@@ -355,7 +355,7 @@
                 except (OSError, socket.error), inst:
                     try:
                         os.unlink(self.realsockpath)
-                    except:
+                    except OSError:
                         pass
                     os.rmdir(tempdir)
                     if inst.errno == errno.EEXIST:
@@ -416,7 +416,7 @@
                 # try to send back our version to the client
                 # this way, the client too is informed of the mismatch
                 sock.sendall(chr(common.version))
-            except:
+            except socket.error:
                 pass
             return
 
--- a/hgext/interhg.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/hgext/interhg.py	Mon Jun 04 17:57:57 2012 -0500
@@ -28,6 +28,8 @@
 from mercurial import templatefilters, extensions
 from mercurial.i18n import _
 
+testedwith = 'internal'
+
 interhg_table = []
 
 def uisetup(ui):
--- a/hgext/keyword.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/hgext/keyword.py	Mon Jun 04 17:57:57 2012 -0500
@@ -1,6 +1,6 @@
 # keyword.py - $Keyword$ expansion for Mercurial
 #
-# Copyright 2007-2010 Christian Ebert <blacktrash@gmx.net>
+# Copyright 2007-2012 Christian Ebert <blacktrash@gmx.net>
 #
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
@@ -92,6 +92,7 @@
 
 cmdtable = {}
 command = cmdutil.command(cmdtable)
+testedwith = 'internal'
 
 # hg commands that do not act on keywords
 nokwcommands = ('add addremove annotate bundle export grep incoming init log'
@@ -187,7 +188,7 @@
         self.repo = repo
         self.match = match.match(repo.root, '', [], inc, exc)
         self.restrict = kwtools['hgcmd'] in restricted.split()
-        self.record = False
+        self.postcommit = False
 
         kwmaps = self.ui.configitems('keywordmaps')
         if kwmaps: # override default templates
@@ -238,11 +239,11 @@
     def iskwfile(self, cand, ctx):
         '''Returns subset of candidates which are configured for keyword
         expansion but are not symbolic links.'''
-        return [f for f in cand if self.match(f) and not 'l' in ctx.flags(f)]
+        return [f for f in cand if self.match(f) and 'l' not in ctx.flags(f)]
 
     def overwrite(self, ctx, candidates, lookup, expand, rekw=False):
         '''Overwrites selected files expanding/shrinking keywords.'''
-        if self.restrict or lookup or self.record: # exclude kw_copy
+        if self.restrict or lookup or self.postcommit: # exclude kw_copy
             candidates = self.iskwfile(candidates, ctx)
         if not candidates:
             return
@@ -279,7 +280,7 @@
                 fp.close()
                 if kwcmd:
                     self.repo.dirstate.normal(f)
-                elif self.record:
+                elif self.postcommit:
                     self.repo.dirstate.normallookup(f)
 
     def shrink(self, fname, text):
@@ -441,7 +442,7 @@
         if name.split('.', 1)[0].find('commit') > -1:
             repo.ui.setconfig('hooks', name, '')
     msg = _('hg keyword configuration and expansion example')
-    ui.note("hg ci -m '%s'\n" % msg)
+    ui.note("hg ci -m '%s'\n" % msg) # check-code-ignore
     repo.commit(text=msg)
     ui.status(_('\n\tkeywords expanded\n'))
     ui.write(repo.wread(fn))
@@ -582,7 +583,7 @@
         def kwcommitctx(self, ctx, error=False):
             n = super(kwrepo, self).commitctx(ctx, error)
             # no lock needed, only called from repo.commit() which already locks
-            if not kwt.record:
+            if not kwt.postcommit:
                 restrict = kwt.restrict
                 kwt.restrict = True
                 kwt.overwrite(self[n], sorted(ctx.added() + ctx.modified()),
@@ -624,6 +625,21 @@
         kwt.match = util.never
         return orig(web, req, tmpl)
 
+    def kw_amend(orig, ui, repo, commitfunc, old, extra, pats, opts):
+        '''Wraps cmdutil.amend expanding keywords after amend.'''
+        wlock = repo.wlock()
+        try:
+            kwt.postcommit = True
+            newid = orig(ui, repo, commitfunc, old, extra, pats, opts)
+            if newid != old.node():
+                ctx = repo[newid]
+                kwt.restrict = True
+                kwt.overwrite(ctx, ctx.files(), False, True)
+                kwt.restrict = False
+            return newid
+        finally:
+            wlock.release()
+
     def kw_copy(orig, ui, repo, pats, opts, rename=False):
         '''Wraps cmdutil.copy so that copy/rename destinations do not
         contain expanded keywords.
@@ -634,25 +650,29 @@
         For the latter we have to follow the symlink to find out whether its
         target is configured for expansion and we therefore must unexpand the
         keywords in the destination.'''
-        orig(ui, repo, pats, opts, rename)
-        if opts.get('dry_run'):
-            return
-        wctx = repo[None]
-        cwd = repo.getcwd()
+        wlock = repo.wlock()
+        try:
+            orig(ui, repo, pats, opts, rename)
+            if opts.get('dry_run'):
+                return
+            wctx = repo[None]
+            cwd = repo.getcwd()
 
-        def haskwsource(dest):
-            '''Returns true if dest is a regular file and configured for
-            expansion or a symlink which points to a file configured for
-            expansion. '''
-            source = repo.dirstate.copied(dest)
-            if 'l' in wctx.flags(source):
-                source = scmutil.canonpath(repo.root, cwd,
-                                           os.path.realpath(source))
-            return kwt.match(source)
+            def haskwsource(dest):
+                '''Returns true if dest is a regular file and configured for
+                expansion or a symlink which points to a file configured for
+                expansion. '''
+                source = repo.dirstate.copied(dest)
+                if 'l' in wctx.flags(source):
+                    source = scmutil.canonpath(repo.root, cwd,
+                                               os.path.realpath(source))
+                return kwt.match(source)
 
-        candidates = [f for f in repo.dirstate.copies() if
-                      not 'l' in wctx.flags(f) and haskwsource(f)]
-        kwt.overwrite(wctx, candidates, False, False)
+            candidates = [f for f in repo.dirstate.copies() if
+                          'l' not in wctx.flags(f) and haskwsource(f)]
+            kwt.overwrite(wctx, candidates, False, False)
+        finally:
+            wlock.release()
 
     def kw_dorecord(orig, ui, repo, commitfunc, *pats, **opts):
         '''Wraps record.dorecord expanding keywords after recording.'''
@@ -660,7 +680,7 @@
         try:
             # record returns 0 even when nothing has changed
             # therefore compare nodes before and after
-            kwt.record = True
+            kwt.postcommit = True
             ctx = repo['.']
             wstatus = repo[None].status()
             ret = orig(ui, repo, commitfunc, *pats, **opts)
@@ -680,7 +700,7 @@
         # not make sense
         if (fctx._filerev is None and
             (self._repo._encodefilterpats or
-             kwt.match(fctx.path()) and not 'l' in fctx.flags() or
+             kwt.match(fctx.path()) and 'l' not in fctx.flags() or
              self.size() - 4 == fctx.size()) or
             self.size() == fctx.size()):
             return self._filelog.cmp(self._filenode, fctx.data())
@@ -689,6 +709,7 @@
     extensions.wrapfunction(context.filectx, 'cmp', kwfilectx_cmp)
     extensions.wrapfunction(patch.patchfile, '__init__', kwpatchfile_init)
     extensions.wrapfunction(patch, 'diff', kw_diff)
+    extensions.wrapfunction(cmdutil, 'amend', kw_amend)
     extensions.wrapfunction(cmdutil, 'copy', kw_copy)
     for c in 'annotate changeset rev filediff diff'.split():
         extensions.wrapfunction(webcommands, c, kwweb_skip)
--- a/hgext/largefiles/lfcommands.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/hgext/largefiles/lfcommands.py	Mon Jun 04 17:57:57 2012 -0500
@@ -11,7 +11,8 @@
 import os
 import shutil
 
-from mercurial import util, match as match_, hg, node, context, error, cmdutil
+from mercurial import util, match as match_, hg, node, context, error, \
+    cmdutil, scmutil
 from mercurial.i18n import _
 from mercurial.lock import release
 
@@ -133,7 +134,7 @@
             try:
                 fctx = ctx.filectx(lfutil.standin(f))
             except error.LookupError:
-                raise IOError()
+                raise IOError
             renamed = fctx.renamed()
             if renamed:
                 renamed = lfutil.splitstandin(renamed[0])
@@ -233,7 +234,7 @@
             try:
                 fctx = ctx.filectx(srcfname)
             except error.LookupError:
-                raise IOError()
+                raise IOError
             renamed = fctx.renamed()
             if renamed:
                 # standin is always a largefile because largefile-ness
@@ -282,7 +283,7 @@
     try:
         fctx = ctx.filectx(f)
     except error.LookupError:
-        raise IOError()
+        raise IOError
     renamed = fctx.renamed()
     if renamed:
         renamed = renamed[0]
@@ -368,7 +369,7 @@
     store = basestore._openstore(repo)
     return store.verify(revs, contents=contents)
 
-def cachelfiles(ui, repo, node):
+def cachelfiles(ui, repo, node, filelist=None):
     '''cachelfiles ensures that all largefiles needed by the specified revision
     are present in the repository's largefile cache.
 
@@ -376,6 +377,8 @@
     by this operation; missing is the list of files that were needed but could
     not be found.'''
     lfiles = lfutil.listlfiles(repo, node)
+    if filelist:
+        lfiles = set(lfiles) & set(filelist)
     toget = []
 
     for lfile in lfiles:
@@ -404,6 +407,23 @@
 
     return ([], [])
 
+def downloadlfiles(ui, repo, rev=None):
+    matchfn = scmutil.match(repo[None],
+                            [repo.wjoin(lfutil.shortname)], {})
+    def prepare(ctx, fns):
+        pass
+    totalsuccess = 0
+    totalmissing = 0
+    for ctx in cmdutil.walkchangerevs(repo, matchfn, {'rev' : rev},
+                                      prepare):
+        success, missing = cachelfiles(ui, repo, ctx.node())
+        totalsuccess += len(success)
+        totalmissing += len(missing)
+    ui.status(_("%d additional largefiles cached\n") % totalsuccess)
+    if totalmissing > 0:
+        ui.status(_("%d largefiles failed to download\n") % totalmissing)
+    return totalsuccess, totalmissing
+
 def updatelfiles(ui, repo, filelist=None, printmessage=True):
     wlock = repo.wlock()
     try:
@@ -417,7 +437,7 @@
         if printmessage and lfiles:
             ui.status(_('getting changed largefiles\n'))
             printed = True
-            cachelfiles(ui, repo, '.')
+            cachelfiles(ui, repo, '.', lfiles)
 
         updated, removed = 0, 0
         for i in map(lambda f: _updatelfile(repo, lfdirstate, f), lfiles):
--- a/hgext/largefiles/overrides.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/hgext/largefiles/overrides.py	Mon Jun 04 17:57:57 2012 -0500
@@ -169,6 +169,8 @@
         # function handle this.
         if not getattr(repo, "_isaddremove", False):
             lfutil.reporemove(repo, remove, unlink=True)
+        else:
+            lfutil.reporemove(repo, remove, unlink=False)
     finally:
         wlock.release()
 
@@ -651,6 +653,7 @@
 # take some extra care so that the largefiles are correctly updated in the
 # working copy
 def overridepull(orig, ui, repo, source=None, **opts):
+    revsprepull = len(repo)
     if opts.get('rebase', False):
         repo._isrebasing = True
         try:
@@ -660,7 +663,6 @@
                           'the update flag\n')
             del opts['rebase']
             cmdutil.bailifchanged(repo)
-            revsprepull = len(repo)
             origpostincoming = commands.postincoming
             def _dummy(*args, **kwargs):
                 pass
@@ -695,8 +697,35 @@
             (cached, missing) = lfcommands.cachelfiles(ui, repo, head)
             numcached += len(cached)
         ui.status(_("%d largefiles cached\n") % numcached)
+    if opts.get('all_largefiles'):
+        revspostpull = len(repo)
+        revs = []
+        for rev in xrange(revsprepull + 1, revspostpull):
+            revs.append(repo[rev].rev())
+        lfcommands.downloadlfiles(ui, repo, revs)
     return result
 
+def overrideclone(orig, ui, source, dest=None, **opts):
+    if dest is None:
+        dest = hg.defaultdest(source)
+    if opts.get('all_largefiles') and not hg.islocal(dest):
+            raise util.Abort(_(
+            '--all-largefiles is incompatible with non-local destination %s' %
+            dest))
+    result = hg.clone(ui, opts, source, dest,
+                      pull=opts.get('pull'),
+                      stream=opts.get('uncompressed'),
+                      rev=opts.get('rev'),
+                      update=True, # required for successful walkchangerevs
+                      branch=opts.get('branch'))
+    if result is None:
+        return True
+    if opts.get('all_largefiles'):
+        sourcerepo, destrepo = result
+        success, missing = lfcommands.downloadlfiles(ui, destrepo, None)
+        return missing != 0
+    return result is None
+
 def overriderebase(orig, ui, repo, **opts):
     repo._isrebasing = True
     try:
@@ -782,6 +811,47 @@
 
     archiver.done()
 
+def hgsubrepoarchive(orig, repo, ui, archiver, prefix):
+    rev = repo._state[1]
+    ctx = repo._repo[rev]
+
+    lfcommands.cachelfiles(ui, repo._repo, ctx.node())
+
+    def write(name, mode, islink, getdata):
+        if lfutil.isstandin(name):
+            return
+        data = getdata()
+
+        archiver.addfile(prefix + repo._path + '/' + name, mode, islink, data)
+
+    for f in ctx:
+        ff = ctx.flags(f)
+        getdata = ctx[f].data
+        if lfutil.isstandin(f):
+            path = lfutil.findfile(repo._repo, getdata().strip())
+            if path is None:
+                raise util.Abort(
+                    _('largefile %s not found in repo store or system cache')
+                    % lfutil.splitstandin(f))
+            f = lfutil.splitstandin(f)
+
+            def getdatafn():
+                fd = None
+                try:
+                    fd = open(os.path.join(prefix, path), 'rb')
+                    return fd.read()
+                finally:
+                    if fd:
+                        fd.close()
+
+            getdata = getdatafn
+
+        write(f, 'x' in ff and 0755 or 0644, 'l' in ff, getdata)
+
+    for subpath in ctx.substate:
+        sub = ctx.sub(subpath)
+        sub.archive(repo.ui, archiver, prefix)
+
 # If a largefile is modified, the change is not reflected in its
 # standin until a commit. cmdutil.bailifchanged() raises an exception
 # if the repo has uncommitted changes. Wrap it to also check if
--- a/hgext/largefiles/reposetup.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/hgext/largefiles/reposetup.py	Mon Jun 04 17:57:57 2012 -0500
@@ -340,8 +340,9 @@
                                     lfdirstate.normal(lfile)
                     for lfile in lfdirstate:
                         if lfile in modifiedfiles:
-                            if not os.path.exists(
-                                    repo.wjoin(lfutil.standin(lfile))):
+                            if (not os.path.exists(repo.wjoin(
+                               lfutil.standin(lfile)))) or \
+                               (not os.path.exists(repo.wjoin(lfile))):
                                 lfdirstate.drop(lfile)
 
                     result = orig(text=text, user=user, date=date, match=match,
--- a/hgext/largefiles/uisetup.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/hgext/largefiles/uisetup.py	Mon Jun 04 17:57:57 2012 -0500
@@ -70,6 +70,15 @@
                                    overrides.overrideupdate)
     entry = extensions.wrapcommand(commands.table, 'pull',
                                    overrides.overridepull)
+    pullopt = [('', 'all-largefiles', None,
+                 _('download all pulled versions of largefiles'))]
+    entry[1].extend(pullopt)
+    entry = extensions.wrapcommand(commands.table, 'clone',
+                                   overrides.overrideclone)
+    cloneopt = [('', 'all-largefiles', None,
+                 _('download all versions of all largefiles'))]
+
+    entry[1].extend(cloneopt)
     entry = extensions.wrapcommand(commands.table, 'cat',
                                    overrides.overridecat)
     entry = extensions.wrapfunction(merge, '_checkunknownfile',
@@ -100,6 +109,7 @@
     extensions.wrapfunction(hg, 'merge', overrides.hgmerge)
 
     extensions.wrapfunction(archival, 'archive', overrides.overridearchive)
+    extensions.wrapfunction(hgsubrepo, 'archive', overrides.hgsubrepoarchive)
     extensions.wrapfunction(cmdutil, 'bailifchanged',
                             overrides.overridebailifchanged)
 
--- a/hgext/largefiles/wirestore.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/hgext/largefiles/wirestore.py	Mon Jun 04 17:57:57 2012 -0500
@@ -14,7 +14,7 @@
         if not cap:
             raise lfutil.storeprotonotcapable([])
         storetypes = cap.split(',')
-        if not 'serve' in storetypes:
+        if 'serve' not in storetypes:
             raise lfutil.storeprotonotcapable(storetypes)
         self.remote = remote
         super(wirestore, self).__init__(ui, repo, remote.url())
--- a/hgext/mq.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/hgext/mq.py	Mon Jun 04 17:57:57 2012 -0500
@@ -46,6 +46,17 @@
 
 You will by default be managing a patch queue named "patches". You can
 create other, independent patch queues with the :hg:`qqueue` command.
+
+If the working directory contains uncommitted files, qpush, qpop and
+qgoto abort immediately. If -f/--force is used, the changes are
+discarded. Setting::
+
+  [mq]
+  keepchanges = True
+
+make them behave as if --keep-changes were passed, and non-conflicting
+local changes will be tolerated and preserved. If incompatible options
+such as -f/--force or --exact are passed, this setting is ignored.
 '''
 
 from mercurial.i18n import _
@@ -62,6 +73,7 @@
 
 cmdtable = {}
 command = cmdutil.command(cmdtable)
+testedwith = 'internal'
 
 # Patch names looks like unix-file names.
 # They must be joinable with queue directory and result in the patch path.
@@ -280,6 +292,9 @@
         if phase is not None:
             repo.ui.restoreconfig(backup)
 
+class AbortNoCleanup(error.Abort):
+    pass
+
 class queue(object):
     def __init__(self, ui, path, patchdir=None):
         self.basepath = path
@@ -308,7 +323,7 @@
         try:
             gitmode = ui.configbool('mq', 'git', None)
             if gitmode is None:
-                raise error.ConfigError()
+                raise error.ConfigError
             self.gitmode = gitmode and 'yes' or 'no'
         except error.ConfigError:
             self.gitmode = ui.config('mq', 'git', 'auto').lower()
@@ -599,7 +614,7 @@
             raise util.Abort(_("repo commit failed"))
         try:
             ph = patchheader(mergeq.join(patch), self.plainmode)
-        except:
+        except Exception:
             raise util.Abort(_("unable to read %s") % patch)
 
         diffopts = self.patchopts(diffopts, patch)
@@ -681,7 +696,7 @@
 
     def apply(self, repo, series, list=False, update_status=True,
               strict=False, patchdir=None, merge=None, all_files=None,
-              tobackup=None):
+              tobackup=None, keepchanges=False):
         wlock = lock = tr = None
         try:
             wlock = repo.wlock()
@@ -690,11 +705,15 @@
             try:
                 ret = self._apply(repo, series, list, update_status,
                                   strict, patchdir, merge, all_files=all_files,
-                                  tobackup=tobackup)
+                                  tobackup=tobackup, keepchanges=keepchanges)
                 tr.close()
                 self.savedirty()
                 return ret
-            except:
+            except AbortNoCleanup:
+                tr.close()
+                self.savedirty()
+                return 2, repo.dirstate.p1()
+            except: # re-raises
                 try:
                     tr.abort()
                 finally:
@@ -708,7 +727,7 @@
 
     def _apply(self, repo, series, list=False, update_status=True,
                strict=False, patchdir=None, merge=None, all_files=None,
-               tobackup=None):
+               tobackup=None, keepchanges=False):
         """returns (error, hash)
 
         error = 1 for unable to read, 2 for patch failed, 3 for patch
@@ -749,6 +768,9 @@
                 if tobackup:
                     touched = patchmod.changedfiles(self.ui, repo, pf)
                     touched = set(touched) & tobackup
+                    if touched and keepchanges:
+                        raise AbortNoCleanup(
+                            _("local changes found, refresh first"))
                     self.backup(repo, touched, copy=True)
                     tobackup = tobackup - touched
                 (patcherr, files, fuzz) = self.patch(repo, pf)
@@ -862,7 +884,7 @@
     def finish(self, repo, revs):
         # Manually trigger phase computation to ensure phasedefaults is
         # executed before we remove the patches.
-        repo._phaserev
+        repo._phasecache
         patches = self._revpatches(repo, sorted(revs))
         qfinished = self._cleanup(patches, len(patches))
         if qfinished and repo.ui.configbool('mq', 'secret', False):
@@ -959,6 +981,10 @@
             else:
                 raise util.Abort(_('patch "%s" already exists') % name)
 
+    def checkkeepchanges(self, keepchanges, force):
+        if force and keepchanges:
+            raise util.Abort(_('cannot use both --force and --keep-changes'))
+
     def new(self, repo, patchfn, *pats, **opts):
         """options:
            msg: a string or a no-argument function returning a string
@@ -1052,14 +1078,14 @@
                     r = self.qrepo()
                     if r:
                         r[None].add([patchfn])
-                except:
+                except: # re-raises
                     repo.rollback()
                     raise
             except Exception:
                 patchpath = self.join(patchfn)
                 try:
                     os.unlink(patchpath)
-                except:
+                except OSError:
                     self.ui.warn(_('error unlinking %s\n') % patchpath)
                 raise
             self.removeundo(repo)
@@ -1156,8 +1182,10 @@
                                 return self.series[i + off]
         raise util.Abort(_("patch %s not in series") % patch)
 
-    def push(self, repo, patch=None, force=False, list=False,
-             mergeq=None, all=False, move=False, exact=False, nobackup=False):
+    def push(self, repo, patch=None, force=False, list=False, mergeq=None,
+             all=False, move=False, exact=False, nobackup=False,
+             keepchanges=False):
+        self.checkkeepchanges(keepchanges, force)
         diffopts = self.diffopts()
         wlock = repo.wlock()
         try:
@@ -1212,14 +1240,19 @@
             if start == len(self.series):
                 self.ui.warn(_('patch series already fully applied\n'))
                 return 1
-            if not force:
+            if not force and not keepchanges:
                 self.checklocalchanges(repo, refresh=self.applied)
 
             if exact:
+                if keepchanges:
+                    raise util.Abort(
+                        _("cannot use --exact and --keep-changes together"))
                 if move:
-                    raise util.Abort(_("cannot use --exact and --move together"))
+                    raise util.Abort(_('cannot use --exact and --move '
+                                       'together'))
                 if self.applied:
-                    raise util.Abort(_("cannot push --exact with applied patches"))
+                    raise util.Abort(_('cannot push --exact with applied '
+                                       'patches'))
                 root = self.series[start]
                 target = patchheader(self.join(root), self.plainmode).parent
                 if not target:
@@ -1257,9 +1290,12 @@
                 end = self.series.index(patch, start) + 1
 
             tobackup = set()
-            if not nobackup and force:
+            if (not nobackup and force) or keepchanges:
                 m, a, r, d = self.checklocalchanges(repo, force=True)
-                tobackup.update(m + a)
+                if keepchanges:
+                    tobackup.update(m + a + r + d)
+                else:
+                    tobackup.update(m + a)
 
             s = self.series[start:end]
             all_files = set()
@@ -1268,8 +1304,8 @@
                     ret = self.mergepatch(repo, mergeq, s, diffopts)
                 else:
                     ret = self.apply(repo, s, list, all_files=all_files,
-                                     tobackup=tobackup)
-            except:
+                                     tobackup=tobackup, keepchanges=keepchanges)
+            except: # re-raises
                 self.ui.warn(_('cleaning up working directory...'))
                 node = repo.dirstate.p1()
                 hg.revert(repo, node, None)
@@ -1299,7 +1335,8 @@
             wlock.release()
 
     def pop(self, repo, patch=None, force=False, update=True, all=False,
-            nobackup=False):
+            nobackup=False, keepchanges=False):
+        self.checkkeepchanges(keepchanges, force)
         wlock = repo.wlock()
         try:
             if patch:
@@ -1346,9 +1383,13 @@
 
             tobackup = set()
             if update:
-                m, a, r, d = self.checklocalchanges(repo, force=force)
-                if not nobackup and force:
-                    tobackup.update(m + a)
+                m, a, r, d = self.checklocalchanges(
+                    repo, force=force or keepchanges)
+                if force:
+                    if not nobackup:
+                        tobackup.update(m + a)
+                elif keepchanges:
+                    tobackup.update(m + a + r + d)
 
             self.applieddirty = True
             end = len(self.applied)
@@ -1379,8 +1420,10 @@
                 if d:
                     raise util.Abort(_("deletions found between repo revs"))
 
-                # backup local changes in --force case
-                self.backup(repo, set(a + m + r) & tobackup)
+                tobackup = set(a + m + r) & tobackup
+                if keepchanges and tobackup:
+                    self.localchangesfound()
+                self.backup(repo, tobackup)
 
                 for f in a:
                     try:
@@ -1589,7 +1632,7 @@
                 self.applieddirty = True
                 self.strip(repo, [top], update=False,
                            backup='strip')
-            except:
+            except: # re-raises
                 repo.dirstate.invalidate()
                 raise
 
@@ -1603,7 +1646,7 @@
                 # only write patch after a successful commit
                 patchf.close()
                 self.applied.append(statusentry(n, patchfn))
-            except:
+            except: # re-raises
                 ctx = repo[cparents[0]]
                 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
                 self.savedirty()
@@ -1959,6 +2002,14 @@
         self.removeundo(repo)
         return imported
 
+def fixkeepchangesopts(ui, opts):
+    if (not ui.configbool('mq', 'keepchanges') or opts.get('force')
+        or opts.get('exact')):
+        return opts
+    opts = dict(opts)
+    opts['keep_changes'] = True
+    return opts
+
 @command("qdelete|qremove|qrm",
          [('k', 'keep', None, _('keep patch file')),
           ('r', 'rev', [],
@@ -2140,7 +2191,8 @@
 
 @command("qclone",
          [('', 'pull', None, _('use pull protocol to copy metadata')),
-          ('U', 'noupdate', None, _('do not update the new working directories')),
+          ('U', 'noupdate', None,
+           _('do not update the new working directories')),
           ('', 'uncompressed', None,
            _('use uncompressed transfer (fast over LAN)')),
           ('p', 'patches', '',
@@ -2242,7 +2294,8 @@
     """print the entire series file
 
     Returns 0 on success."""
-    repo.mq.qseries(repo, missing=opts.get('missing'), summary=opts.get('summary'))
+    repo.mq.qseries(repo, missing=opts.get('missing'),
+                    summary=opts.get('summary'))
     return 0
 
 @command("qtop", seriesopts, _('hg qtop [-s]'))
@@ -2463,7 +2516,8 @@
         if p in patches or p == parent:
             ui.warn(_('Skipping already folded patch %s\n') % p)
         if q.isapplied(p):
-            raise util.Abort(_('qfold cannot fold already applied patch %s') % p)
+            raise util.Abort(_('qfold cannot fold already applied patch %s')
+                             % p)
         patches.append(p)
 
     for p in patches:
@@ -2497,20 +2551,26 @@
         wlock.release()
 
 @command("qgoto",
-         [('f', 'force', None, _('overwrite any local changes')),
+         [('', 'keep-changes', None,
+           _('tolerate non-conflicting local changes')),
+          ('f', 'force', None, _('overwrite any local changes')),
           ('', 'no-backup', None, _('do not save backup copies of files'))],
          _('hg qgoto [OPTION]... PATCH'))
 def goto(ui, repo, patch, **opts):
     '''push or pop patches until named patch is at top of stack
 
     Returns 0 on success.'''
+    opts = fixkeepchangesopts(ui, opts)
     q = repo.mq
     patch = q.lookup(patch)
     nobackup = opts.get('no_backup')
+    keepchanges = opts.get('keep_changes')
     if q.isapplied(patch):
-        ret = q.pop(repo, patch, force=opts.get('force'), nobackup=nobackup)
+        ret = q.pop(repo, patch, force=opts.get('force'), nobackup=nobackup,
+                    keepchanges=keepchanges)
     else:
-        ret = q.push(repo, patch, force=opts.get('force'), nobackup=nobackup)
+        ret = q.push(repo, patch, force=opts.get('force'), nobackup=nobackup,
+                     keepchanges=keepchanges)
     q.savedirty()
     return ret
 
@@ -2566,7 +2626,8 @@
     args = list(args)
     if opts.get('list'):
         if args or opts.get('none'):
-            raise util.Abort(_('cannot mix -l/--list with options or arguments'))
+            raise util.Abort(_('cannot mix -l/--list with options or '
+                               'arguments'))
         for i in xrange(len(q.series)):
             status(i)
         return
@@ -2630,8 +2691,11 @@
     return newpath
 
 @command("^qpush",
-         [('f', 'force', None, _('apply on top of local changes')),
-          ('e', 'exact', None, _('apply the target patch to its recorded parent')),
+         [('', 'keep-changes', None,
+           _('tolerate non-conflicting local changes')),
+          ('f', 'force', None, _('apply on top of local changes')),
+          ('e', 'exact', None,
+           _('apply the target patch to its recorded parent')),
           ('l', 'list', None, _('list patch name in commit text')),
           ('a', 'all', None, _('apply all patches')),
           ('m', 'merge', None, _('merge from another queue (DEPRECATED)')),
@@ -2644,14 +2708,17 @@
 def push(ui, repo, patch=None, **opts):
     """push the next patch onto the stack
 
-    When -f/--force is applied, all local changes in patched files
-    will be lost.
+    By default, abort if the working directory contains uncommitted
+    changes. With --keep-changes, abort only if the uncommitted files
+    overlap with patched files. With -f/--force, backup and patch over
+    uncommitted changes.
 
     Return 0 on success.
     """
     q = repo.mq
     mergeq = None
 
+    opts = fixkeepchangesopts(ui, opts)
     if opts.get('merge'):
         if opts.get('name'):
             newpath = repo.join(opts.get('name'))
@@ -2664,25 +2731,34 @@
         ui.warn(_("merging with queue at: %s\n") % mergeq.path)
     ret = q.push(repo, patch, force=opts.get('force'), list=opts.get('list'),
                  mergeq=mergeq, all=opts.get('all'), move=opts.get('move'),
-                 exact=opts.get('exact'), nobackup=opts.get('no_backup'))
+                 exact=opts.get('exact'), nobackup=opts.get('no_backup'),
+                 keepchanges=opts.get('keep_changes'))
     return ret
 
 @command("^qpop",
          [('a', 'all', None, _('pop all patches')),
           ('n', 'name', '',
            _('queue name to pop (DEPRECATED)'), _('NAME')),
+          ('', 'keep-changes', None,
+           _('tolerate non-conflicting local changes')),
           ('f', 'force', None, _('forget any local changes to patched files')),
           ('', 'no-backup', None, _('do not save backup copies of files'))],
          _('hg qpop [-a] [-f] [PATCH | INDEX]'))
 def pop(ui, repo, patch=None, **opts):
     """pop the current patch off the stack
 
-    By default, pops off the top of the patch stack. If given a patch
-    name, keeps popping off patches until the named patch is at the
-    top of the stack.
+    Without argument, pops off the top of the patch stack. If given a
+    patch name, keeps popping off patches until the named patch is at
+    the top of the stack.
+
+    By default, abort if the working directory contains uncommitted
+    changes. With --keep-changes, abort only if the uncommitted files
+    overlap with patched files. With -f/--force, backup and discard
+    changes made to such files.
 
     Return 0 on success.
     """
+    opts = fixkeepchangesopts(ui, opts)
     localupdate = True
     if opts.get('name'):
         q = queue(ui, repo.path, repo.join(opts.get('name')))
@@ -2691,7 +2767,8 @@
     else:
         q = repo.mq
     ret = q.pop(repo, patch, force=opts.get('force'), update=localupdate,
-                all=opts.get('all'), nobackup=opts.get('no_backup'))
+                all=opts.get('all'), nobackup=opts.get('no_backup'),
+                keepchanges=opts.get('keep_changes'))
     q.savedirty()
     return ret
 
@@ -2819,8 +2896,10 @@
           ('', 'no-backup', None, _('no backups')),
           ('', 'nobackup', None, _('no backups (DEPRECATED)')),
           ('n', '', None, _('ignored  (DEPRECATED)')),
-          ('k', 'keep', None, _("do not modify working copy during strip"))],
-          _('hg strip [-k] [-f] [-n] REV...'))
+          ('k', 'keep', None, _("do not modify working copy during strip")),
+          ('B', 'bookmark', '', _("remove revs only reachable from given"
+                                  " bookmark"))],
+          _('hg strip [-k] [-f] [-n] [-B bookmark] REV...'))
 def strip(ui, repo, *revs, **opts):
     """strip changesets and all their descendants from the repository
 
@@ -2855,10 +2934,36 @@
     cl = repo.changelog
     revs = list(revs) + opts.get('rev')
     revs = set(scmutil.revrange(repo, revs))
+
+    if opts.get('bookmark'):
+        mark = opts.get('bookmark')
+        marks = repo._bookmarks
+        if mark not in marks:
+            raise util.Abort(_("bookmark '%s' not found") % mark)
+
+        # If the requested bookmark is not the only one pointing to a
+        # a revision we have to only delete the bookmark and not strip
+        # anything. revsets cannot detect that case.
+        uniquebm = True
+        for m, n in marks.iteritems():
+            if m != mark and n == repo[mark].node():
+                uniquebm = False
+                break
+        if uniquebm:
+            rsrevs = repo.revs("ancestors(bookmark(%s)) - "
+                               "ancestors(head() and not bookmark(%s)) - "
+                               "ancestors(bookmark() and not bookmark(%s))",
+                               mark, mark, mark)
+            revs.update(set(rsrevs))
+        if not revs:
+            del marks[mark]
+            repo._writebookmarks(mark)
+            ui.write(_("bookmark '%s' deleted\n") % mark)
+
     if not revs:
         raise util.Abort(_('empty revision set'))
 
-    descendants = set(cl.descendants(*revs))
+    descendants = set(cl.descendants(revs))
     strippedrevs = revs.union(descendants)
     roots = revs.difference(descendants)
 
@@ -2900,8 +3005,14 @@
         finally:
             wlock.release()
 
+    if opts.get('bookmark'):
+        del marks[mark]
+        repo._writebookmarks(marks)
+        ui.write(_("bookmark '%s' deleted\n") % mark)
+
     repo.mq.strip(repo, revs, backup=backup, update=update,
                   force=opts.get('force'))
+
     return 0
 
 @command("qselect",
@@ -3309,8 +3420,8 @@
             tags = result[0]
             for patch in mqtags:
                 if patch[1] in tags:
-                    self.ui.warn(_('Tag %s overrides mq patch of the same name\n')
-                                 % patch[1])
+                    self.ui.warn(_('Tag %s overrides mq patch of the same '
+                                   'name\n') % patch[1])
                 else:
                     tags[patch[1]] = patch[0]
 
--- a/hgext/notify.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/hgext/notify.py	Mon Jun 04 17:57:57 2012 -0500
@@ -128,6 +128,8 @@
 from mercurial import patch, cmdutil, templater, util, mail
 import email.Parser, email.Errors, fnmatch, socket, time
 
+testedwith = 'internal'
+
 # template for single changeset can include email headers.
 single_template = '''
 Subject: changeset in {webroot}: {desc|firstline|strip}
@@ -353,8 +355,8 @@
                     author = repo[rev].user()
             else:
                 data += ui.popbuffer()
-                ui.note(_('notify: suppressing notification for merge %d:%s\n') %
-                        (rev, repo[rev].hex()[:12]))
+                ui.note(_('notify: suppressing notification for merge %d:%s\n')
+                        % (rev, repo[rev].hex()[:12]))
                 ui.pushbuffer()
         if count:
             n.diff(ctx, repo['tip'])
--- a/hgext/pager.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/hgext/pager.py	Mon Jun 04 17:57:57 2012 -0500
@@ -51,7 +51,9 @@
 from mercurial import commands, dispatch, util, extensions
 from mercurial.i18n import _
 
-def _runpager(p):
+testedwith = 'internal'
+
+def _runpager(ui, p):
     pager = subprocess.Popen(p, shell=True, bufsize=-1,
                              close_fds=util.closefds, stdin=subprocess.PIPE,
                              stdout=sys.stdout, stderr=sys.stderr)
@@ -59,7 +61,7 @@
     stdout = os.dup(sys.stdout.fileno())
     stderr = os.dup(sys.stderr.fileno())
     os.dup2(pager.stdin.fileno(), sys.stdout.fileno())
-    if util.isatty(sys.stderr):
+    if ui._isatty(sys.stderr):
         os.dup2(pager.stdin.fileno(), sys.stderr.fileno())
 
     @atexit.register
@@ -70,7 +72,7 @@
         pager.wait()
 
 def uisetup(ui):
-    if ui.plain() or '--debugger' in sys.argv or not util.isatty(sys.stdout):
+    if '--debugger' in sys.argv or not ui.formatted():
         return
 
     def pagecmd(orig, ui, options, cmd, cmdfunc):
@@ -87,7 +89,7 @@
                 ui.setconfig('ui', 'interactive', False)
                 if util.safehasattr(signal, "SIGPIPE"):
                     signal.signal(signal.SIGPIPE, signal.SIG_DFL)
-                _runpager(p)
+                _runpager(ui, p)
         return orig(ui, options, cmd, cmdfunc)
 
     extensions.wrapfunction(dispatch, '_runcommand', pagecmd)
--- a/hgext/patchbomb.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/hgext/patchbomb.py	Mon Jun 04 17:57:57 2012 -0500
@@ -55,6 +55,7 @@
 
 cmdtable = {}
 command = cmdutil.command(cmdtable)
+testedwith = 'internal'
 
 def prompt(ui, prompt, default=None, rest=':'):
     if default:
@@ -109,7 +110,8 @@
         msg = email.MIMEMultipart.MIMEMultipart()
         if body:
             msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test')))
-        p = mail.mimetextpatch('\n'.join(patchlines), 'x-patch', opts.get('test'))
+        p = mail.mimetextpatch('\n'.join(patchlines), 'x-patch',
+                               opts.get('test'))
         binnode = bin(node)
         # if node is mq patch, it will have the patch file's name as a tag
         if not patchname:
@@ -119,7 +121,8 @@
                 patchname = patchtags[0]
             elif total > 1:
                 patchname = cmdutil.makefilename(repo, '%b-%n.patch',
-                                                  binnode, seqno=idx, total=total)
+                                                 binnode, seqno=idx,
+                                                 total=total)
             else:
                 patchname = cmdutil.makefilename(repo, '%b.patch', binnode)
         disposition = 'inline'
@@ -302,7 +305,7 @@
         finally:
             try:
                 os.unlink(tmpfn)
-            except:
+            except OSError:
                 pass
             os.rmdir(tmpdir)
 
--- a/hgext/progress.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/hgext/progress.py	Mon Jun 04 17:57:57 2012 -0500
@@ -38,14 +38,14 @@
 import sys
 import time
 
-from mercurial import util
 from mercurial.i18n import _
+testedwith = 'internal'
 
 def spacejoin(*args):
     return ' '.join(s for s in args if s)
 
 def shouldprint(ui):
-    return util.isatty(sys.stderr) or ui.configbool('progress', 'assume-tty')
+    return ui._isatty(sys.stderr) or ui.configbool('progress', 'assume-tty')
 
 def fmtremaining(seconds):
     if seconds < 60:
@@ -237,7 +237,7 @@
             # truncate the list of topics assuming all topics within
             # this one are also closed
             if topic in self.topics:
-              self.topics = self.topics[:self.topics.index(topic)]
+                self.topics = self.topics[:self.topics.index(topic)]
         else:
             if topic not in self.topics:
                 self.starttimes[topic] = now
--- a/hgext/purge.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/hgext/purge.py	Mon Jun 04 17:57:57 2012 -0500
@@ -30,6 +30,7 @@
 
 cmdtable = {}
 command = cmdutil.command(cmdtable)
+testedwith = 'internal'
 
 @command('purge|clean',
     [('a', 'abort-on-err', None, _('abort if an error occurs')),
--- a/hgext/rebase.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/hgext/rebase.py	Mon Jun 04 17:57:57 2012 -0500
@@ -26,6 +26,7 @@
 
 cmdtable = {}
 command = cmdutil.command(cmdtable)
+testedwith = 'internal'
 
 @command('rebase',
     [('s', 'source', '',
@@ -182,7 +183,7 @@
                 branch = repo[None].branch()
                 dest = repo[branch]
             else:
-                dest = repo[destf]
+                dest = scmutil.revsingle(repo, destf)
 
             if revf:
                 rebaseset = repo.revs('%lr', revf)
@@ -201,7 +202,7 @@
                 root = None
 
             if not rebaseset:
-                repo.ui.debug('base is ancestor of destination')
+                repo.ui.debug('base is ancestor of destination\n')
                 result = None
             elif not keepf and list(repo.revs('first(children(%ld) - %ld)',
                                               rebaseset, rebaseset)):
@@ -214,7 +215,7 @@
                                  % repo[root],
                                  hint=_('see hg help phases for details'))
             else:
-                result = buildstate(repo, dest, rebaseset, detachf)
+                result = buildstate(repo, dest, rebaseset, detachf, collapsef)
 
             if not result:
                 # Empty state built, nothing to rebase
@@ -223,7 +224,7 @@
             else:
                 originalwd, target, state = result
                 if collapsef:
-                    targetancestors = set(repo.changelog.ancestors(target))
+                    targetancestors = set(repo.changelog.ancestors([target]))
                     targetancestors.add(target)
                     external = checkexternal(repo, state, targetancestors)
 
@@ -242,7 +243,7 @@
 
         # Rebase
         if not targetancestors:
-            targetancestors = set(repo.changelog.ancestors(target))
+            targetancestors = set(repo.changelog.ancestors([target]))
             targetancestors.add(target)
 
         # Keep track of the current bookmarks in order to reset them later
@@ -265,7 +266,7 @@
                 else:
                     try:
                         ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
-                        stats = rebasenode(repo, rev, p1, state)
+                        stats = rebasenode(repo, rev, p1, state, collapsef)
                         if stats and stats[3] > 0:
                             raise util.Abort(_('unresolved conflicts (see hg '
                                         'resolve, then hg rebase --continue)'))
@@ -320,7 +321,7 @@
             # Remove no more useful revisions
             rebased = [rev for rev in state if state[rev] != nullmerge]
             if rebased:
-                if set(repo.changelog.descendants(min(rebased))) - set(state):
+                if set(repo.changelog.descendants([min(rebased)])) - set(state):
                     ui.warn(_("warning: new changesets detected "
                               "on source branch, not stripping\n"))
                 else:
@@ -383,7 +384,7 @@
         repo.dirstate.invalidate()
         raise
 
-def rebasenode(repo, rev, p1, state):
+def rebasenode(repo, rev, p1, state, collapse):
     'Rebase a single revision'
     # Merge phase
     # Update to target and merge it with local
@@ -397,7 +398,9 @@
     base = None
     if repo[rev].rev() != repo[min(state)].rev():
         base = repo[rev].p1().node()
-    return merge.update(repo, rev, True, True, False, base)
+    # When collapsing in-place, the parent is the common ancestor, we
+    # have to allow merging with it.
+    return merge.update(repo, rev, True, True, False, base, collapse)
 
 def defineparents(repo, rev, target, state, targetancestors):
     'Return the new parent relationship of the revision that will be rebased'
@@ -572,7 +575,7 @@
 
     descendants = set()
     if dstates:
-        descendants = set(repo.changelog.descendants(*dstates))
+        descendants = set(repo.changelog.descendants(dstates))
     if descendants - set(dstates):
         repo.ui.warn(_("warning: new changesets detected on target branch, "
                        "can't abort\n"))
@@ -589,7 +592,7 @@
         repo.ui.warn(_('rebase aborted\n'))
         return 0
 
-def buildstate(repo, dest, rebaseset, detach):
+def buildstate(repo, dest, rebaseset, detach, collapse):
     '''Define which revisions are going to be rebased and where
 
     repo: repo
@@ -617,9 +620,9 @@
         raise util.Abort(_('source is ancestor of destination'))
     if commonbase == dest:
         samebranch = root.branch() == dest.branch()
-        if samebranch and root in dest.children():
-           repo.ui.debug('source is a child of destination')
-           return None
+        if not collapse and samebranch and root in dest.children():
+            repo.ui.debug('source is a child of destination\n')
+            return None
         # rebase on ancestor, force detach
         detach = True
     if detach:
--- a/hgext/record.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/hgext/record.py	Mon Jun 04 17:57:57 2012 -0500
@@ -14,6 +14,7 @@
 
 cmdtable = {}
 command = cmdutil.command(cmdtable)
+testedwith = 'internal'
 
 lines_re = re.compile(r'@@ -(\d+),(\d+) \+(\d+),(\d+) @@\s*(.*)')
 
@@ -516,10 +517,11 @@
                                '(use "hg commit" instead)'))
 
         changes = repo.status(match=match)[:3]
-        diffopts = mdiff.diffopts(git=True, nodates=True,
-                                  ignorews=opts.get('ignore_all_space'),
-                                  ignorewsamount=opts.get('ignore_space_change'),
-                                  ignoreblanklines=opts.get('ignore_blank_lines'))
+        diffopts = mdiff.diffopts(
+            git=True, nodates=True,
+            ignorews=opts.get('ignore_all_space'),
+            ignorewsamount=opts.get('ignore_space_change'),
+            ignoreblanklines=opts.get('ignore_blank_lines'))
         chunks = patch.diff(repo, changes=changes, opts=diffopts)
         fp = cStringIO.StringIO()
         fp.write(''.join(chunks))
--- a/hgext/relink.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/hgext/relink.py	Mon Jun 04 17:57:57 2012 -0500
@@ -11,6 +11,8 @@
 from mercurial.i18n import _
 import os, stat
 
+testedwith = 'internal'
+
 def relink(ui, repo, origin=None, **opts):
     """recreate hardlinks between two repositories
 
@@ -79,7 +81,7 @@
         dirnames.sort()
         relpath = dirpath[len(src) + seplen:]
         for filename in sorted(filenames):
-            if not filename[-2:] in ('.d', '.i'):
+            if filename[-2:] not in ('.d', '.i'):
                 continue
             st = os.stat(os.path.join(dirpath, filename))
             if not stat.S_ISREG(st.st_mode):
--- a/hgext/schemes.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/hgext/schemes.py	Mon Jun 04 17:57:57 2012 -0500
@@ -44,6 +44,8 @@
 from mercurial import extensions, hg, templater, util
 from mercurial.i18n import _
 
+testedwith = 'internal'
+
 
 class ShortRepository(object):
     def __init__(self, url, scheme, templater):
--- a/hgext/share.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/hgext/share.py	Mon Jun 04 17:57:57 2012 -0500
@@ -8,6 +8,8 @@
 from mercurial.i18n import _
 from mercurial import hg, commands, util
 
+testedwith = 'internal'
+
 def share(ui, source, dest=None, noupdate=False):
     """create a new shared repository
 
--- a/hgext/transplant.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/hgext/transplant.py	Mon Jun 04 17:57:57 2012 -0500
@@ -25,6 +25,7 @@
 
 cmdtable = {}
 command = cmdutil.command(cmdtable)
+testedwith = 'internal'
 
 class transplantentry(object):
     def __init__(self, lnode, rnode):
@@ -124,7 +125,7 @@
                     continue
 
                 parents = source.changelog.parents(node)
-                if not opts.get('filter'):
+                if not (opts.get('filter') or opts.get('log')):
                     # If the changeset parent is the same as the
                     # wdir's parent, just pull it.
                     if parents[0] == p1:
--- a/hgext/win32mbcs.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/hgext/win32mbcs.py	Mon Jun 04 17:57:57 2012 -0500
@@ -48,6 +48,7 @@
 import os, sys
 from mercurial.i18n import _
 from mercurial import util, encoding
+testedwith = 'internal'
 
 _encoding = None                                # see extsetup
 
@@ -164,4 +165,3 @@
         if '--debug' in sys.argv:
             ui.write("[win32mbcs] activated with encoding: %s\n"
                      % _encoding)
-
--- a/hgext/win32text.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/hgext/win32text.py	Mon Jun 04 17:57:57 2012 -0500
@@ -46,6 +46,8 @@
 from mercurial import util
 import re
 
+testedwith = 'internal'
+
 # regexp for single LF without CR preceding.
 re_single_lf = re.compile('(^|[^\r])\n', re.MULTILINE)
 
--- a/hgext/zeroconf/__init__.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/hgext/zeroconf/__init__.py	Mon Jun 04 17:57:57 2012 -0500
@@ -32,6 +32,8 @@
 from mercurial.hgweb import hgweb_mod
 from mercurial.hgweb import hgwebdir_mod
 
+testedwith = 'internal'
+
 # publish
 
 server = None
@@ -44,7 +46,7 @@
         s.connect(('1.0.0.1', 0))
         ip = s.getsockname()[0]
         return ip
-    except:
+    except socket.error:
         pass
 
     # Generic method, sometimes gives useless results
@@ -61,7 +63,7 @@
         s.connect(('1.0.0.1', 1))
         ip = s.getsockname()[0]
         return ip
-    except:
+    except socket.error:
         pass
 
     return dumbip
@@ -119,7 +121,8 @@
             name = os.path.basename(repo)
             path = (prefix + repo).strip('/')
             desc = u.config('web', 'description', name)
-            publish(name, desc, path, util.getport(u.config("web", "port", 8000)))
+            publish(name, desc, path,
+                    util.getport(u.config("web", "port", 8000)))
 
 # listen
 
--- a/mercurial/base85.c	Mon Jun 04 17:22:09 2012 -0500
+++ b/mercurial/base85.c	Mon Jun 04 17:57:57 2012 -0500
@@ -9,6 +9,7 @@
  Largely based on git's implementation
 */
 
+#define PY_SSIZE_T_CLEAN
 #include <Python.h>
 
 #include "util.h"
@@ -33,7 +34,7 @@
 	const unsigned char *text;
 	PyObject *out;
 	char *dst;
-	int len, olen, i;
+	Py_ssize_t len, olen, i;
 	unsigned int acc, val, ch;
 	int pad = 0;
 
@@ -81,7 +82,8 @@
 	PyObject *out;
 	const char *text;
 	char *dst;
-	int len, i, j, olen, c, cap;
+	Py_ssize_t len, i, j, olen, cap;
+	int c;
 	unsigned int acc;
 
 	if (!PyArg_ParseTuple(args, "s#", &text, &len))
@@ -109,7 +111,8 @@
 			if (c < 0)
 				return PyErr_Format(
 					PyExc_ValueError,
-					"bad base85 character at position %d", i);
+					"bad base85 character at position %d",
+					(int)i);
 			acc = acc * 85 + c;
 		}
 		if (i++ < len)
@@ -118,13 +121,15 @@
 			if (c < 0)
 				return PyErr_Format(
 					PyExc_ValueError,
-					"bad base85 character at position %d", i);
+					"bad base85 character at position %d",
+					(int)i);
 			/* overflow detection: 0xffffffff == "|NsC0",
 			 * "|NsC" == 0x03030303 */
 			if (acc > 0x03030303 || (acc *= 85) > 0xffffffff - c)
 				return PyErr_Format(
 					PyExc_ValueError,
-					"bad base85 sequence at position %d", i);
+					"bad base85 sequence at position %d",
+					(int)i);
 			acc += c;
 		}
 
--- a/mercurial/bdiff.c	Mon Jun 04 17:22:09 2012 -0500
+++ b/mercurial/bdiff.c	Mon Jun 04 17:57:57 2012 -0500
@@ -9,6 +9,7 @@
  Based roughly on Python difflib
 */
 
+#define PY_SSIZE_T_CLEAN
 #include <Python.h>
 #include <stdlib.h>
 #include <string.h>
@@ -17,7 +18,8 @@
 #include "util.h"
 
 struct line {
-	int hash, len, n, e;
+	int hash, n, e;
+	Py_ssize_t len;
 	const char *l;
 };
 
@@ -31,7 +33,7 @@
 	struct hunk *next;
 };
 
-static int splitlines(const char *a, int len, struct line **lr)
+static int splitlines(const char *a, Py_ssize_t len, struct line **lr)
 {
 	unsigned hash;
 	int i;
@@ -338,7 +340,8 @@
 	PyObject *result = NULL;
 	struct line *al, *bl;
 	struct hunk l, *h;
-	int an, bn, len = 0, la, lb, count;
+	int an, bn, count;
+	Py_ssize_t len = 0, la, lb;
 	PyThreadState *_save;
 
 	if (!PyArg_ParseTuple(args, "s#s#:bdiff", &sa, &la, &sb, &lb))
@@ -378,9 +381,18 @@
 	for (h = l.next; h; h = h->next) {
 		if (h->a1 != la || h->b1 != lb) {
 			len = bl[h->b1].l - bl[lb].l;
-			putbe32(al[la].l - al->l, rb);
-			putbe32(al[h->a1].l - al->l, rb + 4);
-			putbe32(len, rb + 8);
+
+#define checkputbe32(__x, __c) \
+	if (__x > UINT_MAX) { \
+		PyErr_SetString(PyExc_ValueError, \
+		                "bdiff: value too large for putbe32"); \
+		goto nomem; \
+	} \
+	putbe32((uint32_t)(__x), __c);
+
+			checkputbe32(al[la].l - al->l, rb);
+			checkputbe32(al[h->a1].l - al->l, rb + 4);
+			checkputbe32(len, rb + 8);
 			memcpy(rb + 12, bl[lb].l, len);
 			rb += 12 + len;
 		}
@@ -407,7 +419,7 @@
 	PyObject *s, *result = NULL;
 	char allws, c;
 	const char *r;
-	int i, rlen, wlen = 0;
+	Py_ssize_t i, rlen, wlen = 0;
 	char *w;
 
 	if (!PyArg_ParseTuple(args, "Sb:fixws", &s, &allws))
--- a/mercurial/bookmarks.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/mercurial/bookmarks.py	Mon Jun 04 17:57:57 2012 -0500
@@ -7,7 +7,7 @@
 
 from mercurial.i18n import _
 from mercurial.node import hex
-from mercurial import encoding, util
+from mercurial import encoding, error, util
 import errno, os
 
 def valid(mark):
@@ -140,8 +140,8 @@
 
 def updatecurrentbookmark(repo, oldnode, curbranch):
     try:
-        return update(repo, oldnode, repo.branchtags()[curbranch])
-    except KeyError:
+        return update(repo, oldnode, repo.branchtip(curbranch))
+    except error.RepoLookupError:
         if curbranch == "default": # no default branch!
             return update(repo, oldnode, repo.lookup("tip"))
         else:
@@ -150,13 +150,20 @@
 def update(repo, parents, node):
     marks = repo._bookmarks
     update = False
-    mark = repo._bookmarkcurrent
-    if mark and marks[mark] in parents:
-        old = repo[marks[mark]]
-        new = repo[node]
-        if new in old.descendants():
-            marks[mark] = new.node()
-            update = True
+    cur = repo._bookmarkcurrent
+    if not cur:
+        return False
+
+    toupdate = [b for b in marks if b.split('@', 1)[0] == cur.split('@', 1)[0]]
+    for mark in toupdate:
+        if mark and marks[mark] in parents:
+            old = repo[marks[mark]]
+            new = repo[node]
+            if new in old.descendants() and mark == cur:
+                marks[cur] = new.node()
+                update = True
+            if mark != cur:
+                del marks[mark]
     if update:
         repo._writebookmarks(marks)
     return update
@@ -221,6 +228,11 @@
                     repo._bookmarks[n] = cr.node()
                     changed = True
                     ui.warn(_("divergent bookmark %s stored as %s\n") % (k, n))
+        elif rb[k] in repo:
+            # add remote bookmarks for changes we already have
+            repo._bookmarks[k] = repo[rb[k]].node()
+            changed = True
+            ui.status(_("adding remote bookmark %s\n") % k)
 
     if changed:
         write(repo)
--- a/mercurial/bundlerepo.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/mercurial/bundlerepo.py	Mon Jun 04 17:57:57 2012 -0500
@@ -54,7 +54,7 @@
                 continue
 
             for p in (p1, p2):
-                if not p in self.nodemap:
+                if p not in self.nodemap:
                     raise error.LookupError(p, self.indexfile,
                                             _("unknown parent"))
             # start, size, full unc. size, base (unused), link, p1, p2, node
@@ -323,13 +323,16 @@
 
     Returns a tuple (local, csets, cleanupfn):
 
-    "local" is a local repo from which to obtain the actual incoming changesets; it
-      is a bundlerepo for the obtained bundle when the original "other" is remote.
+    "local" is a local repo from which to obtain the actual incoming
+      changesets; it is a bundlerepo for the obtained bundle when the
+      original "other" is remote.
     "csets" lists the incoming changeset node ids.
-    "cleanupfn" must be called without arguments when you're done processing the
-      changes; it closes both the original "other" and the one returned here.
+    "cleanupfn" must be called without arguments when you're done processing
+      the changes; it closes both the original "other" and the one returned
+      here.
     '''
-    tmp = discovery.findcommonincoming(repo, other, heads=onlyheads, force=force)
+    tmp = discovery.findcommonincoming(repo, other, heads=onlyheads,
+                                       force=force)
     common, incoming, rheads = tmp
     if not incoming:
         try:
--- a/mercurial/cmdutil.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/mercurial/cmdutil.py	Mon Jun 04 17:57:57 2012 -0500
@@ -958,6 +958,20 @@
 
     raise util.Abort(_("revision matching date not found"))
 
+def increasingwindows(start, end, windowsize=8, sizelimit=512):
+    if start < end:
+        while start < end:
+            yield start, min(windowsize, end - start)
+            start += windowsize
+            if windowsize < sizelimit:
+                windowsize *= 2
+    else:
+        while start > end:
+            yield start, min(windowsize, start - end - 1)
+            start -= windowsize
+            if windowsize < sizelimit:
+                windowsize *= 2
+
 def walkchangerevs(repo, match, opts, prepare):
     '''Iterate over files and the revs in which they changed.
 
@@ -973,20 +987,6 @@
     yielding each context, the iterator will first call the prepare
     function on each context in the window in forward order.'''
 
-    def increasing_windows(start, end, windowsize=8, sizelimit=512):
-        if start < end:
-            while start < end:
-                yield start, min(windowsize, end - start)
-                start += windowsize
-                if windowsize < sizelimit:
-                    windowsize *= 2
-        else:
-            while start > end:
-                yield start, min(windowsize, start - end - 1)
-                start -= windowsize
-                if windowsize < sizelimit:
-                    windowsize *= 2
-
     follow = opts.get('follow') or opts.get('follow_first')
 
     if not len(repo):
@@ -1176,7 +1176,7 @@
             def want(rev):
                 return rev in wanted
 
-        for i, window in increasing_windows(0, len(revs)):
+        for i, window in increasingwindows(0, len(revs)):
             nrevs = [rev for rev in revs[i:i + window] if want(rev)]
             for rev in sorted(nrevs):
                 fns = fncache.get(rev)
@@ -1345,7 +1345,7 @@
                     a = ctx.filectx(f)
                     if f in base.manifest():
                         b = base.filectx(f)
-                        return (a.data() == b.data()
+                        return (not a.cmp(b)
                                 and a.flags() == b.flags())
                     else:
                         return False
@@ -1363,7 +1363,7 @@
                                               copied=copied.get(path))
                     return mctx
                 except KeyError:
-                    raise IOError()
+                    raise IOError
         else:
             ui.note(_('copying changeset %s to %s\n') % (old, base))
 
@@ -1372,7 +1372,7 @@
                 try:
                     return old.filectx(path)
                 except KeyError:
-                    raise IOError()
+                    raise IOError
 
             # See if we got a message from -m or -l, if not, open the editor
             # with the message of the changeset to amend
@@ -1489,7 +1489,7 @@
         def badfn(path, msg):
             if path in names:
                 return
-            if path in repo[node].substate:
+            if path in ctx.substate:
                 return
             path_ = path + '/'
             for f in names:
@@ -1497,14 +1497,14 @@
                     return
             ui.warn("%s: %s\n" % (m.rel(path), msg))
 
-        m = scmutil.match(repo[node], pats, opts)
+        m = scmutil.match(ctx, pats, opts)
         m.bad = badfn
-        for abs in repo[node].walk(m):
+        for abs in ctx.walk(m):
             if abs not in names:
                 names[abs] = m.rel(abs), m.exact(abs)
 
         # get the list of subrepos that must be reverted
-        targetsubs = [s for s in repo[node].substate if m(s)]
+        targetsubs = [s for s in ctx.substate if m(s)]
         m = scmutil.matchfiles(repo, names)
         changes = repo.status(match=m)[:4]
         modified, added, removed, deleted = map(set, changes)
--- a/mercurial/commands.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/mercurial/commands.py	Mon Jun 04 17:57:57 2012 -0500
@@ -520,10 +520,12 @@
     revision as good or bad without checking it out first.
 
     If you supply a command, it will be used for automatic bisection.
-    Its exit status will be used to mark revisions as good or bad:
-    status 0 means good, 125 means to skip the revision, 127
-    (command not found) will abort the bisection, and any other
-    non-zero exit status means the revision is bad.
+    The environment variable HG_NODE will contain the ID of the
+    changeset being tested. The exit status of the command will be
+    used to mark revisions as good or bad: status 0 means good, 125
+    means to skip the revision, 127 (command not found) will abort the
+    bisection, and any other non-zero exit status means the revision
+    is bad.
 
     .. container:: verbose
 
@@ -563,6 +565,11 @@
 
           hg log -r "bisect(pruned)"
 
+      - see the changeset currently being bisected (especially useful
+        if running with -U/--noupdate)::
+
+          hg log -r "bisect(current)"
+
       - see all changesets that took part in the current bisection::
 
           hg log -r "bisect(range)"
@@ -647,10 +654,22 @@
     if command:
         changesets = 1
         try:
+            node = state['current'][0]
+        except LookupError:
+            if noupdate:
+                raise util.Abort(_('current bisect revision is unknown - '
+                                   'start a new bisect to fix'))
+            node, p2 = repo.dirstate.parents()
+            if p2 != nullid:
+                raise util.Abort(_('current bisect revision is a merge'))
+        try:
             while changesets:
                 # update state
+                state['current'] = [node]
                 hbisect.save_state(repo, state)
-                status = util.system(command, out=ui.fout)
+                status = util.system(command,
+                                     environ={'HG_NODE': hex(node)},
+                                     out=ui.fout)
                 if status == 125:
                     transition = "skip"
                 elif status == 0:
@@ -662,7 +681,7 @@
                     raise util.Abort(_("%s killed") % command)
                 else:
                     transition = "bad"
-                ctx = scmutil.revsingle(repo, rev)
+                ctx = scmutil.revsingle(repo, rev, node)
                 rev = None # clear for future iterations
                 state[transition].append(ctx.node())
                 ui.status(_('Changeset %d:%s: %s\n') % (ctx, ctx, transition))
@@ -670,9 +689,12 @@
                 # bisect
                 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
                 # update to next check
-                cmdutil.bailifchanged(repo)
-                hg.clean(repo, nodes[0], show_stats=False)
+                node = nodes[0]
+                if not noupdate:
+                    cmdutil.bailifchanged(repo)
+                    hg.clean(repo, node, show_stats=False)
         finally:
+            state['current'] = [node]
             hbisect.save_state(repo, state)
         print_result(nodes, good)
         return
@@ -704,6 +726,8 @@
             if extendnode is not None:
                 ui.write(_("Extending search to changeset %d:%s\n"
                          % (extendnode.rev(), extendnode)))
+                state['current'] = [extendnode.node()]
+                hbisect.save_state(repo, state)
                 if noupdate:
                     return
                 cmdutil.bailifchanged(repo)
@@ -723,6 +747,8 @@
         ui.write(_("Testing changeset %d:%s "
                    "(%d changesets remaining, ~%d tests)\n")
                  % (rev, short(node), changesets, tests))
+        state['current'] = [node]
+        hbisect.save_state(repo, state)
         if not noupdate:
             cmdutil.bailifchanged(repo)
             return hg.clean(repo, node)
@@ -801,7 +827,7 @@
         if mark in marks and not force:
             raise util.Abort(_("bookmark '%s' already exists "
                                "(use -f to force)") % mark)
-        if ((mark in repo.branchtags() or mark == repo.dirstate.branch())
+        if ((mark in repo.branchmap() or mark == repo.dirstate.branch())
             and not force):
             raise util.Abort(
                 _("a bookmark cannot have the name of an existing branch"))
@@ -877,7 +903,7 @@
             repo.dirstate.setbranch(label)
             ui.status(_('reset working directory to branch %s\n') % label)
         elif label:
-            if not opts.get('force') and label in repo.branchtags():
+            if not opts.get('force') and label in repo.branchmap():
                 if label not in [p.branch() for p in repo.parents()]:
                     raise util.Abort(_('a branch of the same name already'
                                        ' exists'),
@@ -910,37 +936,45 @@
     """
 
     hexfunc = ui.debugflag and hex or short
-    activebranches = [repo[n].branch() for n in repo.heads()]
-    def testactive(tag, node):
-        realhead = tag in activebranches
-        open = node in repo.branchheads(tag, closed=False)
-        return realhead and open
-    branches = sorted([(testactive(tag, node), repo.changelog.rev(node), tag)
-                          for tag, node in repo.branchtags().items()],
-                      reverse=True)
-
-    for isactive, node, tag in branches:
+
+    activebranches = set([repo[n].branch() for n in repo.heads()])
+    branches = []
+    for tag, heads in repo.branchmap().iteritems():
+        for h in reversed(heads):
+            ctx = repo[h]
+            isopen = not ctx.closesbranch()
+            if isopen:
+                tip = ctx
+                break
+        else:
+            tip = repo[heads[-1]]
+        isactive = tag in activebranches and isopen
+        branches.append((tip, isactive, isopen))
+    branches.sort(key=lambda i: (i[1], i[0].rev(), i[0].branch(), i[2]),
+                  reverse=True)
+
+    for ctx, isactive, isopen in branches:
         if (not active) or isactive:
+            if isactive:
+                label = 'branches.active'
+                notice = ''
+            elif not isopen:
+                if not closed:
+                    continue
+                label = 'branches.closed'
+                notice = _(' (closed)')
+            else:
+                label = 'branches.inactive'
+                notice = _(' (inactive)')
+            if ctx.branch() == repo.dirstate.branch():
+                label = 'branches.current'
+            rev = str(ctx.rev()).rjust(31 - encoding.colwidth(ctx.branch()))
+            rev = ui.label('%s:%s' % (rev, hexfunc(ctx.node())),
+                           'log.changeset')
+            tag = ui.label(ctx.branch(), label)
             if ui.quiet:
                 ui.write("%s\n" % tag)
             else:
-                hn = repo.lookup(node)
-                if isactive:
-                    label = 'branches.active'
-                    notice = ''
-                elif hn not in repo.branchheads(tag, closed=False):
-                    if not closed:
-                        continue
-                    label = 'branches.closed'
-                    notice = _(' (closed)')
-                else:
-                    label = 'branches.inactive'
-                    notice = _(' (inactive)')
-                if tag == repo.dirstate.branch():
-                    label = 'branches.current'
-                rev = str(node).rjust(31 - encoding.colwidth(tag))
-                rev = ui.label('%s:%s' % (rev, hexfunc(hn)), 'log.changeset')
-                tag = ui.label(tag, label)
                 ui.write("%s %s%s\n" % (tag, rev, notice))
 
 @command('bundle',
@@ -1324,7 +1358,7 @@
 
     if not opts.get('close_branch'):
         for r in parents:
-            if r.extra().get('close') and r.branch() == branch:
+            if r.closesbranch() and r.branch() == branch:
                 ui.status(_('reopening closed branch head %d\n') % r)
 
     if ui.debugflag:
@@ -1662,7 +1696,8 @@
         revs = set((int(r) for r in revs))
         def events():
             for r in rlog:
-                yield 'n', (r, list(set(p for p in rlog.parentrevs(r) if p != -1)))
+                yield 'n', (r, list(set(p for p in rlog.parentrevs(r)
+                                        if p != -1)))
                 if r in revs:
                     yield 'l', (r, "r%i" % r)
     elif repo:
@@ -1681,7 +1716,8 @@
                     if newb != b:
                         yield 'a', newb
                         b = newb
-                yield 'n', (r, list(set(p for p in cl.parentrevs(r) if p != -1)))
+                yield 'n', (r, list(set(p for p in cl.parentrevs(r)
+                                        if p != -1)))
                 if tags:
                     ls = labels.get(r)
                     if ls:
@@ -1739,7 +1775,8 @@
     _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
 def debugdiscovery(ui, repo, remoteurl="default", **opts):
     """runs the changeset discovery protocol in isolation"""
-    remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl), opts.get('branch'))
+    remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
+                                      opts.get('branch'))
     remote = hg.peer(repo, opts, remoteurl)
     ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
 
@@ -1749,7 +1786,8 @@
     def doit(localheads, remoteheads):
         if opts.get('old'):
             if localheads:
-                raise util.Abort('cannot use localheads with old style discovery')
+                raise util.Abort('cannot use localheads with old style '
+                                 'discovery')
             common, _in, hds = treediscovery.findcommonincoming(repo, remote,
                                                                 force=True)
             common = set(common)
@@ -1876,7 +1914,8 @@
                  " nodeid       p1           p2\n")
     elif format == 1:
         ui.write("   rev flag   offset   length"
-                 "     size " + basehdr + "   link     p1     p2       nodeid\n")
+                 "     size " + basehdr + "   link     p1     p2"
+                 "       nodeid\n")
 
     for i in r:
         node = r.node(i)
@@ -1887,7 +1926,7 @@
         if format == 0:
             try:
                 pp = r.parents(node)
-            except:
+            except Exception:
                 pp = [nullid, nullid]
             ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
                     i, r.start(i), r.length(i), base, r.linkrev(i),
@@ -2001,8 +2040,8 @@
 def debugknown(ui, repopath, *ids, **opts):
     """test whether node ids are known to a repo
 
-    Every ID must be a full-length hex node id string. Returns a list of 0s and 1s
-    indicating unknown/known.
+    Every ID must be a full-length hex node id string. Returns a list of 0s
+    and 1s indicating unknown/known.
     """
     repo = hg.peer(ui, opts, repopath)
     if not repo.capable('known'):
@@ -2234,13 +2273,17 @@
         fmt2 = pcfmtstr(numdeltas, 4)
         ui.write('deltas against prev  : ' + fmt % pcfmt(numprev, numdeltas))
         if numprev > 0:
-            ui.write('    where prev = p1  : ' + fmt2 % pcfmt(nump1prev, numprev))
-            ui.write('    where prev = p2  : ' + fmt2 % pcfmt(nump2prev, numprev))
-            ui.write('    other            : ' + fmt2 % pcfmt(numoprev, numprev))
+            ui.write('    where prev = p1  : ' + fmt2 % pcfmt(nump1prev,
+                                                              numprev))
+            ui.write('    where prev = p2  : ' + fmt2 % pcfmt(nump2prev,
+                                                              numprev))
+            ui.write('    other            : ' + fmt2 % pcfmt(numoprev,
+                                                              numprev))
         if gdelta:
             ui.write('deltas against p1    : ' + fmt % pcfmt(nump1, numdeltas))
             ui.write('deltas against p2    : ' + fmt % pcfmt(nump2, numdeltas))
-            ui.write('deltas against other : ' + fmt % pcfmt(numother, numdeltas))
+            ui.write('deltas against other : ' + fmt % pcfmt(numother,
+                                                             numdeltas))
 
 @command('debugrevspec', [], ('REVSPEC'))
 def debugrevspec(ui, repo, expr):
@@ -2556,6 +2599,7 @@
     'graft',
     [('c', 'continue', False, _('resume interrupted graft')),
      ('e', 'edit', False, _('invoke editor on commit messages')),
+     ('', 'log', None, _('append graft info to log message')),
      ('D', 'currentdate', False,
       _('record the current date as commit date')),
      ('U', 'currentuser', False,
@@ -2574,6 +2618,11 @@
     Changesets that are ancestors of the current revision, that have
     already been grafted, or that are merges will be skipped.
 
+    If --log is specified, log messages will have a comment appended
+    of the form::
+
+      (grafted from CHANGESETHASH)
+
     If a graft merge results in conflicts, the graft process is
     interrupted so that the current merge can be manually resolved.
     Once all conflicts are addressed, the graft process can be
@@ -2723,8 +2772,13 @@
             date = ctx.date()
             if opts.get('date'):
                 date = opts['date']
-            repo.commit(text=ctx.description(), user=user,
+            message = ctx.description()
+            if opts.get('log'):
+                message += '\n(grafted from %s)' % ctx.hex()
+            node = repo.commit(text=message, user=user,
                         date=date, extra=extra, editor=editor)
+            if node is None:
+                ui.status(_('graft for revision %s is empty\n') % ctx.rev())
     finally:
         wlock.release()
 
@@ -3018,7 +3072,9 @@
 
 @command('help',
     [('e', 'extension', None, _('show only help for extensions')),
-     ('c', 'command', None, _('show only help for commands'))],
+     ('c', 'command', None, _('show only help for commands')),
+     ('k', 'keyword', '', _('show topics matching keyword')),
+     ],
     _('[-ec] [TOPIC]'))
 def help_(ui, name=None, unknowncmd=False, full=True, **opts):
     """show help for a given topic or a help overview
@@ -3033,78 +3089,6 @@
 
     textwidth = min(ui.termwidth(), 80) - 2
 
-    def optrst(options):
-        data = []
-        multioccur = False
-        for option in options:
-            if len(option) == 5:
-                shortopt, longopt, default, desc, optlabel = option
-            else:
-                shortopt, longopt, default, desc = option
-                optlabel = _("VALUE") # default label
-
-            if _("DEPRECATED") in desc and not ui.verbose:
-                continue
-
-            so = ''
-            if shortopt:
-                so = '-' + shortopt
-            lo = '--' + longopt
-            if default:
-                desc += _(" (default: %s)") % default
-
-            if isinstance(default, list):
-                lo += " %s [+]" % optlabel
-                multioccur = True
-            elif (default is not None) and not isinstance(default, bool):
-                lo += " %s" % optlabel
-
-            data.append((so, lo, desc))
-
-        rst = minirst.maketable(data, 1)
-
-        if multioccur:
-            rst += _("\n[+] marked option can be specified multiple times\n")
-
-        return rst
-
-    # list all option lists
-    def opttext(optlist, width):
-        rst = ''
-        if not optlist:
-            return ''
-
-        for title, options in optlist:
-            rst += '\n%s\n' % title
-            if options:
-                rst += "\n"
-                rst += optrst(options)
-                rst += '\n'
-
-        return '\n' + minirst.format(rst, width)
-
-    def addglobalopts(optlist, aliases):
-        if ui.quiet:
-            return []
-
-        if ui.verbose:
-            optlist.append((_("global options:"), globalopts))
-            if name == 'shortlist':
-                optlist.append((_('use "hg help" for the full list '
-                                       'of commands'), ()))
-        else:
-            if name == 'shortlist':
-                msg = _('use "hg help" for the full list of commands '
-                        'or "hg -v" for details')
-            elif name and not full:
-                msg = _('use "hg help %s" to show the full help text') % name
-            elif aliases:
-                msg = _('use "hg -v help%s" to show builtin aliases and '
-                        'global options') % (name and " " + name or "")
-            else:
-                msg = _('use "hg -v help %s" to show more info') % name
-            optlist.append((msg, ()))
-
     def helpcmd(name):
         try:
             aliases, entry = cmdutil.findcmd(name, table, strict=unknowncmd)
@@ -3113,29 +3097,31 @@
             # except block, nor can be used inside a lambda. python issue4617
             prefix = inst.args[0]
             select = lambda c: c.lstrip('^').startswith(prefix)
-            helplist(select)
-            return
+            rst = helplist(select)
+            return rst
+
+        rst = []
 
         # check if it's an invalid alias and display its error if it is
         if getattr(entry[0], 'badalias', False):
             if not unknowncmd:
+                ui.pushbuffer()
                 entry[0](ui)
-            return
-
-        rst = ""
+                rst.append(ui.popbuffer())
+            return rst
 
         # synopsis
         if len(entry) > 2:
             if entry[2].startswith('hg'):
-                rst += "%s\n" % entry[2]
+                rst.append("%s\n" % entry[2])
             else:
-                rst += 'hg %s %s\n' % (aliases[0], entry[2])
+                rst.append('hg %s %s\n' % (aliases[0], entry[2]))
         else:
-            rst += 'hg %s\n' % aliases[0]
-
+            rst.append('hg %s\n' % aliases[0])
         # aliases
         if full and not ui.quiet and len(aliases) > 1:
-            rst += _("\naliases: %s\n") % ', '.join(aliases[1:])
+            rst.append(_("\naliases: %s\n") % ', '.join(aliases[1:]))
+        rst.append('\n')
 
         # description
         doc = gettext(entry[0].__doc__)
@@ -3146,9 +3132,12 @@
                 doc = _('shell alias for::\n\n    %s') % entry[0].definition[1:]
             else:
                 doc = _('alias for: hg %s\n\n%s') % (entry[0].definition, doc)
+        doc = doc.splitlines(True)
         if ui.quiet or not full:
-            doc = doc.splitlines()[0]
-        rst += "\n" + doc + "\n"
+            rst.append(doc[0])
+        else:
+            rst.extend(doc)
+        rst.append('\n')
 
         # check if this command shadows a non-trivial (multi-line)
         # extension help text
@@ -3158,33 +3147,27 @@
             if '\n' in doc.strip():
                 msg = _('use "hg help -e %s" to show help for '
                         'the %s extension') % (name, name)
-                rst += '\n%s\n' % msg
+                rst.append('\n%s\n' % msg)
         except KeyError:
             pass
 
         # options
         if not ui.quiet and entry[1]:
-            rst += '\n'
-            rst += _("options:")
-            rst += '\n\n'
-            rst += optrst(entry[1])
+            rst.append('\n%s\n\n' % _("options:"))
+            rst.append(help.optrst(entry[1], ui.verbose))
 
         if ui.verbose:
-            rst += '\n'
-            rst += _("global options:")
-            rst += '\n\n'
-            rst += optrst(globalopts)
-
-        keep = ui.verbose and ['verbose'] or []
-        formatted, pruned = minirst.format(rst, textwidth, keep=keep)
-        ui.write(formatted)
+            rst.append('\n%s\n\n' % _("global options:"))
+            rst.append(help.optrst(globalopts, ui.verbose))
 
         if not ui.verbose:
             if not full:
-                ui.write(_('\nuse "hg help %s" to show the full help text\n')
+                rst.append(_('\nuse "hg help %s" to show the full help text\n')
                            % name)
             elif not ui.quiet:
-                ui.write(_('\nuse "hg -v help %s" to show more info\n') % name)
+                rst.append(_('\nuse "hg -v help %s" to show more info\n')
+                           % name)
+        return rst
 
 
     def helplist(select=None):
@@ -3217,38 +3200,60 @@
             h[f] = doc.splitlines()[0].rstrip()
             cmds[f] = c.lstrip("^")
 
+        rst = []
         if not h:
-            ui.status(_('no commands defined\n'))
-            return
-
-        ui.status(header)
+            if not ui.quiet:
+                rst.append(_('no commands defined\n'))
+            return rst
+
+        if not ui.quiet:
+            rst.append(header)
         fns = sorted(h)
-        m = max(map(len, fns))
         for f in fns:
             if ui.verbose:
                 commands = cmds[f].replace("|",", ")
-                ui.write(" %s:\n      %s\n"%(commands, h[f]))
+                rst.append(" :%s: %s\n" % (commands, h[f]))
             else:
-                ui.write('%s\n' % (util.wrap(h[f], textwidth,
-                                             initindent=' %-*s    ' % (m, f),
-                                             hangindent=' ' * (m + 4))))
+                rst.append(' :%s: %s\n' % (f, h[f]))
 
         if not name:
-            text = help.listexts(_('enabled extensions:'), extensions.enabled())
-            if text:
-                ui.write("\n%s" % minirst.format(text, textwidth))
-
-            ui.write(_("\nadditional help topics:\n\n"))
+            exts = help.listexts(_('enabled extensions:'), extensions.enabled())
+            if exts:
+                rst.append('\n')
+                rst.extend(exts)
+
+            rst.append(_("\nadditional help topics:\n\n"))
             topics = []
             for names, header, doc in help.helptable:
                 topics.append((sorted(names, key=len, reverse=True)[0], header))
-            topics_len = max([len(s[0]) for s in topics])
             for t, desc in topics:
-                ui.write(" %-*s   %s\n" % (topics_len, t, desc))
+                rst.append(" :%s: %s\n" % (t, desc))
 
         optlist = []
-        addglobalopts(optlist, True)
-        ui.write(opttext(optlist, textwidth))
+        if not ui.quiet:
+            if ui.verbose:
+                optlist.append((_("global options:"), globalopts))
+                if name == 'shortlist':
+                    optlist.append((_('use "hg help" for the full list '
+                                           'of commands'), ()))
+            else:
+                if name == 'shortlist':
+                    msg = _('use "hg help" for the full list of commands '
+                            'or "hg -v" for details')
+                elif name and not full:
+                    msg = _('use "hg help %s" to show the full help '
+                            'text') % name
+                else:
+                    msg = _('use "hg -v help%s" to show builtin aliases and '
+                            'global options') % (name and " " + name or "")
+                optlist.append((msg, ()))
+
+        if optlist:
+            for title, options in optlist:
+                rst.append('\n%s\n' % title)
+                if options:
+                    rst.append('\n%s\n' % help.optrst(options, ui.verbose))
+        return rst
 
     def helptopic(name):
         for names, header, doc in help.helptable:
@@ -3257,20 +3262,20 @@
         else:
             raise error.UnknownCommand(name)
 
+        rst = ["%s\n\n" % header]
         # description
         if not doc:
-            doc = _("(no help text available)")
+            rst.append("    %s\n" % _("(no help text available)"))
         if util.safehasattr(doc, '__call__'):
-            doc = doc()
-
-        ui.write("%s\n\n" % header)
-        ui.write("%s" % minirst.format(doc, textwidth, indent=4))
+            rst += ["    %s\n" % l for l in doc().splitlines()]
+
         try:
             cmdutil.findcmd(name, table)
-            ui.write(_('\nuse "hg help -c %s" to see help for '
+            rst.append(_('\nuse "hg help -c %s" to see help for '
                        'the %s command\n') % (name, name))
         except error.UnknownCommand:
             pass
+        return rst
 
     def helpext(name):
         try:
@@ -3286,10 +3291,10 @@
             head, tail = doc, ""
         else:
             head, tail = doc.split('\n', 1)
-        ui.write(_('%s extension - %s\n\n') % (name.split('.')[-1], head))
+        rst = [_('%s extension - %s\n\n') % (name.split('.')[-1], head)]
         if tail:
-            ui.write(minirst.format(tail, textwidth))
-            ui.status('\n')
+            rst.extend(tail.splitlines(True))
+            rst.append('\n')
 
         if mod:
             try:
@@ -3297,24 +3302,38 @@
             except AttributeError:
                 ct = {}
             modcmds = set([c.split('|', 1)[0] for c in ct])
-            helplist(modcmds.__contains__)
+            rst.extend(helplist(modcmds.__contains__))
         else:
-            ui.write(_('use "hg help extensions" for information on enabling '
+            rst.append(_('use "hg help extensions" for information on enabling '
                        'extensions\n'))
+        return rst
 
     def helpextcmd(name):
         cmd, ext, mod = extensions.disabledcmd(ui, name,
                                                ui.configbool('ui', 'strict'))
         doc = gettext(mod.__doc__).splitlines()[0]
 
-        msg = help.listexts(_("'%s' is provided by the following "
+        rst = help.listexts(_("'%s' is provided by the following "
                               "extension:") % cmd, {ext: doc}, indent=4)
-        ui.write(minirst.format(msg, textwidth))
-        ui.write('\n')
-        ui.write(_('use "hg help extensions" for information on enabling '
+        rst.append('\n')
+        rst.append(_('use "hg help extensions" for information on enabling '
                    'extensions\n'))
-
-    if name and name != 'shortlist':
+        return rst
+
+
+    rst = []
+    kw = opts.get('keyword')
+    if kw:
+        matches = help.topicmatch(kw)
+        for t, title in (('topics', _('Topics')),
+                         ('commands', _('Commands')),
+                         ('extensions', _('Extensions')),
+                         ('extensioncommands', _('Extension Commands'))):
+            if matches[t]:
+                rst.append('%s:\n\n' % title)
+                rst.extend(minirst.maketable(matches[t], 1))
+                rst.append('\n')
+    elif name and name != 'shortlist':
         i = None
         if unknowncmd:
             queries = (helpextcmd,)
@@ -3326,7 +3345,7 @@
             queries = (helptopic, helpcmd, helpext, helpextcmd)
         for f in queries:
             try:
-                f(name)
+                rst = f(name)
                 i = None
                 break
             except error.UnknownCommand, inst:
@@ -3335,9 +3354,13 @@
             raise i
     else:
         # program name
-        ui.status(_("Mercurial Distributed SCM\n"))
-        ui.status('\n')
-        helplist()
+        if not ui.quiet:
+            rst = [_("Mercurial Distributed SCM\n"), '\n']
+        rst.extend(helplist())
+
+    keep = ui.verbose and ['verbose'] or []
+    formatted, pruned = minirst.format(''.join(rst), textwidth, keep=keep)
+    ui.write(formatted)
 
 
 @command('identify|id',
@@ -3730,7 +3753,7 @@
                 tr.close()
             if msgs:
                 repo.savecommitmessage('\n* * *\n'.join(msgs))
-        except:
+        except: # re-raises
             # wlock.release() indirectly calls dirstate.write(): since
             # we're crashing, we do not want to change the working dir
             # parent after all, so make sure it writes nothing
@@ -4123,17 +4146,43 @@
     if not node:
         node = opts.get('rev')
 
-    if not node:
+    if node:
+        node = scmutil.revsingle(repo, node).node()
+
+    if not node and repo._bookmarkcurrent:
+        bmheads = repo.bookmarkheads(repo._bookmarkcurrent)
+        curhead = repo[repo._bookmarkcurrent]
+        if len(bmheads) == 2:
+            if curhead == bmheads[0]:
+                node = bmheads[1]
+            else:
+                node = bmheads[0]
+        elif len(bmheads) > 2:
+            raise util.Abort(_("multiple matching bookmarks to merge - "
+                "please merge with an explicit rev or bookmark"),
+                hint=_("run 'hg heads' to see all heads"))
+        elif len(bmheads) <= 1:
+            raise util.Abort(_("no matching bookmark to merge - "
+                "please merge with an explicit rev or bookmark"),
+                hint=_("run 'hg heads' to see all heads"))
+
+    if not node and not repo._bookmarkcurrent:
         branch = repo[None].branch()
         bheads = repo.branchheads(branch)
-        if len(bheads) > 2:
+        nbhs = [bh for bh in bheads if not repo[bh].bookmarks()]
+
+        if len(nbhs) > 2:
             raise util.Abort(_("branch '%s' has %d heads - "
                                "please merge with an explicit rev")
                              % (branch, len(bheads)),
                              hint=_("run 'hg heads .' to see heads"))
 
         parent = repo.dirstate.p1()
-        if len(bheads) == 1:
+        if len(nbhs) == 1:
+            if len(bheads) > 1:
+                raise util.Abort(_("heads are bookmarked - "
+                                   "please merge with an explicit rev"),
+                                 hint=_("run 'hg heads' to see all heads"))
             if len(repo.heads()) > 1:
                 raise util.Abort(_("branch '%s' has one head - "
                                    "please merge with an explicit rev")
@@ -4148,9 +4197,10 @@
             raise util.Abort(_('working directory not at a head revision'),
                              hint=_("use 'hg update' or merge with an "
                                     "explicit revision"))
-        node = parent == bheads[0] and bheads[-1] or bheads[0]
-    else:
-        node = scmutil.revsingle(repo, node).node()
+        if parent == nbhs[0]:
+            node = nbhs[-1]
+        else:
+            node = nbhs[0]
 
     if opts.get('preview'):
         # find nodes that are ancestors of p2 but not of p1
@@ -4348,34 +4398,32 @@
         lock = repo.lock()
         try:
             # set phase
-            nodes = [ctx.node() for ctx in repo.set('%ld', revs)]
-            if not nodes:
-                raise util.Abort(_('empty revision set'))
-            olddata = repo._phaserev[:]
+            if not revs:
+                 raise util.Abort(_('empty revision set'))
+            nodes = [repo[r].node() for r in revs]
+            olddata = repo._phasecache.getphaserevs(repo)[:]
             phases.advanceboundary(repo, targetphase, nodes)
             if opts['force']:
                 phases.retractboundary(repo, targetphase, nodes)
         finally:
             lock.release()
-        if olddata is not None:
-            changes = 0
-            newdata = repo._phaserev
-            changes = sum(o != newdata[i] for i, o in enumerate(olddata))
-            rejected = [n for n in nodes
-                        if newdata[repo[n].rev()] < targetphase]
-            if rejected:
-                ui.warn(_('cannot move %i changesets to a more permissive '
-                          'phase, use --force\n') % len(rejected))
-                ret = 1
-            if changes:
-                msg = _('phase changed for %i changesets\n') % changes
-                if ret:
-                    ui.status(msg)
-                else:
-                    ui.note(msg)
+        newdata = repo._phasecache.getphaserevs(repo)
+        changes = sum(o != newdata[i] for i, o in enumerate(olddata))
+        rejected = [n for n in nodes
+                    if newdata[repo[n].rev()] < targetphase]
+        if rejected:
+            ui.warn(_('cannot move %i changesets to a more permissive '
+                      'phase, use --force\n') % len(rejected))
+            ret = 1
+        if changes:
+            msg = _('phase changed for %i changesets\n') % changes
+            if ret:
+                ui.status(msg)
             else:
-                ui.warn(_('no phases changed\n'))
-                ret = 1
+                ui.note(msg)
+        else:
+            ui.warn(_('no phases changed\n'))
+            ret = 1
     return ret
 
 def postincoming(ui, repo, modheads, optupdate, checkout):
@@ -4397,7 +4445,8 @@
         if currentbranchheads == modheads:
             ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
         elif currentbranchheads > 1:
-            ui.status(_("(run 'hg heads .' to see heads, 'hg merge' to merge)\n"))
+            ui.status(_("(run 'hg heads .' to see heads, 'hg merge' to "
+                        "merge)\n"))
         else:
             ui.status(_("(run 'hg heads' to see heads)\n"))
     else:
@@ -5329,7 +5378,7 @@
         t += _(' (merge)')
     elif branch != parents[0].branch():
         t += _(' (new branch)')
-    elif (parents[0].extra().get('close') and
+    elif (parents[0].closesbranch() and
           pnode in repo.branchheads(branch, closed=True)):
         t += _(' (head closed)')
     elif not (st[0] or st[1] or st[2] or st[3] or st[4] or st[9]):
@@ -5348,12 +5397,12 @@
     cl = repo.changelog
     for a in [cl.rev(n) for n in bheads]:
         new[a] = 1
-    for a in cl.ancestors(*[cl.rev(n) for n in bheads]):
+    for a in cl.ancestors([cl.rev(n) for n in bheads]):
         new[a] = 1
     for a in [p.rev() for p in parents]:
         if a >= 0:
             new[a] = 0
-    for a in cl.ancestors(*[p.rev() for p in parents]):
+    for a in cl.ancestors([p.rev() for p in parents]):
         new[a] = 0
     new = sum(new)
 
@@ -5369,7 +5418,8 @@
         t = []
         source, branches = hg.parseurl(ui.expandpath('default'))
         other = hg.peer(repo, {}, source)
-        revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev'))
+        revs, checkout = hg.addbranchrevs(repo, other, branches,
+                                          opts.get('rev'))
         ui.debug('comparing with %s\n' % util.hidepassword(source))
         repo.ui.pushbuffer()
         commoninc = discovery.findcommonincoming(repo, other)
@@ -5587,9 +5637,9 @@
             f = url.open(ui, fname)
             gen = changegroup.readbundle(f, fname)
             modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
-        bookmarks.updatecurrentbookmark(repo, wc.node(), wc.branch())
     finally:
         lock.release()
+    bookmarks.updatecurrentbookmark(repo, wc.node(), wc.branch())
     return postincoming(ui, repo, modheads, opts.get('update'), None)
 
 @command('^update|up|checkout|co',
--- a/mercurial/commandserver.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/mercurial/commandserver.py	Mon Jun 04 17:57:57 2012 -0500
@@ -142,8 +142,8 @@
             else:
                 logfile = open(logpath, 'a')
 
-        # the ui here is really the repo ui so take its baseui so we don't end up
-        # with its local configuration
+        # the ui here is really the repo ui so take its baseui so we don't end
+        # up with its local configuration
         self.ui = repo.baseui
         self.repo = repo
         self.repoui = repo.ui
@@ -166,7 +166,7 @@
 
         # is the other end closed?
         if not data:
-            raise EOFError()
+            raise EOFError
 
         return data
 
--- a/mercurial/config.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/mercurial/config.py	Mon Jun 04 17:57:57 2012 -0500
@@ -35,6 +35,10 @@
     def __delitem__(self, key):
         dict.__delitem__(self, key)
         self._list.remove(key)
+    def keys(self):
+        return self._list
+    def iterkeys(self):
+        return self._list.__iter__()
 
 class config(object):
     def __init__(self, data=None):
--- a/mercurial/context.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/mercurial/context.py	Mon Jun 04 17:57:57 2012 -0500
@@ -8,6 +8,7 @@
 from node import nullid, nullrev, short, hex, bin
 from i18n import _
 import ancestor, mdiff, error, util, scmutil, subrepo, patch, encoding, phases
+import copies
 import match as matchmod
 import os, errno, stat
 
@@ -79,10 +80,12 @@
             self._node = repo._tagscache.tags[changeid]
             self._rev = repo.changelog.rev(self._node)
             return
-        if changeid in repo.branchtags():
-            self._node = repo.branchtags()[changeid]
+        try:
+            self._node = repo.branchtip(changeid)
             self._rev = repo.changelog.rev(self._node)
             return
+        except error.RepoLookupError:
+            pass
 
         self._node = repo.changelog._partialmatch(changeid)
         if self._node is not None:
@@ -185,6 +188,8 @@
         return self._changeset[4]
     def branch(self):
         return encoding.tolocal(self._changeset[5].get("branch"))
+    def closesbranch(self):
+        return 'close' in self._changeset[5]
     def extra(self):
         return self._changeset[5]
     def tags(self):
@@ -192,12 +197,7 @@
     def bookmarks(self):
         return self._repo.nodebookmarks(self._node)
     def phase(self):
-        if self._rev == -1:
-            return phases.public
-        if self._rev >= len(self._repo._phaserev):
-            # outdated cache
-            del self._repo._phaserev
-        return self._repo._phaserev[self._rev]
+        return self._repo._phasecache.phase(self._repo, self._rev)
     def phasestr(self):
         return phases.phasenames[self.phase()]
     def mutable(self):
@@ -223,11 +223,11 @@
         return [changectx(self._repo, x) for x in c]
 
     def ancestors(self):
-        for a in self._repo.changelog.ancestors(self._rev):
+        for a in self._repo.changelog.ancestors([self._rev]):
             yield changectx(self._repo, a)
 
     def descendants(self):
-        for d in self._repo.changelog.descendants(self._rev):
+        for d in self._repo.changelog.descendants([self._rev]):
             yield changectx(self._repo, d)
 
     def _fileinfo(self, path):
@@ -239,7 +239,8 @@
                                         _('not found in manifest'))
         if '_manifestdelta' in self.__dict__ or path in self.files():
             if path in self._manifestdelta:
-                return self._manifestdelta[path], self._manifestdelta.flags(path)
+                return (self._manifestdelta[path],
+                        self._manifestdelta.flags(path))
         node, flag = self._repo.manifest.find(self._changeset[0], path)
         if not node:
             raise error.LookupError(self._node, path,
@@ -636,27 +637,27 @@
 
         return zip(hist[base][0], hist[base][1].splitlines(True))
 
-    def ancestor(self, fc2, actx=None):
+    def ancestor(self, fc2, actx):
         """
         find the common ancestor file context, if any, of self, and fc2
 
-        If actx is given, it must be the changectx of the common ancestor
+        actx must be the changectx of the common ancestor
         of self's and fc2's respective changesets.
         """
 
-        if actx is None:
-            actx = self.changectx().ancestor(fc2.changectx())
-
-        # the trivial case: changesets are unrelated, files must be too
-        if not actx:
-            return None
-
         # the easy case: no (relevant) renames
         if fc2.path() == self.path() and self.path() in actx:
             return actx[self.path()]
-        acache = {}
+
+        # the next easiest cases: unambiguous predecessor (name trumps
+        # history)
+        if self.path() in actx and fc2.path() not in actx:
+            return actx[self.path()]
+        if fc2.path() in actx and self.path() not in actx:
+            return actx[fc2.path()]
 
         # prime the ancestor cache for the working directory
+        acache = {}
         for c in (self, fc2):
             if c._filerev is None:
                 pl = [(n.path(), n.filenode()) for n in c.parents()]
@@ -697,6 +698,14 @@
             c = visit.pop(max(visit))
             yield c
 
+    def copies(self, c2):
+        if not util.safehasattr(self, "_copycache"):
+            self._copycache = {}
+        sc2 = str(c2)
+        if sc2 not in self._copycache:
+            self._copycache[sc2] = copies.pathcopies(c2)
+        return self._copycache[sc2]
+
 class workingctx(changectx):
     """A workingctx object makes access to data related to
     the current working directory convenient.
@@ -890,6 +899,8 @@
         return self._clean
     def branch(self):
         return encoding.tolocal(self._extra['branch'])
+    def closesbranch(self):
+        return 'close' in self._extra
     def extra(self):
         return self._extra
 
@@ -1008,7 +1019,7 @@
 
     def ancestors(self):
         for a in self._repo.changelog.ancestors(
-            *[p.rev() for p in self._parents]):
+            [p.rev() for p in self._parents]):
             yield changectx(self._repo, a)
 
     def undelete(self, list):
--- a/mercurial/copies.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/mercurial/copies.py	Mon Jun 04 17:57:57 2012 -0500
@@ -177,19 +177,22 @@
 
     "diverge" is a mapping of source name -> list of destination names
     for divergent renames.
+
+    "renamedelete" is a mapping of source name -> list of destination
+    names for files deleted in c1 that were renamed in c2 or vice-versa.
     """
     # avoid silly behavior for update from empty dir
     if not c1 or not c2 or c1 == c2:
-        return {}, {}
+        return {}, {}, {}
 
     # avoid silly behavior for parent -> working dir
     if c2.node() is None and c1.node() == repo.dirstate.p1():
-        return repo.dirstate.copies(), {}
+        return repo.dirstate.copies(), {}, {}
 
     limit = _findlimit(repo, c1.rev(), c2.rev())
     if limit is None:
         # no common ancestor, no copies
-        return {}, {}
+        return {}, {}, {}
     m1 = c1.manifest()
     m2 = c2.manifest()
     ma = ca.manifest()
@@ -283,26 +286,36 @@
     for f in u2:
         checkcopies(f, m2, m1)
 
+    renamedelete = {}
+    renamedelete2 = set()
     diverge2 = set()
     for of, fl in diverge.items():
-        if len(fl) == 1 or of in c2:
+        if len(fl) == 1 or of in c1 or of in c2:
             del diverge[of] # not actually divergent, or not a rename
+            if of not in c1 and of not in c2:
+                # renamed on one side, deleted on the other side, but filter
+                # out files that have been renamed and then deleted
+                renamedelete[of] = [f for f in fl if f in c1 or f in c2]
+                renamedelete2.update(fl) # reverse map for below
         else:
             diverge2.update(fl) # reverse map for below
 
     if fullcopy:
-        repo.ui.debug("  all copies found (* = to merge, ! = divergent):\n")
+        repo.ui.debug("  all copies found (* = to merge, ! = divergent, "
+                      "% = renamed and deleted):\n")
         for f in fullcopy:
             note = ""
             if f in copy:
                 note += "*"
             if f in diverge2:
                 note += "!"
+            if f in renamedelete2:
+                note += "%"
             repo.ui.debug("   %s -> %s %s\n" % (f, fullcopy[f], note))
     del diverge2
 
     if not fullcopy:
-        return copy, diverge
+        return copy, diverge, renamedelete
 
     repo.ui.debug("  checking for directory renames\n")
 
@@ -337,7 +350,7 @@
     del d1, d2, invalid
 
     if not dirmove:
-        return copy, diverge
+        return copy, diverge, renamedelete
 
     for d in dirmove:
         repo.ui.debug("  dir %s -> %s\n" % (d, dirmove[d]))
@@ -354,4 +367,4 @@
                         repo.ui.debug("  file %s -> %s\n" % (f, copy[f]))
                     break
 
-    return copy, diverge
+    return copy, diverge, renamedelete
--- a/mercurial/dagparser.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/mercurial/dagparser.py	Mon Jun 04 17:57:57 2012 -0500
@@ -268,7 +268,8 @@
                 s += c
                 i += 1
                 c = nextch()
-            raise util.Abort(_("invalid character in dag description: %s...") % s)
+            raise util.Abort(_('invalid character in dag description: '
+                               '%s...') % s)
 
 def dagtextlines(events,
                  addspaces=True,
@@ -436,7 +437,9 @@
         >>> dagtext([('n', (0, [-1])), ('a', 'ann'), ('n', (1, [0]))])
         '+1 @ann +1'
 
-        >>> dagtext([('n', (0, [-1])), ('a', 'my annotation'), ('n', (1, [0]))])
+        >>> dagtext([('n', (0, [-1])),
+        ...          ('a', 'my annotation'),
+        ...          ('n', (1, [0]))])
         '+1 @"my annotation" +1'
 
     Commands:
@@ -447,7 +450,9 @@
         >>> dagtext([('n', (0, [-1])), ('c', 'my command'), ('n', (1, [0]))])
         '+1 !"my command" +1'
 
-        >>> dagtext([('n', (0, [-1])), ('C', 'my command line'), ('n', (1, [0]))])
+        >>> dagtext([('n', (0, [-1])),
+        ...          ('C', 'my command line'),
+        ...          ('n', (1, [0]))])
         '+1 !!my command line\\n+1'
 
     Comments:
--- a/mercurial/dagutil.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/mercurial/dagutil.py	Mon Jun 04 17:57:57 2012 -0500
@@ -26,25 +26,25 @@
 
     def nodeset(self):
         '''set of all node idxs'''
-        raise NotImplementedError()
+        raise NotImplementedError
 
     def heads(self):
         '''list of head ixs'''
-        raise NotImplementedError()
+        raise NotImplementedError
 
     def parents(self, ix):
         '''list of parents ixs of ix'''
-        raise NotImplementedError()
+        raise NotImplementedError
 
     def inverse(self):
         '''inverse DAG, where parents becomes children, etc.'''
-        raise NotImplementedError()
+        raise NotImplementedError
 
     def ancestorset(self, starts, stops=None):
         '''
         set of all ancestors of starts (incl), but stop walk at stops (excl)
         '''
-        raise NotImplementedError()
+        raise NotImplementedError
 
     def descendantset(self, starts, stops=None):
         '''
@@ -59,7 +59,7 @@
         By "connected list" we mean that if an ancestor and a descendant are in
         the list, then so is at least one path connecting them.
         '''
-        raise NotImplementedError()
+        raise NotImplementedError
 
     def externalize(self, ix):
         '''return a list of (or set if given a set) of node ids'''
--- a/mercurial/diffhelpers.c	Mon Jun 04 17:22:09 2012 -0500
+++ b/mercurial/diffhelpers.c	Mon Jun 04 17:57:57 2012 -0500
@@ -20,14 +20,14 @@
 /* fixup the last lines of a and b when the patch has no newline at eof */
 static void _fix_newline(PyObject *hunk, PyObject *a, PyObject *b)
 {
-	int hunksz = PyList_Size(hunk);
+	Py_ssize_t hunksz = PyList_Size(hunk);
 	PyObject *s = PyList_GET_ITEM(hunk, hunksz-1);
 	char *l = PyBytes_AsString(s);
-	int alen = PyList_Size(a);
-	int blen = PyList_Size(b);
+	Py_ssize_t alen = PyList_Size(a);
+	Py_ssize_t blen = PyList_Size(b);
 	char c = l[0];
 	PyObject *hline;
-	int sz = PyBytes_GET_SIZE(s);
+	Py_ssize_t sz = PyBytes_GET_SIZE(s);
 
 	if (sz > 1 && l[sz-2] == '\r')
 		/* tolerate CRLF in last line */
@@ -57,6 +57,12 @@
 	return Py_BuildValue("l", 0);
 }
 
+#if (PY_VERSION_HEX < 0x02050000)
+static const char *addlines_format = "OOiiOO";
+#else
+static const char *addlines_format = "OOnnOO";
+#endif
+
 /*
  * read lines from fp into the hunk.  The hunk is parsed into two arrays
  * a and b.  a gets the old state of the text, b gets the new state
@@ -68,13 +74,14 @@
 {
 
 	PyObject *fp, *hunk, *a, *b, *x;
-	int i;
-	int lena, lenb;
-	int num;
-	int todoa, todob;
+	Py_ssize_t i;
+	Py_ssize_t lena, lenb;
+	Py_ssize_t num;
+	Py_ssize_t todoa, todob;
 	char *s, c;
 	PyObject *l;
-	if (!PyArg_ParseTuple(args, "OOiiOO", &fp, &hunk, &lena, &lenb, &a, &b))
+	if (!PyArg_ParseTuple(args, addlines_format,
+			      &fp, &hunk, &lena, &lenb, &a, &b))
 		return NULL;
 
 	while (1) {
@@ -127,8 +134,8 @@
 
 	PyObject *a, *b;
 	long bstart;
-	int alen, blen;
-	int i;
+	Py_ssize_t alen, blen;
+	Py_ssize_t i;
 	char *sa, *sb;
 
 	if (!PyArg_ParseTuple(args, "OOl", &a, &b, &bstart))
--- a/mercurial/dirstate.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/mercurial/dirstate.py	Mon Jun 04 17:57:57 2012 -0500
@@ -695,7 +695,8 @@
         if not skipstep3 and not exact:
             visit = sorted([f for f in dmap if f not in results and matchfn(f)])
             for nf, st in zip(visit, util.statfiles([join(i) for i in visit])):
-                if not st is None and not getkind(st.st_mode) in (regkind, lnkkind):
+                if (not st is None and
+                    getkind(st.st_mode) not in (regkind, lnkkind)):
                     st = None
                 results[nf] = st
         for s in subrepos:
--- a/mercurial/discovery.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/mercurial/discovery.py	Mon Jun 04 17:57:57 2012 -0500
@@ -86,14 +86,14 @@
             self._computecommonmissing()
         return self._missing
 
-def findcommonoutgoing(repo, other, onlyheads=None, force=False, commoninc=None,
-                       portable=False):
+def findcommonoutgoing(repo, other, onlyheads=None, force=False,
+                       commoninc=None, portable=False):
     '''Return an outgoing instance to identify the nodes present in repo but
     not in other.
 
-    If onlyheads is given, only nodes ancestral to nodes in onlyheads (inclusive)
-    are included. If you already know the local repo's heads, passing them in
-    onlyheads is faster than letting them be recomputed here.
+    If onlyheads is given, only nodes ancestral to nodes in onlyheads
+    (inclusive) are included. If you already know the local repo's heads,
+    passing them in onlyheads is faster than letting them be recomputed here.
 
     If commoninc is given, it must the the result of a prior call to
     findcommonincoming(repo, other, force) to avoid recomputing it here.
@@ -109,7 +109,7 @@
     og.commonheads, _any, _hds = commoninc
 
     # compute outgoing
-    if not repo._phaseroots[phases.secret]:
+    if not repo._phasecache.phaseroots[phases.secret]:
         og.missingheads = onlyheads or repo.heads()
     elif onlyheads is None:
         # use visible heads as it should be cached
@@ -140,7 +140,7 @@
         og._computecommonmissing()
         cl = repo.changelog
         missingrevs = set(cl.rev(n) for n in og._missing)
-        og._common = set(cl.ancestors(*missingrevs)) - missingrevs
+        og._common = set(cl.ancestors(missingrevs)) - missingrevs
         commonheads = set(og.commonheads)
         og.missingheads = [h for h in og.missingheads if h not in commonheads]
 
@@ -226,11 +226,20 @@
     # If there are more heads after the push than before, a suitable
     # error message, depending on unsynced status, is displayed.
     error = None
+    remotebookmarks = remote.listkeys('bookmarks')
+    localbookmarks = repo._bookmarks
+
     for branch in branches:
         newhs = set(newmap[branch])
         oldhs = set(oldmap[branch])
+        dhs = None
         if len(newhs) > len(oldhs):
-            dhs = list(newhs - oldhs)
+            # strip updates to existing remote heads from the new heads list
+            bookmarkedheads = set([repo[bm].node() for bm in localbookmarks
+                                   if bm in remotebookmarks and
+                                   remote[bm] == repo[bm].ancestor(remote[bm])])
+            dhs = list(newhs - bookmarkedheads - oldhs)
+        if dhs:
             if error is None:
                 if branch not in ('default', None):
                     error = _("push creates new remote head %s "
--- a/mercurial/dispatch.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/mercurial/dispatch.py	Mon Jun 04 17:57:57 2012 -0500
@@ -12,7 +12,8 @@
 import ui as uimod
 
 class request(object):
-    def __init__(self, args, ui=None, repo=None, fin=None, fout=None, ferr=None):
+    def __init__(self, args, ui=None, repo=None, fin=None, fout=None,
+                 ferr=None):
         self.args = args
         self.ui = ui
         self.repo = repo
@@ -87,7 +88,7 @@
                 return _dispatch(req)
             finally:
                 ui.flush()
-        except:
+        except: # re-raises
             # enter the debugger when we hit an exception
             if '--debugger' in req.args:
                 traceback.print_exc()
@@ -203,19 +204,59 @@
         return inst.code
     except socket.error, inst:
         ui.warn(_("abort: %s\n") % inst.args[-1])
-    except:
-        ui.warn(_("** unknown exception encountered,"
-                  " please report by visiting\n"))
-        ui.warn(_("**  http://mercurial.selenic.com/wiki/BugTracker\n"))
-        ui.warn(_("** Python %s\n") % sys.version.replace('\n', ''))
-        ui.warn(_("** Mercurial Distributed SCM (version %s)\n")
-               % util.version())
-        ui.warn(_("** Extensions loaded: %s\n")
-               % ", ".join([x[0] for x in extensions.extensions()]))
+    except: # re-raises
+        myver = util.version()
+        # For compatibility checking, we discard the portion of the hg
+        # version after the + on the assumption that if a "normal
+        # user" is running a build with a + in it the packager
+        # probably built from fairly close to a tag and anyone with a
+        # 'make local' copy of hg (where the version number can be out
+        # of date) will be clueful enough to notice the implausible
+        # version number and try updating.
+        compare = myver.split('+')[0]
+        ct = tuplever(compare)
+        worst = None, ct, ''
+        for name, mod in extensions.extensions():
+            testedwith = getattr(mod, 'testedwith', 'unknown')
+            report = getattr(mod, 'buglink', _('the extension author.'))
+            if testedwith == 'unknown':
+                # We found an untested extension. It's likely the culprit.
+                worst = name, testedwith, report
+                break
+            if compare not in testedwith.split() and testedwith != 'internal':
+                tested = [tuplever(v) for v in testedwith.split()]
+                nearest = max([t for t in tested if t < ct])
+                if nearest < worst[1]:
+                    worst = name, nearest, report
+        if worst[0] is not None:
+            name, testedwith, report = worst
+            if not isinstance(testedwith, str):
+                testedwith = '.'.join([str(c) for c in testedwith])
+            warning = (_('** Unknown exception encountered with '
+                         'possibly-broken third-party extension %s\n'
+                         '** which supports versions %s of Mercurial.\n'
+                         '** Please disable %s and try your action again.\n'
+                         '** If that fixes the bug please report it to %s\n')
+                       % (name, testedwith, name, report))
+        else:
+            warning = (_("** unknown exception encountered, "
+                         "please report by visiting\n") +
+                       _("** http://mercurial.selenic.com/wiki/BugTracker\n"))
+        warning += ((_("** Python %s\n") % sys.version.replace('\n', '')) +
+                    (_("** Mercurial Distributed SCM (version %s)\n") % myver) +
+                    (_("** Extensions loaded: %s\n") %
+                     ", ".join([x[0] for x in extensions.extensions()])))
+        ui.warn(warning)
         raise
 
     return -1
 
+def tuplever(v):
+    try:
+        return tuple([int(i) for i in v.split('.')])
+    except ValueError:
+        return tuple()
+
 def aliasargs(fn, givenargs):
     args = getattr(fn, 'args', [])
     if args:
@@ -532,7 +573,8 @@
 
     if cmd and util.safehasattr(fn, 'shell'):
         d = lambda: fn(ui, *args[1:])
-        return lambda: runcommand(lui, None, cmd, args[:1], ui, options, d, [], {})
+        return lambda: runcommand(lui, None, cmd, args[:1], ui, options, d,
+                                  [], {})
 
     restorecommands()
 
@@ -680,7 +722,8 @@
                             return _dispatch(req)
                     if not path:
                         raise error.RepoError(_("no repository found in '%s'"
-                                                " (.hg not found)") % os.getcwd())
+                                                " (.hg not found)")
+                                              % os.getcwd())
                     raise
         if repo:
             ui = repo.ui
@@ -703,7 +746,7 @@
     field = ui.config('profiling', 'sort', default='inlinetime')
     climit = ui.configint('profiling', 'nested', default=5)
 
-    if not format in ['text', 'kcachegrind']:
+    if format not in ['text', 'kcachegrind']:
         ui.warn(_("unrecognized profiling format '%s'"
                     " - Ignored\n") % format)
         format = 'text'
--- a/mercurial/extensions.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/mercurial/extensions.py	Mon Jun 04 17:57:57 2012 -0500
@@ -274,7 +274,7 @@
 
     paths = _disabledpaths()
     if not paths:
-        return None
+        return {}
 
     exts = {}
     for name, path in paths.iteritems():
@@ -301,7 +301,7 @@
 
 def disabledcmd(ui, cmd, strict=False):
     '''import disabled extensions until cmd is found.
-    returns (cmdname, extname, doc)'''
+    returns (cmdname, extname, module)'''
 
     paths = _disabledpaths(strip_init=True)
     if not paths:
@@ -329,18 +329,19 @@
             cmd = aliases[0]
         return (cmd, name, mod)
 
+    ext = None
     # first, search for an extension with the same name as the command
     path = paths.pop(cmd, None)
     if path:
         ext = findcmd(cmd, cmd, path)
-        if ext:
-            return ext
-
-    # otherwise, interrogate each extension until there's a match
-    for name, path in paths.iteritems():
-        ext = findcmd(cmd, name, path)
-        if ext:
-            return ext
+    if not ext:
+        # otherwise, interrogate each extension until there's a match
+        for name, path in paths.iteritems():
+            ext = findcmd(cmd, name, path)
+            if ext:
+                break
+    if ext and 'DEPRECATED' not in ext.__doc__:
+        return ext
 
     raise error.UnknownCommand(cmd)
 
--- a/mercurial/hbisect.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/mercurial/hbisect.py	Mon Jun 04 17:57:57 2012 -0500
@@ -69,10 +69,10 @@
 
     # build children dict
     children = {}
-    visit = [badrev]
+    visit = util.deque([badrev])
     candidates = []
     while visit:
-        rev = visit.pop(0)
+        rev = visit.popleft()
         if ancestors[rev] == []:
             candidates.append(rev)
             for prev in clparents(rev):
@@ -132,7 +132,7 @@
 
 
 def load_state(repo):
-    state = {'good': [], 'bad': [], 'skip': []}
+    state = {'current': [], 'good': [], 'bad': [], 'skip': []}
     if os.path.exists(repo.join("bisect.state")):
         for l in repo.opener("bisect.state"):
             kind, node = l[:-1].split()
@@ -164,10 +164,11 @@
     - ``pruned``             : csets that are goods, bads or skipped
     - ``untested``           : csets whose fate is yet unknown
     - ``ignored``            : csets ignored due to DAG topology
+    - ``current``            : the cset currently being bisected
     """
     state = load_state(repo)
-    if status in ('good', 'bad', 'skip'):
-        return [repo.changelog.rev(n) for n in state[status]]
+    if status in ('good', 'bad', 'skip', 'current'):
+        return map(repo.changelog.rev, state[status])
     else:
         # In the floowing sets, we do *not* call 'bisect()' with more
         # than one level of recusrsion, because that can be very, very
@@ -233,7 +234,7 @@
     if rev in get(repo, 'skip'):
         # i18n: bisect changeset status
         return _('skipped')
-    if rev in get(repo, 'untested'):
+    if rev in get(repo, 'untested') or rev in get(repo, 'current'):
         # i18n: bisect changeset status
         return _('untested')
     if rev in get(repo, 'ignored'):
--- a/mercurial/help.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/mercurial/help.py	Mon Jun 04 17:57:57 2012 -0500
@@ -6,27 +6,111 @@
 # GNU General Public License version 2 or any later version.
 
 from i18n import gettext, _
-import sys, os
+import itertools, sys, os
 import extensions, revset, fileset, templatekw, templatefilters, filemerge
-import util
+import encoding, util, minirst
 
 def listexts(header, exts, indent=1):
     '''return a text listing of the given extensions'''
-    if not exts:
-        return ''
-    maxlength = max(len(e) for e in exts)
-    result = '\n%s\n\n' % header
-    for name, desc in sorted(exts.iteritems()):
-        result += '%s%-*s %s\n' % (' ' * indent, maxlength + 2,
-                                   ':%s:' % name, desc)
-    return result
+    rst = []
+    if exts:
+        rst.append('\n%s\n\n' % header)
+        for name, desc in sorted(exts.iteritems()):
+            rst.append('%s:%s: %s\n' % (' ' * indent, name, desc))
+    return rst
 
 def extshelp():
-    doc = loaddoc('extensions')()
-    doc += listexts(_('enabled extensions:'), extensions.enabled())
-    doc += listexts(_('disabled extensions:'), extensions.disabled())
+    rst = loaddoc('extensions')().splitlines(True)
+    rst.extend(listexts(_('enabled extensions:'), extensions.enabled()))
+    rst.extend(listexts(_('disabled extensions:'), extensions.disabled()))
+    doc = ''.join(rst)
     return doc
 
+def optrst(options, verbose):
+    data = []
+    multioccur = False
+    for option in options:
+        if len(option) == 5:
+            shortopt, longopt, default, desc, optlabel = option
+        else:
+            shortopt, longopt, default, desc = option
+            optlabel = _("VALUE") # default label
+
+        if _("DEPRECATED") in desc and not verbose:
+            continue
+
+        so = ''
+        if shortopt:
+            so = '-' + shortopt
+        lo = '--' + longopt
+        if default:
+            desc += _(" (default: %s)") % default
+
+        if isinstance(default, list):
+            lo += " %s [+]" % optlabel
+            multioccur = True
+        elif (default is not None) and not isinstance(default, bool):
+            lo += " %s" % optlabel
+
+        data.append((so, lo, desc))
+
+    rst = minirst.maketable(data, 1)
+
+    if multioccur:
+        rst.append(_("\n[+] marked option can be specified multiple times\n"))
+
+    return ''.join(rst)
+
+def topicmatch(kw):
+    """Return help topics matching kw.
+
+    Returns {'section': [(name, summary), ...], ...} where section is
+    one of topics, commands, extensions, or extensioncommands.
+    """
+    kw = encoding.lower(kw)
+    def lowercontains(container):
+        return kw in encoding.lower(container)  # translated in helptable
+    results = {'topics': [],
+               'commands': [],
+               'extensions': [],
+               'extensioncommands': [],
+               }
+    for names, header, doc in helptable:
+        if (sum(map(lowercontains, names))
+            or lowercontains(header)
+            or lowercontains(doc())):
+            results['topics'].append((names[0], header))
+    import commands # avoid cycle
+    for cmd, entry in commands.table.iteritems():
+        if cmd.startswith('debug'):
+            continue
+        if len(entry) == 3:
+            summary = entry[2]
+        else:
+            summary = ''
+        # translate docs *before* searching there
+        docs = _(getattr(entry[0], '__doc__', None)) or ''
+        if kw in cmd or lowercontains(summary) or lowercontains(docs):
+            doclines = docs.splitlines()
+            if doclines:
+                summary = doclines[0]
+            cmdname = cmd.split('|')[0].lstrip('^')
+            results['commands'].append((cmdname, summary))
+    for name, docs in itertools.chain(
+        extensions.enabled().iteritems(),
+        extensions.disabled().iteritems()):
+        # extensions.load ignores the UI argument
+        mod = extensions.load(None, name, '')
+        if lowercontains(name) or lowercontains(docs):
+            # extension docs are already translated
+            results['extensions'].append((name, docs.splitlines()[0]))
+        for cmd, entry in getattr(mod, 'cmdtable', {}).iteritems():
+            if kw in cmd or (len(entry) > 2 and lowercontains(entry[2])):
+                cmdname = cmd.split('|')[0].lstrip('^')
+                cmddoc=getattr(mod, '__doc__', '').splitlines()[0]
+                results['extensioncommands'].append((cmdname, _(cmddoc)))
+    return results
+
 def loaddoc(topic):
     """Return a delayed loader for help/topic.txt."""
 
--- a/mercurial/help/revisions.txt	Mon Jun 04 17:22:09 2012 -0500
+++ b/mercurial/help/revisions.txt	Mon Jun 04 17:57:57 2012 -0500
@@ -12,13 +12,13 @@
 identifier. A short-form identifier is only valid if it is the prefix
 of exactly one full-length identifier.
 
-Any other string is treated as a tag or branch name. A tag name is a
-symbolic name associated with a revision identifier. A branch name
-denotes the tipmost revision of that branch. Tag and branch names must
-not contain the ":" character.
+Any other string is treated as a bookmark, tag, or branch name. A
+bookmark is a movable pointer to a revision. A tag is a permanent name
+associated with a revision. A branch name denotes the tipmost revision
+of that branch. Bookmark, tag, and branch names must not contain the ":"
+character.
 
-The reserved name "tip" is a special tag that always identifies the
-most recent revision.
+The reserved name "tip" always identifies the most recent revision.
 
 The reserved name "null" indicates the null revision. This is the
 revision of an empty repository, and the parent of revision 0.
--- a/mercurial/hg.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/mercurial/hg.py	Mon Jun 04 17:57:57 2012 -0500
@@ -203,7 +203,7 @@
         else:
             ui.debug("copied %d files\n" % num)
         return destlock
-    except:
+    except: # re-raises
         release(destlock)
         raise
 
--- a/mercurial/hgweb/common.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/mercurial/hgweb/common.py	Mon Jun 04 17:57:57 2012 -0500
@@ -95,7 +95,7 @@
     def __getattr__(self, attr):
         if attr in ('close', 'readline', 'readlines', '__iter__'):
             return getattr(self.f, attr)
-        raise AttributeError()
+        raise AttributeError
 
 def _statusmessage(code):
     from BaseHTTPServer import BaseHTTPRequestHandler
--- a/mercurial/hgweb/hgweb_mod.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/mercurial/hgweb/hgweb_mod.py	Mon Jun 04 17:57:57 2012 -0500
@@ -36,7 +36,7 @@
             self.repo = repo
 
         self.repo.ui.setconfig('ui', 'report_untrusted', 'off')
-        self.repo.ui.setconfig('ui', 'interactive', 'off')
+        self.repo.ui.setconfig('ui', 'nontty', 'true')
         hook.redirect(True)
         self.mtime = -1
         self.size = -1
@@ -73,7 +73,8 @@
             self.repo = hg.repository(self.repo.ui, self.repo.root)
             self.maxchanges = int(self.config("web", "maxchanges", 10))
             self.stripecount = int(self.config("web", "stripes", 1))
-            self.maxshortchanges = int(self.config("web", "maxshortchanges", 60))
+            self.maxshortchanges = int(self.config("web", "maxshortchanges",
+                                                   60))
             self.maxfiles = int(self.config("web", "maxfiles", 10))
             self.allowpull = self.configbool("web", "allowpull", True)
             encoding.encoding = self.config("web", "encoding",
--- a/mercurial/hgweb/hgwebdir_mod.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/mercurial/hgweb/hgwebdir_mod.py	Mon Jun 04 17:57:57 2012 -0500
@@ -97,7 +97,7 @@
         else:
             u = ui.ui()
             u.setconfig('ui', 'report_untrusted', 'off')
-            u.setconfig('ui', 'interactive', 'off')
+            u.setconfig('ui', 'nontty', 'true')
 
         if not isinstance(self.conf, (dict, list, tuple)):
             map = {'paths': 'hgweb-paths'}
--- a/mercurial/hgweb/webcommands.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/mercurial/hgweb/webcommands.py	Mon Jun 04 17:57:57 2012 -0500
@@ -441,7 +441,7 @@
     tips = (web.repo[n] for t, n in web.repo.branchtags().iteritems())
     heads = web.repo.heads()
     parity = paritygen(web.stripecount)
-    sortkey = lambda ctx: ('close' not in ctx.extra(), ctx.rev())
+    sortkey = lambda ctx: (not ctx.closesbranch(), ctx.rev())
 
     def entries(limit, **map):
         count = 0
@@ -784,24 +784,76 @@
 
     dag = graphmod.dagwalker(web.repo, range(startrev, downrev - 1, -1))
     tree = list(graphmod.colored(dag, web.repo))
-    canvasheight = (len(tree) + 1) * bg_height - 27
-    data = []
-    for (id, type, ctx, vtx, edges) in tree:
-        if type != graphmod.CHANGESET:
-            continue
-        node = str(ctx)
-        age = templatefilters.age(ctx.date())
-        desc = templatefilters.firstline(ctx.description())
-        desc = cgi.escape(templatefilters.nonempty(desc))
-        user = cgi.escape(templatefilters.person(ctx.user()))
-        branch = ctx.branch()
-        branch = branch, web.repo.branchtags().get(branch) == ctx.node()
-        data.append((node, vtx, edges, desc, user, age, branch, ctx.tags(),
-                     ctx.bookmarks()))
+
+    def getcolumns(tree):
+        cols = 0
+        for (id, type, ctx, vtx, edges) in tree:
+            if type != graphmod.CHANGESET:
+                continue
+            cols = max(cols, max([edge[0] for edge in edges] or [0]),
+                             max([edge[1] for edge in edges] or [0]))
+        return cols
+
+    def graphdata(usetuples, **map):
+        data = []
+
+        row = 0
+        for (id, type, ctx, vtx, edges) in tree:
+            if type != graphmod.CHANGESET:
+                continue
+            node = str(ctx)
+            age = templatefilters.age(ctx.date())
+            desc = templatefilters.firstline(ctx.description())
+            desc = cgi.escape(templatefilters.nonempty(desc))
+            user = cgi.escape(templatefilters.person(ctx.user()))
+            branch = ctx.branch()
+            try:
+                branchnode = web.repo.branchtip(branch)
+            except error.RepoLookupError:
+                branchnode = None
+            branch = branch, branchnode == ctx.node()
+
+            if usetuples:
+                data.append((node, vtx, edges, desc, user, age, branch,
+                             ctx.tags(), ctx.bookmarks()))
+            else:
+                edgedata = [dict(col=edge[0], nextcol=edge[1],
+                                 color=(edge[2] - 1) % 6 + 1,
+                                 width=edge[3], bcolor=edge[4])
+                            for edge in edges]
+
+                data.append(
+                    dict(node=node,
+                         col=vtx[0],
+                         color=(vtx[1] - 1) % 6 + 1,
+                         edges=edgedata,
+                         row=row,
+                         nextrow=row + 1,
+                         desc=desc,
+                         user=user,
+                         age=age,
+                         bookmarks=webutil.nodebookmarksdict(
+                            web.repo, ctx.node()),
+                         branches=webutil.nodebranchdict(web.repo, ctx),
+                         inbranch=webutil.nodeinbranch(web.repo, ctx),
+                         tags=webutil.nodetagsdict(web.repo, ctx.node())))
+
+            row += 1
+
+        return data
+
+    cols = getcolumns(tree)
+    rows = len(tree)
+    canvasheight = (rows + 1) * bg_height - 27
 
     return tmpl('graph', rev=rev, revcount=revcount, uprev=uprev,
                 lessvars=lessvars, morevars=morevars, downrev=downrev,
-                canvasheight=canvasheight, jsdata=data, bg_height=bg_height,
+                cols=cols, rows=rows,
+                canvaswidth=(cols + 1) * bg_height,
+                truecanvasheight=rows * bg_height,
+                canvasheight=canvasheight, bg_height=bg_height,
+                jsdata=lambda **x: graphdata(True, **x),
+                nodes=lambda **x: graphdata(False, **x),
                 node=revnode_hex, changenav=changenav)
 
 def _getdoc(e):
--- a/mercurial/hgweb/webutil.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/mercurial/hgweb/webutil.py	Mon Jun 04 17:57:57 2012 -0500
@@ -98,16 +98,23 @@
     branches = []
     branch = ctx.branch()
     # If this is an empty repo, ctx.node() == nullid,
-    # ctx.branch() == 'default', but branchtags() is
-    # an empty dict. Using dict.get avoids a traceback.
-    if repo.branchtags().get(branch) == ctx.node():
+    # ctx.branch() == 'default'.
+    try:
+        branchnode = repo.branchtip(branch)
+    except error.RepoLookupError:
+        branchnode = None
+    if branchnode == ctx.node():
         branches.append({"name": branch})
     return branches
 
 def nodeinbranch(repo, ctx):
     branches = []
     branch = ctx.branch()
-    if branch != 'default' and repo.branchtags().get(branch) != ctx.node():
+    try:
+        branchnode = repo.branchtip(branch)
+    except error.RepoLookupError:
+        branchnode = None
+    if branch != 'default' and branchnode != ctx.node():
         branches.append({"name": branch})
     return branches
 
--- a/mercurial/httpclient/__init__.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/mercurial/httpclient/__init__.py	Mon Jun 04 17:57:57 2012 -0500
@@ -45,6 +45,7 @@
 import select
 import socket
 
+import _readers
 import socketutil
 
 logger = logging.getLogger(__name__)
@@ -54,8 +55,6 @@
 HTTP_VER_1_0 = 'HTTP/1.0'
 HTTP_VER_1_1 = 'HTTP/1.1'
 
-_LEN_CLOSE_IS_END = -1
-
 OUTGOING_BUFFER_SIZE = 1 << 15
 INCOMING_BUFFER_SIZE = 1 << 20
 
@@ -83,23 +82,19 @@
     The response will continue to load as available. If you need the
     complete response before continuing, check the .complete() method.
     """
-    def __init__(self, sock, timeout):
+    def __init__(self, sock, timeout, method):
         self.sock = sock
+        self.method = method
         self.raw_response = ''
-        self._body = None
         self._headers_len = 0
-        self._content_len = 0
         self.headers = None
         self.will_close = False
         self.status_line = ''
         self.status = None
+        self.continued = False
         self.http_version = None
         self.reason = None
-        self._chunked = False
-        self._chunked_done = False
-        self._chunked_until_next = 0
-        self._chunked_skip_bytes = 0
-        self._chunked_preloaded_block = None
+        self._reader = None
 
         self._read_location = 0
         self._eol = EOL
@@ -117,11 +112,12 @@
         socket is closed, this will nearly always return False, even
         in cases where all the data has actually been loaded.
         """
-        if self._chunked:
-            return self._chunked_done
-        if self._content_len == _LEN_CLOSE_IS_END:
-            return False
-        return self._body is not None and len(self._body) >= self._content_len
+        if self._reader:
+            return self._reader.done()
+
+    def _close(self):
+        if self._reader is not None:
+            self._reader._close()
 
     def readline(self):
         """Read a single line from the response body.
@@ -129,30 +125,34 @@
         This may block until either a line ending is found or the
         response is complete.
         """
-        eol = self._body.find('\n', self._read_location)
-        while eol == -1 and not self.complete():
+        # TODO: move this into the reader interface where it can be
+        # smarter (and probably avoid copies)
+        bytes = []
+        while not bytes:
+            try:
+                bytes = [self._reader.read(1)]
+            except _readers.ReadNotReady:
+                self._select()
+        while bytes[-1] != '\n' and not self.complete():
             self._select()
-            eol = self._body.find('\n', self._read_location)
-        if eol != -1:
-            eol += 1
-        else:
-            eol = len(self._body)
-        data = self._body[self._read_location:eol]
-        self._read_location = eol
-        return data
+            bytes.append(self._reader.read(1))
+        if bytes[-1] != '\n':
+            next = self._reader.read(1)
+            while next and next != '\n':
+                bytes.append(next)
+                next = self._reader.read(1)
+            bytes.append(next)
+        return ''.join(bytes)
 
     def read(self, length=None):
         # if length is None, unbounded read
         while (not self.complete()  # never select on a finished read
                and (not length  # unbounded, so we wait for complete()
-                    or (self._read_location + length) > len(self._body))):
+                    or length > self._reader.available_data)):
             self._select()
         if not length:
-            length = len(self._body) - self._read_location
-        elif len(self._body) < (self._read_location + length):
-            length = len(self._body) - self._read_location
-        r = self._body[self._read_location:self._read_location + length]
-        self._read_location += len(r)
+            length = self._reader.available_data
+        r = self._reader.read(length)
         if self.complete() and self.will_close:
             self.sock.close()
         return r
@@ -160,93 +160,35 @@
     def _select(self):
         r, _, _ = select.select([self.sock], [], [], self._timeout)
         if not r:
-            # socket was not readable. If the response is not complete
-            # and we're not a _LEN_CLOSE_IS_END response, raise a timeout.
-            # If we are a _LEN_CLOSE_IS_END response and we have no data,
-            # raise a timeout.
-            if not (self.complete() or
-                    (self._content_len == _LEN_CLOSE_IS_END and self._body)):
+            # socket was not readable. If the response is not
+            # complete, raise a timeout.
+            if not self.complete():
                 logger.info('timed out with timeout of %s', self._timeout)
                 raise HTTPTimeoutException('timeout reading data')
-            logger.info('cl: %r body: %r', self._content_len, self._body)
         try:
             data = self.sock.recv(INCOMING_BUFFER_SIZE)
-            # If the socket was readable and no data was read, that
-            # means the socket was closed. If this isn't a
-            # _CLOSE_IS_END socket, then something is wrong if we're
-            # here (we shouldn't enter _select() if the response is
-            # complete), so abort.
-            if not data and self._content_len != _LEN_CLOSE_IS_END:
-                raise HTTPRemoteClosedError(
-                    'server appears to have closed the socket mid-response')
         except socket.sslerror, e:
             if e.args[0] != socket.SSL_ERROR_WANT_READ:
                 raise
             logger.debug('SSL_WANT_READ in _select, should retry later')
             return True
         logger.debug('response read %d data during _select', len(data))
+        # If the socket was readable and no data was read, that means
+        # the socket was closed. Inform the reader (if any) so it can
+        # raise an exception if this is an invalid situation.
         if not data:
-            if self.headers and self._content_len == _LEN_CLOSE_IS_END:
-                self._content_len = len(self._body)
+            if self._reader:
+                self._reader._close()
             return False
         else:
             self._load_response(data)
             return True
 
-    def _chunked_parsedata(self, data):
-        if self._chunked_preloaded_block:
-            data = self._chunked_preloaded_block + data
-            self._chunked_preloaded_block = None
-        while data:
-            logger.debug('looping with %d data remaining', len(data))
-            # Slice out anything we should skip
-            if self._chunked_skip_bytes:
-                if len(data) <= self._chunked_skip_bytes:
-                    self._chunked_skip_bytes -= len(data)
-                    data = ''
-                    break
-                else:
-                    data = data[self._chunked_skip_bytes:]
-                    self._chunked_skip_bytes = 0
-
-            # determine how much is until the next chunk
-            if self._chunked_until_next:
-                amt = self._chunked_until_next
-                logger.debug('reading remaining %d of existing chunk', amt)
-                self._chunked_until_next = 0
-                body = data
-            else:
-                try:
-                    amt, body = data.split(self._eol, 1)
-                except ValueError:
-                    self._chunked_preloaded_block = data
-                    logger.debug('saving %r as a preloaded block for chunked',
-                                 self._chunked_preloaded_block)
-                    return
-                amt = int(amt, base=16)
-                logger.debug('reading chunk of length %d', amt)
-                if amt == 0:
-                    self._chunked_done = True
-
-            # read through end of what we have or the chunk
-            self._body += body[:amt]
-            if len(body) >= amt:
-                data = body[amt:]
-                self._chunked_skip_bytes = len(self._eol)
-            else:
-                self._chunked_until_next = amt - len(body)
-                self._chunked_skip_bytes = 0
-                data = ''
-
     def _load_response(self, data):
-        if self._chunked:
-            self._chunked_parsedata(data)
-            return
-        elif self._body is not None:
-            self._body += data
-            return
-
-        # We haven't seen end of headers yet
+        # Being here implies we're not at the end of the headers yet,
+        # since at the end of this method if headers were completely
+        # loaded we replace this method with the load() method of the
+        # reader we created.
         self.raw_response += data
         # This is a bogus server with bad line endings
         if self._eol not in self.raw_response:
@@ -270,6 +212,7 @@
         http_ver, status = hdrs.split(' ', 1)
         if status.startswith('100'):
             self.raw_response = body
+            self.continued = True
             logger.debug('continue seen, setting body to %r', body)
             return
 
@@ -289,23 +232,46 @@
         if self._eol != EOL:
             hdrs = hdrs.replace(self._eol, '\r\n')
         headers = rfc822.Message(cStringIO.StringIO(hdrs))
+        content_len = None
         if HDR_CONTENT_LENGTH in headers:
-            self._content_len = int(headers[HDR_CONTENT_LENGTH])
+            content_len = int(headers[HDR_CONTENT_LENGTH])
         if self.http_version == HTTP_VER_1_0:
             self.will_close = True
         elif HDR_CONNECTION_CTRL in headers:
             self.will_close = (
                 headers[HDR_CONNECTION_CTRL].lower() == CONNECTION_CLOSE)
-            if self._content_len == 0:
-                self._content_len = _LEN_CLOSE_IS_END
         if (HDR_XFER_ENCODING in headers
             and headers[HDR_XFER_ENCODING].lower() == XFER_ENCODING_CHUNKED):
-            self._body = ''
-            self._chunked_parsedata(body)
-            self._chunked = True
-        if self._body is None:
-            self._body = body
+            self._reader = _readers.ChunkedReader(self._eol)
+            logger.debug('using a chunked reader')
+        else:
+            # HEAD responses are forbidden from returning a body, and
+            # it's implausible for a CONNECT response to use
+            # close-is-end logic for an OK response.
+            if (self.method == 'HEAD' or
+                (self.method == 'CONNECT' and content_len is None)):
+                content_len = 0
+            if content_len is not None:
+                logger.debug('using a content-length reader with length %d',
+                             content_len)
+                self._reader = _readers.ContentLengthReader(content_len)
+            else:
+                # Response body had no length specified and is not
+                # chunked, so the end of the body will only be
+                # identifiable by the termination of the socket by the
+                # server. My interpretation of the spec means that we
+                # are correct in hitting this case if
+                # transfer-encoding, content-length, and
+                # connection-control were left unspecified.
+                self._reader = _readers.CloseIsEndReader()
+                logger.debug('using a close-is-end reader')
+                self.will_close = True
+
+        if body:
+            self._reader._load(body)
+        logger.debug('headers complete')
         self.headers = headers
+        self._load_response = self._reader._load
 
 
 class HTTPConnection(object):
@@ -382,13 +348,14 @@
                                          {}, HTTP_VER_1_0)
                 sock.send(data)
                 sock.setblocking(0)
-                r = self.response_class(sock, self.timeout)
+                r = self.response_class(sock, self.timeout, 'CONNECT')
                 timeout_exc = HTTPTimeoutException(
                     'Timed out waiting for CONNECT response from proxy')
                 while not r.complete():
                     try:
                         if not r._select():
-                            raise timeout_exc
+                            if not r.complete():
+                                raise timeout_exc
                     except HTTPTimeoutException:
                         # This raise/except pattern looks goofy, but
                         # _select can raise the timeout as well as the
@@ -405,6 +372,10 @@
         else:
             sock = socketutil.create_connection((self.host, self.port))
         if self.ssl:
+            # This is the default, but in the case of proxied SSL
+            # requests the proxy logic above will have cleared
+            # blocking mode, so reenable it just to be safe.
+            sock.setblocking(1)
             logger.debug('wrapping socket for ssl with options %r',
                          self.ssl_opts)
             sock = socketutil.wrap_socket(sock, **self.ssl_opts)
@@ -527,7 +498,7 @@
             out = outgoing_headers or body
             blocking_on_continue = False
             if expect_continue and not outgoing_headers and not (
-                response and response.headers):
+                response and (response.headers or response.continued)):
                 logger.info(
                     'waiting up to %s seconds for'
                     ' continue response from server',
@@ -550,11 +521,6 @@
                                 'server, optimistically sending request body')
                 else:
                     raise HTTPTimeoutException('timeout sending data')
-            # TODO exceptional conditions with select? (what are those be?)
-            # TODO if the response is loading, must we finish sending at all?
-            #
-            # Certainly not if it's going to close the connection and/or
-            # the response is already done...I think.
             was_first = first
 
             # incoming data
@@ -572,11 +538,11 @@
                         logger.info('socket appears closed in read')
                         self.sock = None
                         self._current_response = None
+                        if response is not None:
+                            response._close()
                         # This if/elif ladder is a bit subtle,
                         # comments in each branch should help.
-                        if response is not None and (
-                            response.complete() or
-                            response._content_len == _LEN_CLOSE_IS_END):
+                        if response is not None and response.complete():
                             # Server responded completely and then
                             # closed the socket. We should just shut
                             # things down and let the caller get their
@@ -605,7 +571,7 @@
                                 'response was missing or incomplete!')
                     logger.debug('read %d bytes in request()', len(data))
                     if response is None:
-                        response = self.response_class(r[0], self.timeout)
+                        response = self.response_class(r[0], self.timeout, method)
                     response._load_response(data)
                     # Jump to the next select() call so we load more
                     # data if the server is still sending us content.
@@ -613,10 +579,6 @@
                 except socket.error, e:
                     if e[0] != errno.EPIPE and not was_first:
                         raise
-                    if (response._content_len
-                        and response._content_len != _LEN_CLOSE_IS_END):
-                        outgoing_headers = sent_data + outgoing_headers
-                        reconnect('read')
 
             # outgoing data
             if w and out:
@@ -661,7 +623,7 @@
         # close if the server response said to or responded before eating
         # the whole request
         if response is None:
-            response = self.response_class(self.sock, self.timeout)
+            response = self.response_class(self.sock, self.timeout, method)
         complete = response.complete()
         data_left = bool(outgoing_headers or body)
         if data_left:
@@ -679,7 +641,8 @@
             raise httplib.ResponseNotReady()
         r = self._current_response
         while r.headers is None:
-            r._select()
+            if not r._select() and not r.complete():
+                raise _readers.HTTPRemoteClosedError()
         if r.will_close:
             self.sock = None
             self._current_response = None
@@ -705,7 +668,7 @@
 class HTTPStateError(httplib.HTTPException):
     """Invalid internal state encountered."""
 
-
-class HTTPRemoteClosedError(httplib.HTTPException):
-    """The server closed the remote socket in the middle of a response."""
+# Forward this exception type from _readers since it needs to be part
+# of the public API.
+HTTPRemoteClosedError = _readers.HTTPRemoteClosedError
 # no-check-code
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/httpclient/_readers.py	Mon Jun 04 17:57:57 2012 -0500
@@ -0,0 +1,195 @@
+# Copyright 2011, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+#     * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+#     * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+#     * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+"""Reader objects to abstract out different body response types.
+
+This module is package-private. It is not expected that these will
+have any clients outside of httpplus.
+"""
+
+import httplib
+import itertools
+import logging
+
+logger = logging.getLogger(__name__)
+
+
+class ReadNotReady(Exception):
+    """Raised when read() is attempted but not enough data is loaded."""
+
+
+class HTTPRemoteClosedError(httplib.HTTPException):
+    """The server closed the remote socket in the middle of a response."""
+
+
+class AbstractReader(object):
+    """Abstract base class for response readers.
+
+    Subclasses must implement _load, and should implement _close if
+    it's not an error for the server to close their socket without
+    some termination condition being detected during _load.
+    """
+    def __init__(self):
+        self._finished = False
+        self._done_chunks = []
+
+    @property
+    def available_data(self):
+        return sum(map(len, self._done_chunks))
+
+    def done(self):
+        return self._finished
+
+    def read(self, amt):
+        if self.available_data < amt and not self._finished:
+            raise ReadNotReady()
+        need = [amt]
+        def pred(s):
+            needed = need[0] > 0
+            need[0] -= len(s)
+            return needed
+        blocks = list(itertools.takewhile(pred, self._done_chunks))
+        self._done_chunks = self._done_chunks[len(blocks):]
+        over_read = sum(map(len, blocks)) - amt
+        if over_read > 0 and blocks:
+            logger.debug('need to reinsert %d data into done chunks', over_read)
+            last = blocks[-1]
+            blocks[-1], reinsert = last[:-over_read], last[-over_read:]
+            self._done_chunks.insert(0, reinsert)
+        result = ''.join(blocks)
+        assert len(result) == amt or (self._finished and len(result) < amt)
+        return result
+
+    def _load(self, data): # pragma: no cover
+        """Subclasses must implement this.
+
+        As data is available to be read out of this object, it should
+        be placed into the _done_chunks list. Subclasses should not
+        rely on data remaining in _done_chunks forever, as it may be
+        reaped if the client is parsing data as it comes in.
+        """
+        raise NotImplementedError
+
+    def _close(self):
+        """Default implementation of close.
+
+        The default implementation assumes that the reader will mark
+        the response as finished on the _finished attribute once the
+        entire response body has been read. In the event that this is
+        not true, the subclass should override the implementation of
+        close (for example, close-is-end responses have to set
+        self._finished in the close handler.)
+        """
+        if not self._finished:
+            raise HTTPRemoteClosedError(
+                'server appears to have closed the socket mid-response')
+
+
+class AbstractSimpleReader(AbstractReader):
+    """Abstract base class for simple readers that require no response decoding.
+
+    Examples of such responses are Connection: Close (close-is-end)
+    and responses that specify a content length.
+    """
+    def _load(self, data):
+        if data:
+            assert not self._finished, (
+                'tried to add data (%r) to a closed reader!' % data)
+        logger.debug('%s read an addtional %d data', self.name, len(data))
+        self._done_chunks.append(data)
+
+
+class CloseIsEndReader(AbstractSimpleReader):
+    """Reader for responses that specify Connection: Close for length."""
+    name = 'close-is-end'
+
+    def _close(self):
+        logger.info('Marking close-is-end reader as closed.')
+        self._finished = True
+
+
+class ContentLengthReader(AbstractSimpleReader):
+    """Reader for responses that specify an exact content length."""
+    name = 'content-length'
+
+    def __init__(self, amount):
+        AbstractReader.__init__(self)
+        self._amount = amount
+        if amount == 0:
+            self._finished = True
+        self._amount_seen = 0
+
+    def _load(self, data):
+        AbstractSimpleReader._load(self, data)
+        self._amount_seen += len(data)
+        if self._amount_seen >= self._amount:
+            self._finished = True
+            logger.debug('content-length read complete')
+
+
+class ChunkedReader(AbstractReader):
+    """Reader for chunked transfer encoding responses."""
+    def __init__(self, eol):
+        AbstractReader.__init__(self)
+        self._eol = eol
+        self._leftover_skip_amt = 0
+        self._leftover_data = ''
+
+    def _load(self, data):
+        assert not self._finished, 'tried to add data to a closed reader!'
+        logger.debug('chunked read an addtional %d data', len(data))
+        position = 0
+        if self._leftover_data:
+            logger.debug('chunked reader trying to finish block from leftover data')
+            # TODO: avoid this string concatenation if possible
+            data = self._leftover_data + data
+            position = self._leftover_skip_amt
+            self._leftover_data = ''
+            self._leftover_skip_amt = 0
+        datalen = len(data)
+        while position < datalen:
+            split = data.find(self._eol, position)
+            if split == -1:
+                self._leftover_data = data
+                self._leftover_skip_amt = position
+                return
+            amt = int(data[position:split], base=16)
+            block_start = split + len(self._eol)
+            # If the whole data chunk plus the eol trailer hasn't
+            # loaded, we'll wait for the next load.
+            if block_start + amt + len(self._eol) > len(data):
+                self._leftover_data = data
+                self._leftover_skip_amt = position
+                return
+            if amt == 0:
+                self._finished = True
+                logger.debug('closing chunked redaer due to chunk of length 0')
+                return
+            self._done_chunks.append(data[block_start:block_start + amt])
+            position = block_start + amt + len(self._eol)
+# no-check-code
--- a/mercurial/httpclient/tests/__init__.py	Mon Jun 04 17:22:09 2012 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,1 +0,0 @@
-# no-check-code
--- a/mercurial/httpclient/tests/simple_http_test.py	Mon Jun 04 17:22:09 2012 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,401 +0,0 @@
-# Copyright 2010, Google Inc.
-# All rights reserved.
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-#     * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-#     * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-#     * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-import socket
-import unittest
-
-import http
-
-# relative import to ease embedding the library
-import util
-
-
-class SimpleHttpTest(util.HttpTestBase, unittest.TestCase):
-
-    def _run_simple_test(self, host, server_data, expected_req, expected_data):
-        con = http.HTTPConnection(host)
-        con._connect()
-        con.sock.data = server_data
-        con.request('GET', '/')
-
-        self.assertStringEqual(expected_req, con.sock.sent)
-        self.assertEqual(expected_data, con.getresponse().read())
-
-    def test_broken_data_obj(self):
-        con = http.HTTPConnection('1.2.3.4:80')
-        con._connect()
-        self.assertRaises(http.BadRequestData,
-                          con.request, 'POST', '/', body=1)
-
-    def test_no_keepalive_http_1_0(self):
-        expected_request_one = """GET /remote/.hg/requires HTTP/1.1
-Host: localhost:9999
-range: bytes=0-
-accept-encoding: identity
-accept: application/mercurial-0.1
-user-agent: mercurial/proto-1.0
-
-""".replace('\n', '\r\n')
-        expected_response_headers = """HTTP/1.0 200 OK
-Server: SimpleHTTP/0.6 Python/2.6.1
-Date: Sun, 01 May 2011 13:56:57 GMT
-Content-type: application/octet-stream
-Content-Length: 33
-Last-Modified: Sun, 01 May 2011 13:56:56 GMT
-
-""".replace('\n', '\r\n')
-        expected_response_body = """revlogv1
-store
-fncache
-dotencode
-"""
-        con = http.HTTPConnection('localhost:9999')
-        con._connect()
-        con.sock.data = [expected_response_headers, expected_response_body]
-        con.request('GET', '/remote/.hg/requires',
-                    headers={'accept-encoding': 'identity',
-                             'range': 'bytes=0-',
-                             'accept': 'application/mercurial-0.1',
-                             'user-agent': 'mercurial/proto-1.0',
-                             })
-        self.assertStringEqual(expected_request_one, con.sock.sent)
-        self.assertEqual(con.sock.closed, False)
-        self.assertNotEqual(con.sock.data, [])
-        self.assert_(con.busy())
-        resp = con.getresponse()
-        self.assertStringEqual(resp.read(), expected_response_body)
-        self.failIf(con.busy())
-        self.assertEqual(con.sock, None)
-        self.assertEqual(resp.sock.data, [])
-        self.assert_(resp.sock.closed)
-
-    def test_multiline_header(self):
-        con = http.HTTPConnection('1.2.3.4:80')
-        con._connect()
-        con.sock.data = ['HTTP/1.1 200 OK\r\n',
-                         'Server: BogusServer 1.0\r\n',
-                         'Multiline: Value\r\n',
-                         '  Rest of value\r\n',
-                         'Content-Length: 10\r\n',
-                         '\r\n'
-                         '1234567890'
-                         ]
-        con.request('GET', '/')
-
-        expected_req = ('GET / HTTP/1.1\r\n'
-                        'Host: 1.2.3.4\r\n'
-                        'accept-encoding: identity\r\n\r\n')
-
-        self.assertEqual(('1.2.3.4', 80), con.sock.sa)
-        self.assertEqual(expected_req, con.sock.sent)
-        resp = con.getresponse()
-        self.assertEqual('1234567890', resp.read())
-        self.assertEqual(['Value\n Rest of value'],
-                         resp.headers.getheaders('multiline'))
-        # Socket should not be closed
-        self.assertEqual(resp.sock.closed, False)
-        self.assertEqual(con.sock.closed, False)
-
-    def testSimpleRequest(self):
-        con = http.HTTPConnection('1.2.3.4:80')
-        con._connect()
-        con.sock.data = ['HTTP/1.1 200 OK\r\n',
-                         'Server: BogusServer 1.0\r\n',
-                         'MultiHeader: Value\r\n'
-                         'MultiHeader: Other Value\r\n'
-                         'MultiHeader: One More!\r\n'
-                         'Content-Length: 10\r\n',
-                         '\r\n'
-                         '1234567890'
-                         ]
-        con.request('GET', '/')
-
-        expected_req = ('GET / HTTP/1.1\r\n'
-                        'Host: 1.2.3.4\r\n'
-                        'accept-encoding: identity\r\n\r\n')
-
-        self.assertEqual(('1.2.3.4', 80), con.sock.sa)
-        self.assertEqual(expected_req, con.sock.sent)
-        resp = con.getresponse()
-        self.assertEqual('1234567890', resp.read())
-        self.assertEqual(['Value', 'Other Value', 'One More!'],
-                         resp.headers.getheaders('multiheader'))
-        self.assertEqual(['BogusServer 1.0'],
-                         resp.headers.getheaders('server'))
-
-    def testHeaderlessResponse(self):
-        con = http.HTTPConnection('1.2.3.4', use_ssl=False)
-        con._connect()
-        con.sock.data = ['HTTP/1.1 200 OK\r\n',
-                         '\r\n'
-                         '1234567890'
-                         ]
-        con.request('GET', '/')
-
-        expected_req = ('GET / HTTP/1.1\r\n'
-                        'Host: 1.2.3.4\r\n'
-                        'accept-encoding: identity\r\n\r\n')
-
-        self.assertEqual(('1.2.3.4', 80), con.sock.sa)
-        self.assertEqual(expected_req, con.sock.sent)
-        resp = con.getresponse()
-        self.assertEqual('1234567890', resp.read())
-        self.assertEqual({}, dict(resp.headers))
-        self.assertEqual(resp.status, 200)
-
-    def testReadline(self):
-        con = http.HTTPConnection('1.2.3.4')
-        con._connect()
-        # make sure it trickles in one byte at a time
-        # so that we touch all the cases in readline
-        con.sock.data = list(''.join(
-            ['HTTP/1.1 200 OK\r\n',
-             'Server: BogusServer 1.0\r\n',
-             'Connection: Close\r\n',
-             '\r\n'
-             '1\n2\nabcdefg\n4\n5']))
-
-        expected_req = ('GET / HTTP/1.1\r\n'
-                        'Host: 1.2.3.4\r\n'
-                        'accept-encoding: identity\r\n\r\n')
-
-        con.request('GET', '/')
-        self.assertEqual(('1.2.3.4', 80), con.sock.sa)
-        self.assertEqual(expected_req, con.sock.sent)
-        r = con.getresponse()
-        for expected in ['1\n', '2\n', 'abcdefg\n', '4\n', '5']:
-            actual = r.readline()
-            self.assertEqual(expected, actual,
-                             'Expected %r, got %r' % (expected, actual))
-
-    def testIPv6(self):
-        self._run_simple_test('[::1]:8221',
-                        ['HTTP/1.1 200 OK\r\n',
-                         'Server: BogusServer 1.0\r\n',
-                         'Content-Length: 10',
-                         '\r\n\r\n'
-                         '1234567890'],
-                        ('GET / HTTP/1.1\r\n'
-                         'Host: [::1]:8221\r\n'
-                         'accept-encoding: identity\r\n\r\n'),
-                        '1234567890')
-        self._run_simple_test('::2',
-                        ['HTTP/1.1 200 OK\r\n',
-                         'Server: BogusServer 1.0\r\n',
-                         'Content-Length: 10',
-                         '\r\n\r\n'
-                         '1234567890'],
-                        ('GET / HTTP/1.1\r\n'
-                         'Host: ::2\r\n'
-                         'accept-encoding: identity\r\n\r\n'),
-                        '1234567890')
-        self._run_simple_test('[::3]:443',
-                        ['HTTP/1.1 200 OK\r\n',
-                         'Server: BogusServer 1.0\r\n',
-                         'Content-Length: 10',
-                         '\r\n\r\n'
-                         '1234567890'],
-                        ('GET / HTTP/1.1\r\n'
-                         'Host: ::3\r\n'
-                         'accept-encoding: identity\r\n\r\n'),
-                        '1234567890')
-
-    def testEarlyContinueResponse(self):
-        con = http.HTTPConnection('1.2.3.4:80')
-        con._connect()
-        sock = con.sock
-        sock.data = ['HTTP/1.1 403 Forbidden\r\n',
-                         'Server: BogusServer 1.0\r\n',
-                         'Content-Length: 18',
-                         '\r\n\r\n'
-                         "You can't do that."]
-        expected_req = self.doPost(con, expect_body=False)
-        self.assertEqual(('1.2.3.4', 80), sock.sa)
-        self.assertStringEqual(expected_req, sock.sent)
-        self.assertEqual("You can't do that.", con.getresponse().read())
-        self.assertEqual(sock.closed, True)
-
-    def testDeniedAfterContinueTimeoutExpires(self):
-        con = http.HTTPConnection('1.2.3.4:80')
-        con._connect()
-        sock = con.sock
-        sock.data = ['HTTP/1.1 403 Forbidden\r\n',
-                     'Server: BogusServer 1.0\r\n',
-                     'Content-Length: 18\r\n',
-                     'Connection: close',
-                     '\r\n\r\n'
-                     "You can't do that."]
-        sock.read_wait_sentinel = 'Dear server, send response!'
-        sock.close_on_empty = True
-        # send enough data out that we'll chunk it into multiple
-        # blocks and the socket will close before we can send the
-        # whole request.
-        post_body = ('This is some POST data\n' * 1024 * 32 +
-                     'Dear server, send response!\n' +
-                     'This is some POST data\n' * 1024 * 32)
-        expected_req = self.doPost(con, expect_body=False,
-                                   body_to_send=post_body)
-        self.assertEqual(('1.2.3.4', 80), sock.sa)
-        self.assert_('POST data\n' in sock.sent)
-        self.assert_('Dear server, send response!\n' in sock.sent)
-        # We expect not all of our data was sent.
-        self.assertNotEqual(sock.sent, expected_req)
-        self.assertEqual("You can't do that.", con.getresponse().read())
-        self.assertEqual(sock.closed, True)
-
-    def testPostData(self):
-        con = http.HTTPConnection('1.2.3.4:80')
-        con._connect()
-        sock = con.sock
-        sock.read_wait_sentinel = 'POST data'
-        sock.early_data = ['HTTP/1.1 100 Co', 'ntinue\r\n\r\n']
-        sock.data = ['HTTP/1.1 200 OK\r\n',
-                     'Server: BogusServer 1.0\r\n',
-                     'Content-Length: 16',
-                     '\r\n\r\n',
-                     "You can do that."]
-        expected_req = self.doPost(con, expect_body=True)
-        self.assertEqual(('1.2.3.4', 80), sock.sa)
-        self.assertEqual(expected_req, sock.sent)
-        self.assertEqual("You can do that.", con.getresponse().read())
-        self.assertEqual(sock.closed, False)
-
-    def testServerWithoutContinue(self):
-        con = http.HTTPConnection('1.2.3.4:80')
-        con._connect()
-        sock = con.sock
-        sock.read_wait_sentinel = 'POST data'
-        sock.data = ['HTTP/1.1 200 OK\r\n',
-                     'Server: BogusServer 1.0\r\n',
-                     'Content-Length: 16',
-                     '\r\n\r\n',
-                     "You can do that."]
-        expected_req = self.doPost(con, expect_body=True)
-        self.assertEqual(('1.2.3.4', 80), sock.sa)
-        self.assertEqual(expected_req, sock.sent)
-        self.assertEqual("You can do that.", con.getresponse().read())
-        self.assertEqual(sock.closed, False)
-
-    def testServerWithSlowContinue(self):
-        con = http.HTTPConnection('1.2.3.4:80')
-        con._connect()
-        sock = con.sock
-        sock.read_wait_sentinel = 'POST data'
-        sock.data = ['HTTP/1.1 100 ', 'Continue\r\n\r\n',
-                     'HTTP/1.1 200 OK\r\n',
-                     'Server: BogusServer 1.0\r\n',
-                     'Content-Length: 16',
-                     '\r\n\r\n',
-                     "You can do that."]
-        expected_req = self.doPost(con, expect_body=True)
-        self.assertEqual(('1.2.3.4', 80), sock.sa)
-        self.assertEqual(expected_req, sock.sent)
-        resp = con.getresponse()
-        self.assertEqual("You can do that.", resp.read())
-        self.assertEqual(200, resp.status)
-        self.assertEqual(sock.closed, False)
-
-    def testSlowConnection(self):
-        con = http.HTTPConnection('1.2.3.4:80')
-        con._connect()
-        # simulate one byte arriving at a time, to check for various
-        # corner cases
-        con.sock.data = list('HTTP/1.1 200 OK\r\n'
-                             'Server: BogusServer 1.0\r\n'
-                             'Content-Length: 10'
-                             '\r\n\r\n'
-                             '1234567890')
-        con.request('GET', '/')
-
-        expected_req = ('GET / HTTP/1.1\r\n'
-                        'Host: 1.2.3.4\r\n'
-                        'accept-encoding: identity\r\n\r\n')
-
-        self.assertEqual(('1.2.3.4', 80), con.sock.sa)
-        self.assertEqual(expected_req, con.sock.sent)
-        self.assertEqual('1234567890', con.getresponse().read())
-
-    def testTimeout(self):
-        con = http.HTTPConnection('1.2.3.4:80')
-        con._connect()
-        con.sock.data = []
-        con.request('GET', '/')
-        self.assertRaises(http.HTTPTimeoutException,
-                          con.getresponse)
-
-        expected_req = ('GET / HTTP/1.1\r\n'
-                        'Host: 1.2.3.4\r\n'
-                        'accept-encoding: identity\r\n\r\n')
-
-        self.assertEqual(('1.2.3.4', 80), con.sock.sa)
-        self.assertEqual(expected_req, con.sock.sent)
-
-    def test_conn_keep_alive_but_server_close_anyway(self):
-        sockets = []
-        def closingsocket(*args, **kwargs):
-            s = util.MockSocket(*args, **kwargs)
-            sockets.append(s)
-            s.data = ['HTTP/1.1 200 OK\r\n',
-                      'Server: BogusServer 1.0\r\n',
-                      'Connection: Keep-Alive\r\n',
-                      'Content-Length: 16',
-                      '\r\n\r\n',
-                      'You can do that.']
-            s.close_on_empty = True
-            return s
-
-        socket.socket = closingsocket
-        con = http.HTTPConnection('1.2.3.4:80')
-        con._connect()
-        con.request('GET', '/')
-        r1 = con.getresponse()
-        r1.read()
-        self.assertFalse(con.sock.closed)
-        self.assert_(con.sock.remote_closed)
-        con.request('GET', '/')
-        self.assertEqual(2, len(sockets))
-
-    def test_server_closes_before_end_of_body(self):
-        con = http.HTTPConnection('1.2.3.4:80')
-        con._connect()
-        s = con.sock
-        s.data = ['HTTP/1.1 200 OK\r\n',
-                  'Server: BogusServer 1.0\r\n',
-                  'Connection: Keep-Alive\r\n',
-                  'Content-Length: 16',
-                  '\r\n\r\n',
-                  'You can '] # Note: this is shorter than content-length
-        s.close_on_empty = True
-        con.request('GET', '/')
-        r1 = con.getresponse()
-        self.assertRaises(http.HTTPRemoteClosedError, r1.read)
-
-    def test_no_response_raises_response_not_ready(self):
-        con = http.HTTPConnection('foo')
-        self.assertRaises(http.httplib.ResponseNotReady, con.getresponse)
-# no-check-code
--- a/mercurial/httpclient/tests/test_bogus_responses.py	Mon Jun 04 17:22:09 2012 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,68 +0,0 @@
-# Copyright 2010, Google Inc.
-# All rights reserved.
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-#     * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-#     * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-#     * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-"""Tests against malformed responses.
-
-Server implementations that respond with only LF instead of CRLF have
-been observed. Checking against ones that use only CR is a hedge
-against that potential insanit.y
-"""
-import unittest
-
-import http
-
-# relative import to ease embedding the library
-import util
-
-
-class SimpleHttpTest(util.HttpTestBase, unittest.TestCase):
-
-    def bogusEOL(self, eol):
-        con = http.HTTPConnection('1.2.3.4:80')
-        con._connect()
-        con.sock.data = ['HTTP/1.1 200 OK%s' % eol,
-                         'Server: BogusServer 1.0%s' % eol,
-                         'Content-Length: 10',
-                         eol * 2,
-                         '1234567890']
-        con.request('GET', '/')
-
-        expected_req = ('GET / HTTP/1.1\r\n'
-                        'Host: 1.2.3.4\r\n'
-                        'accept-encoding: identity\r\n\r\n')
-
-        self.assertEqual(('1.2.3.4', 80), con.sock.sa)
-        self.assertEqual(expected_req, con.sock.sent)
-        self.assertEqual('1234567890', con.getresponse().read())
-
-    def testOnlyLinefeed(self):
-        self.bogusEOL('\n')
-
-    def testOnlyCarriageReturn(self):
-        self.bogusEOL('\r')
-# no-check-code
--- a/mercurial/httpclient/tests/test_chunked_transfer.py	Mon Jun 04 17:22:09 2012 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,153 +0,0 @@
-# Copyright 2010, Google Inc.
-# All rights reserved.
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-#     * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-#     * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-#     * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-import cStringIO
-import unittest
-
-import http
-
-# relative import to ease embedding the library
-import util
-
-
-def chunkedblock(x, eol='\r\n'):
-    r"""Make a chunked transfer-encoding block.
-
-    >>> chunkedblock('hi')
-    '2\r\nhi\r\n'
-    >>> chunkedblock('hi' * 10)
-    '14\r\nhihihihihihihihihihi\r\n'
-    >>> chunkedblock('hi', eol='\n')
-    '2\nhi\n'
-    """
-    return ''.join((hex(len(x))[2:], eol, x, eol))
-
-
-class ChunkedTransferTest(util.HttpTestBase, unittest.TestCase):
-    def testChunkedUpload(self):
-        con = http.HTTPConnection('1.2.3.4:80')
-        con._connect()
-        sock = con.sock
-        sock.read_wait_sentinel = '0\r\n\r\n'
-        sock.data = ['HTTP/1.1 200 OK\r\n',
-                     'Server: BogusServer 1.0\r\n',
-                     'Content-Length: 6',
-                     '\r\n\r\n',
-                     "Thanks"]
-
-        zz = 'zz\n'
-        con.request('POST', '/', body=cStringIO.StringIO(
-            (zz * (0x8010 / 3)) + 'end-of-body'))
-        expected_req = ('POST / HTTP/1.1\r\n'
-                        'transfer-encoding: chunked\r\n'
-                        'Host: 1.2.3.4\r\n'
-                        'accept-encoding: identity\r\n\r\n')
-        expected_req += chunkedblock('zz\n' * (0x8000 / 3) + 'zz')
-        expected_req += chunkedblock(
-            '\n' + 'zz\n' * ((0x1b - len('end-of-body')) / 3) + 'end-of-body')
-        expected_req += '0\r\n\r\n'
-        self.assertEqual(('1.2.3.4', 80), sock.sa)
-        self.assertStringEqual(expected_req, sock.sent)
-        self.assertEqual("Thanks", con.getresponse().read())
-        self.assertEqual(sock.closed, False)
-
-    def testChunkedDownload(self):
-        con = http.HTTPConnection('1.2.3.4:80')
-        con._connect()
-        sock = con.sock
-        sock.data = ['HTTP/1.1 200 OK\r\n',
-                     'Server: BogusServer 1.0\r\n',
-                     'transfer-encoding: chunked',
-                     '\r\n\r\n',
-                     chunkedblock('hi '),
-                     chunkedblock('there'),
-                     chunkedblock(''),
-                     ]
-        con.request('GET', '/')
-        self.assertStringEqual('hi there', con.getresponse().read())
-
-    def testChunkedDownloadBadEOL(self):
-        con = http.HTTPConnection('1.2.3.4:80')
-        con._connect()
-        sock = con.sock
-        sock.data = ['HTTP/1.1 200 OK\n',
-                     'Server: BogusServer 1.0\n',
-                     'transfer-encoding: chunked',
-                     '\n\n',
-                     chunkedblock('hi ', eol='\n'),
-                     chunkedblock('there', eol='\n'),
-                     chunkedblock('', eol='\n'),
-                     ]
-        con.request('GET', '/')
-        self.assertStringEqual('hi there', con.getresponse().read())
-
-    def testChunkedDownloadPartialChunkBadEOL(self):
-        con = http.HTTPConnection('1.2.3.4:80')
-        con._connect()
-        sock = con.sock
-        sock.data = ['HTTP/1.1 200 OK\n',
-                     'Server: BogusServer 1.0\n',
-                     'transfer-encoding: chunked',
-                     '\n\n',
-                     chunkedblock('hi ', eol='\n'),
-                     ] + list(chunkedblock('there\n' * 5, eol='\n')) + [
-                         chunkedblock('', eol='\n')]
-        con.request('GET', '/')
-        self.assertStringEqual('hi there\nthere\nthere\nthere\nthere\n',
-                               con.getresponse().read())
-
-    def testChunkedDownloadPartialChunk(self):
-        con = http.HTTPConnection('1.2.3.4:80')
-        con._connect()
-        sock = con.sock
-        sock.data = ['HTTP/1.1 200 OK\r\n',
-                     'Server: BogusServer 1.0\r\n',
-                     'transfer-encoding: chunked',
-                     '\r\n\r\n',
-                     chunkedblock('hi '),
-                     ] + list(chunkedblock('there\n' * 5)) + [chunkedblock('')]
-        con.request('GET', '/')
-        self.assertStringEqual('hi there\nthere\nthere\nthere\nthere\n',
-                               con.getresponse().read())
-
-    def testChunkedDownloadEarlyHangup(self):
-        con = http.HTTPConnection('1.2.3.4:80')
-        con._connect()
-        sock = con.sock
-        broken = chunkedblock('hi'*20)[:-1]
-        sock.data = ['HTTP/1.1 200 OK\r\n',
-                     'Server: BogusServer 1.0\r\n',
-                     'transfer-encoding: chunked',
-                     '\r\n\r\n',
-                     broken,
-                     ]
-        sock.close_on_empty = True
-        con.request('GET', '/')
-        resp = con.getresponse()
-        self.assertRaises(http.HTTPRemoteClosedError, resp.read)
-# no-check-code
--- a/mercurial/httpclient/tests/test_proxy_support.py	Mon Jun 04 17:22:09 2012 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,135 +0,0 @@
-# Copyright 2010, Google Inc.
-# All rights reserved.
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-#     * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-#     * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-#     * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-import unittest
-import socket
-
-import http
-
-# relative import to ease embedding the library
-import util
-
-
-def make_preloaded_socket(data):
-    """Make a socket pre-loaded with data so it can be read during connect.
-
-    Useful for https proxy tests because we have to read from the
-    socket during _connect rather than later on.
-    """
-    def s(*args, **kwargs):
-        sock = util.MockSocket(*args, **kwargs)
-        sock.early_data = data[:]
-        return sock
-    return s
-
-
-class ProxyHttpTest(util.HttpTestBase, unittest.TestCase):
-
-    def _run_simple_test(self, host, server_data, expected_req, expected_data):
-        con = http.HTTPConnection(host)
-        con._connect()
-        con.sock.data = server_data
-        con.request('GET', '/')
-
-        self.assertEqual(expected_req, con.sock.sent)
-        self.assertEqual(expected_data, con.getresponse().read())
-
-    def testSimpleRequest(self):
-        con = http.HTTPConnection('1.2.3.4:80',
-                                  proxy_hostport=('magicproxy', 4242))
-        con._connect()
-        con.sock.data = ['HTTP/1.1 200 OK\r\n',
-                         'Server: BogusServer 1.0\r\n',
-                         'MultiHeader: Value\r\n'
-                         'MultiHeader: Other Value\r\n'
-                         'MultiHeader: One More!\r\n'
-                         'Content-Length: 10\r\n',
-                         '\r\n'
-                         '1234567890'
-                         ]
-        con.request('GET', '/')
-
-        expected_req = ('GET http://1.2.3.4/ HTTP/1.1\r\n'
-                        'Host: 1.2.3.4\r\n'
-                        'accept-encoding: identity\r\n\r\n')
-
-        self.assertEqual(('127.0.0.42', 4242), con.sock.sa)
-        self.assertStringEqual(expected_req, con.sock.sent)
-        resp = con.getresponse()
-        self.assertEqual('1234567890', resp.read())
-        self.assertEqual(['Value', 'Other Value', 'One More!'],
-                         resp.headers.getheaders('multiheader'))
-        self.assertEqual(['BogusServer 1.0'],
-                         resp.headers.getheaders('server'))
-
-    def testSSLRequest(self):
-        con = http.HTTPConnection('1.2.3.4:443',
-                                  proxy_hostport=('magicproxy', 4242))
-        socket.socket = make_preloaded_socket(
-            ['HTTP/1.1 200 OK\r\n',
-             'Server: BogusServer 1.0\r\n',
-             'Content-Length: 10\r\n',
-             '\r\n'
-             '1234567890'])
-        con._connect()
-        con.sock.data = ['HTTP/1.1 200 OK\r\n',
-                         'Server: BogusServer 1.0\r\n',
-                         'Content-Length: 10\r\n',
-                         '\r\n'
-                         '1234567890'
-                         ]
-        connect_sent = con.sock.sent
-        con.sock.sent = ''
-        con.request('GET', '/')
-
-        expected_connect = ('CONNECT 1.2.3.4:443 HTTP/1.0\r\n'
-                            'Host: 1.2.3.4\r\n'
-                            'accept-encoding: identity\r\n'
-                            '\r\n')
-        expected_request = ('GET / HTTP/1.1\r\n'
-                            'Host: 1.2.3.4\r\n'
-                            'accept-encoding: identity\r\n\r\n')
-
-        self.assertEqual(('127.0.0.42', 4242), con.sock.sa)
-        self.assertStringEqual(expected_connect, connect_sent)
-        self.assertStringEqual(expected_request, con.sock.sent)
-        resp = con.getresponse()
-        self.assertEqual(resp.status, 200)
-        self.assertEqual('1234567890', resp.read())
-        self.assertEqual(['BogusServer 1.0'],
-                         resp.headers.getheaders('server'))
-
-    def testSSLProxyFailure(self):
-        con = http.HTTPConnection('1.2.3.4:443',
-                                  proxy_hostport=('magicproxy', 4242))
-        socket.socket = make_preloaded_socket(
-            ['HTTP/1.1 407 Proxy Authentication Required\r\n\r\n'])
-        self.assertRaises(http.HTTPProxyConnectFailedException, con._connect)
-        self.assertRaises(http.HTTPProxyConnectFailedException,
-                          con.request, 'GET', '/')
-# no-check-code
--- a/mercurial/httpclient/tests/test_ssl.py	Mon Jun 04 17:22:09 2012 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,93 +0,0 @@
-# Copyright 2011, Google Inc.
-# All rights reserved.
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-#     * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-#     * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-#     * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-import unittest
-
-import http
-
-# relative import to ease embedding the library
-import util
-
-
-
-class HttpSslTest(util.HttpTestBase, unittest.TestCase):
-    def testSslRereadRequired(self):
-        con = http.HTTPConnection('1.2.3.4:443')
-        con._connect()
-        # extend the list instead of assign because of how
-        # MockSSLSocket works.
-        con.sock.data = ['HTTP/1.1 200 OK\r\n',
-                         'Server: BogusServer 1.0\r\n',
-                         'MultiHeader: Value\r\n'
-                         'MultiHeader: Other Value\r\n'
-                         'MultiHeader: One More!\r\n'
-                         'Content-Length: 10\r\n',
-                         '\r\n'
-                         '1234567890'
-                         ]
-        con.request('GET', '/')
-
-        expected_req = ('GET / HTTP/1.1\r\n'
-                        'Host: 1.2.3.4\r\n'
-                        'accept-encoding: identity\r\n\r\n')
-
-        self.assertEqual(('1.2.3.4', 443), con.sock.sa)
-        self.assertEqual(expected_req, con.sock.sent)
-        resp = con.getresponse()
-        self.assertEqual('1234567890', resp.read())
-        self.assertEqual(['Value', 'Other Value', 'One More!'],
-                         resp.headers.getheaders('multiheader'))
-        self.assertEqual(['BogusServer 1.0'],
-                         resp.headers.getheaders('server'))
-
-    def testSslRereadInEarlyResponse(self):
-        con = http.HTTPConnection('1.2.3.4:443')
-        con._connect()
-        con.sock.early_data = ['HTTP/1.1 200 OK\r\n',
-                               'Server: BogusServer 1.0\r\n',
-                               'MultiHeader: Value\r\n'
-                               'MultiHeader: Other Value\r\n'
-                               'MultiHeader: One More!\r\n'
-                               'Content-Length: 10\r\n',
-                               '\r\n'
-                               '1234567890'
-                               ]
-
-        expected_req = self.doPost(con, False)
-        self.assertEqual(None, con.sock,
-                         'Connection should have disowned socket')
-
-        resp = con.getresponse()
-        self.assertEqual(('1.2.3.4', 443), resp.sock.sa)
-        self.assertEqual(expected_req, resp.sock.sent)
-        self.assertEqual('1234567890', resp.read())
-        self.assertEqual(['Value', 'Other Value', 'One More!'],
-                         resp.headers.getheaders('multiheader'))
-        self.assertEqual(['BogusServer 1.0'],
-                         resp.headers.getheaders('server'))
-# no-check-code
--- a/mercurial/httpclient/tests/util.py	Mon Jun 04 17:22:09 2012 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,195 +0,0 @@
-# Copyright 2010, Google Inc.
-# All rights reserved.
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-#     * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-#     * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-#     * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-import difflib
-import socket
-
-import http
-
-
-class MockSocket(object):
-    """Mock non-blocking socket object.
-
-    This is ONLY capable of mocking a nonblocking socket.
-
-    Attributes:
-      early_data: data to always send as soon as end of headers is seen
-      data: a list of strings to return on recv(), with the
-            assumption that the socket would block between each
-            string in the list.
-      read_wait_sentinel: data that must be written to the socket before
-                          beginning the response.
-      close_on_empty: If true, close the socket when it runs out of data
-                      for the client.
-    """
-    def __init__(self, af, socktype, proto):
-        self.af = af
-        self.socktype = socktype
-        self.proto = proto
-
-        self.early_data = []
-        self.data = []
-        self.remote_closed = self.closed = False
-        self.close_on_empty = False
-        self.sent = ''
-        self.read_wait_sentinel = http._END_HEADERS
-
-    def close(self):
-        self.closed = True
-
-    def connect(self, sa):
-        self.sa = sa
-
-    def setblocking(self, timeout):
-        assert timeout == 0
-
-    def recv(self, amt=-1):
-        if self.early_data:
-            datalist = self.early_data
-        elif not self.data:
-            return ''
-        else:
-            datalist = self.data
-        if amt == -1:
-            return datalist.pop(0)
-        data = datalist.pop(0)
-        if len(data) > amt:
-            datalist.insert(0, data[amt:])
-        if not self.data and not self.early_data and self.close_on_empty:
-            self.remote_closed = True
-        return data[:amt]
-
-    @property
-    def ready_for_read(self):
-        return ((self.early_data and http._END_HEADERS in self.sent)
-                or (self.read_wait_sentinel in self.sent and self.data)
-                or self.closed or self.remote_closed)
-
-    def send(self, data):
-        # this is a horrible mock, but nothing needs us to raise the
-        # correct exception yet
-        assert not self.closed, 'attempted to write to a closed socket'
-        assert not self.remote_closed, ('attempted to write to a'
-                                        ' socket closed by the server')
-        if len(data) > 8192:
-            data = data[:8192]
-        self.sent += data
-        return len(data)
-
-
-def mockselect(r, w, x, timeout=0):
-    """Simple mock for select()
-    """
-    readable = filter(lambda s: s.ready_for_read, r)
-    return readable, w[:], []
-
-
-class MockSSLSocket(object):
-    def __init__(self, sock):
-        self._sock = sock
-        self._fail_recv = True
-
-    def __getattr__(self, key):
-        return getattr(self._sock, key)
-
-    def __setattr__(self, key, value):
-        if key not in ('_sock', '_fail_recv'):
-            return setattr(self._sock, key, value)
-        return object.__setattr__(self, key, value)
-
-    def recv(self, amt=-1):
-        try:
-            if self._fail_recv:
-                raise socket.sslerror(socket.SSL_ERROR_WANT_READ)
-            return self._sock.recv(amt=amt)
-        finally:
-            self._fail_recv = not self._fail_recv
-
-
-def mocksslwrap(sock, keyfile=None, certfile=None,
-                server_side=False, cert_reqs=http.socketutil.CERT_NONE,
-                ssl_version=None, ca_certs=None,
-                do_handshake_on_connect=True,
-                suppress_ragged_eofs=True):
-    return MockSSLSocket(sock)
-
-
-def mockgetaddrinfo(host, port, unused, streamtype):
-    assert unused == 0
-    assert streamtype == socket.SOCK_STREAM
-    if host.count('.') != 3:
-        host = '127.0.0.42'
-    return [(socket.AF_INET, socket.SOCK_STREAM, socket.IPPROTO_TCP, '',
-             (host, port))]
-
-
-class HttpTestBase(object):
-    def setUp(self):
-        self.orig_socket = socket.socket
-        socket.socket = MockSocket
-
-        self.orig_getaddrinfo = socket.getaddrinfo
-        socket.getaddrinfo = mockgetaddrinfo
-
-        self.orig_select = http.select.select
-        http.select.select = mockselect
-
-        self.orig_sslwrap = http.socketutil.wrap_socket
-        http.socketutil.wrap_socket = mocksslwrap
-
-    def tearDown(self):
-        socket.socket = self.orig_socket
-        http.select.select = self.orig_select
-        http.socketutil.wrap_socket = self.orig_sslwrap
-        socket.getaddrinfo = self.orig_getaddrinfo
-
-    def assertStringEqual(self, l, r):
-        try:
-            self.assertEqual(l, r, ('failed string equality check, '
-                                    'see stdout for details'))
-        except:
-            add_nl = lambda li: map(lambda x: x + '\n', li)
-            print 'failed expectation:'
-            print ''.join(difflib.unified_diff(
-                add_nl(l.splitlines()), add_nl(r.splitlines()),
-                fromfile='expected', tofile='got'))
-            raise
-
-    def doPost(self, con, expect_body, body_to_send='This is some POST data'):
-        con.request('POST', '/', body=body_to_send,
-                    expect_continue=True)
-        expected_req = ('POST / HTTP/1.1\r\n'
-                        'Host: 1.2.3.4\r\n'
-                        'content-length: %d\r\n'
-                        'Expect: 100-Continue\r\n'
-                        'accept-encoding: identity\r\n\r\n' %
-                        len(body_to_send))
-        if expect_body:
-            expected_req += body_to_send
-        return expected_req
-# no-check-code
--- a/mercurial/keepalive.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/mercurial/keepalive.py	Mon Jun 04 17:57:57 2012 -0500
@@ -136,7 +136,7 @@
     def add(self, host, connection, ready):
         self._lock.acquire()
         try:
-            if not host in self._hostmap:
+            if host not in self._hostmap:
                 self._hostmap[host] = []
             self._hostmap[host].append(connection)
             self._connmap[connection] = host
@@ -290,7 +290,7 @@
             # worked.  We'll check the version below, too.
         except (socket.error, httplib.HTTPException):
             r = None
-        except:
+        except: # re-raises
             # adding this block just in case we've missed
             # something we will still raise the exception, but
             # lets try and close the connection and remove it
@@ -534,7 +534,7 @@
         if self.auto_open:
             self.connect()
         else:
-            raise httplib.NotConnected()
+            raise httplib.NotConnected
 
     # send the data to the server. if we get a broken pipe, then close
     # the socket. we want to reconnect when somebody tries to send again.
@@ -758,7 +758,7 @@
     try:
         N = int(sys.argv[1])
         url = sys.argv[2]
-    except:
+    except (IndexError, ValueError):
         print "%s <integer> <url>" % sys.argv[0]
     else:
         test(url, N)
--- a/mercurial/localrepo.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/mercurial/localrepo.py	Mon Jun 04 17:57:57 2012 -0500
@@ -41,7 +41,6 @@
         self.wopener = scmutil.opener(self.root)
         self.baseui = baseui
         self.ui = baseui.copy()
-        self._dirtyphases = False
         # A list of callback to shape the phase if no data were found.
         # Callback are in the form: func(repo, roots) --> processed root.
         # This list it to be filled by extension during repo setup
@@ -181,24 +180,17 @@
     def _writebookmarks(self, marks):
       bookmarks.write(self)
 
-    @storecache('phaseroots')
-    def _phaseroots(self):
-        self._dirtyphases = False
-        phaseroots = phases.readroots(self)
-        phases.filterunknown(self, phaseroots)
-        return phaseroots
+    def bookmarkheads(self, bookmark):
+        name = bookmark.split('@', 1)[0]
+        heads = []
+        for mark, n in self._bookmarks.iteritems():
+            if mark.split('@', 1)[0] == name:
+                heads.append(n)
+        return heads
 
-    @propertycache
-    def _phaserev(self):
-        cache = [phases.public] * len(self)
-        for phase in phases.trackedphases:
-            roots = map(self.changelog.rev, self._phaseroots[phase])
-            if roots:
-                for rev in roots:
-                    cache[rev] = phase
-                for rev in self.changelog.descendants(*roots):
-                    cache[rev] = phase
-        return cache
+    @storecache('phaseroots')
+    def _phasecache(self):
+        return phases.phasecache(self, self._phasedefaults)
 
     @storecache('00changelog.i')
     def changelog(self):
@@ -296,7 +288,8 @@
                 fp.write('\n')
             for name in names:
                 m = munge and munge(name) or name
-                if self._tagscache.tagtypes and name in self._tagscache.tagtypes:
+                if (self._tagscache.tagtypes and
+                    name in self._tagscache.tagtypes):
                     old = self.tags().get(name, nullid)
                     fp.write('%s %s\n' % (hex(old), m))
                 fp.write('%s %s\n' % (hex(node), m))
@@ -376,7 +369,8 @@
 
     @propertycache
     def _tagscache(self):
-        '''Returns a tagscache object that contains various tags related caches.'''
+        '''Returns a tagscache object that contains various tags related
+        caches.'''
 
         # This simplifies its cache management by having one decorated
         # function (this one) and the rest simply fetch things from it.
@@ -505,7 +499,7 @@
             partial = self._branchcache
 
         self._branchtags(partial, lrev)
-        # this private cache holds all heads (not just tips)
+        # this private cache holds all heads (not just the branch tips)
         self._branchcache = partial
 
     def branchmap(self):
@@ -513,17 +507,27 @@
         self.updatebranchcache()
         return self._branchcache
 
+    def _branchtip(self, heads):
+        '''return the tipmost branch head in heads'''
+        tip = heads[-1]
+        for h in reversed(heads):
+            if not self[h].closesbranch():
+                tip = h
+                break
+        return tip
+
+    def branchtip(self, branch):
+        '''return the tip node for a given branch'''
+        if branch not in self.branchmap():
+            raise error.RepoLookupError(_("unknown branch '%s'") % branch)
+        return self._branchtip(self.branchmap()[branch])
+
     def branchtags(self):
         '''return a dict where branch names map to the tipmost head of
         the branch, open heads come before closed'''
         bt = {}
         for bn, heads in self.branchmap().iteritems():
-            tip = heads[-1]
-            for h in reversed(heads):
-                if 'close' not in self.changelog.read(h)[5]:
-                    tip = h
-                    break
-            bt[bn] = tip
+            bt[bn] = self._branchtip(heads)
         return bt
 
     def _readbranchcache(self):
@@ -585,8 +589,8 @@
                 latest = newnodes.pop()
                 if latest not in bheads:
                     continue
-                minbhrev = self[bheads[0]].node()
-                reachable = self.changelog.reachable(latest, minbhrev)
+                minbhnode = self[bheads[0]].node()
+                reachable = self.changelog.reachable(latest, minbhnode)
                 reachable.remove(latest)
                 if reachable:
                     bheads = [b for b in bheads if b not in reachable]
@@ -605,10 +609,11 @@
 
     def known(self, nodes):
         nm = self.changelog.nodemap
+        pc = self._phasecache
         result = []
         for n in nodes:
             r = nm.get(n)
-            resp = not (r is None or self._phaserev[r] >= phases.secret)
+            resp = not (r is None or pc.phase(self, r) >= phases.secret)
             result.append(resp)
         return result
 
@@ -833,9 +838,6 @@
                         self.sjoin('phaseroots'))
         self.invalidate()
 
-        # Discard all cache entries to force reloading everything.
-        self._filecache.clear()
-
         parentgone = (parents[0] not in self.changelog.nodemap or
                       parents[1] not in self.changelog.nodemap)
         if parentgone:
@@ -867,7 +869,6 @@
                 pass
 
         delcache('_tagscache')
-        delcache('_phaserev')
 
         self._branchcache = None # in UTF-8
         self._branchcachetip = None
@@ -901,6 +902,9 @@
                 pass
         self.invalidatecaches()
 
+        # Discard all cache entries to force reloading everything.
+        self._filecache.clear()
+
     def _lock(self, lockname, wait, releasefn, acquirefn, desc):
         try:
             l = lock.lock(lockname, 0, releasefn, desc=desc)
@@ -937,9 +941,8 @@
 
         def unlock():
             self.store.write()
-            if self._dirtyphases:
-                phases.writeroots(self)
-                self._dirtyphases = False
+            if '_phasecache' in vars(self):
+                self._phasecache.write()
             for k, ce in self._filecache.items():
                 if k == 'dirstate':
                     continue
@@ -1195,16 +1198,17 @@
             p1, p2 = self.dirstate.parents()
             hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
             try:
-                self.hook("precommit", throw=True, parent1=hookp1, parent2=hookp2)
+                self.hook("precommit", throw=True, parent1=hookp1,
+                          parent2=hookp2)
                 ret = self.commitctx(cctx, True)
-            except:
+            except: # re-raises
                 if edited:
                     self.ui.write(
                         _('note: commit message saved in %s\n') % msgfn)
                 raise
 
             # update bookmarks, dirstate and mergestate
-            bookmarks.update(self, p1, ret)
+            bookmarks.update(self, [p1, p2], ret)
             for f in changes[0] + changes[1]:
                 self.dirstate.normal(f)
             for f in changes[2]:
@@ -1330,7 +1334,8 @@
     def status(self, node1='.', node2=None, match=None,
                ignored=False, clean=False, unknown=False,
                listsubrepos=False):
-        """return status of files between two nodes or node and working directory
+        """return status of files between two nodes or node and working
+        directory.
 
         If node1 is None, use the first dirstate parent instead.
         If node2 is None, compare node1 with working directory.
@@ -1338,6 +1343,8 @@
 
         def mfmatches(ctx):
             mf = ctx.manifest().copy()
+            if match.always():
+                return mf
             for fn in mf.keys():
                 if not match(fn):
                     del mf[fn]
@@ -1423,10 +1430,11 @@
                 mf2 = mfmatches(ctx2)
 
             modified, added, clean = [], [], []
+            withflags = mf1.withflags() | mf2.withflags()
             for fn in mf2:
                 if fn in mf1:
                     if (fn not in deleted and
-                        (mf1.flags(fn) != mf2.flags(fn) or
+                        ((fn in withflags and mf1.flags(fn) != mf2.flags(fn)) or
                          (mf1[fn] != mf2[fn] and
                           (mf2[fn] or ctx1[fn].cmp(ctx2[fn]))))):
                         modified.append(fn)
@@ -1501,8 +1509,7 @@
             fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
             bheads = [h for h in bheads if h in fbheads]
         if not closed:
-            bheads = [h for h in bheads if
-                      ('close' not in self.changelog.read(h)[5])]
+            bheads = [h for h in bheads if not self[h].closesbranch()]
         return bheads
 
     def branches(self, nodes):
@@ -1672,7 +1679,8 @@
                         # http: return remote's addchangegroup() or 0 for error
                         ret = remote.unbundle(cg, remoteheads, 'push')
                     else:
-                        # we return an integer indicating remote head count change
+                        # we return an integer indicating remote head count
+                        # change
                         ret = remote.addchangegroup(cg, 'push', self.url())
 
                 if ret:
@@ -1698,7 +1706,7 @@
                     # * missingheads part of comon (::commonheads)
                     common = set(outgoing.common)
                     cheads = [node for node in revs if node in common]
-                    # and 
+                    # and
                     # * commonheads parents on missing
                     revset = self.set('%ln and parents(roots(%ln))',
                                      outgoing.commonheads,
@@ -1783,7 +1791,7 @@
             bases = [nullid]
         csets, bases, heads = cl.nodesbetween(bases, heads)
         # We assume that all ancestors of bases are known
-        common = set(cl.ancestors(*[cl.rev(n) for n in bases]))
+        common = set(cl.ancestors([cl.rev(n) for n in bases]))
         return self._changegroupsubset(common, csets, heads, source)
 
     def getlocalbundle(self, source, outgoing):
@@ -1904,7 +1912,8 @@
             for fname in sorted(changedfiles):
                 filerevlog = self.file(fname)
                 if not len(filerevlog):
-                    raise util.Abort(_("empty or missing revlog for %s") % fname)
+                    raise util.Abort(_("empty or missing revlog for %s")
+                                     % fname)
                 fstate[0] = fname
                 fstate[1] = fnodes.pop(fname, {})
 
@@ -2004,7 +2013,8 @@
             for fname in sorted(changedfiles):
                 filerevlog = self.file(fname)
                 if not len(filerevlog):
-                    raise util.Abort(_("empty or missing revlog for %s") % fname)
+                    raise util.Abort(_("empty or missing revlog for %s")
+                                     % fname)
                 fstate[0] = fname
                 nodelist = gennodelst(filerevlog)
                 if nodelist:
@@ -2149,7 +2159,7 @@
                 heads = cl.heads()
                 dh = len(heads) - len(oldheads)
                 for h in heads:
-                    if h not in oldheads and 'close' in self[h].extra():
+                    if h not in oldheads and self[h].closesbranch():
                         dh -= 1
             htext = ""
             if dh:
@@ -2236,6 +2246,8 @@
                     _('Unexpected response from remote server:'), l)
             self.ui.status(_('%d files to transfer, %s of data\n') %
                            (total_files, util.bytecount(total_bytes)))
+            handled_bytes = 0
+            self.ui.progress(_('clone'), 0, total=total_bytes)
             start = time.time()
             for i in xrange(total_files):
                 # XXX doesn't support '\n' or '\r' in filenames
@@ -2252,16 +2264,21 @@
                 # for backwards compat, name was partially encoded
                 ofp = self.sopener(store.decodedir(name), 'w')
                 for chunk in util.filechunkiter(fp, limit=size):
+                    handled_bytes += len(chunk)
+                    self.ui.progress(_('clone'), handled_bytes,
+                                     total=total_bytes)
                     ofp.write(chunk)
                 ofp.close()
             elapsed = time.time() - start
             if elapsed <= 0:
                 elapsed = 0.001
+            self.ui.progress(_('clone'), None)
             self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
                            (util.bytecount(total_bytes), elapsed,
                             util.bytecount(total_bytes / elapsed)))
 
-            # new requirements = old non-format requirements + new format-related
+            # new requirements = old non-format requirements +
+            #                    new format-related
             # requirements from the streamed-in repository
             requirements.update(set(self.requirements) - self.supportedformats)
             self._applyrequirements(requirements)
--- a/mercurial/lsprof.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/mercurial/lsprof.py	Mon Jun 04 17:57:57 2012 -0500
@@ -38,8 +38,8 @@
             d = d[:top]
         cols = "% 12s %12s %11.4f %11.4f   %s\n"
         hcols = "% 12s %12s %12s %12s %s\n"
-        file.write(hcols % ("CallCount", "Recursive", "Total(ms)",
-                            "Inline(ms)", "module:lineno(function)"))
+        file.write(hcols % ("CallCount", "Recursive", "Total(s)",
+                            "Inline(s)", "module:lineno(function)"))
         count = 0
         for e in d:
             file.write(cols % (e.callcount, e.reccallcount, e.totaltime,
--- a/mercurial/manifest.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/mercurial/manifest.py	Mon Jun 04 17:57:57 2012 -0500
@@ -19,6 +19,8 @@
         self._flags = flags
     def flags(self, f):
         return self._flags.get(f, "")
+    def withflags(self):
+        return set(self._flags.keys())
     def set(self, f, flags):
         self._flags[f] = flags
     def copy(self):
@@ -124,8 +126,8 @@
                     addlist[start:end] = array.array('c', content)
                 else:
                     del addlist[start:end]
-            return "".join(struct.pack(">lll", start, end, len(content)) + content
-                           for start, end, content in x)
+            return "".join(struct.pack(">lll", start, end, len(content))
+                           + content for start, end, content in x)
 
         def checkforbidden(l):
             for f in l:
--- a/mercurial/match.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/mercurial/match.py	Mon Jun 04 17:57:57 2012 -0500
@@ -121,6 +121,8 @@
         return self._files
     def anypats(self):
         return self._anypats
+    def always(self):
+        return False
 
 class exact(match):
     def __init__(self, root, cwd, files):
@@ -129,6 +131,8 @@
 class always(match):
     def __init__(self, root, cwd):
         match.__init__(self, root, cwd, [])
+    def always(self):
+        return True
 
 class narrowmatcher(match):
     """Adapt a matcher to work on a subdirectory only.
@@ -275,7 +279,7 @@
     try:
         pat = '(?:%s)' % '|'.join([_regex(k, p, tail) for (k, p) in pats])
         if len(pat) > 20000:
-            raise OverflowError()
+            raise OverflowError
         return pat, re.compile(pat).match
     except OverflowError:
         # We're using a Python with a tiny regex engine and we
--- a/mercurial/merge.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/mercurial/merge.py	Mon Jun 04 17:57:57 2012 -0500
@@ -7,7 +7,7 @@
 
 from node import nullid, nullrev, hex, bin
 from i18n import _
-import scmutil, util, filemerge, copies, subrepo
+import error, scmutil, util, filemerge, copies, subrepo
 import errno, os, shutil
 
 class mergestate(object):
@@ -198,9 +198,11 @@
     elif pa == p2: # backwards
         pa = p1.p1()
     elif pa and repo.ui.configbool("merge", "followcopies", True):
-        copy, diverge = copies.mergecopies(repo, p1, p2, pa)
+        copy, diverge, renamedelete = copies.mergecopies(repo, p1, p2, pa)
         for of, fl in diverge.iteritems():
             act("divergent renames", "dr", of, fl)
+        for of, fl in renamedelete.iteritems():
+            act("rename and delete", "rd", of, fl)
 
     repo.ui.note(_("resolving manifests\n"))
     repo.ui.debug(" overwrite: %s, partial: %s\n"
@@ -363,7 +365,8 @@
             removed += 1
         elif m == "m": # merge
             if f == '.hgsubstate': # subrepo states need updating
-                subrepo.submerge(repo, wctx, mctx, wctx.ancestor(mctx), overwrite)
+                subrepo.submerge(repo, wctx, mctx, wctx.ancestor(mctx),
+                                 overwrite)
                 continue
             f2, fd, flags, move = a[2:]
             repo.wopener.audit(fd)
@@ -408,6 +411,12 @@
                            "multiple times to:\n") % f)
             for nf in fl:
                 repo.ui.warn(" %s\n" % nf)
+        elif m == "rd": # rename and delete
+            fl = a[2]
+            repo.ui.warn(_("note: possible conflict - %s was deleted "
+                           "and renamed to:\n") % f)
+            for nf in fl:
+                repo.ui.warn(" %s\n" % nf)
         elif m == "e": # exec
             flags = a[2]
             repo.wopener.audit(f)
@@ -479,7 +488,8 @@
                 if f:
                     repo.dirstate.drop(f)
 
-def update(repo, node, branchmerge, force, partial, ancestor=None):
+def update(repo, node, branchmerge, force, partial, ancestor=None,
+           mergeancestor=False):
     """
     Perform a merge between the working directory and the given node
 
@@ -487,6 +497,10 @@
     branchmerge = whether to merge between branches
     force = whether to force branch merging or file overwriting
     partial = a function to filter file lists (dirstate not updated)
+    mergeancestor = if false, merging with an ancestor (fast-forward)
+      is only allowed between different named branches. This flag
+      is used by rebase extension as a temporary fix and should be
+      avoided in general.
 
     The table below shows all the behaviors of the update command
     given the -c and -C or no options, whether the working directory
@@ -523,8 +537,8 @@
         if node is None:
             # tip of current branch
             try:
-                node = repo.branchtags()[wc.branch()]
-            except KeyError:
+                node = repo.branchtip(wc.branch())
+            except error.RepoLookupError:
                 if wc.branch() == "default": # no default branch!
                     node = repo.lookup("tip") # update to tip
                 else:
@@ -547,7 +561,7 @@
                 raise util.Abort(_("merging with a working directory ancestor"
                                    " has no effect"))
             elif pa == p1:
-                if p1.branch() == p2.branch():
+                if not mergeancestor and p1.branch() == p2.branch():
                     raise util.Abort(_("nothing to merge"),
                                      hint=_("use 'hg update' "
                                             "or check 'hg heads'"))
--- a/mercurial/minirst.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/mercurial/minirst.py	Mon Jun 04 17:57:57 2012 -0500
@@ -658,7 +658,7 @@
     return lines
 
 def maketable(data, indent=0, header=False):
-    '''Generate an RST table for the given table data'''
+    '''Generate an RST table for the given table data as a list of lines'''
 
     widths = [max(encoding.colwidth(e) for e in c) for c in zip(*data)]
     indent = ' ' * indent
@@ -674,4 +674,4 @@
     if header and len(data) > 1:
         out.insert(2, div)
     out.append(div)
-    return ''.join(out)
+    return out
--- a/mercurial/mpatch.c	Mon Jun 04 17:22:09 2012 -0500
+++ b/mercurial/mpatch.c	Mon Jun 04 17:57:57 2012 -0500
@@ -20,6 +20,7 @@
  of the GNU General Public License, incorporated herein by reference.
 */
 
+#define PY_SSIZE_T_CLEAN
 #include <Python.h>
 #include <stdlib.h>
 #include <string.h>
@@ -38,7 +39,7 @@
 	struct frag *base, *head, *tail;
 };
 
-static struct flist *lalloc(int size)
+static struct flist *lalloc(Py_ssize_t size)
 {
 	struct flist *a = NULL;
 
@@ -68,7 +69,7 @@
 	}
 }
 
-static int lsize(struct flist *a)
+static Py_ssize_t lsize(struct flist *a)
 {
 	return a->tail - a->head;
 }
@@ -197,7 +198,7 @@
 }
 
 /* decode a binary patch into a hunk list */
-static struct flist *decode(const char *bin, int len)
+static struct flist *decode(const char *bin, Py_ssize_t len)
 {
 	struct flist *l;
 	struct frag *lt;
@@ -236,9 +237,9 @@
 }
 
 /* calculate the size of resultant text */
-static int calcsize(int len, struct flist *l)
+static Py_ssize_t calcsize(Py_ssize_t len, struct flist *l)
 {
-	int outlen = 0, last = 0;
+	Py_ssize_t outlen = 0, last = 0;
 	struct frag *f = l->head;
 
 	while (f != l->tail) {
@@ -258,7 +259,7 @@
 	return outlen;
 }
 
-static int apply(char *buf, const char *orig, int len, struct flist *l)
+static int apply(char *buf, const char *orig, Py_ssize_t len, struct flist *l)
 {
 	struct frag *f = l->head;
 	int last = 0;
@@ -283,10 +284,9 @@
 }
 
 /* recursively generate a patch of all bins between start and end */
-static struct flist *fold(PyObject *bins, int start, int end)
+static struct flist *fold(PyObject *bins, Py_ssize_t start, Py_ssize_t end)
 {
-	int len;
-	Py_ssize_t blen;
+	Py_ssize_t len, blen;
 	const char *buffer;
 
 	if (start + 1 == end) {
@@ -312,8 +312,7 @@
 	struct flist *patch;
 	const char *in;
 	char *out;
-	int len, outlen;
-	Py_ssize_t inlen;
+	Py_ssize_t len, outlen, inlen;
 
 	if (!PyArg_ParseTuple(args, "OO:mpatch", &text, &bins))
 		return NULL;
@@ -357,7 +356,7 @@
 patchedsize(PyObject *self, PyObject *args)
 {
 	long orig, start, end, len, outlen = 0, last = 0;
-	int patchlen;
+	Py_ssize_t patchlen;
 	char *bin, *binend, *data;
 
 	if (!PyArg_ParseTuple(args, "ls#", &orig, &bin, &patchlen))
--- a/mercurial/parsers.c	Mon Jun 04 17:22:09 2012 -0500
+++ b/mercurial/parsers.c	Mon Jun 04 17:57:57 2012 -0500
@@ -13,8 +13,10 @@
 
 #include "util.h"
 
-static int hexdigit(char c)
+static inline int hexdigit(const char *p, Py_ssize_t off)
 {
+	char c = p[off];
+
 	if (c >= '0' && c <= '9')
 		return c - '0';
 	if (c >= 'a' && c <= 'f')
@@ -32,8 +34,8 @@
 static PyObject *unhexlify(const char *str, int len)
 {
 	PyObject *ret;
-	const char *c;
 	char *d;
+	int i;
 
 	ret = PyBytes_FromStringAndSize(NULL, len / 2);
 
@@ -42,9 +44,9 @@
 
 	d = PyBytes_AsString(ret);
 
-	for (c = str; c < str + len;) {
-		int hi = hexdigit(*c++);
-		int lo = hexdigit(*c++);
+	for (i = 0; i < len;) {
+		int hi = hexdigit(str, i++);
+		int lo = hexdigit(str, i++);
 		*d++ = (hi << 4) | lo;
 	}
 
@@ -244,6 +246,7 @@
 	Py_ssize_t raw_length; /* original number of elements */
 	Py_ssize_t length;     /* current number of elements */
 	PyObject *added;       /* populated on demand */
+	PyObject *headrevs;    /* cache, invalidated on changes */
 	nodetree *nt;          /* base-16 trie */
 	int ntlength;          /* # nodes in use */
 	int ntcapacity;        /* # nodes allocated */
@@ -273,6 +276,9 @@
 static char *tuple_format = "kiiiiiis#";
 #endif
 
+/* A RevlogNG v1 index entry is 64 bytes long. */
+static const long v1_hdrsize = 64;
+
 /*
  * Return a pointer to the beginning of a RevlogNG record.
  */
@@ -289,7 +295,7 @@
 		return self->offsets[pos];
 	}
 
-	return PyString_AS_STRING(self->data) + pos * 64;
+	return PyString_AS_STRING(self->data) + pos * v1_hdrsize;
 }
 
 /*
@@ -385,7 +391,7 @@
 	Py_ssize_t length = index_length(self);
 	const char *data;
 
-	if (pos == length - 1)
+	if (pos == length - 1 || pos == INT_MAX)
 		return nullid;
 
 	if (pos >= length)
@@ -461,6 +467,7 @@
 	if (self->nt)
 		nt_insert(self, node, (int)offset);
 
+	Py_CLEAR(self->headrevs);
 	Py_RETURN_NONE;
 }
 
@@ -469,12 +476,8 @@
 	if (self->cache) {
 		Py_ssize_t i;
 
-		for (i = 0; i < self->raw_length; i++) {
-			if (self->cache[i]) {
-				Py_DECREF(self->cache[i]);
-				self->cache[i] = NULL;
-			}
-		}
+		for (i = 0; i < self->raw_length; i++)
+			Py_CLEAR(self->cache[i]);
 		free(self->cache);
 		self->cache = NULL;
 	}
@@ -486,6 +489,7 @@
 		free(self->nt);
 		self->nt = NULL;
 	}
+	Py_CLEAR(self->headrevs);
 }
 
 static PyObject *index_clearcaches(indexObject *self)
@@ -506,13 +510,13 @@
 		return NULL;
 
 #define istat(__n, __d) \
-	if (PyDict_SetItemString(obj, __d, PyInt_FromLong(self->__n)) == -1) \
+	if (PyDict_SetItemString(obj, __d, PyInt_FromSsize_t(self->__n)) == -1) \
 		goto bail;
 
 	if (self->added) {
 		Py_ssize_t len = PyList_GET_SIZE(self->added);
 		if (PyDict_SetItemString(obj, "index entries added",
-					 PyInt_FromLong(len)) == -1)
+					 PyInt_FromSsize_t(len)) == -1)
 			goto bail;
 	}
 
@@ -536,7 +540,108 @@
 	return NULL;
 }
 
-static inline int nt_level(const char *node, int level)
+/*
+ * When we cache a list, we want to be sure the caller can't mutate
+ * the cached copy.
+ */
+static PyObject *list_copy(PyObject *list)
+{
+	Py_ssize_t len = PyList_GET_SIZE(list);
+	PyObject *newlist = PyList_New(len);
+	Py_ssize_t i;
+
+	if (newlist == NULL)
+		return NULL;
+
+	for (i = 0; i < len; i++) {
+		PyObject *obj = PyList_GET_ITEM(list, i);
+		Py_INCREF(obj);
+		PyList_SET_ITEM(newlist, i, obj);
+	}
+
+	return newlist;
+}
+
+static PyObject *index_headrevs(indexObject *self)
+{
+	Py_ssize_t i, len, addlen;
+	char *nothead = NULL;
+	PyObject *heads;
+
+	if (self->headrevs)
+		return list_copy(self->headrevs);
+
+	len = index_length(self) - 1;
+	heads = PyList_New(0);
+	if (heads == NULL)
+		goto bail;
+	if (len == 0) {
+		PyObject *nullid = PyInt_FromLong(-1);
+		if (nullid == NULL || PyList_Append(heads, nullid) == -1) {
+			Py_XDECREF(nullid);
+			goto bail;
+		}
+		goto done;
+	}
+
+	nothead = calloc(len, 1);
+	if (nothead == NULL)
+		goto bail;
+
+	for (i = 0; i < self->raw_length; i++) {
+		const char *data = index_deref(self, i);
+		int parent_1 = getbe32(data + 24);
+		int parent_2 = getbe32(data + 28);
+		if (parent_1 >= 0)
+			nothead[parent_1] = 1;
+		if (parent_2 >= 0)
+			nothead[parent_2] = 1;
+	}
+
+	addlen = self->added ? PyList_GET_SIZE(self->added) : 0;
+
+	for (i = 0; i < addlen; i++) {
+		PyObject *rev = PyList_GET_ITEM(self->added, i);
+		PyObject *p1 = PyTuple_GET_ITEM(rev, 5);
+		PyObject *p2 = PyTuple_GET_ITEM(rev, 6);
+		long parent_1, parent_2;
+
+		if (!PyInt_Check(p1) || !PyInt_Check(p2)) {
+			PyErr_SetString(PyExc_TypeError,
+					"revlog parents are invalid");
+			goto bail;
+		}
+		parent_1 = PyInt_AS_LONG(p1);
+		parent_2 = PyInt_AS_LONG(p2);
+		if (parent_1 >= 0)
+			nothead[parent_1] = 1;
+		if (parent_2 >= 0)
+			nothead[parent_2] = 1;
+	}
+
+	for (i = 0; i < len; i++) {
+		PyObject *head;
+
+		if (nothead[i])
+			continue;
+		head = PyInt_FromLong(i);
+		if (head == NULL || PyList_Append(heads, head) == -1) {
+			Py_XDECREF(head);
+			goto bail;
+		}
+	}
+
+done:
+	self->headrevs = heads;
+	free(nothead);
+	return list_copy(self->headrevs);
+bail:
+	Py_XDECREF(heads);
+	free(nothead);
+	return NULL;
+}
+
+static inline int nt_level(const char *node, Py_ssize_t level)
 {
 	int v = node[level>>1];
 	if (!(level & 1))
@@ -544,8 +649,17 @@
 	return v & 0xf;
 }
 
-static int nt_find(indexObject *self, const char *node, Py_ssize_t nodelen)
+/*
+ * Return values:
+ *
+ *   -4: match is ambiguous (multiple candidates)
+ *   -2: not found
+ * rest: valid rev
+ */
+static int nt_find(indexObject *self, const char *node, Py_ssize_t nodelen,
+		   int hex)
 {
+	int (*getnybble)(const char *, Py_ssize_t) = hex ? hexdigit : nt_level;
 	int level, maxlevel, off;
 
 	if (nodelen == 20 && node[0] == '\0' && memcmp(node, nullid, 20) == 0)
@@ -554,27 +668,35 @@
 	if (self->nt == NULL)
 		return -2;
 
-	maxlevel = nodelen > 20 ? 40 : ((int)nodelen * 2);
+	if (hex)
+		maxlevel = nodelen > 40 ? 40 : (int)nodelen;
+	else
+		maxlevel = nodelen > 20 ? 40 : ((int)nodelen * 2);
 
 	for (level = off = 0; level < maxlevel; level++) {
-		int k = nt_level(node, level);
+		int k = getnybble(node, level);
 		nodetree *n = &self->nt[off];
 		int v = n->children[k];
 
 		if (v < 0) {
 			const char *n;
+			Py_ssize_t i;
+
 			v = -v - 1;
 			n = index_node(self, v);
 			if (n == NULL)
 				return -2;
-			return memcmp(node, n, nodelen > 20 ? 20 : nodelen)
-				? -2 : v;
+			for (i = level; i < maxlevel; i++)
+				if (getnybble(node, i) != nt_level(n, i))
+					return -2;
+			return v;
 		}
 		if (v == 0)
 			return -2;
 		off = v;
 	}
-	return -2;
+	/* multiple matches against an ambiguous prefix */
+	return -4;
 }
 
 static int nt_new(indexObject *self)
@@ -638,6 +760,26 @@
 	return -1;
 }
 
+static int nt_init(indexObject *self)
+{
+	if (self->nt == NULL) {
+		self->ntcapacity = self->raw_length < 4
+			? 4 : self->raw_length / 2;
+		self->nt = calloc(self->ntcapacity, sizeof(nodetree));
+		if (self->nt == NULL) {
+			PyErr_NoMemory();
+			return -1;
+		}
+		self->ntlength = 1;
+		self->ntrev = (int)index_length(self) - 1;
+		self->ntlookups = 1;
+		self->ntmisses = 0;
+		if (nt_insert(self, nullid, INT_MAX) == -1)
+			return -1;
+	}
+	return 0;
+}
+
 /*
  * Return values:
  *
@@ -651,23 +793,12 @@
 	int rev;
 
 	self->ntlookups++;
-	rev = nt_find(self, node, nodelen);
+	rev = nt_find(self, node, nodelen, 0);
 	if (rev >= -1)
 		return rev;
 
-	if (self->nt == NULL) {
-		self->ntcapacity = self->raw_length < 4
-			? 4 : self->raw_length / 2;
-		self->nt = calloc(self->ntcapacity, sizeof(nodetree));
-		if (self->nt == NULL) {
-			PyErr_SetString(PyExc_MemoryError, "out of memory");
-			return -3;
-		}
-		self->ntlength = 1;
-		self->ntrev = (int)index_length(self) - 1;
-		self->ntlookups = 1;
-		self->ntmisses = 0;
-	}
+	if (nt_init(self) == -1)
+		return -3;
 
 	/*
 	 * For the first handful of lookups, we scan the entire index,
@@ -692,10 +823,14 @@
 	} else {
 		for (rev = self->ntrev - 1; rev >= 0; rev--) {
 			const char *n = index_node(self, rev);
-			if (n == NULL)
+			if (n == NULL) {
+				self->ntrev = rev + 1;
 				return -2;
-			if (nt_insert(self, n, rev) == -1)
+			}
+			if (nt_insert(self, n, rev) == -1) {
+				self->ntrev = rev + 1;
 				return -3;
+			}
 			if (memcmp(node, n, nodelen > 20 ? 20 : nodelen) == 0) {
 				break;
 			}
@@ -763,6 +898,77 @@
 	return NULL;
 }
 
+static int nt_partialmatch(indexObject *self, const char *node,
+			   Py_ssize_t nodelen)
+{
+	int rev;
+
+	if (nt_init(self) == -1)
+		return -3;
+
+	if (self->ntrev > 0) {
+		/* ensure that the radix tree is fully populated */
+		for (rev = self->ntrev - 1; rev >= 0; rev--) {
+			const char *n = index_node(self, rev);
+			if (n == NULL)
+				return -2;
+			if (nt_insert(self, n, rev) == -1)
+				return -3;
+		}
+		self->ntrev = rev;
+	}
+
+	return nt_find(self, node, nodelen, 1);
+}
+
+static PyObject *index_partialmatch(indexObject *self, PyObject *args)
+{
+	const char *fullnode;
+	int nodelen;
+	char *node;
+	int rev, i;
+
+	if (!PyArg_ParseTuple(args, "s#", &node, &nodelen))
+		return NULL;
+
+	if (nodelen < 4) {
+		PyErr_SetString(PyExc_ValueError, "key too short");
+		return NULL;
+	}
+
+	if (nodelen > 40)
+		nodelen = 40;
+
+	for (i = 0; i < nodelen; i++)
+		hexdigit(node, i);
+	if (PyErr_Occurred()) {
+		/* input contains non-hex characters */
+		PyErr_Clear();
+		Py_RETURN_NONE;
+	}
+
+	rev = nt_partialmatch(self, node, nodelen);
+
+	switch (rev) {
+	case -4:
+		raise_revlog_error();
+	case -3:
+		return NULL;
+	case -2:
+		Py_RETURN_NONE;
+	case -1:
+		return PyString_FromStringAndSize(nullid, 20);
+	}
+
+	fullnode = index_node(self, rev);
+	if (fullnode == NULL) {
+		PyErr_Format(PyExc_IndexError,
+			     "could not access rev %d", rev);
+		return NULL;
+	}
+	return PyString_FromStringAndSize(fullnode, 20);
+}
+
 static PyObject *index_m_get(indexObject *self, PyObject *args)
 {
 	Py_ssize_t nodelen;
@@ -819,10 +1025,8 @@
 		nt_insert(self, PyString_AS_STRING(node), -1);
 	}
 
-	if (start == 0) {
-		Py_DECREF(self->added);
-		self->added = NULL;
-	}
+	if (start == 0)
+		Py_CLEAR(self->added);
 }
 
 /*
@@ -833,6 +1037,7 @@
 {
 	Py_ssize_t start, stop, step, slicelength;
 	Py_ssize_t length = index_length(self);
+	int ret = 0;
 
 	if (PySlice_GetIndicesEx((PySliceObject*)item, length,
 				 &start, &stop, &step, &slicelength) < 0)
@@ -878,7 +1083,9 @@
 				self->ntrev = (int)start;
 		}
 		self->length = start + 1;
-		return 0;
+		if (start < self->raw_length)
+			self->raw_length = start;
+		goto done;
 	}
 
 	if (self->nt) {
@@ -886,10 +1093,12 @@
 		if (self->ntrev > start)
 			self->ntrev = (int)start;
 	}
-	return self->added
-		? PyList_SetSlice(self->added, start - self->length + 1,
-				  PyList_GET_SIZE(self->added), NULL)
-		: 0;
+	if (self->added)
+		ret = PyList_SetSlice(self->added, start - self->length + 1,
+				      PyList_GET_SIZE(self->added), NULL);
+done:
+	Py_CLEAR(self->headrevs);
+	return ret;
 }
 
 /*
@@ -931,17 +1140,16 @@
 {
 	const char *data = PyString_AS_STRING(self->data);
 	const char *end = data + PyString_GET_SIZE(self->data);
-	const long hdrsize = 64;
-	long incr = hdrsize;
+	long incr = v1_hdrsize;
 	Py_ssize_t len = 0;
 
-	while (data + hdrsize <= end) {
+	while (data + v1_hdrsize <= end) {
 		uint32_t comp_len;
 		const char *old_data;
 		/* 3rd element of header is length of compressed inline data */
 		comp_len = getbe32(data + 8);
-		incr = hdrsize + comp_len;
-		if (incr < hdrsize)
+		incr = v1_hdrsize + comp_len;
+		if (incr < v1_hdrsize)
 			break;
 		if (offsets)
 			offsets[len] = data;
@@ -952,7 +1160,7 @@
 			break;
 	}
 
-	if (data != end && data + hdrsize != end) {
+	if (data != end && data + v1_hdrsize != end) {
 		if (!PyErr_Occurred())
 			PyErr_SetString(PyExc_ValueError, "corrupt index file");
 		return -1;
@@ -979,6 +1187,7 @@
 	self->cache = NULL;
 
 	self->added = NULL;
+	self->headrevs = NULL;
 	self->offsets = NULL;
 	self->nt = NULL;
 	self->ntlength = self->ntcapacity = 0;
@@ -994,11 +1203,11 @@
 		self->raw_length = len;
 		self->length = len + 1;
 	} else {
-		if (size % 64) {
+		if (size % v1_hdrsize) {
 			PyErr_SetString(PyExc_ValueError, "corrupt index file");
 			goto bail;
 		}
-		self->raw_length = size / 64;
+		self->raw_length = size / v1_hdrsize;
 		self->length = self->raw_length + 1;
 	}
 
@@ -1043,8 +1252,12 @@
 	 "clear the index caches"},
 	{"get", (PyCFunction)index_m_get, METH_VARARGS,
 	 "get an index entry"},
+	{"headrevs", (PyCFunction)index_headrevs, METH_NOARGS,
+	 "get head revisions"},
 	{"insert", (PyCFunction)index_insert, METH_VARARGS,
 	 "insert an index entry"},
+	{"partialmatch", (PyCFunction)index_partialmatch, METH_VARARGS,
+	 "match a potentially ambiguous node ID"},
 	{"stats", (PyCFunction)index_stats, METH_NOARGS,
 	 "stats for the index"},
 	{NULL} /* Sentinel */
--- a/mercurial/patch.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/mercurial/patch.py	Mon Jun 04 17:57:57 2012 -0500
@@ -245,7 +245,7 @@
                         tmpfp.write('\n')
             elif not diffs_seen and message and content_type == 'text/plain':
                 message += '\n' + payload
-    except:
+    except: # re-raises
         tmpfp.close()
         os.unlink(tmpname)
         raise
@@ -534,7 +534,7 @@
         if fname in self.data:
             return self.data[fname]
         if not self.opener or fname not in self.files:
-            raise IOError()
+            raise IOError
         fn, mode, copied = self.files[fname]
         return self.opener.read(fn), mode, copied
 
@@ -560,7 +560,7 @@
         try:
             fctx = self.ctx[fname]
         except error.LookupError:
-            raise IOError()
+            raise IOError
         flags = fctx.flags()
         return fctx.data(), ('l' in flags, 'x' in flags)
 
@@ -858,7 +858,8 @@
             self.lenb = int(self.lenb)
         self.starta = int(self.starta)
         self.startb = int(self.startb)
-        diffhelpers.addlines(lr, self.hunk, self.lena, self.lenb, self.a, self.b)
+        diffhelpers.addlines(lr, self.hunk, self.lena, self.lenb, self.a,
+                             self.b)
         # if we hit eof before finishing out the hunk, the last line will
         # be zero length.  Lets try to fix it up.
         while len(self.hunk[-1]) == 0:
@@ -1040,12 +1041,13 @@
             hunk.append(l)
             return l.rstrip('\r\n')
 
-        line = getline(lr, self.hunk)
-        while line and not line.startswith('literal '):
+        while True:
             line = getline(lr, self.hunk)
-        if not line:
-            raise PatchError(_('could not extract "%s" binary data')
-                             % self._fname)
+            if not line:
+                raise PatchError(_('could not extract "%s" binary data')
+                                 % self._fname)
+            if line.startswith('literal '):
+                break
         size = int(line[8:].rstrip())
         dec = []
         line = getline(lr, self.hunk)
@@ -1595,12 +1597,12 @@
 
     def lrugetfilectx():
         cache = {}
-        order = []
+        order = util.deque()
         def getfilectx(f, ctx):
             fctx = ctx.filectx(f, filelog=cache.get(f))
             if f not in cache:
                 if len(cache) > 20:
-                    del cache[order.pop(0)]
+                    del cache[order.popleft()]
                 cache[f] = fctx.filelog()
             else:
                 order.remove(f)
@@ -1628,13 +1630,14 @@
     if opts.git or opts.upgrade:
         copy = copies.pathcopies(ctx1, ctx2)
 
-    difffn = lambda opts, losedata: trydiff(repo, revs, ctx1, ctx2,
-                 modified, added, removed, copy, getfilectx, opts, losedata, prefix)
+    difffn = (lambda opts, losedata:
+                  trydiff(repo, revs, ctx1, ctx2, modified, added, removed,
+                          copy, getfilectx, opts, losedata, prefix))
     if opts.upgrade and not opts.git:
         try:
             def losedata(fn):
                 if not losedatafn or not losedatafn(fn=fn):
-                    raise GitDiffRequired()
+                    raise GitDiffRequired
             # Buffer the whole output until we are sure it can be generated
             return list(difffn(opts.copy(git=False), losedata))
         except GitDiffRequired:
@@ -1665,7 +1668,7 @@
                 if line.startswith('@'):
                     head = False
             else:
-                if line and not line[0] in ' +-@\\':
+                if line and line[0] not in ' +-@\\':
                     head = True
             stripline = line
             if not head and line and line[0] in '+-':
@@ -1870,7 +1873,8 @@
                        countwidth, count, pluses, minuses))
 
     if stats:
-        output.append(_(' %d files changed, %d insertions(+), %d deletions(-)\n')
+        output.append(_(' %d files changed, %d insertions(+), '
+                        '%d deletions(-)\n')
                       % (len(stats), totaladds, totalremoves))
 
     return ''.join(output)
--- a/mercurial/phases.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/mercurial/phases.py	Mon Jun 04 17:57:57 2012 -0500
@@ -6,8 +6,8 @@
                    Logilab SA        <contact@logilab.fr>
                    Augie Fackler     <durin42@gmail.com>
 
-    This software may be used and distributed according to the terms of the
-    GNU General Public License version 2 or any later version.
+    This software may be used and distributed according to the terms
+    of the GNU General Public License version 2 or any later version.
 
     ---
 
@@ -17,17 +17,17 @@
 Basic Concept
 =============
 
-A 'changeset phases' is an indicator that tells us how a changeset is
-manipulated and communicated. The details of each phase is described below,
-here we describe the properties they have in common.
+A 'changeset phase' is an indicator that tells us how a changeset is
+manipulated and communicated. The details of each phase is described
+below, here we describe the properties they have in common.
 
-Like bookmarks, phases are not stored in history and thus are not permanent and
-leave no audit trail.
+Like bookmarks, phases are not stored in history and thus are not
+permanent and leave no audit trail.
 
-First, no changeset can be in two phases at once. Phases are ordered, so they
-can be considered from lowest to highest. The default, lowest phase is 'public'
-- this is the normal phase of existing changesets. A child changeset can not be
-in a lower phase than its parents.
+First, no changeset can be in two phases at once. Phases are ordered,
+so they can be considered from lowest to highest. The default, lowest
+phase is 'public' - this is the normal phase of existing changesets. A
+child changeset can not be in a lower phase than its parents.
 
 These phases share a hierarchy of traits:
 
@@ -36,25 +36,26 @@
     draft:               X
     secret:
 
-local commits are draft by default
+Local commits are draft by default.
+
+Phase Movement and Exchange
+===========================
 
-Phase movement and exchange
-============================
-
-Phase data are exchanged by pushkey on pull and push. Some server have a
-publish option set, we call them publishing server. Pushing to such server make
-draft changeset publish.
+Phase data is exchanged by pushkey on pull and push. Some servers have
+a publish option set, we call such a server a "publishing server".
+Pushing a draft changeset to a publishing server changes the phase to
+public.
 
 A small list of fact/rules define the exchange of phase:
 
 * old client never changes server states
 * pull never changes server states
-* publish and old server csets are seen as public by client
+* publish and old server changesets are seen as public by client
+* any secret changeset seen in another repository is lowered to at
+  least draft
 
-* Any secret changeset seens in another repository is lowered to at least draft
-
-
-Here is the final table summing up the 49 possible usecase of phase exchange:
+Here is the final table summing up the 49 possible use cases of phase
+exchange:
 
                            server
                   old     publish      non-publish
@@ -81,152 +82,232 @@
     * N = new/not present,
     * P = public,
     * D = draft,
-    * X = not tracked (ie: the old client or server has no internal way of
-          recording the phase.)
+    * X = not tracked (i.e., the old client or server has no internal
+          way of recording the phase.)
 
     passive = only pushes
 
 
     A cell here can be read like this:
 
-    "When a new client pushes a draft changeset (D) to a publishing server
-    where it's not present (N), it's marked public on both sides (P/P)."
+    "When a new client pushes a draft changeset (D) to a publishing
+    server where it's not present (N), it's marked public on both
+    sides (P/P)."
 
-Note: old client behave as publish server with Draft only content
+Note: old client behave as a publishing server with draft only content
 - other people see it as public
 - content is pushed as draft
 
 """
 
 import errno
-from node import nullid, bin, hex, short
+from node import nullid, nullrev, bin, hex, short
 from i18n import _
+import util
 
 allphases = public, draft, secret = range(3)
 trackedphases = allphases[1:]
 phasenames = ['public', 'draft', 'secret']
 
-def readroots(repo):
-    """Read phase roots from disk"""
+def _filterunknown(ui, changelog, phaseroots):
+    """remove unknown nodes from the phase boundary
+
+    Nothing is lost as unknown nodes only hold data for their descendants.
+    """
+    updated = False
+    nodemap = changelog.nodemap # to filter unknown nodes
+    for phase, nodes in enumerate(phaseroots):
+        missing = [node for node in nodes if node not in nodemap]
+        if missing:
+            for mnode in missing:
+                ui.debug(
+                    'removing unknown node %s from %i-phase boundary\n'
+                    % (short(mnode), phase))
+            nodes.symmetric_difference_update(missing)
+            updated = True
+    return updated
+
+def _readroots(repo, phasedefaults=None):
+    """Read phase roots from disk
+
+    phasedefaults is a list of fn(repo, roots) callable, which are
+    executed if the phase roots file does not exist. When phases are
+    being initialized on an existing repository, this could be used to
+    set selected changesets phase to something else than public.
+
+    Return (roots, dirty) where dirty is true if roots differ from
+    what is being stored.
+    """
+    dirty = False
     roots = [set() for i in allphases]
     try:
         f = repo.sopener('phaseroots')
         try:
             for line in f:
-                phase, nh = line.strip().split()
+                phase, nh = line.split()
                 roots[int(phase)].add(bin(nh))
         finally:
             f.close()
     except IOError, inst:
         if inst.errno != errno.ENOENT:
             raise
-        for f in repo._phasedefaults:
-            roots = f(repo, roots)
-        repo._dirtyphases = True
-    return roots
+        if phasedefaults:
+            for f in phasedefaults:
+                roots = f(repo, roots)
+        dirty = True
+    if _filterunknown(repo.ui, repo.changelog, roots):
+        dirty = True
+    return roots, dirty
+
+class phasecache(object):
+    def __init__(self, repo, phasedefaults, _load=True):
+        if _load:
+            # Cheap trick to allow shallow-copy without copy module
+            self.phaseroots, self.dirty = _readroots(repo, phasedefaults)
+            self.opener = repo.sopener
+            self._phaserevs = None
+
+    def copy(self):
+        # Shallow copy meant to ensure isolation in
+        # advance/retractboundary(), nothing more.
+        ph = phasecache(None, None, _load=False)
+        ph.phaseroots = self.phaseroots[:]
+        ph.dirty = self.dirty
+        ph.opener = self.opener
+        ph._phaserevs = self._phaserevs
+        return ph
 
-def writeroots(repo):
-    """Write phase roots from disk"""
-    f = repo.sopener('phaseroots', 'w', atomictemp=True)
-    try:
-        for phase, roots in enumerate(repo._phaseroots):
-            for h in roots:
-                f.write('%i %s\n' % (phase, hex(h)))
-        repo._dirtyphases = False
-    finally:
-        f.close()
+    def replace(self, phcache):
+        for a in 'phaseroots dirty opener _phaserevs'.split():
+            setattr(self, a, getattr(phcache, a))
+
+    def getphaserevs(self, repo, rebuild=False):
+        if rebuild or self._phaserevs is None:
+            revs = [public] * len(repo.changelog)
+            for phase in trackedphases:
+                roots = map(repo.changelog.rev, self.phaseroots[phase])
+                if roots:
+                    for rev in roots:
+                        revs[rev] = phase
+                    for rev in repo.changelog.descendants(roots):
+                        revs[rev] = phase
+            self._phaserevs = revs
+        return self._phaserevs
+
+    def phase(self, repo, rev):
+        # We need a repo argument here to be able to build _phaserev
+        # if necessary. The repository instance is not stored in
+        # phasecache to avoid reference cycles. The changelog instance
+        # is not stored because it is a filecache() property and can
+        # be replaced without us being notified.
+        if rev == nullrev:
+            return public
+        if self._phaserevs is None or rev >= len(self._phaserevs):
+            self._phaserevs = self.getphaserevs(repo, rebuild=True)
+        return self._phaserevs[rev]
 
-def filterunknown(repo, phaseroots=None):
-    """remove unknown nodes from the phase boundary
+    def write(self):
+        if not self.dirty:
+            return
+        f = self.opener('phaseroots', 'w', atomictemp=True)
+        try:
+            for phase, roots in enumerate(self.phaseroots):
+                for h in roots:
+                    f.write('%i %s\n' % (phase, hex(h)))
+        finally:
+            f.close()
+        self.dirty = False
+
+    def _updateroots(self, phase, newroots):
+        self.phaseroots[phase] = newroots
+        self._phaserevs = None
+        self.dirty = True
+
+    def advanceboundary(self, repo, targetphase, nodes):
+        # Be careful to preserve shallow-copied values: do not update
+        # phaseroots values, replace them.
 
-    no data is lost as unknown node only old data for their descentants
-    """
-    if phaseroots is None:
-        phaseroots = repo._phaseroots
-    nodemap = repo.changelog.nodemap # to filter unknown nodes
-    for phase, nodes in enumerate(phaseroots):
-        missing = [node for node in nodes if node not in nodemap]
-        if missing:
-            for mnode in missing:
-                repo.ui.debug(
-                    'removing unknown node %s from %i-phase boundary\n'
-                    % (short(mnode), phase))
-            nodes.symmetric_difference_update(missing)
-            repo._dirtyphases = True
+        delroots = [] # set of root deleted by this path
+        for phase in xrange(targetphase + 1, len(allphases)):
+            # filter nodes that are not in a compatible phase already
+            nodes = [n for n in nodes
+                     if self.phase(repo, repo[n].rev()) >= phase]
+            if not nodes:
+                break # no roots to move anymore
+            olds = self.phaseroots[phase]
+            roots = set(ctx.node() for ctx in repo.set(
+                    'roots((%ln::) - (%ln::%ln))', olds, olds, nodes))
+            if olds != roots:
+                self._updateroots(phase, roots)
+                # some roots may need to be declared for lower phases
+                delroots.extend(olds - roots)
+            # declare deleted root in the target phase
+            if targetphase != 0:
+                self.retractboundary(repo, targetphase, delroots)
+
+    def retractboundary(self, repo, targetphase, nodes):
+        # Be careful to preserve shallow-copied values: do not update
+        # phaseroots values, replace them.
+
+        currentroots = self.phaseroots[targetphase]
+        newroots = [n for n in nodes
+                    if self.phase(repo, repo[n].rev()) < targetphase]
+        if newroots:
+            if nullid in newroots:
+                raise util.Abort(_('cannot change null revision phase'))
+            currentroots = currentroots.copy()
+            currentroots.update(newroots)
+            ctxs = repo.set('roots(%ln::)', currentroots)
+            currentroots.intersection_update(ctx.node() for ctx in ctxs)
+            self._updateroots(targetphase, currentroots)
 
 def advanceboundary(repo, targetphase, nodes):
     """Add nodes to a phase changing other nodes phases if necessary.
 
-    This function move boundary *forward* this means that all nodes are set
-    in the target phase or kept in a *lower* phase.
+    This function move boundary *forward* this means that all nodes
+    are set in the target phase or kept in a *lower* phase.
 
     Simplify boundary to contains phase roots only."""
-    delroots = [] # set of root deleted by this path
-    for phase in xrange(targetphase + 1, len(allphases)):
-        # filter nodes that are not in a compatible phase already
-        # XXX rev phase cache might have been invalidated by a previous loop
-        # XXX we need to be smarter here
-        nodes = [n for n in nodes if repo[n].phase() >= phase]
-        if not nodes:
-            break # no roots to move anymore
-        roots = repo._phaseroots[phase]
-        olds = roots.copy()
-        ctxs = list(repo.set('roots((%ln::) - (%ln::%ln))', olds, olds, nodes))
-        roots.clear()
-        roots.update(ctx.node() for ctx in ctxs)
-        if olds != roots:
-            # invalidate cache (we probably could be smarter here
-            if '_phaserev' in vars(repo):
-                del repo._phaserev
-            repo._dirtyphases = True
-            # some roots may need to be declared for lower phases
-            delroots.extend(olds - roots)
-        # declare deleted root in the target phase
-        if targetphase != 0:
-            retractboundary(repo, targetphase, delroots)
-
+    phcache = repo._phasecache.copy()
+    phcache.advanceboundary(repo, targetphase, nodes)
+    repo._phasecache.replace(phcache)
 
 def retractboundary(repo, targetphase, nodes):
-    """Set nodes back to a phase changing other nodes phases if necessary.
+    """Set nodes back to a phase changing other nodes phases if
+    necessary.
 
-    This function move boundary *backward* this means that all nodes are set
-    in the target phase or kept in a *higher* phase.
+    This function move boundary *backward* this means that all nodes
+    are set in the target phase or kept in a *higher* phase.
 
     Simplify boundary to contains phase roots only."""
-    currentroots = repo._phaseroots[targetphase]
-    newroots = [n for n in nodes if repo[n].phase() < targetphase]
-    if newroots:
-        currentroots.update(newroots)
-        ctxs = repo.set('roots(%ln::)', currentroots)
-        currentroots.intersection_update(ctx.node() for ctx in ctxs)
-        if '_phaserev' in vars(repo):
-            del repo._phaserev
-        repo._dirtyphases = True
-
+    phcache = repo._phasecache.copy()
+    phcache.retractboundary(repo, targetphase, nodes)
+    repo._phasecache.replace(phcache)
 
 def listphases(repo):
-    """List phases root for serialisation over pushkey"""
+    """List phases root for serialization over pushkey"""
     keys = {}
     value = '%i' % draft
-    for root in repo._phaseroots[draft]:
+    for root in repo._phasecache.phaseroots[draft]:
         keys[hex(root)] = value
 
     if repo.ui.configbool('phases', 'publish', True):
-        # Add an extra data to let remote know we are a publishing repo.
-        # Publishing repo can't just pretend they are old repo. When pushing to
-        # a publishing repo, the client still need to push phase boundary
+        # Add an extra data to let remote know we are a publishing
+        # repo. Publishing repo can't just pretend they are old repo.
+        # When pushing to a publishing repo, the client still need to
+        # push phase boundary
         #
-        # Push do not only push changeset. It also push phase data. New
-        # phase data may apply to common changeset which won't be push (as they
-        # are common).  Here is a very simple example:
+        # Push do not only push changeset. It also push phase data.
+        # New phase data may apply to common changeset which won't be
+        # push (as they are common). Here is a very simple example:
         #
         # 1) repo A push changeset X as draft to repo B
         # 2) repo B make changeset X public
-        # 3) repo B push to repo A. X is not pushed but the data that X as now
-        #    public should
+        # 3) repo B push to repo A. X is not pushed but the data that
+        #    X as now public should
         #
-        # The server can't handle it on it's own as it has no idea of client
-        # phase data.
+        # The server can't handle it on it's own as it has no idea of
+        # client phase data.
         keys['publishing'] = 'True'
     return keys
 
@@ -251,7 +332,7 @@
 def visibleheads(repo):
     """return the set of visible head of this repo"""
     # XXX we want a cache on this
-    sroots = repo._phaseroots[secret]
+    sroots = repo._phasecache.phaseroots[secret]
     if sroots:
         # XXX very slow revset. storing heads or secret "boundary" would help.
         revset = repo.set('heads(not (%ln::))', sroots)
@@ -267,7 +348,7 @@
     """return a branchmap for the visible set"""
     # XXX Recomputing this data on the fly is very slow.  We should build a
     # XXX cached version while computin the standard branchmap version.
-    sroots = repo._phaseroots[secret]
+    sroots = repo._phasecache.phaseroots[secret]
     if sroots:
         vbranchmap = {}
         for branch, nodes in  repo.branchmap().iteritems():
--- a/mercurial/pure/mpatch.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/mercurial/pure/mpatch.py	Mon Jun 04 17:57:57 2012 -0500
@@ -85,10 +85,10 @@
             p1, p2, l = struct.unpack(">lll", m.read(12))
             pull(new, frags, p1 - last) # what didn't change
             pull([], frags, p2 - p1)    # what got deleted
-            new.append((l, pos + 12))        # what got added
+            new.append((l, pos + 12))   # what got added
             pos += l + 12
             last = p2
-        frags.extend(reversed(new))                    # what was left at the end
+        frags.extend(reversed(new))     # what was left at the end
 
     t = collect(b2, frags)
 
--- a/mercurial/pure/osutil.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/mercurial/pure/osutil.py	Mon Jun 04 17:57:57 2012 -0500
@@ -119,7 +119,7 @@
                 flags = _O_TEXT
 
             m0 = mode[0]
-            if m0 == 'r' and not '+' in mode:
+            if m0 == 'r' and '+' not in mode:
                 flags |= _O_RDONLY
                 access = _GENERIC_READ
             else:
--- a/mercurial/repair.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/mercurial/repair.py	Mon Jun 04 17:57:57 2012 -0500
@@ -6,7 +6,7 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
-from mercurial import changegroup, bookmarks, phases
+from mercurial import changegroup, bookmarks
 from mercurial.node import short
 from mercurial.i18n import _
 import os
@@ -38,14 +38,14 @@
     """return the changesets which will be broken by the truncation"""
     s = set()
     def collectone(revlog):
-        links = (revlog.linkrev(i) for i in revlog)
+        linkgen = (revlog.linkrev(i) for i in revlog)
         # find the truncation point of the revlog
-        for lrev in links:
+        for lrev in linkgen:
             if lrev >= striprev:
                 break
         # see if any revision after this point has a linkrev
         # less than striprev (those will be broken by strip)
-        for lrev in links:
+        for lrev in linkgen:
             if lrev < striprev:
                 s.add(lrev)
 
@@ -74,7 +74,7 @@
     #  base = revision in the set that has no ancestor in the set)
     tostrip = set(striplist)
     for rev in striplist:
-        for desc in cl.descendants(rev):
+        for desc in cl.descendants([rev]):
             tostrip.add(desc)
 
     files = _collectfiles(repo, striprev)
@@ -91,7 +91,7 @@
 
     # compute base nodes
     if saverevs:
-        descendants = set(cl.descendants(*saverevs))
+        descendants = set(cl.descendants(saverevs))
         saverevs.difference_update(descendants)
     savebases = [cl.node(r) for r in saverevs]
     stripbases = [cl.node(r) for r in tostrip]
@@ -131,7 +131,7 @@
                 file, troffset, ignore = tr.entries[i]
                 repo.sopener(file, 'a').truncate(troffset)
             tr.close()
-        except:
+        except: # re-raises
             tr.abort()
             raise
 
@@ -160,7 +160,7 @@
         for m in updatebm:
             bm[m] = repo['.'].node()
         bookmarks.write(repo)
-    except:
+    except: # re-raises
         if backupfile:
             ui.warn(_("strip failed, full bundle stored in '%s'\n")
                     % backupfile)
@@ -170,7 +170,3 @@
         raise
 
     repo.destroyed()
-
-    # remove potential unknown phase
-    # XXX using to_strip data would be faster
-    phases.filterunknown(repo)
--- a/mercurial/revlog.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/mercurial/revlog.py	Mon Jun 04 17:57:57 2012 -0500
@@ -362,13 +362,13 @@
         """return the set of all nodes ancestral to a given node, including
          the node itself, stopping when stop is matched"""
         reachable = set((node,))
-        visit = [node]
+        visit = util.deque([node])
         if stop:
             stopn = self.rev(stop)
         else:
             stopn = 0
         while visit:
-            n = visit.pop(0)
+            n = visit.popleft()
             if n == stop:
                 continue
             if n == nullid:
@@ -381,24 +381,27 @@
                     visit.append(p)
         return reachable
 
-    def ancestors(self, *revs):
+    def ancestors(self, revs, stoprev=0):
         """Generate the ancestors of 'revs' in reverse topological order.
+        Does not generate revs lower than stoprev.
 
         Yield a sequence of revision numbers starting with the parents
         of each revision in revs, i.e., each revision is *not* considered
         an ancestor of itself.  Results are in breadth-first order:
         parents of each rev in revs, then parents of those, etc.  Result
         does not include the null revision."""
-        visit = list(revs)
+        visit = util.deque(revs)
         seen = set([nullrev])
         while visit:
-            for parent in self.parentrevs(visit.pop(0)):
+            for parent in self.parentrevs(visit.popleft()):
+                if parent < stoprev:
+                    continue
                 if parent not in seen:
                     visit.append(parent)
                     seen.add(parent)
                     yield parent
 
-    def descendants(self, *revs):
+    def descendants(self, revs):
         """Generate the descendants of 'revs' in revision order.
 
         Yield a sequence of revision numbers starting with a child of
@@ -441,15 +444,15 @@
         heads = [self.rev(n) for n in heads]
 
         # we want the ancestors, but inclusive
-        has = set(self.ancestors(*common))
+        has = set(self.ancestors(common))
         has.add(nullrev)
         has.update(common)
 
         # take all ancestors from heads that aren't in has
         missing = set()
-        visit = [r for r in heads if r not in has]
+        visit = util.deque(r for r in heads if r not in has)
         while visit:
-            r = visit.pop(0)
+            r = visit.popleft()
             if r in missing:
                 continue
             else:
@@ -635,6 +638,10 @@
         return (orderedout, roots, heads)
 
     def headrevs(self):
+        try:
+            return self.index.headrevs()
+        except AttributeError:
+            pass
         count = len(self)
         if not count:
             return [nullrev]
@@ -696,7 +703,7 @@
     def descendant(self, start, end):
         if start == nullrev:
             return True
-        for i in self.descendants(start):
+        for i in self.descendants([start]):
             if i == end:
                 return True
             elif i > end:
@@ -722,7 +729,7 @@
         return self.node(c)
 
     def _match(self, id):
-        if isinstance(id, (long, int)):
+        if isinstance(id, int):
             # rev
             return self.node(id)
         if len(id) == 20:
@@ -756,6 +763,15 @@
                 pass
 
     def _partialmatch(self, id):
+        try:
+            return self.index.partialmatch(id)
+        except RevlogError:
+            # parsers.c radix tree lookup gave multiple matches
+            raise LookupError(id, self.indexfile, _("ambiguous identifier"))
+        except (AttributeError, ValueError):
+            # we are pure python, or key was too short to search radix tree
+            pass
+
         if id in self._pcache:
             return self._pcache[id]
 
@@ -1199,7 +1215,7 @@
                     continue
 
                 for p in (p1, p2):
-                    if not p in self.nodemap:
+                    if p not in self.nodemap:
                         raise LookupError(p, self.indexfile,
                                           _('unknown parent'))
 
--- a/mercurial/revset.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/mercurial/revset.py	Mon Jun 04 17:57:57 2012 -0500
@@ -17,10 +17,10 @@
     """Like revlog.ancestors(), but supports followfirst."""
     cut = followfirst and 1 or None
     cl = repo.changelog
-    visit = list(revs)
+    visit = util.deque(revs)
     seen = set([node.nullrev])
     while visit:
-        for parent in cl.parentrevs(visit.pop(0))[:cut]:
+        for parent in cl.parentrevs(visit.popleft())[:cut]:
             if parent not in seen:
                 visit.append(parent)
                 seen.add(parent)
@@ -47,6 +47,36 @@
                 yield i
                 break
 
+def _revsbetween(repo, roots, heads):
+    """Return all paths between roots and heads, inclusive of both endpoint
+    sets."""
+    if not roots:
+        return []
+    parentrevs = repo.changelog.parentrevs
+    visit = heads[:]
+    reachable = set()
+    seen = {}
+    minroot = min(roots)
+    roots = set(roots)
+    # open-code the post-order traversal due to the tiny size of
+    # sys.getrecursionlimit()
+    while visit:
+        rev = visit.pop()
+        if rev in roots:
+            reachable.add(rev)
+        parents = parentrevs(rev)
+        seen[rev] = parents
+        for parent in parents:
+            if parent >= minroot and parent not in seen:
+                visit.append(parent)
+    if not reachable:
+        return []
+    for rev in sorted(seen):
+        for parent in seen[rev]:
+            if parent in reachable:
+                reachable.add(rev)
+    return sorted(reachable)
+
 elements = {
     "(": (20, ("group", 1, ")"), ("func", 1, ")")),
     "~": (18, None, ("ancestor", 18)),
@@ -108,7 +138,8 @@
                 pos += 1
             else:
                 raise error.ParseError(_("unterminated string"), s)
-        elif c.isalnum() or c in '._' or ord(c) > 127: # gather up a symbol/keyword
+        # gather up a symbol/keyword
+        elif c.isalnum() or c in '._' or ord(c) > 127:
             s = pos
             pos += 1
             while pos < l: # find end of symbol
@@ -190,6 +221,14 @@
     s = set(subset)
     return [x for x in r if x in s]
 
+def dagrange(repo, subset, x, y):
+    if subset:
+        r = range(len(repo))
+        xs = _revsbetween(repo, getset(repo, r, x), getset(repo, r, y))
+        s = set(subset)
+        return [r for r in xs if r in s]
+    return []
+
 def andset(repo, subset, x, y):
     return getset(repo, getset(repo, subset, x), y)
 
@@ -257,7 +296,8 @@
 
 def ancestorspec(repo, subset, x, n):
     """``set~n``
-    Changesets that are the Nth ancestor (first parents only) of a changeset in set.
+    Changesets that are the Nth ancestor (first parents only) of a changeset
+    in set.
     """
     try:
         n = int(n[1])
@@ -277,7 +317,8 @@
     """
     # i18n: "author" is a keyword
     n = encoding.lower(getstring(x, _("author requires a string")))
-    return [r for r in subset if n in encoding.lower(repo[r].user())]
+    kind, pattern, matcher = _substringmatcher(n)
+    return [r for r in subset if matcher(encoding.lower(repo[r].user()))]
 
 def bisect(repo, subset, x):
     """``bisect(string)``
@@ -289,6 +330,7 @@
     - ``pruned``             : csets that are goods, bads or skipped
     - ``untested``           : csets whose fate is yet unknown
     - ``ignored``            : csets ignored due to DAG topology
+    - ``current``            : the cset currently being bisected
     """
     status = getstring(x, _("bisect requires a string")).lower()
     state = set(hbisect.get(repo, status))
@@ -302,6 +344,10 @@
 def bookmark(repo, subset, x):
     """``bookmark([name])``
     The named bookmark or all bookmarks.
+
+    If `name` starts with `re:`, the remainder of the name is treated as
+    a regular expression. To match a bookmark that actually starts with `re:`,
+    use the prefix `literal:`.
     """
     # i18n: "bookmark" is a keyword
     args = getargs(x, 0, 1, _('bookmark takes one or no arguments'))
@@ -309,11 +355,26 @@
         bm = getstring(args[0],
                        # i18n: "bookmark" is a keyword
                        _('the argument to bookmark must be a string'))
-        bmrev = bookmarksmod.listbookmarks(repo).get(bm, None)
-        if not bmrev:
-            raise util.Abort(_("bookmark '%s' does not exist") % bm)
-        bmrev = repo[bmrev].rev()
-        return [r for r in subset if r == bmrev]
+        kind, pattern, matcher = _stringmatcher(bm)
+        if kind == 'literal':
+            bmrev = bookmarksmod.listbookmarks(repo).get(bm, None)
+            if not bmrev:
+                raise util.Abort(_("bookmark '%s' does not exist") % bm)
+            bmrev = repo[bmrev].rev()
+            return [r for r in subset if r == bmrev]
+        else:
+            matchrevs = set()
+            for name, bmrev in bookmarksmod.listbookmarks(repo).iteritems():
+                if matcher(name):
+                    matchrevs.add(bmrev)
+            if not matchrevs:
+                raise util.Abort(_("no bookmarks exist that match '%s'")
+                                 % pattern)
+            bmrevs = set()
+            for bmrev in matchrevs:
+                bmrevs.add(repo[bmrev].rev())
+            return [r for r in subset if r in bmrevs]
+
     bms = set([repo[r].rev()
                for r in bookmarksmod.listbookmarks(repo).values()])
     return [r for r in subset if r in bms]
@@ -322,14 +383,25 @@
     """``branch(string or set)``
     All changesets belonging to the given branch or the branches of the given
     changesets.
+
+    If `string` starts with `re:`, the remainder of the name is treated as
+    a regular expression. To match a branch that actually starts with `re:`,
+    use the prefix `literal:`.
     """
     try:
         b = getstring(x, '')
-        if b in repo.branchmap():
-            return [r for r in subset if repo[r].branch() == b]
     except error.ParseError:
         # not a string, but another revspec, e.g. tip()
         pass
+    else:
+        kind, pattern, matcher = _stringmatcher(b)
+        if kind == 'literal':
+            # note: falls through to the revspec case if no branch with
+            # this name exists
+            if pattern in repo.branchmap():
+                return [r for r in subset if matcher(repo[r].branch())]
+        else:
+            return [r for r in subset if matcher(repo[r].branch())]
 
     s = getset(repo, range(len(repo)), x)
     b = set()
@@ -392,7 +464,7 @@
     """
     # i18n: "closed" is a keyword
     getargs(x, 0, 0, _("closed takes no arguments"))
-    return [r for r in subset if repo[r].extra().get('close')]
+    return [r for r in subset if repo[r].closesbranch()]
 
 def contains(repo, subset, x):
     """``contains(pattern)``
@@ -462,7 +534,32 @@
     """``draft()``
     Changeset in draft phase."""
     getargs(x, 0, 0, _("draft takes no arguments"))
-    return [r for r in subset if repo._phaserev[r] == phases.draft]
+    pc = repo._phasecache
+    return [r for r in subset if pc.phase(repo, r) == phases.draft]
+
+def extra(repo, subset, x):
+    """``extra(label, [value])``
+    Changesets with the given label in the extra metadata, with the given
+    optional value.
+
+    If `value` starts with `re:`, the remainder of the value is treated as
+    a regular expression. To match a value that actually starts with `re:`,
+    use the prefix `literal:`.
+    """
+
+    l = getargs(x, 1, 2, _('extra takes at least 1 and at most 2 arguments'))
+    label = getstring(l[0], _('first argument to extra must be a string'))
+    value = None
+
+    if len(l) > 1:
+        value = getstring(l[1], _('second argument to extra must be a string'))
+        kind, value, matcher = _stringmatcher(value)
+
+    def _matchvalue(r):
+        extra = repo[r].extra()
+        return label in extra and (value is None or matcher(extra[label]))
+
+    return [r for r in subset if _matchvalue(r)]
 
 def filelog(repo, subset, x):
     """``filelog(pattern)``
@@ -859,7 +956,8 @@
     """``public()``
     Changeset in public phase."""
     getargs(x, 0, 0, _("public takes no arguments"))
-    return [r for r in subset if repo._phaserev[r] == phases.public]
+    pc = repo._phasecache
+    return [r for r in subset if pc.phase(repo, r) == phases.public]
 
 def remote(repo, subset, x):
     """``remote([id [,path]])``
@@ -1038,7 +1136,8 @@
     """``secret()``
     Changeset in secret phase."""
     getargs(x, 0, 0, _("secret takes no arguments"))
-    return [r for r in subset if repo._phaserev[r] == phases.secret]
+    pc = repo._phasecache
+    return [r for r in subset if pc.phase(repo, r) == phases.secret]
 
 def sort(repo, subset, x):
     """``sort(set[, [-]key...])``
@@ -1095,6 +1194,51 @@
     l.sort()
     return [e[-1] for e in l]
 
+def _stringmatcher(pattern):
+    """
+    accepts a string, possibly starting with 're:' or 'literal:' prefix.
+    returns the matcher name, pattern, and matcher function.
+    missing or unknown prefixes are treated as literal matches.
+
+    helper for tests:
+    >>> def test(pattern, *tests):
+    ...     kind, pattern, matcher = _stringmatcher(pattern)
+    ...     return (kind, pattern, [bool(matcher(t)) for t in tests])
+
+    exact matching (no prefix):
+    >>> test('abcdefg', 'abc', 'def', 'abcdefg')
+    ('literal', 'abcdefg', [False, False, True])
+
+    regex matching ('re:' prefix)
+    >>> test('re:a.+b', 'nomatch', 'fooadef', 'fooadefbar')
+    ('re', 'a.+b', [False, False, True])
+
+    force exact matches ('literal:' prefix)
+    >>> test('literal:re:foobar', 'foobar', 're:foobar')
+    ('literal', 're:foobar', [False, True])
+
+    unknown prefixes are ignored and treated as literals
+    >>> test('foo:bar', 'foo', 'bar', 'foo:bar')
+    ('literal', 'foo:bar', [False, False, True])
+    """
+    if pattern.startswith('re:'):
+        pattern = pattern[3:]
+        try:
+            regex = re.compile(pattern)
+        except re.error, e:
+            raise error.ParseError(_('invalid regular expression: %s')
+                                   % e)
+        return 're', pattern, regex.search
+    elif pattern.startswith('literal:'):
+        pattern = pattern[8:]
+    return 'literal', pattern, pattern.__eq__
+
+def _substringmatcher(pattern):
+    kind, pattern, matcher = _stringmatcher(pattern)
+    if kind == 'literal':
+        matcher = lambda s: pattern in s
+    return kind, pattern, matcher
+
 def tag(repo, subset, x):
     """``tag([name])``
     The specified tag by name, or all tagged revisions if no name is given.
@@ -1103,12 +1247,20 @@
     args = getargs(x, 0, 1, _("tag takes one or no arguments"))
     cl = repo.changelog
     if args:
-        tn = getstring(args[0],
-                       # i18n: "tag" is a keyword
-                       _('the argument to tag must be a string'))
-        if not repo.tags().get(tn, None):
-            raise util.Abort(_("tag '%s' does not exist") % tn)
-        s = set([cl.rev(n) for t, n in repo.tagslist() if t == tn])
+        pattern = getstring(args[0],
+                            # i18n: "tag" is a keyword
+                            _('the argument to tag must be a string'))
+        kind, pattern, matcher = _stringmatcher(pattern)
+        if kind == 'literal':
+            # avoid resolving all tags
+            tn = repo._tagscache.tags.get(pattern, None)
+            if tn is None:
+                raise util.Abort(_("tag '%s' does not exist") % pattern)
+            s = set([repo[tn].rev()])
+        else:
+            s = set([cl.rev(n) for t, n in repo.tagslist() if matcher(t)])
+            if not s:
+                raise util.Abort(_("no tags exist that match '%s'") % pattern)
     else:
         s = set([cl.rev(n) for t, n in repo.tagslist() if t != 'tip'])
     return [r for r in subset if r in s]
@@ -1119,6 +1271,10 @@
 def user(repo, subset, x):
     """``user(string)``
     User name contains string. The match is case-insensitive.
+
+    If `string` starts with `re:`, the remainder of the string is treated as
+    a regular expression. To match a user that actually contains `re:`, use
+    the prefix `literal:`.
     """
     return author(repo, subset, x)
 
@@ -1151,6 +1307,7 @@
     "descendants": descendants,
     "_firstdescendants": _firstdescendants,
     "draft": draft,
+    "extra": extra,
     "file": hasfile,
     "filelog": filelog,
     "first": first,
@@ -1190,6 +1347,7 @@
 
 methods = {
     "range": rangeset,
+    "dagrange": dagrange,
     "string": stringset,
     "symbol": symbolset,
     "and": andset,
@@ -1213,9 +1371,6 @@
     op = x[0]
     if op == 'minus':
         return optimize(('and', x[1], ('not', x[2])), small)
-    elif op == 'dagrange':
-        return optimize(('and', ('func', ('symbol', 'descendants'), x[1]),
-                         ('func', ('symbol', 'ancestors'), x[2])), small)
     elif op == 'dagrangepre':
         return optimize(('func', ('symbol', 'ancestors'), x[1]), small)
     elif op == 'dagrangepost':
@@ -1229,7 +1384,7 @@
                          '-' + getstring(x[1], _("can't negate that"))), small)
     elif op in 'string symbol negate':
         return smallbonus, x # single revisions are small
-    elif op == 'and' or op == 'dagrange':
+    elif op == 'and':
         wa, ta = optimize(x[1], True)
         wb, tb = optimize(x[2], True)
         w = min(wa, wb)
@@ -1250,7 +1405,7 @@
         return o[0], (op, o[1])
     elif op == 'group':
         return optimize(x[1], small)
-    elif op in 'range list parent ancestorspec':
+    elif op in 'dagrange range list parent ancestorspec':
         if op == 'parent':
             # x^:y means (x^) : y, not x ^ (:y)
             post = ('parentpost', x[1])
@@ -1362,7 +1517,7 @@
         return args[arg]
     return tuple(_expandargs(t, args) for t in tree)
 
-def _expandaliases(aliases, tree, expanding):
+def _expandaliases(aliases, tree, expanding, cache):
     """Expand aliases in tree, recursively.
 
     'aliases' is a dictionary mapping user defined aliases to
@@ -1377,17 +1532,20 @@
             raise error.ParseError(_('infinite expansion of revset alias "%s" '
                                      'detected') % alias.name)
         expanding.append(alias)
-        result = _expandaliases(aliases, alias.replacement, expanding)
+        if alias.name not in cache:
+            cache[alias.name] = _expandaliases(aliases, alias.replacement,
+                                               expanding, cache)
+        result = cache[alias.name]
         expanding.pop()
         if alias.args is not None:
             l = getlist(tree[2])
             if len(l) != len(alias.args):
                 raise error.ParseError(
                     _('invalid number of arguments: %s') % len(l))
-            l = [_expandaliases(aliases, a, []) for a in l]
+            l = [_expandaliases(aliases, a, [], cache) for a in l]
             result = _expandargs(result, dict(zip(alias.args, l)))
     else:
-        result = tuple(_expandaliases(aliases, t, expanding)
+        result = tuple(_expandaliases(aliases, t, expanding, cache)
                        for t in tree)
     return result
 
@@ -1397,7 +1555,7 @@
     for k, v in ui.configitems('revsetalias'):
         alias = revsetalias(k, v)
         aliases[alias.name] = alias
-    return _expandaliases(aliases, tree, [])
+    return _expandaliases(aliases, tree, [], {})
 
 parse = parser.parser(tokenize, elements).parse
 
--- a/mercurial/scmutil.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/mercurial/scmutil.py	Mon Jun 04 17:57:57 2012 -0500
@@ -141,8 +141,9 @@
                 elif (stat.S_ISDIR(st.st_mode) and
                       os.path.isdir(os.path.join(curpath, '.hg'))):
                     if not self.callback or not self.callback(curpath):
-                        raise util.Abort(_("path '%s' is inside nested repo %r") %
-                                         (path, prefix))
+                        raise util.Abort(_("path '%s' is inside nested "
+                                           "repo %r")
+                                         % (path, prefix))
             prefixes.append(normprefix)
             parts.pop()
             normparts.pop()
@@ -464,7 +465,7 @@
 
 else:
 
-    _HKEY_LOCAL_MACHINE = 0x80000002L
+    import _winreg
 
     def systemrcpath():
         '''return default os-specific hgrc search path'''
@@ -484,7 +485,7 @@
             return rcpath
         # else look for a system rcpath in the registry
         value = util.lookupreg('SOFTWARE\\Mercurial', None,
-                               _HKEY_LOCAL_MACHINE)
+                               _winreg.HKEY_LOCAL_MACHINE)
         if not isinstance(value, str) or not value:
             return rcpath
         value = util.localpath(value)
@@ -656,8 +657,9 @@
             unknown.append(abs)
             if repo.ui.verbose or not exact:
                 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
-        elif repo.dirstate[abs] != 'r' and (not good or not os.path.lexists(target)
-            or (os.path.isdir(target) and not os.path.islink(target))):
+        elif (repo.dirstate[abs] != 'r' and
+              (not good or not os.path.lexists(target) or
+               (os.path.isdir(target) and not os.path.islink(target)))):
             deleted.append(abs)
             if repo.ui.verbose or not exact:
                 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
@@ -766,8 +768,9 @@
             missings.append(r)
     missings.sort()
     if missings:
-        raise error.RequirementError(_("unknown repository format: "
-            "requires features '%s' (upgrade Mercurial)") % "', '".join(missings))
+        raise error.RequirementError(
+            _("unknown repository format: requires features '%s' (upgrade "
+              "Mercurial)") % "', '".join(missings))
     return requirements
 
 class filecacheentry(object):
--- a/mercurial/setdiscovery.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/mercurial/setdiscovery.py	Mon Jun 04 17:57:57 2012 -0500
@@ -8,7 +8,7 @@
 
 from node import nullid
 from i18n import _
-import random, collections, util, dagutil
+import random, util, dagutil
 import phases
 
 def _updatesample(dag, nodes, sample, always, quicksamplesize=0):
@@ -18,7 +18,7 @@
     else:
         heads = dag.heads()
     dist = {}
-    visit = collections.deque(heads)
+    visit = util.deque(heads)
     seen = set()
     factor = 1
     while visit:
@@ -134,11 +134,16 @@
         return (ownheadhashes, True, srvheadhashes,)
 
     # full blown discovery
-    undecided = dag.nodeset() # own nodes where I don't know if remote knows them
-    common = set() # own nodes I know we both know
-    missing = set() # own nodes I know remote lacks
 
-    # treat remote heads (and maybe own heads) as a first implicit sample response
+    # own nodes where I don't know if remote knows them
+    undecided = dag.nodeset()
+    # own nodes I know we both know
+    common = set()
+    # own nodes I know remote lacks
+    missing = set()
+
+    # treat remote heads (and maybe own heads) as a first implicit sample
+    # response
     common.update(dag.ancestorset(srvheads))
     undecided.difference_update(common)
 
--- a/mercurial/similar.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/mercurial/similar.py	Mon Jun 04 17:57:57 2012 -0500
@@ -44,7 +44,8 @@
     '''
     copies = {}
     for i, r in enumerate(removed):
-        repo.ui.progress(_('searching for similar files'), i, total=len(removed))
+        repo.ui.progress(_('searching for similar files'), i,
+                         total=len(removed))
 
         # lazily load text
         @util.cachefunc
--- a/mercurial/simplemerge.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/mercurial/simplemerge.py	Mon Jun 04 17:57:57 2012 -0500
@@ -94,7 +94,7 @@
             elif self.a[0].endswith('\r'):
                 newline = '\r'
         if base_marker and reprocess:
-            raise CantReprocessAndShowBase()
+            raise CantReprocessAndShowBase
         if name_a:
             start_marker = start_marker + ' ' + name_a
         if name_b:
@@ -222,7 +222,8 @@
         # section a[0:ia] has been disposed of, etc
         iz = ia = ib = 0
 
-        for zmatch, zend, amatch, aend, bmatch, bend in self.find_sync_regions():
+        for region in self.find_sync_regions():
+            zmatch, zend, amatch, aend, bmatch, bend = region
             #print 'match base [%d:%d]' % (zmatch, zend)
 
             matchlen = zend - zmatch
--- a/mercurial/sshrepo.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/mercurial/sshrepo.py	Mon Jun 04 17:57:57 2012 -0500
@@ -29,6 +29,7 @@
     def __init__(self, ui, path, create=False):
         self._url = path
         self.ui = ui
+        self.pipeo = self.pipei = self.pipee = None
 
         u = util.url(path, parsequery=False, parsefragment=False)
         if u.scheme != 'ssh' or not u.host or u.path is None:
@@ -86,7 +87,8 @@
             lines.append(l)
             max_noise -= 1
         else:
-            self._abort(error.RepoError(_("no suitable response from remote hg")))
+            self._abort(error.RepoError(_('no suitable response from '
+                                          'remote hg')))
 
         self.capabilities = set()
         for l in reversed(lines):
@@ -110,15 +112,17 @@
         raise exception
 
     def cleanup(self):
+        if self.pipeo is None:
+            return
+        self.pipeo.close()
+        self.pipei.close()
         try:
-            self.pipeo.close()
-            self.pipei.close()
             # read the error descriptor until EOF
             for l in self.pipee:
                 self.ui.status(_("remote: "), l)
-            self.pipee.close()
-        except:
+        except (IOError, ValueError):
             pass
+        self.pipee.close()
 
     __del__ = cleanup
 
--- a/mercurial/subrepo.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/mercurial/subrepo.py	Mon Jun 04 17:57:57 2012 -0500
@@ -200,7 +200,8 @@
                  'use (l)ocal source (%s) or (r)emote source (%s)?\n')
                % (subrelpath(sub), local, remote))
     else:
-        msg = (_(' subrepository sources for %s differ (in checked out version)\n'
+        msg = (_(' subrepository sources for %s differ (in checked out '
+                 'version)\n'
                  'use (l)ocal source (%s) or (r)emote source (%s)?\n')
                % (subrelpath(sub), local, remote))
     return ui.promptchoice(msg, (_('&Local'), _('&Remote')), 0)
@@ -267,7 +268,7 @@
     hg = h
 
     scmutil.pathauditor(ctx._repo.root)(path)
-    state = ctx.substate.get(path, nullstate)
+    state = ctx.substate[path]
     if state[2] not in types:
         raise util.Abort(_('unknown subrepo type %s') % state[2])
     return types[state[2]](ctx, path, state[:2])
@@ -498,8 +499,9 @@
                                      % (subrelpath(self), srcurl))
                 parentrepo = self._repo._subparent
                 shutil.rmtree(self._repo.path)
-                other, self._repo = hg.clone(self._repo._subparent.ui, {}, other,
-                                         self._repo.root, update=False)
+                other, self._repo = hg.clone(self._repo._subparent.ui, {},
+                                             other, self._repo.root,
+                                             update=False)
                 self._initrepo(parentrepo, source, create=True)
             else:
                 self._repo.ui.status(_('pulling subrepo %s from %s\n')
--- a/mercurial/tags.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/mercurial/tags.py	Mon Jun 04 17:57:57 2012 -0500
@@ -181,7 +181,7 @@
             for line in cachelines:
                 if line == "\n":
                     break
-                line = line.rstrip().split()
+                line = line.split()
                 cacherevs.append(int(line[0]))
                 headnode = bin(line[1])
                 cacheheads.append(headnode)
@@ -228,6 +228,11 @@
 
     # N.B. in case 4 (nodes destroyed), "new head" really means "newly
     # exposed".
+    if not len(repo.file('.hgtags')):
+        # No tags have ever been committed, so we can avoid a
+        # potentially expensive search.
+        return (repoheads, cachefnode, None, True)
+
     newheads = [head
                 for head in repoheads
                 if head not in set(cacheheads)]
--- a/mercurial/templatefilters.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/mercurial/templatefilters.py	Mon Jun 04 17:57:57 2012 -0500
@@ -260,7 +260,7 @@
     >>> person('"Foo Bar <foo@bar>')
     'Foo Bar'
     """
-    if not '@' in author:
+    if '@' not in author:
         return author
     f = author.find('<')
     if f != -1:
--- a/mercurial/templater.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/mercurial/templater.py	Mon Jun 04 17:57:57 2012 -0500
@@ -312,7 +312,7 @@
 
     def load(self, t):
         '''Get the template for the given template name. Use a local cache.'''
-        if not t in self.cache:
+        if t not in self.cache:
             try:
                 self.cache[t] = util.readfile(self.map[t][1])
             except KeyError, inst:
--- a/mercurial/templates/paper/help.tmpl	Mon Jun 04 17:22:09 2012 -0500
+++ b/mercurial/templates/paper/help.tmpl	Mon Jun 04 17:57:57 2012 -0500
@@ -21,7 +21,7 @@
 <li><a href="{url}branches{sessionvars%urlparameter}">branches</a></li>
 </ul>
 <ul>
- <li class="active">help</li>
+ <li class="active"><a href="{url}help{sessionvars%urlparameter}">help</a></li>
 </ul>
 </div>
 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/templates/raw/graph.tmpl	Mon Jun 04 17:57:57 2012 -0500
@@ -0,0 +1,6 @@
+{header}
+# HG graph
+# Node ID {node}
+# Rows shown {rows}
+
+{nodes%graphnode}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/templates/raw/graphedge.tmpl	Mon Jun 04 17:57:57 2012 -0500
@@ -0,0 +1,1 @@
+edge:        ({col}, {row}) -> ({nextcol}, {nextrow}) (color {color})
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/templates/raw/graphnode.tmpl	Mon Jun 04 17:57:57 2012 -0500
@@ -0,0 +1,7 @@
+changeset:   {node}
+user:        {user}
+date:        {age}
+summary:     {desc}
+{branches%branchname}{tags%tagname}{bookmarks%bookmarkname}
+node:        ({col}, {row}) (color {color})
+{edges%graphedge}
--- a/mercurial/templates/raw/map	Mon Jun 04 17:22:09 2012 -0500
+++ b/mercurial/templates/raw/map	Mon Jun 04 17:57:57 2012 -0500
@@ -28,3 +28,9 @@
 bookmarkentry = '{bookmark}	{node}\n'
 branches = '{entries%branchentry}'
 branchentry = '{branch}	{node}	{status}\n'
+graph = graph.tmpl
+graphnode = graphnode.tmpl
+graphedge = graphedge.tmpl
+bookmarkname = 'bookmark:    {name}\n'
+branchname = 'branch:      {name}\n'
+tagname = 'tag:         {name}\n'
--- a/mercurial/transaction.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/mercurial/transaction.py	Mon Jun 04 17:57:57 2012 -0500
@@ -164,7 +164,7 @@
                 _playback(self.journal, self.report, self.opener,
                           self.entries, False)
                 self.report(_("rollback completed\n"))
-            except:
+            except Exception:
                 self.report(_("rollback failed - please run hg recover\n"))
         finally:
             self.journal = None
--- a/mercurial/treediscovery.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/mercurial/treediscovery.py	Mon Jun 04 17:57:57 2012 -0500
@@ -56,11 +56,11 @@
     # a 'branch' here is a linear segment of history, with four parts:
     # head, root, first parent, second parent
     # (a branch always has two parents (or none) by definition)
-    unknown = remote.branches(unknown)
+    unknown = util.deque(remote.branches(unknown))
     while unknown:
         r = []
         while unknown:
-            n = unknown.pop(0)
+            n = unknown.popleft()
             if n[0] in seen:
                 continue
 
--- a/mercurial/ui.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/mercurial/ui.py	Mon Jun 04 17:57:57 2012 -0500
@@ -488,9 +488,14 @@
 
     def flush(self):
         try: self.fout.flush()
-        except: pass
+        except (IOError, ValueError): pass
         try: self.ferr.flush()
-        except: pass
+        except (IOError, ValueError): pass
+
+    def _isatty(self, fh):
+        if self.configbool('ui', 'nontty', False):
+            return False
+        return util.isatty(fh)
 
     def interactive(self):
         '''is interactive input allowed?
@@ -510,7 +515,7 @@
         if i is None:
             # some environments replace stdin without implementing isatty
             # usually those are non-interactive
-            return util.isatty(self.fin)
+            return self._isatty(self.fin)
 
         return i
 
@@ -548,12 +553,12 @@
         if i is None:
             # some environments replace stdout without implementing isatty
             # usually those are non-interactive
-            return util.isatty(self.fout)
+            return self._isatty(self.fout)
 
         return i
 
     def _readline(self, prompt=''):
-        if util.isatty(self.fin):
+        if self._isatty(self.fin):
             try:
                 # magically add command line editing support, where
                 # available
@@ -680,7 +685,8 @@
         printed.'''
         if self.tracebackflag:
             if exc:
-                traceback.print_exception(exc[0], exc[1], exc[2], file=self.ferr)
+                traceback.print_exception(exc[0], exc[1], exc[2],
+                                          file=self.ferr)
             else:
                 traceback.print_exc(file=self.ferr)
         return self.tracebackflag
--- a/mercurial/url.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/mercurial/url.py	Mon Jun 04 17:57:57 2012 -0500
@@ -377,7 +377,8 @@
                 keyfile = self.auth['key']
                 certfile = self.auth['cert']
 
-            conn = httpsconnection(host, port, keyfile, certfile, *args, **kwargs)
+            conn = httpsconnection(host, port, keyfile, certfile, *args,
+                                   **kwargs)
             conn.ui = self.ui
             return conn
 
--- a/mercurial/util.h	Mon Jun 04 17:22:09 2012 -0500
+++ b/mercurial/util.h	Mon Jun 04 17:57:57 2012 -0500
@@ -109,6 +109,7 @@
 typedef int Py_ssize_t;
 typedef Py_ssize_t (*lenfunc)(PyObject *);
 typedef PyObject *(*ssizeargfunc)(PyObject *, Py_ssize_t);
+#define PyInt_FromSsize_t PyInt_FromLong
 
 #if !defined(PY_SSIZE_T_MIN)
 #define PY_SSIZE_T_MAX INT_MAX
--- a/mercurial/util.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/mercurial/util.py	Mon Jun 04 17:57:57 2012 -0500
@@ -14,7 +14,7 @@
 """
 
 from i18n import _
-import error, osutil, encoding
+import error, osutil, encoding, collections
 import errno, re, shutil, sys, tempfile, traceback
 import os, time, datetime, calendar, textwrap, signal
 import imp, socket, urllib
@@ -202,15 +202,27 @@
 
     return f
 
+try:
+    collections.deque.remove
+    deque = collections.deque
+except AttributeError:
+    # python 2.4 lacks deque.remove
+    class deque(collections.deque):
+        def remove(self, val):
+            for i, v in enumerate(self):
+                if v == val:
+                    del self[i]
+                    break
+
 def lrucachefunc(func):
     '''cache most recent results of function calls'''
     cache = {}
-    order = []
+    order = deque()
     if func.func_code.co_argcount == 1:
         def f(arg):
             if arg not in cache:
                 if len(cache) > 20:
-                    del cache[order.pop(0)]
+                    del cache[order.popleft()]
                 cache[arg] = func(arg)
             else:
                 order.remove(arg)
@@ -220,7 +232,7 @@
         def f(*args):
             if args not in cache:
                 if len(cache) > 20:
-                    del cache[order.pop(0)]
+                    del cache[order.popleft()]
                 cache[args] = func(*args)
             else:
                 order.remove(args)
@@ -760,9 +772,9 @@
             ofp.write(chunk)
         ifp.close()
         ofp.close()
-    except:
+    except: # re-raises
         try: os.unlink(temp)
-        except: pass
+        except OSError: pass
         raise
     return temp
 
@@ -865,7 +877,7 @@
         Returns less than L bytes if the iterator runs dry."""
         left = l
         buf = ''
-        queue = self._queue
+        queue = deque(self._queue)
         while left > 0:
             # refill the queue
             if not queue:
@@ -878,13 +890,14 @@
                 if not queue:
                     break
 
-            chunk = queue.pop(0)
+            chunk = queue.popleft()
             left -= len(chunk)
             if left < 0:
-                queue.insert(0, chunk[left:])
+                queue.appendleft(chunk[left:])
                 buf += chunk[:left]
             else:
                 buf += chunk
+        self._queue = list(queue)
 
         return buf
 
@@ -1079,7 +1092,7 @@
             try:
                 d["d"] = days
                 return parsedate(date, extendeddateformats, d)[0]
-            except:
+            except Abort:
                 pass
         d["d"] = "28"
         return parsedate(date, extendeddateformats, d)[0]
--- a/mercurial/verify.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/mercurial/verify.py	Mon Jun 04 17:57:57 2012 -0500
@@ -87,7 +87,7 @@
                         # attempt to filter down to real linkrevs
                         linkrevs = [l for l in linkrevs
                                     if lrugetctx(l)[f].filenode() == node]
-                    except:
+                    except Exception:
                         pass
                 warn(_(" (expected %s)") % " ".join(map(str, linkrevs)))
             lr = None # can't be trusted
@@ -189,7 +189,7 @@
                 try:
                     fl = repo.file(f)
                     lr = min([fl.linkrev(fl.rev(n)) for n in filenodes[f]])
-                except:
+                except Exception:
                     lr = None
                 err(lr, _("in manifest but not in changeset"), f)
 
--- a/mercurial/win32.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/mercurial/win32.py	Mon Jun 04 17:57:57 2012 -0500
@@ -5,7 +5,7 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
-import ctypes, errno, os, struct, subprocess, random
+import ctypes, errno, os, subprocess, random
 
 _kernel32 = ctypes.windll.kernel32
 _advapi32 = ctypes.windll.advapi32
@@ -69,13 +69,6 @@
 # GetExitCodeProcess
 _STILL_ACTIVE = 259
 
-# registry
-_HKEY_CURRENT_USER = 0x80000001L
-_HKEY_LOCAL_MACHINE = 0x80000002L
-_KEY_READ = 0x20019
-_REG_SZ = 1
-_REG_DWORD = 4
-
 class _STARTUPINFO(ctypes.Structure):
     _fields_ = [('cb', _DWORD),
                 ('lpReserved', _LPSTR),
@@ -179,17 +172,6 @@
 _kernel32.GetConsoleScreenBufferInfo.argtypes = [_HANDLE, ctypes.c_void_p]
 _kernel32.GetConsoleScreenBufferInfo.restype = _BOOL
 
-_advapi32.RegOpenKeyExA.argtypes = [_HANDLE, _LPCSTR, _DWORD, _DWORD,
-    ctypes.c_void_p]
-_advapi32.RegOpenKeyExA.restype = _LONG
-
-_advapi32.RegQueryValueExA.argtypes = [_HANDLE, _LPCSTR, ctypes.c_void_p,
-    ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p]
-_advapi32.RegQueryValueExA.restype = _LONG
-
-_advapi32.RegCloseKey.argtypes = [_HANDLE]
-_advapi32.RegCloseKey.restype = _LONG
-
 _advapi32.GetUserNameA.argtypes = [ctypes.c_void_p, ctypes.c_void_p]
 _advapi32.GetUserNameA.restype = _BOOL
 
@@ -261,50 +243,13 @@
             _kernel32.CloseHandle(h)
     return _kernel32.GetLastError() != _ERROR_INVALID_PARAMETER
 
-def lookupreg(key, valname=None, scope=None):
-    ''' Look up a key/value name in the Windows registry.
-
-    valname: value name. If unspecified, the default value for the key
-    is used.
-    scope: optionally specify scope for registry lookup, this can be
-    a sequence of scopes to look up in order. Default (CURRENT_USER,
-    LOCAL_MACHINE).
-    '''
-    byref = ctypes.byref
-    if scope is None:
-        scope = (_HKEY_CURRENT_USER, _HKEY_LOCAL_MACHINE)
-    elif not isinstance(scope, (list, tuple)):
-        scope = (scope,)
-    for s in scope:
-        kh = _HANDLE()
-        res = _advapi32.RegOpenKeyExA(s, key, 0, _KEY_READ, ctypes.byref(kh))
-        if res != _ERROR_SUCCESS:
-            continue
-        try:
-            size = _DWORD(600)
-            type = _DWORD()
-            buf = ctypes.create_string_buffer(size.value + 1)
-            res = _advapi32.RegQueryValueExA(kh.value, valname, None,
-                                       byref(type), buf, byref(size))
-            if res != _ERROR_SUCCESS:
-                continue
-            if type.value == _REG_SZ:
-                # string is in ANSI code page, aka local encoding
-                return buf.value
-            elif type.value == _REG_DWORD:
-                fmt = '<L'
-                s = ctypes.string_at(byref(buf), struct.calcsize(fmt))
-                return struct.unpack(fmt, s)[0]
-        finally:
-            _advapi32.RegCloseKey(kh.value)
-
 def executablepath():
     '''return full path of hg.exe'''
     size = 600
     buf = ctypes.create_string_buffer(size + 1)
     len = _kernel32.GetModuleFileNameA(None, ctypes.byref(buf), size)
     if len == 0:
-        raise ctypes.WinError()
+        raise ctypes.WinError
     elif len == size:
         raise ctypes.WinError(_ERROR_INSUFFICIENT_BUFFER)
     return buf.value
@@ -314,7 +259,7 @@
     size = _DWORD(300)
     buf = ctypes.create_string_buffer(size.value + 1)
     if not _advapi32.GetUserNameA(ctypes.byref(buf), ctypes.byref(size)):
-        raise ctypes.WinError()
+        raise ctypes.WinError
     return buf.value
 
 _signalhandler = []
@@ -332,7 +277,7 @@
     h = _SIGNAL_HANDLER(handler)
     _signalhandler.append(h) # needed to prevent garbage collection
     if not _kernel32.SetConsoleCtrlHandler(h, True):
-        raise ctypes.WinError()
+        raise ctypes.WinError
 
 def hidewindow():
 
@@ -395,7 +340,7 @@
         None, args, None, None, False, _DETACHED_PROCESS,
         env, os.getcwd(), ctypes.byref(si), ctypes.byref(pi))
     if not res:
-        raise ctypes.WinError()
+        raise ctypes.WinError
 
     return pi.dwProcessId
 
--- a/mercurial/windows.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/mercurial/windows.py	Mon Jun 04 17:57:57 2012 -0500
@@ -6,14 +6,13 @@
 # GNU General Public License version 2 or any later version.
 
 from i18n import _
-import osutil
-import errno, msvcrt, os, re, sys
+import osutil, encoding
+import errno, msvcrt, os, re, sys, _winreg
 
 import win32
 executablepath = win32.executablepath
 getuser = win32.getuser
 hidewindow = win32.hidewindow
-lookupreg = win32.lookupreg
 makedir = win32.makedir
 nlinks = win32.nlinks
 oslink = win32.oslink
@@ -304,7 +303,7 @@
 
 def groupmembers(name):
     # Don't support groups on Windows for now
-    raise KeyError()
+    raise KeyError
 
 def isexec(f):
     return False
@@ -316,4 +315,25 @@
     def cacheable(self):
         return False
 
+def lookupreg(key, valname=None, scope=None):
+    ''' Look up a key/value name in the Windows registry.
+
+    valname: value name. If unspecified, the default value for the key
+    is used.
+    scope: optionally specify scope for registry lookup, this can be
+    a sequence of scopes to look up in order. Default (CURRENT_USER,
+    LOCAL_MACHINE).
+    '''
+    if scope is None:
+        scope = (_winreg.HKEY_CURRENT_USER, _winreg.HKEY_LOCAL_MACHINE)
+    elif not isinstance(scope, (list, tuple)):
+        scope = (scope,)
+    for s in scope:
+        try:
+            val = _winreg.QueryValueEx(_winreg.OpenKey(s, key), valname)[0]
+            # never let a Unicode string escape into the wild
+            return encoding.tolocal(val.encode('UTF-8'))
+        except EnvironmentError:
+            pass
+
 expandglobs = True
--- a/mercurial/wireproto.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/mercurial/wireproto.py	Mon Jun 04 17:57:57 2012 -0500
@@ -24,9 +24,9 @@
 class batcher(object):
     '''base class for batches of commands submittable in a single request
 
-    All methods invoked on instances of this class are simply queued and return a
-    a future for the result. Once you call submit(), all the queued calls are
-    performed and the results set in their respective futures.
+    All methods invoked on instances of this class are simply queued and
+    return a a future for the result. Once you call submit(), all the queued
+    calls are performed and the results set in their respective futures.
     '''
     def __init__(self):
         self.calls = []
@@ -51,7 +51,8 @@
 class remotebatch(batcher):
     '''batches the queued calls; uses as few roundtrips as possible'''
     def __init__(self, remote):
-        '''remote must support _submitbatch(encbatch) and _submitone(op, encargs)'''
+        '''remote must support _submitbatch(encbatch) and
+        _submitone(op, encargs)'''
         batcher.__init__(self)
         self.remote = remote
     def submit(self):
@@ -97,14 +98,14 @@
         encresref = future()
         # Return encoded arguments and future:
         yield encargs, encresref
-        # Assuming the future to be filled with the result from the batched request
-        # now. Decode it:
+        # Assuming the future to be filled with the result from the batched
+        # request now. Decode it:
         yield decode(encresref.value)
 
-    The decorator returns a function which wraps this coroutine as a plain method,
-    but adds the original method as an attribute called "batchable", which is
-    used by remotebatch to split the call into separate encoding and decoding
-    phases.
+    The decorator returns a function which wraps this coroutine as a plain
+    method, but adds the original method as an attribute called "batchable",
+    which is used by remotebatch to split the call into separate encoding and
+    decoding phases.
     '''
     def plain(*args, **opts):
         batchable = f(*args, **opts)
--- a/setup.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/setup.py	Mon Jun 04 17:57:57 2012 -0500
@@ -23,24 +23,25 @@
 try:
     import hashlib
     sha = hashlib.sha1()
-except:
+except ImportError:
     try:
         import sha
-    except:
+    except ImportError:
         raise SystemExit(
             "Couldn't import standard hashlib (incomplete Python install).")
 
 try:
     import zlib
-except:
+except ImportError:
     raise SystemExit(
         "Couldn't import standard zlib (incomplete Python install).")
 
 # The base IronPython distribution (as of 2.7.1) doesn't support bz2
 isironpython = False
 try:
-    isironpython = platform.python_implementation().lower().find("ironpython") != -1
-except:
+    isironpython = (platform.python_implementation()
+                    .lower().find("ironpython") != -1)
+except AttributeError:
     pass
 
 if isironpython:
@@ -48,7 +49,7 @@
 else:
     try:
         import bz2
-    except:
+    except ImportError:
         raise SystemExit(
             "Couldn't import standard bz2 (incomplete Python install).")
 
@@ -107,7 +108,7 @@
             os.dup2(devnull.fileno(), sys.stderr.fileno())
             objects = cc.compile([fname], output_dir=tmpdir)
             cc.link_executable(objects, os.path.join(tmpdir, "a.out"))
-        except:
+        except Exception:
             return False
         return True
     finally:
@@ -211,10 +212,12 @@
     # Insert hgbuildmo first so that files in mercurial/locale/ are found
     # when build_py is run next.
     sub_commands = [('build_mo', None),
-    # We also need build_ext before build_py. Otherwise, when 2to3 is called (in
-    # build_py), it will not find osutil & friends, thinking that those modules are
-    # global and, consequently, making a mess, now that all module imports are
-    # global.
+
+    # We also need build_ext before build_py. Otherwise, when 2to3 is
+    # called (in build_py), it will not find osutil & friends,
+    # thinking that those modules are global and, consequently, making
+    # a mess, now that all module imports are global.
+
                     ('build_ext', build.has_ext_modules),
                    ] + build.sub_commands
 
@@ -292,7 +295,8 @@
             self.distribution.ext_modules = []
         else:
             if not os.path.exists(os.path.join(get_python_inc(), 'Python.h')):
-                raise SystemExit("Python headers are required to build Mercurial")
+                raise SystemExit('Python headers are required to build '
+                                 'Mercurial')
 
     def find_modules(self):
         modules = build_py.find_modules(self)
@@ -384,8 +388,7 @@
             'build_hgextindex': buildhgextindex,
             'install_scripts': hginstallscripts}
 
-packages = ['mercurial', 'mercurial.hgweb',
-            'mercurial.httpclient', 'mercurial.httpclient.tests',
+packages = ['mercurial', 'mercurial.hgweb', 'mercurial.httpclient',
             'hgext', 'hgext.convert', 'hgext.highlight', 'hgext.zeroconf',
             'hgext.largefiles']
 
--- a/tests/hghave	Mon Jun 04 17:22:09 2012 -0500
+++ b/tests/hghave	Mon Jun 04 17:57:57 2012 -0500
@@ -31,14 +31,14 @@
 def has_bzr():
     try:
         import bzrlib
-        return bzrlib.__doc__ != None
+        return bzrlib.__doc__ is not None
     except ImportError:
         return False
 
 def has_bzr114():
     try:
         import bzrlib
-        return (bzrlib.__doc__ != None
+        return (bzrlib.__doc__ is not None
                 and bzrlib.version_info[:2] >= (1, 14))
     except ImportError:
         return False
@@ -60,7 +60,7 @@
         os.close(fd)
         os.remove(path)
         return True
-    except:
+    except (IOError, OSError):
         return False
 
 def has_executablebit():
@@ -93,7 +93,7 @@
         try:
             s2 = os.stat(p2)
             return s2 == s1
-        except:
+        except OSError:
             return False
     finally:
         os.remove(path)
@@ -106,7 +106,7 @@
         return False
 
 def has_fifo():
-    return hasattr(os, "mkfifo")
+    return getattr(os, "mkfifo", None) is not None
 
 def has_cacheable_fs():
     from mercurial import util
@@ -165,10 +165,11 @@
         return False
 
 def has_p4():
-    return matchoutput('p4 -V', r'Rev\. P4/') and matchoutput('p4d -V', r'Rev\. P4D/')
+    return (matchoutput('p4 -V', r'Rev\. P4/') and
+            matchoutput('p4d -V', r'Rev\. P4D/'))
 
 def has_symlink():
-    if not hasattr(os, "symlink"):
+    if getattr(os, "symlink", None) is None:
         return False
     name = tempfile.mktemp(dir=".", prefix='hg-checklink-')
     try:
--- a/tests/run-tests.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/tests/run-tests.py	Mon Jun 04 17:57:57 2012 -0500
@@ -594,6 +594,19 @@
     # can generate the surrounding doctest magic
     inpython = False
 
+    # True or False when in a true or false conditional section
+    skipping = None
+
+    def hghave(reqs):
+        # TODO: do something smarter when all other uses of hghave is gone
+        proc = Popen4('%s -c "%s/hghave %s"' %
+                      (options.shell, TESTDIR, ' '.join(reqs)), TESTDIR, 0)
+        proc.communicate()
+        ret = proc.wait()
+        if wifexited(ret):
+            ret = os.WEXITSTATUS(ret)
+        return ret == 0
+
     f = open(test)
     t = f.readlines()
     f.close()
@@ -606,7 +619,24 @@
     for n, l in enumerate(t):
         if not l.endswith('\n'):
             l += '\n'
-        if l.startswith('  >>> '): # python inlines
+        if l.startswith('#if'):
+            if skipping is not None:
+                after.setdefault(pos, []).append('  !!! nested #if\n')
+            skipping = not hghave(l.split()[1:])
+            after.setdefault(pos, []).append(l)
+        elif l.startswith('#else'):
+            if skipping is None:
+                after.setdefault(pos, []).append('  !!! missing #if\n')
+            skipping = not skipping
+            after.setdefault(pos, []).append(l)
+        elif l.startswith('#endif'):
+            if skipping is None:
+                after.setdefault(pos, []).append('  !!! missing #if\n')
+            skipping = None
+            after.setdefault(pos, []).append(l)
+        elif skipping:
+            after.setdefault(pos, []).append(l)
+        elif l.startswith('  >>> '): # python inlines
             after.setdefault(pos, []).append(l)
             prepos = pos
             pos = n
@@ -617,7 +647,7 @@
                 script.append('%s -m heredoctest <<EOF\n' % PYTHON)
             addsalt(n, True)
             script.append(l[2:])
-        if l.startswith('  ... '): # python inlines
+        elif l.startswith('  ... '): # python inlines
             after.setdefault(prepos, []).append(l)
             script.append(l[2:])
         elif l.startswith('  $ '): # commands
@@ -644,6 +674,8 @@
 
     if inpython:
         script.append("EOF\n")
+    if skipping is not None:
+        after.setdefault(pos, []).append('  !!! missing #endif\n')
     addsalt(n + 1, False)
 
     # Write out the script and execute it
@@ -860,7 +892,7 @@
         tf = open(testpath)
         firstline = tf.readline().rstrip()
         tf.close()
-    except:
+    except IOError:
         firstline = ''
     lctest = test.lower()
 
@@ -1187,6 +1219,7 @@
     os.environ['http_proxy'] = ''
     os.environ['no_proxy'] = ''
     os.environ['NO_PROXY'] = ''
+    os.environ['TERM'] = 'xterm'
 
     # unset env related to hooks
     for k in os.environ.keys():
@@ -1196,7 +1229,7 @@
             del os.environ[k]
 
     global TESTDIR, HGTMP, INST, BINDIR, PYTHONDIR, COVERAGE_FILE
-    TESTDIR = os.environ["TESTDIR"] = os.getcwd()
+    TESTDIR = os.environ["TESTDIR"] = os.getcwd().replace('\\', '/')
     if options.tmpdir:
         options.keep_tmpdir = True
         tmpdir = options.tmpdir
--- a/tests/test-alias.t	Mon Jun 04 17:22:09 2012 -0500
+++ b/tests/test-alias.t	Mon Jun 04 17:57:57 2012 -0500
@@ -340,24 +340,24 @@
   
   basic commands:
   
-   add         add the specified files on the next commit
-   annotate    show changeset information by line for each file
-   clone       make a copy of an existing repository
-   commit      commit the specified files or all outstanding changes
-   diff        diff repository (or selected files)
-   export      dump the header and diffs for one or more changesets
-   forget      forget the specified files on the next commit
-   init        create a new repository in the given directory
-   log         show revision history of entire repository or files
-   merge       merge working directory with another revision
-   phase       set or show the current phase name
-   pull        pull changes from the specified source
-   push        push changes to the specified destination
-   remove      remove the specified files on the next commit
-   serve       start stand-alone webserver
-   status      show changed files in the working directory
-   summary     summarize working directory state
-   update      update working directory (or switch revisions)
+   add           add the specified files on the next commit
+   annotate      show changeset information by line for each file
+   clone         make a copy of an existing repository
+   commit        commit the specified files or all outstanding changes
+   diff          diff repository (or selected files)
+   export        dump the header and diffs for one or more changesets
+   forget        forget the specified files on the next commit
+   init          create a new repository in the given directory
+   log           show revision history of entire repository or files
+   merge         merge working directory with another revision
+   phase         set or show the current phase name
+   pull          pull changes from the specified source
+   push          push changes to the specified destination
+   remove        remove the specified files on the next commit
+   serve         start stand-alone webserver
+   status        show changed files in the working directory
+   summary       summarize working directory state
+   update        update working directory (or switch revisions)
   
   use "hg help" for the full list of commands or "hg -v" for details
   [255]
@@ -367,24 +367,24 @@
   
   basic commands:
   
-   add         add the specified files on the next commit
-   annotate    show changeset information by line for each file
-   clone       make a copy of an existing repository
-   commit      commit the specified files or all outstanding changes
-   diff        diff repository (or selected files)
-   export      dump the header and diffs for one or more changesets
-   forget      forget the specified files on the next commit
-   init        create a new repository in the given directory
-   log         show revision history of entire repository or files
-   merge       merge working directory with another revision
-   phase       set or show the current phase name
-   pull        pull changes from the specified source
-   push        push changes to the specified destination
-   remove      remove the specified files on the next commit
-   serve       start stand-alone webserver
-   status      show changed files in the working directory
-   summary     summarize working directory state
-   update      update working directory (or switch revisions)
+   add           add the specified files on the next commit
+   annotate      show changeset information by line for each file
+   clone         make a copy of an existing repository
+   commit        commit the specified files or all outstanding changes
+   diff          diff repository (or selected files)
+   export        dump the header and diffs for one or more changesets
+   forget        forget the specified files on the next commit
+   init          create a new repository in the given directory
+   log           show revision history of entire repository or files
+   merge         merge working directory with another revision
+   phase         set or show the current phase name
+   pull          pull changes from the specified source
+   push          push changes to the specified destination
+   remove        remove the specified files on the next commit
+   serve         start stand-alone webserver
+   status        show changed files in the working directory
+   summary       summarize working directory state
+   update        update working directory (or switch revisions)
   
   use "hg help" for the full list of commands or "hg -v" for details
   [255]
@@ -394,24 +394,24 @@
   
   basic commands:
   
-   add         add the specified files on the next commit
-   annotate    show changeset information by line for each file
-   clone       make a copy of an existing repository
-   commit      commit the specified files or all outstanding changes
-   diff        diff repository (or selected files)
-   export      dump the header and diffs for one or more changesets
-   forget      forget the specified files on the next commit
-   init        create a new repository in the given directory
-   log         show revision history of entire repository or files
-   merge       merge working directory with another revision
-   phase       set or show the current phase name
-   pull        pull changes from the specified source
-   push        push changes to the specified destination
-   remove      remove the specified files on the next commit
-   serve       start stand-alone webserver
-   status      show changed files in the working directory
-   summary     summarize working directory state
-   update      update working directory (or switch revisions)
+   add           add the specified files on the next commit
+   annotate      show changeset information by line for each file
+   clone         make a copy of an existing repository
+   commit        commit the specified files or all outstanding changes
+   diff          diff repository (or selected files)
+   export        dump the header and diffs for one or more changesets
+   forget        forget the specified files on the next commit
+   init          create a new repository in the given directory
+   log           show revision history of entire repository or files
+   merge         merge working directory with another revision
+   phase         set or show the current phase name
+   pull          pull changes from the specified source
+   push          push changes to the specified destination
+   remove        remove the specified files on the next commit
+   serve         start stand-alone webserver
+   status        show changed files in the working directory
+   summary       summarize working directory state
+   update        update working directory (or switch revisions)
   
   use "hg help" for the full list of commands or "hg -v" for details
   [255]
--- a/tests/test-bisect.t	Mon Jun 04 17:22:09 2012 -0500
+++ b/tests/test-bisect.t	Mon Jun 04 17:57:57 2012 -0500
@@ -224,6 +224,7 @@
   Testing changeset 12:1941b52820a5 (23 changesets remaining, ~4 tests)
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ cat .hg/bisect.state
+  current 1941b52820a544549596820a8ae006842b0e2c64
   skip 9d7d07bc967ca98ad0600c24953fd289ad5fa991
   skip ce8f0998e922c179e80819d5066fbe46e2998784
   skip e7fa0811edb063f6319531f0d0a865882138e180
@@ -396,6 +397,12 @@
   date:        Thu Jan 01 00:00:06 1970 +0000
   summary:     msg 6
   
+  $ hg log -r "bisect(current)"
+  changeset:   5:7874a09ea728
+  user:        test
+  date:        Thu Jan 01 00:00:05 1970 +0000
+  summary:     msg 5
+  
   $ hg log -r "bisect(skip)"
   changeset:   1:5cd978ea5149
   user:        test
@@ -466,3 +473,40 @@
   date:        Thu Jan 01 00:00:06 1970 +0000
   summary:     msg 6
   
+
+
+test bisecting via a command without updating the working dir, and
+ensure that the bisect state file is updated before running a test
+command
+
+  $ hg update null
+  0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+  $ cat > script.sh <<'EOF'
+  > #!/bin/sh
+  > test -n "$HG_NODE" || (echo HG_NODE missing; exit 127)
+  > current="`hg log -r \"bisect(current)\" --template {node}`"
+  > test "$current" = "$HG_NODE" || (echo current is bad: $current; exit 127)
+  > rev="`hg log -r $HG_NODE --template {rev}`"
+  > test "$rev" -ge 6
+  > EOF
+  $ chmod +x script.sh
+  $ hg bisect -r
+  $ hg bisect --good tip --noupdate
+  $ hg bisect --bad 0 --noupdate
+  Testing changeset 15:e7fa0811edb0 (31 changesets remaining, ~4 tests)
+  $ hg bisect --command "'`pwd`/script.sh' and some params" --noupdate
+  Changeset 15:e7fa0811edb0: good
+  Changeset 7:03750880c6b5: good
+  Changeset 3:b53bea5e2fcb: bad
+  Changeset 5:7874a09ea728: bad
+  Changeset 6:a3d5c6fdf0d3: good
+  The first good revision is:
+  changeset:   6:a3d5c6fdf0d3
+  user:        test
+  date:        Thu Jan 01 00:00:06 1970 +0000
+  summary:     msg 6
+  
+
+ensure that we still don't have a working dir
+
+  $ hg parents
--- a/tests/test-bookmarks-current.t	Mon Jun 04 17:22:09 2012 -0500
+++ b/tests/test-bookmarks-current.t	Mon Jun 04 17:57:57 2012 -0500
@@ -126,6 +126,23 @@
      X                         0:719295282060
      Z                         0:719295282060
 
+bare update moves the active bookmark forward
+
+  $ echo a > a
+  $ hg ci -Am1
+  adding a
+  $ hg update X
+  0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+  $ hg bookmarks
+   * X                         0:719295282060
+     Z                         0:719295282060
+  $ hg update
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  updating bookmark X
+  $ hg bookmarks
+   * X                         1:cc586d725fbe
+     Z                         0:719295282060
+
 test deleting .hg/bookmarks.current when explicitly updating
 to a revision
 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-bookmarks-merge.t	Mon Jun 04 17:57:57 2012 -0500
@@ -0,0 +1,93 @@
+# init
+
+  $ hg init
+  $ echo a > a
+  $ hg add a
+  $ hg commit -m'a'
+  $ echo b > b
+  $ hg add b
+  $ hg commit -m'b'
+  $ hg up -C 0
+  0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+  $ echo c > c
+  $ hg add c
+  $ hg commit -m'c'
+  created new head
+
+# test merging of diverged bookmarks
+  $ hg bookmark -r 1 "c@diverge"
+  $ hg bookmark -r 1 b
+  $ hg bookmark c
+  $ hg bookmarks
+     b                         1:d2ae7f538514
+   * c                         2:d36c0562f908
+     c@diverge                 1:d2ae7f538514
+  $ hg merge "c@diverge"
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  (branch merge, don't forget to commit)
+  $ hg commit -m'merge'
+  $ hg bookmarks
+     b                         1:d2ae7f538514
+   * c                         3:b8f96cf4688b
+
+  $ hg up -C 3
+  0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ echo d > d
+  $ hg add d
+  $ hg commit -m'd'
+
+  $ hg up -C 3
+  0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+  $ echo e > e
+  $ hg add e
+  $ hg commit -m'e'
+  created new head
+  $ hg up -C 5
+  0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ hg bookmark e
+  $ hg bookmarks
+     b                         1:d2ae7f538514
+     c                         3:b8f96cf4688b
+   * e                         5:26bee9c5bcf3
+
+# the picked side is bookmarked
+
+  $ hg up -C 4
+  1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+  $ hg merge
+  abort: heads are bookmarked - please merge with an explicit rev
+  (run 'hg heads' to see all heads)
+  [255]
+
+# our revision is bookmarked
+
+  $ hg up -C e
+  1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+  $ hg merge
+  abort: no matching bookmark to merge - please merge with an explicit rev or bookmark
+  (run 'hg heads' to see all heads)
+  [255]
+
+# merge bookmark heads
+
+  $ hg up -C 4
+  1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+  $ echo f > f
+  $ hg commit -Am "f"
+  adding f
+  $ hg up -C e
+  1 files updated, 0 files merged, 2 files removed, 0 files unresolved
+  $ hg bookmarks -r 4 "e@diverged"
+  $ hg bookmarks
+     b                         1:d2ae7f538514
+     c                         3:b8f96cf4688b
+   * e                         5:26bee9c5bcf3
+     e@diverged                4:a0546fcfe0fb
+  $ hg merge
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  (branch merge, don't forget to commit)
+  $ hg commit -m'merge'
+  $ hg bookmarks
+     b                         1:d2ae7f538514
+     c                         3:b8f96cf4688b
+   * e                         7:ca784329f0ba
--- a/tests/test-bookmarks-pushpull.t	Mon Jun 04 17:22:09 2012 -0500
+++ b/tests/test-bookmarks-pushpull.t	Mon Jun 04 17:57:57 2012 -0500
@@ -29,9 +29,13 @@
   adding file changes
   added 1 changesets with 1 changes to 1 files
   updating bookmark Y
+  adding remote bookmark X
+  adding remote bookmark Z
   (run 'hg update' to get a working copy)
   $ hg bookmarks
+     X                         0:4e3505fd9583
      Y                         0:4e3505fd9583
+     Z                         0:4e3505fd9583
   $ hg debugpushkey ../a namespaces
   bookmarks	
   phases	
@@ -47,6 +51,7 @@
   $ hg bookmark
      X                         0:4e3505fd9583
      Y                         0:4e3505fd9583
+     Z                         0:4e3505fd9583
 
 export bookmark by name
 
@@ -111,6 +116,7 @@
   $ hg book
    * X                         1:9b140be10808
      Y                         0:4e3505fd9583
+     Z                         0:4e3505fd9583
      foo                       -1:000000000000
      foobar                    1:9b140be10808
 
@@ -122,11 +128,13 @@
   adding file changes
   added 1 changesets with 1 changes to 1 files (+1 heads)
   divergent bookmark X stored as X@foo
+  updating bookmark Z
   (run 'hg heads' to see heads, 'hg merge' to merge)
   $ hg book
    * X                         1:9b140be10808
      X@foo                     2:0d2164f0ce0d
      Y                         0:4e3505fd9583
+     Z                         2:0d2164f0ce0d
      foo                       -1:000000000000
      foobar                    1:9b140be10808
   $ hg push -f ../a
@@ -141,6 +149,45 @@
      Y                         0:4e3505fd9583
      Z                         1:0d2164f0ce0d
 
+update a remote bookmark from a non-head to a head
+
+  $ hg up -q Y
+  $ echo c3 > f2
+  $ hg ci -Am3
+  adding f2
+  created new head
+  $ hg push ../a
+  pushing to ../a
+  searching for changes
+  adding changesets
+  adding manifests
+  adding file changes
+  added 1 changesets with 1 changes to 1 files (+1 heads)
+  updating bookmark Y
+  $ hg -R ../a book
+   * X                         1:0d2164f0ce0d
+     Y                         3:f6fc62dde3c0
+     Z                         1:0d2164f0ce0d
+
+diverging a remote bookmark fails
+
+  $ hg up -q 4e3505fd9583
+  $ echo c4 > f2
+  $ hg ci -Am4
+  adding f2
+  created new head
+  $ hg book -f Y
+  $ hg push ../a
+  pushing to ../a
+  searching for changes
+  abort: push creates new remote head 4efff6d98829!
+  (did you forget to merge? use push -f to force)
+  [255]
+  $ hg -R ../a book
+   * X                         1:0d2164f0ce0d
+     Y                         3:f6fc62dde3c0
+     Z                         1:0d2164f0ce0d
+
 hgweb
 
   $ cat <<EOF > .hg/hgrc
@@ -158,14 +205,16 @@
   phases	
   namespaces	
   $ hg debugpushkey http://localhost:$HGPORT/ bookmarks
-  Y	4e3505fd95835d721066b76e75dbb8cc554d7f77
+  Y	4efff6d98829d9c824c621afd6e3f01865f5439f
+  foobar	9b140be1080824d768c5a4691a564088eede71f9
+  Z	0d2164f0ce0d8f1d6f94351eba04b794909be66c
+  foo	0000000000000000000000000000000000000000
   X	9b140be1080824d768c5a4691a564088eede71f9
-  foo	0000000000000000000000000000000000000000
-  foobar	9b140be1080824d768c5a4691a564088eede71f9
   $ hg out -B http://localhost:$HGPORT/
   comparing with http://localhost:$HGPORT/
   searching for changed bookmarks
-     Z                         0d2164f0ce0d
+  no changed bookmarks found
+  [1]
   $ hg push -B Z http://localhost:$HGPORT/
   pushing to http://localhost:$HGPORT/
   searching for changes
@@ -182,6 +231,9 @@
   $ hg pull -B Z http://localhost:$HGPORT/
   pulling from http://localhost:$HGPORT/
   no changes found
+  adding remote bookmark foobar
+  adding remote bookmark Z
+  adding remote bookmark foo
   divergent bookmark X stored as X@1
   importing bookmark Z
   $ hg clone http://localhost:$HGPORT/ cloned-bookmarks
@@ -189,12 +241,12 @@
   adding changesets
   adding manifests
   adding file changes
-  added 3 changesets with 3 changes to 3 files (+1 heads)
+  added 5 changesets with 5 changes to 3 files (+3 heads)
   updating to branch default
   2 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ hg -R cloned-bookmarks bookmarks
      X                         1:9b140be10808
-     Y                         0:4e3505fd9583
+     Y                         4:4efff6d98829
      Z                         2:0d2164f0ce0d
      foo                       -1:000000000000
      foobar                    1:9b140be10808
--- a/tests/test-bookmarks.t	Mon Jun 04 17:22:09 2012 -0500
+++ b/tests/test-bookmarks.t	Mon Jun 04 17:57:57 2012 -0500
@@ -84,6 +84,20 @@
   date:        Thu Jan 01 00:00:00 1970 +0000
   summary:     1
   
+  $ hg log -r 'bookmark("re:X")'
+  changeset:   0:f7b1eb17ad24
+  bookmark:    X
+  user:        test
+  date:        Thu Jan 01 00:00:00 1970 +0000
+  summary:     0
+  
+  changeset:   1:925d80f479bb
+  bookmark:    X2
+  tag:         tip
+  user:        test
+  date:        Thu Jan 01 00:00:00 1970 +0000
+  summary:     1
+  
   $ hg log -r 'bookmark(unknown)'
   abort: bookmark 'unknown' does not exist
   [255]
--- a/tests/test-branches.t	Mon Jun 04 17:22:09 2012 -0500
+++ b/tests/test-branches.t	Mon Jun 04 17:57:57 2012 -0500
@@ -241,6 +241,11 @@
   default                        0:19709c5a4e75 (inactive)
   $ hg branches -a
   a branch name much longer than the default justification used by branches 7:10ff5895aa57
+  $ hg branches -q
+  a branch name much longer than the default justification used by branches
+  c
+  a
+  default
   $ hg heads b
   no open branch heads found on branches b
   [1]
--- a/tests/test-check-code-hg.t	Mon Jun 04 17:22:09 2012 -0500
+++ b/tests/test-check-code-hg.t	Mon Jun 04 17:57:57 2012 -0500
@@ -8,63 +8,6 @@
   $ hg manifest | xargs "$check_code" || echo 'FAILURE IS NOT AN OPTION!!!'
 
   $ hg manifest | xargs "$check_code" --warnings --nolineno --per-file=0 || true
-  contrib/check-code.py:0:
-   > #    (r'^\s+[^_ \n][^_. \n]+_[^_\n]+\s*=', "don't use underbars in identifiers"),
-   warning: line over 80 characters
-  contrib/perf.py:0:
-   >         except:
-   warning: naked except clause
-  contrib/perf.py:0:
-   >     #timer(lambda: sum(map(len, repo.dirstate.status(m, [], False, False, False))))
-   warning: line over 80 characters
-  contrib/perf.py:0:
-   >     except:
-   warning: naked except clause
-  contrib/setup3k.py:0:
-   >         except:
-   warning: naked except clause
-  contrib/setup3k.py:0:
-   >     except:
-   warning: naked except clause
-  contrib/setup3k.py:0:
-   > except:
-   warning: naked except clause
-   warning: naked except clause
-   warning: naked except clause
-  contrib/shrink-revlog.py:0:
-   >         except:
-   warning: naked except clause
-  doc/gendoc.py:0:
-   >                "together with Mercurial. Help for other extensions is available "
-   warning: line over 80 characters
-  hgext/bugzilla.py:0:
-   >                 raise util.Abort(_('cannot find bugzilla user id for %s or %s') %
-   warning: line over 80 characters
-  hgext/bugzilla.py:0:
-   >             bzdir = self.ui.config('bugzilla', 'bzdir', '/var/www/html/bugzilla')
-   warning: line over 80 characters
-  hgext/convert/__init__.py:0:
-   >           ('', 'ancestors', '', _('show current changeset in ancestor branches')),
-   warning: line over 80 characters
-  hgext/convert/bzr.py:0:
-   >         except:
-   warning: naked except clause
-  hgext/convert/common.py:0:
-   >             except:
-   warning: naked except clause
-  hgext/convert/common.py:0:
-   >         except:
-   warning: naked except clause
-   warning: naked except clause
-  hgext/convert/convcmd.py:0:
-   >         except:
-   warning: naked except clause
-  hgext/convert/cvs.py:0:
-   >                                 # /1 :pserver:user@example.com:2401/cvsroot/foo Ah<Z
-   warning: line over 80 characters
-  hgext/convert/cvsps.py:0:
-   >                     assert len(branches) == 1, 'unknown branch: %s' % e.mergepoint
-   warning: line over 80 characters
   hgext/convert/cvsps.py:0:
    >                     ui.write('Ancestors: %s\n' % (','.join(r)))
    warning: unwrapped ui message
@@ -75,9 +18,6 @@
    >                     ui.write('Parents: %s\n' %
    warning: unwrapped ui message
   hgext/convert/cvsps.py:0:
-   >                 except:
-   warning: naked except clause
-  hgext/convert/cvsps.py:0:
    >                 ui.write('Branchpoints: %s \n' % ', '.join(branchpoints))
    warning: unwrapped ui message
   hgext/convert/cvsps.py:0:
@@ -101,59 +41,6 @@
   hgext/convert/cvsps.py:0:
    >             ui.write('Tag%s: %s \n' % (['', 's'][len(cs.tags) > 1],
    warning: unwrapped ui message
-  hgext/convert/git.py:0:
-   >             except:
-   warning: naked except clause
-  hgext/convert/git.py:0:
-   >             fh = self.gitopen('git diff-tree --name-only --root -r %s "%s^%s" --'
-   warning: line over 80 characters
-  hgext/convert/hg.py:0:
-   >             # detect missing revlogs and abort on errors or populate self.ignored
-   warning: line over 80 characters
-  hgext/convert/hg.py:0:
-   >             except:
-   warning: naked except clause
-   warning: naked except clause
-  hgext/convert/hg.py:0:
-   >         except:
-   warning: naked except clause
-  hgext/convert/monotone.py:0:
-   >             except:
-   warning: naked except clause
-  hgext/convert/monotone.py:0:
-   >         except:
-   warning: naked except clause
-  hgext/convert/subversion.py:0:
-   >                 raise util.Abort(_('svn: branch has no revision %s') % to_revnum)
-   warning: line over 80 characters
-  hgext/convert/subversion.py:0:
-   >             except:
-   warning: naked except clause
-  hgext/convert/subversion.py:0:
-   >         args = [self.baseurl, relpaths, start, end, limit, discover_changed_paths,
-   warning: line over 80 characters
-  hgext/convert/subversion.py:0:
-   >         self.trunkname = self.ui.config('convert', 'svn.trunk', 'trunk').strip('/')
-   warning: line over 80 characters
-  hgext/convert/subversion.py:0:
-   >     except:
-   warning: naked except clause
-  hgext/convert/subversion.py:0:
-   > def get_log_child(fp, url, paths, start, end, limit=0, discover_changed_paths=True,
-   warning: line over 80 characters
-  hgext/eol.py:0:
-   >     if ui.configbool('eol', 'fix-trailing-newline', False) and s and s[-1] != '\n':
-   warning: line over 80 characters
-   warning: line over 80 characters
-  hgext/gpg.py:0:
-   >                 except:
-   warning: naked except clause
-  hgext/hgcia.py:0:
-   > except:
-   warning: naked except clause
-  hgext/hgk.py:0:
-   >         ui.write("%s%s\n" % (prefix, description.replace('\n', nlprefix).strip()))
-   warning: line over 80 characters
   hgext/hgk.py:0:
    >         ui.write("parent %s\n" % p)
    warning: unwrapped ui message
@@ -173,113 +60,17 @@
    >     ui.write("revision %d\n" % ctx.rev())
    warning: unwrapped ui message
   hgext/hgk.py:0:
-   >     ui.write("tree %s\n" % short(ctx.changeset()[0])) # use ctx.node() instead ??
-   warning: line over 80 characters
-   warning: unwrapped ui message
-  hgext/highlight/__init__.py:0:
-   >     extensions.wrapfunction(webcommands, '_filerevision', filerevision_highlight)
-   warning: line over 80 characters
-  hgext/highlight/__init__.py:0:
-   >     return ['/* pygments_style = %s */\n\n' % pg_style, fmter.get_style_defs('')]
-   warning: line over 80 characters
-  hgext/inotify/__init__.py:0:
-   >             if self._inotifyon and not ignored and not subrepos and not self._dirty:
-   warning: line over 80 characters
-  hgext/inotify/server.py:0:
-   >                     except:
-   warning: naked except clause
-  hgext/inotify/server.py:0:
-   >             except:
-   warning: naked except clause
-  hgext/keyword.py:0:
-   >     ui.note("hg ci -m '%s'\n" % msg)
+   >     ui.write("tree %s\n" % short(ctx.changeset()[0]))
    warning: unwrapped ui message
   hgext/mq.py:0:
-   >                     raise util.Abort(_("cannot push --exact with applied patches"))
-   warning: line over 80 characters
-  hgext/mq.py:0:
-   >                     raise util.Abort(_("cannot use --exact and --move together"))
-   warning: line over 80 characters
-  hgext/mq.py:0:
-   >                     self.ui.warn(_('Tag %s overrides mq patch of the same name\n')
-   warning: line over 80 characters
-  hgext/mq.py:0:
-   >                 except:
-   warning: naked except clause
-   warning: naked except clause
-  hgext/mq.py:0:
-   >             except:
-   warning: naked except clause
-   warning: naked except clause
-   warning: naked except clause
-   warning: naked except clause
-  hgext/mq.py:0:
-   >             raise util.Abort(_('cannot mix -l/--list with options or arguments'))
-   warning: line over 80 characters
-  hgext/mq.py:0:
-   >             raise util.Abort(_('qfold cannot fold already applied patch %s') % p)
-   warning: line over 80 characters
-  hgext/mq.py:0:
-   >           ('U', 'noupdate', None, _('do not update the new working directories')),
-   warning: line over 80 characters
-  hgext/mq.py:0:
-   >           ('e', 'exact', None, _('apply the target patch to its recorded parent')),
-   warning: line over 80 characters
-  hgext/mq.py:0:
-   >         except:
-   warning: naked except clause
-  hgext/mq.py:0:
    >         ui.write("mq:     %s\n" % ', '.join(m))
    warning: unwrapped ui message
-  hgext/mq.py:0:
-   >     repo.mq.qseries(repo, missing=opts.get('missing'), summary=opts.get('summary'))
-   warning: line over 80 characters
-  hgext/notify.py:0:
-   >                 ui.note(_('notify: suppressing notification for merge %d:%s\n') %
-   warning: line over 80 characters
-  hgext/patchbomb.py:0:
-   >                                                   binnode, seqno=idx, total=total)
-   warning: line over 80 characters
-  hgext/patchbomb.py:0:
-   >             except:
-   warning: naked except clause
   hgext/patchbomb.py:0:
    >             ui.write('Subject: %s\n' % subj)
    warning: unwrapped ui message
   hgext/patchbomb.py:0:
-   >         p = mail.mimetextpatch('\n'.join(patchlines), 'x-patch', opts.get('test'))
-   warning: line over 80 characters
-  hgext/patchbomb.py:0:
    >         ui.write('From: %s\n' % sender)
    warning: unwrapped ui message
-  hgext/record.py:0:
-   >                                   ignoreblanklines=opts.get('ignore_blank_lines'))
-   warning: line over 80 characters
-  hgext/record.py:0:
-   >                                   ignorewsamount=opts.get('ignore_space_change'),
-   warning: line over 80 characters
-  hgext/zeroconf/__init__.py:0:
-   >             publish(name, desc, path, util.getport(u.config("web", "port", 8000)))
-   warning: line over 80 characters
-  hgext/zeroconf/__init__.py:0:
-   >     except:
-   warning: naked except clause
-   warning: naked except clause
-  mercurial/bundlerepo.py:0:
-   >       is a bundlerepo for the obtained bundle when the original "other" is remote.
-   warning: line over 80 characters
-  mercurial/bundlerepo.py:0:
-   >     "local" is a local repo from which to obtain the actual incoming changesets; it
-   warning: line over 80 characters
-  mercurial/bundlerepo.py:0:
-   >     tmp = discovery.findcommonincoming(repo, other, heads=onlyheads, force=force)
-   warning: line over 80 characters
-  mercurial/commands.py:0:
-   >                  "     size " + basehdr + "   link     p1     p2       nodeid\n")
-   warning: line over 80 characters
-  mercurial/commands.py:0:
-   >                 raise util.Abort('cannot use localheads with old style discovery')
-   warning: line over 80 characters
   mercurial/commands.py:0:
    >                 ui.note('branch %s\n' % data)
    warning: unwrapped ui message
@@ -293,18 +84,6 @@
    >                 ui.write("unpruned common: %s\n" % " ".join([short(n)
    warning: unwrapped ui message
   mercurial/commands.py:0:
-   >                 yield 'n', (r, list(set(p for p in cl.parentrevs(r) if p != -1)))
-   warning: line over 80 characters
-  mercurial/commands.py:0:
-   >                 yield 'n', (r, list(set(p for p in rlog.parentrevs(r) if p != -1)))
-   warning: line over 80 characters
-  mercurial/commands.py:0:
-   >             except:
-   warning: naked except clause
-  mercurial/commands.py:0:
-   >             ui.status(_("(run 'hg heads .' to see heads, 'hg merge' to merge)\n"))
-   warning: line over 80 characters
-  mercurial/commands.py:0:
    >             ui.write("format: id, p1, p2, cset, delta base, len(delta)\n")
    warning: unwrapped ui message
   mercurial/commands.py:0:
@@ -314,17 +93,7 @@
    >             ui.write("remote is subset\n")
    warning: unwrapped ui message
   mercurial/commands.py:0:
-   >             ui.write('    other            : ' + fmt2 % pcfmt(numoprev, numprev))
-   warning: line over 80 characters
-  mercurial/commands.py:0:
-   >             ui.write('    where prev = p1  : ' + fmt2 % pcfmt(nump1prev, numprev))
-   warning: line over 80 characters
-  mercurial/commands.py:0:
-   >             ui.write('    where prev = p2  : ' + fmt2 % pcfmt(nump2prev, numprev))
-   warning: line over 80 characters
-  mercurial/commands.py:0:
-   >             ui.write('deltas against other : ' + fmt % pcfmt(numother, numdeltas))
-   warning: line over 80 characters
+   >             ui.write('deltas against other : ' + fmt % pcfmt(numother,
    warning: unwrapped ui message
   mercurial/commands.py:0:
    >             ui.write('deltas against p1    : ' + fmt % pcfmt(nump1, numdeltas))
@@ -333,12 +102,6 @@
    >             ui.write('deltas against p2    : ' + fmt % pcfmt(nump2, numdeltas))
    warning: unwrapped ui message
   mercurial/commands.py:0:
-   >         except:
-   warning: naked except clause
-  mercurial/commands.py:0:
-   >         revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev'))
-   warning: line over 80 characters
-  mercurial/commands.py:0:
    >         ui.write("common heads: %s\n" % " ".join([short(n) for n in common]))
    warning: unwrapped ui message
   mercurial/commands.py:0:
@@ -354,12 +117,6 @@
    >         ui.write('uncompressed data size (min/max/avg) : %d / %d / %d\n'
    warning: unwrapped ui message
   mercurial/commands.py:0:
-   >     Every ID must be a full-length hex node id string. Returns a list of 0s and 1s
-   warning: line over 80 characters
-  mercurial/commands.py:0:
-   >     remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl), opts.get('branch'))
-   warning: line over 80 characters
-  mercurial/commands.py:0:
    >     ui.write("digraph G {\n")
    warning: unwrapped ui message
   mercurial/commands.py:0:
@@ -402,226 +159,25 @@
   mercurial/commands.py:0:
    >     ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
    warning: unwrapped ui message
-  mercurial/commandserver.py:0:
-   >         # the ui here is really the repo ui so take its baseui so we don't end up
-   warning: line over 80 characters
-  mercurial/context.py:0:
-   >                 return self._manifestdelta[path], self._manifestdelta.flags(path)
-   warning: line over 80 characters
-  mercurial/dagparser.py:0:
-   >             raise util.Abort(_("invalid character in dag description: %s...") % s)
-   warning: line over 80 characters
-  mercurial/dagparser.py:0:
-   >         >>> dagtext([('n', (0, [-1])), ('C', 'my command line'), ('n', (1, [0]))])
-   warning: line over 80 characters
-  mercurial/dirstate.py:0:
-   >                 if not st is None and not getkind(st.st_mode) in (regkind, lnkkind):
-   warning: line over 80 characters
-  mercurial/discovery.py:0:
-   >     If onlyheads is given, only nodes ancestral to nodes in onlyheads (inclusive)
-   warning: line over 80 characters
-  mercurial/dispatch.py:0:
-   >                                                 " (.hg not found)") % os.getcwd())
-   warning: line over 80 characters
-  mercurial/dispatch.py:0:
-   >         except:
-   warning: naked except clause
-  mercurial/dispatch.py:0:
-   >         return lambda: runcommand(lui, None, cmd, args[:1], ui, options, d, [], {})
-   warning: line over 80 characters
-  mercurial/dispatch.py:0:
-   >     def __init__(self, args, ui=None, repo=None, fin=None, fout=None, ferr=None):
-   warning: line over 80 characters
-  mercurial/dispatch.py:0:
-   >     except:
-   warning: naked except clause
-  mercurial/hg.py:0:
-   >     except:
-   warning: naked except clause
-  mercurial/hgweb/hgweb_mod.py:0:
-   >             self.maxshortchanges = int(self.config("web", "maxshortchanges", 60))
-   warning: line over 80 characters
-  mercurial/keepalive.py:0:
-   >         except:
-   warning: naked except clause
-  mercurial/keepalive.py:0:
-   >     except:
-   warning: naked except clause
-  mercurial/localrepo.py:0:
-   >                         # we return an integer indicating remote head count change
-   warning: line over 80 characters
-  mercurial/localrepo.py:0:
-   >                     raise util.Abort(_("empty or missing revlog for %s") % fname)
-   warning: line over 80 characters
-   warning: line over 80 characters
-  mercurial/localrepo.py:0:
-   >                 if self._tagscache.tagtypes and name in self._tagscache.tagtypes:
-   warning: line over 80 characters
-  mercurial/localrepo.py:0:
-   >                 self.hook("precommit", throw=True, parent1=hookp1, parent2=hookp2)
-   warning: line over 80 characters
-  mercurial/localrepo.py:0:
-   >             # new requirements = old non-format requirements + new format-related
-   warning: line over 80 characters
-  mercurial/localrepo.py:0:
-   >             except:
-   warning: naked except clause
-  mercurial/localrepo.py:0:
-   >         """return status of files between two nodes or node and working directory
-   warning: line over 80 characters
-  mercurial/localrepo.py:0:
-   >         '''Returns a tagscache object that contains various tags related caches.'''
-   warning: line over 80 characters
-  mercurial/manifest.py:0:
-   >             return "".join(struct.pack(">lll", start, end, len(content)) + content
-   warning: line over 80 characters
-  mercurial/merge.py:0:
-   >                 subrepo.submerge(repo, wctx, mctx, wctx.ancestor(mctx), overwrite)
-   warning: line over 80 characters
-  mercurial/patch.py:0:
-   >                  modified, added, removed, copy, getfilectx, opts, losedata, prefix)
-   warning: line over 80 characters
-  mercurial/patch.py:0:
-   >         diffhelpers.addlines(lr, self.hunk, self.lena, self.lenb, self.a, self.b)
-   warning: line over 80 characters
-  mercurial/patch.py:0:
-   >         output.append(_(' %d files changed, %d insertions(+), %d deletions(-)\n')
-   warning: line over 80 characters
-  mercurial/patch.py:0:
-   >     except:
-   warning: naked except clause
-  mercurial/pure/mpatch.py:0:
-   >         frags.extend(reversed(new))                    # what was left at the end
-   warning: line over 80 characters
-  mercurial/repair.py:0:
-   >         except:
-   warning: naked except clause
-  mercurial/repair.py:0:
-   >     except:
-   warning: naked except clause
-  mercurial/revset.py:0:
-   >         elif c.isalnum() or c in '._' or ord(c) > 127: # gather up a symbol/keyword
-   warning: line over 80 characters
-  mercurial/revset.py:0:
-   >     Changesets that are the Nth ancestor (first parents only) of a changeset in set.
-   warning: line over 80 characters
-  mercurial/scmutil.py:0:
-   >                         raise util.Abort(_("path '%s' is inside nested repo %r") %
-   warning: line over 80 characters
-  mercurial/scmutil.py:0:
-   >             "requires features '%s' (upgrade Mercurial)") % "', '".join(missings))
-   warning: line over 80 characters
-  mercurial/scmutil.py:0:
-   >         elif repo.dirstate[abs] != 'r' and (not good or not os.path.lexists(target)
-   warning: line over 80 characters
-  mercurial/setdiscovery.py:0:
-   >     # treat remote heads (and maybe own heads) as a first implicit sample response
-   warning: line over 80 characters
-  mercurial/setdiscovery.py:0:
-   >     undecided = dag.nodeset() # own nodes where I don't know if remote knows them
-   warning: line over 80 characters
-  mercurial/similar.py:0:
-   >         repo.ui.progress(_('searching for similar files'), i, total=len(removed))
-   warning: line over 80 characters
-  mercurial/simplemerge.py:0:
-   >         for zmatch, zend, amatch, aend, bmatch, bend in self.find_sync_regions():
-   warning: line over 80 characters
-  mercurial/sshrepo.py:0:
-   >             self._abort(error.RepoError(_("no suitable response from remote hg")))
-   warning: line over 80 characters
-  mercurial/sshrepo.py:0:
-   >         except:
-   warning: naked except clause
-  mercurial/subrepo.py:0:
-   >                 other, self._repo = hg.clone(self._repo._subparent.ui, {}, other,
-   warning: line over 80 characters
-  mercurial/subrepo.py:0:
-   >         msg = (_(' subrepository sources for %s differ (in checked out version)\n'
-   warning: line over 80 characters
-  mercurial/transaction.py:0:
-   >             except:
-   warning: naked except clause
-  mercurial/ui.py:0:
-   >                 traceback.print_exception(exc[0], exc[1], exc[2], file=self.ferr)
-   warning: line over 80 characters
-  mercurial/url.py:0:
-   >             conn = httpsconnection(host, port, keyfile, certfile, *args, **kwargs)
-   warning: line over 80 characters
-  mercurial/util.py:0:
-   >             except:
-   warning: naked except clause
-  mercurial/util.py:0:
-   >     except:
-   warning: naked except clause
-  mercurial/verify.py:0:
-   >                     except:
-   warning: naked except clause
-  mercurial/verify.py:0:
-   >                 except:
-   warning: naked except clause
-  mercurial/wireproto.py:0:
-   >         # Assuming the future to be filled with the result from the batched request
-   warning: line over 80 characters
-  mercurial/wireproto.py:0:
-   >         '''remote must support _submitbatch(encbatch) and _submitone(op, encargs)'''
-   warning: line over 80 characters
-  mercurial/wireproto.py:0:
-   >     All methods invoked on instances of this class are simply queued and return a
-   warning: line over 80 characters
-  mercurial/wireproto.py:0:
-   >     The decorator returns a function which wraps this coroutine as a plain method,
-   warning: line over 80 characters
-  setup.py:0:
-   >                 raise SystemExit("Python headers are required to build Mercurial")
-   warning: line over 80 characters
-  setup.py:0:
-   >         except:
-   warning: naked except clause
-  setup.py:0:
-   >     # build_py), it will not find osutil & friends, thinking that those modules are
-   warning: line over 80 characters
-  setup.py:0:
-   >     except:
-   warning: naked except clause
-   warning: naked except clause
-  setup.py:0:
-   >     isironpython = platform.python_implementation().lower().find("ironpython") != -1
-   warning: line over 80 characters
-  setup.py:0:
-   > except:
-   warning: naked except clause
-   warning: naked except clause
-   warning: naked except clause
   tests/autodiff.py:0:
    >         ui.write('data lost for: %s\n' % fn)
    warning: unwrapped ui message
-  tests/run-tests.py:0:
-   >     except:
-   warning: naked except clause
-  tests/test-commandserver.py:0:
-   >                         'hooks.pre-identify=python:test-commandserver.hook', 'id'],
-   warning: line over 80 characters
-  tests/test-commandserver.py:0:
-   >     # the cached repo local hgrc contains ui.foo=bar, so showconfig should show it
-   warning: line over 80 characters
-  tests/test-commandserver.py:0:
-   >     print '%c, %r' % (ch, re.sub('encoding: [a-zA-Z0-9-]+', 'encoding: ***', data))
-   warning: line over 80 characters
-  tests/test-filecache.py:0:
-   >     except:
-   warning: naked except clause
-  tests/test-filecache.py:0:
-   > if subprocess.call(['python', '%s/hghave' % os.environ['TESTDIR'], 'cacheable']):
-   warning: line over 80 characters
+  tests/test-convert-mtn.t:0:
+   >   > function get_passphrase(keypair_id)
+   don't use 'function', use old style
+  tests/test-import-git.t:0:
+   >   > Mc\${NkU|\`?^000jF3jhEB
+   ^ must be quoted
+  tests/test-import.t:0:
+   >   > diff -Naur proj-orig/foo proj-new/foo
+   don't use 'diff -N'
+   don't use 'diff -N'
+  tests/test-schemes.t:0:
+   >   > z = file:\$PWD/
+   don't use $PWD, use `pwd`
   tests/test-ui-color.py:0:
    > testui.warn('warning\n')
    warning: unwrapped ui message
   tests/test-ui-color.py:0:
    > testui.write('buffered\n')
    warning: unwrapped ui message
-  tests/test-walkrepo.py:0:
-   >         print "Found %d repositories when I should have found 2" % (len(reposet),)
-   warning: line over 80 characters
-  tests/test-walkrepo.py:0:
-   >         print "Found %d repositories when I should have found 3" % (len(reposet),)
-   warning: line over 80 characters
--- a/tests/test-clone-failure.t	Mon Jun 04 17:22:09 2012 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,80 +0,0 @@
-  $ "$TESTDIR/hghave" unix-permissions || exit 80
-
-No local source
-
-  $ hg clone a b
-  abort: repository a not found!
-  [255]
-
-No remote source
-
-  $ hg clone http://127.0.0.1:3121/a b
-  abort: error: Connection refused
-  [255]
-  $ rm -rf b # work around bug with http clone
-
-Inaccessible source
-
-  $ mkdir a
-  $ chmod 000 a
-  $ hg clone a b
-  abort: repository a not found!
-  [255]
-
-Inaccessible destination
-
-  $ hg init b
-  $ cd b
-  $ hg clone . ../a
-  abort: Permission denied: ../a
-  [255]
-  $ cd ..
-  $ chmod 700 a
-  $ rm -r a b
-
-Source of wrong type
-
-  $ if "$TESTDIR/hghave" -q fifo; then
-  >     mkfifo a
-  >     hg clone a b
-  >     rm a
-  > else
-  >     echo "abort: repository a not found!"
-  > fi
-  abort: repository a not found!
-
-Default destination, same directory
-
-  $ hg init q
-  $ hg clone q
-  destination directory: q
-  abort: destination 'q' is not empty
-  [255]
-
-destination directory not empty
-
-  $ mkdir a 
-  $ echo stuff > a/a
-  $ hg clone q a
-  abort: destination 'a' is not empty
-  [255]
-
-leave existing directory in place after clone failure
-
-  $ hg init c
-  $ cd c
-  $ echo c > c
-  $ hg commit -A -m test
-  adding c
-  $ chmod -rx .hg/store/data
-  $ cd ..
-  $ mkdir d
-  $ hg clone c d 2> err
-  [255]
-  $ test -d d
-  $ test -d d/.hg
-  [1]
-
-reenable perm to allow deletion
-
-  $ chmod +rx c/.hg/store/data
--- a/tests/test-clone.t	Mon Jun 04 17:22:09 2012 -0500
+++ b/tests/test-clone.t	Mon Jun 04 17:57:57 2012 -0500
@@ -458,3 +458,101 @@
   updating to branch stable
   3 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ rm -r ua
+
+
+Testing failures:
+
+  $ mkdir fail
+  $ cd fail
+
+No local source
+
+  $ hg clone a b
+  abort: repository a not found!
+  [255]
+
+No remote source
+
+  $ hg clone http://127.0.0.1:3121/a b
+  abort: error: *refused* (glob)
+  [255]
+  $ rm -rf b # work around bug with http clone
+
+
+#if unix-permissions
+
+Inaccessible source
+
+  $ mkdir a
+  $ chmod 000 a
+  $ hg clone a b
+  abort: repository a not found!
+  [255]
+
+Inaccessible destination
+
+  $ hg init b
+  $ cd b
+  $ hg clone . ../a
+  abort: Permission denied: ../a
+  [255]
+  $ cd ..
+  $ chmod 700 a
+  $ rm -r a b
+
+#endif
+
+
+Source of wrong type
+
+  $ if "$TESTDIR/hghave" -q fifo; then
+  >     mkfifo a
+  >     hg clone a b
+  >     rm a
+  > else
+  >     echo "abort: repository a not found!"
+  > fi
+  abort: repository a not found!
+
+Default destination, same directory
+
+  $ hg init q
+  $ hg clone q
+  destination directory: q
+  abort: destination 'q' is not empty
+  [255]
+
+destination directory not empty
+
+  $ mkdir a 
+  $ echo stuff > a/a
+  $ hg clone q a
+  abort: destination 'a' is not empty
+  [255]
+
+
+#if unix-permissions
+
+leave existing directory in place after clone failure
+
+  $ hg init c
+  $ cd c
+  $ echo c > c
+  $ hg commit -A -m test
+  adding c
+  $ chmod -rx .hg/store/data
+  $ cd ..
+  $ mkdir d
+  $ hg clone c d 2> err
+  [255]
+  $ test -d d
+  $ test -d d/.hg
+  [1]
+
+reenable perm to allow deletion
+
+  $ chmod +rx c/.hg/store/data
+
+#endif
+
+  $ cd ..
--- a/tests/test-commandserver.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/tests/test-commandserver.py	Mon Jun 04 17:57:57 2012 -0500
@@ -18,7 +18,7 @@
 def readchannel(server):
     data = server.stdout.read(5)
     if not data:
-        raise EOFError()
+        raise EOFError
     channel, length = struct.unpack('>cI', data)
     if channel in 'IL':
         return channel, length
@@ -71,7 +71,8 @@
 def hellomessage(server):
     ch, data = readchannel(server)
     # escaping python tests output not supported
-    print '%c, %r' % (ch, re.sub('encoding: [a-zA-Z0-9-]+', 'encoding: ***', data))
+    print '%c, %r' % (ch, re.sub('encoding: [a-zA-Z0-9-]+', 'encoding: ***',
+                                 data))
 
     # run an arbitrary command to make sure the next thing the server sends
     # isn't part of the hello message
@@ -142,7 +143,8 @@
     is used """
     readchannel(server)
 
-    # the cached repo local hgrc contains ui.foo=bar, so showconfig should show it
+    # the cached repo local hgrc contains ui.foo=bar, so showconfig should
+    # show it
     runcommand(server, ['showconfig'])
 
     # but not for this repo
@@ -157,7 +159,8 @@
 def hookoutput(server):
     readchannel(server)
     runcommand(server, ['--config',
-                        'hooks.pre-identify=python:test-commandserver.hook', 'id'],
+                        'hooks.pre-identify=python:test-commandserver.hook',
+                        'id'],
                input=cStringIO.StringIO('some input'))
 
 def outsidechanges(server):
--- a/tests/test-commandserver.py.out	Mon Jun 04 17:22:09 2012 -0500
+++ b/tests/test-commandserver.py.out	Mon Jun 04 17:57:57 2012 -0500
@@ -16,24 +16,24 @@
 
 basic commands:
 
- add         add the specified files on the next commit
- annotate    show changeset information by line for each file
- clone       make a copy of an existing repository
- commit      commit the specified files or all outstanding changes
- diff        diff repository (or selected files)
- export      dump the header and diffs for one or more changesets
- forget      forget the specified files on the next commit
- init        create a new repository in the given directory
- log         show revision history of entire repository or files
- merge       merge working directory with another revision
- phase       set or show the current phase name
- pull        pull changes from the specified source
- push        push changes to the specified destination
- remove      remove the specified files on the next commit
- serve       start stand-alone webserver
- status      show changed files in the working directory
- summary     summarize working directory state
- update      update working directory (or switch revisions)
+ add           add the specified files on the next commit
+ annotate      show changeset information by line for each file
+ clone         make a copy of an existing repository
+ commit        commit the specified files or all outstanding changes
+ diff          diff repository (or selected files)
+ export        dump the header and diffs for one or more changesets
+ forget        forget the specified files on the next commit
+ init          create a new repository in the given directory
+ log           show revision history of entire repository or files
+ merge         merge working directory with another revision
+ phase         set or show the current phase name
+ pull          pull changes from the specified source
+ push          push changes to the specified destination
+ remove        remove the specified files on the next commit
+ serve         start stand-alone webserver
+ status        show changed files in the working directory
+ summary       summarize working directory state
+ update        update working directory (or switch revisions)
 
 use "hg help" for the full list of commands or "hg -v" for details
  runcommand id --quiet
--- a/tests/test-commit-copy.t	Mon Jun 04 17:22:09 2012 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,16 +0,0 @@
-  $ hg init dir
-  $ cd dir
-  $ echo bleh > bar
-  $ hg add bar
-  $ hg ci -m 'add bar'
-
-  $ hg cp bar foo
-  $ echo >> bar
-  $ hg ci -m 'cp bar foo; change bar'
-
-  $ hg debugrename foo
-  foo renamed from bar:26d3ca0dfd18e44d796b564e38dd173c9668d3a9
-  $ hg debugindex bar
-     rev    offset  length   base linkrev nodeid       p1           p2
-       0         0       6      0       0 26d3ca0dfd18 000000000000 000000000000
-       1         6       7      1       1 d267bddd54f7 26d3ca0dfd18 000000000000
--- a/tests/test-commit.t	Mon Jun 04 17:22:09 2012 -0500
+++ b/tests/test-commit.t	Mon Jun 04 17:57:57 2012 -0500
@@ -1,5 +1,3 @@
-  $ "$TESTDIR/hghave" symlink || exit 80
-
 commit date test
 
   $ hg init test
@@ -75,10 +73,14 @@
   $ hg commit -m commit-14 does-not-exist
   abort: does-not-exist: * (glob)
   [255]
+
+#if symlink
   $ ln -s foo baz
   $ hg commit -m commit-15 baz
   abort: baz: file not tracked!
   [255]
+#endif
+
   $ touch quux
   $ hg commit -m commit-16 quux
   abort: quux: file not tracked!
@@ -281,3 +283,24 @@
   HG: removed removed
   abort: empty commit message
   [255]
+  $ cd ..
+
+
+commit copy
+
+  $ hg init dir2
+  $ cd dir2
+  $ echo bleh > bar
+  $ hg add bar
+  $ hg ci -m 'add bar'
+
+  $ hg cp bar foo
+  $ echo >> bar
+  $ hg ci -m 'cp bar foo; change bar'
+
+  $ hg debugrename foo
+  foo renamed from bar:26d3ca0dfd18e44d796b564e38dd173c9668d3a9
+  $ hg debugindex bar
+     rev    offset  length   base linkrev nodeid       p1           p2
+       0         0       6      0       0 26d3ca0dfd18 000000000000 000000000000
+       1         6       7      1       1 d267bddd54f7 26d3ca0dfd18 000000000000
--- a/tests/test-convert-baz	Mon Jun 04 17:22:09 2012 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,79 +0,0 @@
-#!/bin/sh
-
-"$TESTDIR/hghave" baz || exit 80
-
-baz my-id "mercurial <mercurial@selenic.com>"
-
-echo "[extensions]" >> $HGRCPATH
-echo "convert=" >> $HGRCPATH
-echo 'graphlog =' >> $HGRCPATH
-
-echo % create baz archive
-baz make-archive baz@mercurial--convert hg-test-convert-baz
-
-echo % initialize baz repo
-mkdir baz-repo
-cd baz-repo/
-baz init-tree baz@mercurial--convert/baz--test--0
-baz import
-
-echo % create initial files
-echo 'this is a file' > a
-baz add a
-mkdir src
-baz add src
-cd src
-dd count=1 if=/dev/zero of=b > /dev/null 2> /dev/null
-baz add b
-# HACK: hide GNU tar-1.22 "tar: The --preserve option is deprecated, use --preserve-permissions --preserve-order instead"
-baz commit -s "added a file, src and src/b (binary)" 2>&1 | grep -v '^tar'
-
-echo % create link file and modify a
-ln -s ../a a-link
-baz add a-link
-echo 'this a modification to a' >> ../a
-baz commit -s "added link to a and modify a"
-
-echo % create second link and modify b
-ln -s ../a a-link-2
-baz add a-link-2
-dd count=1 seek=1 if=/dev/zero of=b > /dev/null 2> /dev/null
-baz commit -s "added second link and modify b"
-
-echo % b file to link and a-link-2 to regular file
-rm -f a-link-2
-echo 'this is now a regular file' > a-link-2
-ln -sf ../a b
-baz commit -s "file to link and link to file test"
-
-echo % move a-link-2 file and src directory
-cd ..
-baz mv src/a-link-2 c
-baz mv src test
-baz commit -s "move and rename a-link-2 file and src directory"
-
-echo % move and add the moved file again
-echo e > e
-baz add e
-baz commit -s "add e"
-baz mv e f
-echo ee > e
-baz add e
-baz commit -s "move e and recreate it again"
-cd ..
-
-echo % converting baz repo to Mercurial
-hg convert baz-repo baz-repo-hg
-
-baz register-archive -d baz@mercurial--convert
-
-glog()
-{
-    hg glog --template '{rev} "{desc|firstline}" files: {files}\n' "$@"
-}
-
-echo % show graph log
-glog -R baz-repo-hg
-hg up -q -R baz-repo-hg
-hg -R baz-repo-hg manifest --debug
-hg -R baz-repo-hg log -r 5 -r 7 -C --debug | grep copies
--- a/tests/test-convert-baz.out	Mon Jun 04 17:22:09 2012 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,96 +0,0 @@
-% create baz archive
-% initialize baz repo
-* creating version baz@mercurial--convert/baz--test--0
-* imported baz@mercurial--convert/baz--test--0
-% create initial files
-* build pristine tree for baz@mercurial--convert/baz--test--0--base-0
-* Scanning for full-tree revision: .
-* from import revision: baz@mercurial--convert/baz--test--0--base-0
-A/ .arch-ids
-A/ src
-A/ src/.arch-ids
-A  .arch-ids/a.id
-A  a
-A  src/.arch-ids/=id
-A  src/.arch-ids/b.id
-A  src/b
-* update pristine tree (baz@mercurial--convert/baz--test--0--base-0 => baz--test--0--patch-1)
-* committed baz@mercurial--convert/baz--test--0--patch-1
-% create link file and modify a
-A  src/.arch-ids/a-link.id
-A  src/a-link
-M  a
-* update pristine tree (baz@mercurial--convert/baz--test--0--patch-1 => baz--test--0--patch-2)
-* committed baz@mercurial--convert/baz--test--0--patch-2
-% create second link and modify b
-A  src/.arch-ids/a-link-2.id
-A  src/a-link-2
-Mb src/b
-* update pristine tree (baz@mercurial--convert/baz--test--0--patch-2 => baz--test--0--patch-3)
-* committed baz@mercurial--convert/baz--test--0--patch-3
-% b file to link and a-link-2 to regular file
-fl src/b
-lf src/a-link-2
-* update pristine tree (baz@mercurial--convert/baz--test--0--patch-3 => baz--test--0--patch-4)
-* committed baz@mercurial--convert/baz--test--0--patch-4
-% move a-link-2 file and src directory
-D/ src/.arch-ids
-A/ test/.arch-ids
-/> src	test
-=> src/.arch-ids/a-link-2.id	.arch-ids/c.id
-=> src/a-link-2	c
-=> src/.arch-ids/=id	test/.arch-ids/=id
-=> src/.arch-ids/a-link.id	test/.arch-ids/a-link.id
-=> src/.arch-ids/b.id	test/.arch-ids/b.id
-* update pristine tree (baz@mercurial--convert/baz--test--0--patch-4 => baz--test--0--patch-5)
-* committed baz@mercurial--convert/baz--test--0--patch-5
-% move and add the moved file again
-A  .arch-ids/e.id
-A  e
-* update pristine tree (baz@mercurial--convert/baz--test--0--patch-5 => baz--test--0--patch-6)
-* committed baz@mercurial--convert/baz--test--0--patch-6
-A  .arch-ids/e.id
-A  e
-=> .arch-ids/e.id	.arch-ids/f.id
-=> e	f
-* update pristine tree (baz@mercurial--convert/baz--test--0--patch-6 => baz--test--0--patch-7)
-* committed baz@mercurial--convert/baz--test--0--patch-7
-% converting baz repo to Mercurial
-initializing destination baz-repo-hg repository
-analyzing tree version baz@mercurial--convert/baz--test--0...
-scanning source...
-sorting...
-converting...
-7 initial import
-6 added a file, src and src/b (binary)
-5 added link to a and modify a
-4 added second link and modify b
-3 file to link and link to file test
-2 move and rename a-link-2 file and src directory
-1 add e
-0 move e and recreate it again
-% show graph log
-o  7 "move e and recreate it again" files: e f
-|
-o  6 "add e" files: e
-|
-o  5 "move and rename a-link-2 file and src directory" files: c src/a-link src/a-link-2 src/b test/a-link test/b
-|
-o  4 "file to link and link to file test" files: src/a-link-2 src/b
-|
-o  3 "added second link and modify b" files: src/a-link-2 src/b
-|
-o  2 "added link to a and modify a" files: a src/a-link
-|
-o  1 "added a file, src and src/b (binary)" files: a src/b
-|
-o  0 "initial import" files:
-
-c4072c4b72e1cabace081888efa148ee80ca3cbb 644   a
-0201ac32a3a8e86e303dff60366382a54b48a72e 644   c
-1a4a864db0073705a11b1439f563bfa4b46d9246 644   e
-09e0222742fc3f75777fa9d68a5d8af7294cb5e7 644   f
-c0067ba5ff0b7c9a3eb17270839d04614c435623 644 @ test/a-link
-375f4263d86feacdea7e3c27100abd1560f2a973 644 @ test/b
-copies:      c (src/a-link-2) test/a-link (src/a-link) test/b (src/b)
-copies:      f (e)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-convert-baz.t	Mon Jun 04 17:57:57 2012 -0500
@@ -0,0 +1,163 @@
+  $ "$TESTDIR/hghave" baz || exit 80
+
+  $ baz my-id "mercurial <mercurial@selenic.com>"
+
+  $ echo "[extensions]" >> $HGRCPATH
+  $ echo "convert=" >> $HGRCPATH
+  $ echo 'graphlog =' >> $HGRCPATH
+
+create baz archive
+  $ baz make-archive baz@mercurial--convert hg-test-convert-baz
+
+initialize baz repo
+  $ mkdir baz-repo
+  $ cd baz-repo/
+  $ baz init-tree baz@mercurial--convert/baz--test--0
+  $ baz import
+  * creating version baz@mercurial--convert/baz--test--0
+  * imported baz@mercurial--convert/baz--test--0
+
+create initial files
+  $ echo 'this is a file' > a
+  $ baz add a
+  $ mkdir src
+  $ baz add src
+  $ cd src
+  $ dd count=1 if=/dev/zero of=b > /dev/null 2> /dev/null
+  $ baz add b
+HACK: hide GNU tar-1.22 "tar: The --preserve option is deprecated, use --preserve-permissions --preserve-order instead"
+  $ baz commit -s "added a file, src and src/b (binary)" 2>&1 | grep -v '^tar'
+  * build pristine tree for baz@mercurial--convert/baz--test--0--base-0
+  * Scanning for full-tree revision: .
+  * from import revision: baz@mercurial--convert/baz--test--0--base-0
+  A/ .arch-ids
+  A/ src
+  A/ src/.arch-ids
+  A  .arch-ids/a.id
+  A  a
+  A  src/.arch-ids/=id
+  A  src/.arch-ids/b.id
+  A  src/b
+  * update pristine tree (baz@mercurial--convert/baz--test--0--base-0 => baz--test--0--patch-1)
+  * committed baz@mercurial--convert/baz--test--0--patch-1
+
+create link file and modify a
+  $ ln -s ../a a-link
+  $ baz add a-link
+  $ echo 'this a modification to a' >> ../a
+  $ baz commit -s "added link to a and modify a"
+  A  src/.arch-ids/a-link.id
+  A  src/a-link
+  M  a
+  * update pristine tree (baz@mercurial--convert/baz--test--0--patch-1 => baz--test--0--patch-2)
+  * committed baz@mercurial--convert/baz--test--0--patch-2
+
+create second link and modify b
+  $ ln -s ../a a-link-2
+  $ baz add a-link-2
+  $ dd count=1 seek=1 if=/dev/zero of=b > /dev/null 2> /dev/null
+  $ baz commit -s "added second link and modify b"
+  A  src/.arch-ids/a-link-2.id
+  A  src/a-link-2
+  Mb src/b
+  * update pristine tree (baz@mercurial--convert/baz--test--0--patch-2 => baz--test--0--patch-3)
+  * committed baz@mercurial--convert/baz--test--0--patch-3
+
+b file to link and a-link-2 to regular file
+  $ rm -f a-link-2
+  $ echo 'this is now a regular file' > a-link-2
+  $ ln -sf ../a b
+  $ baz commit -s "file to link and link to file test"
+  fl src/b
+  lf src/a-link-2
+  * update pristine tree (baz@mercurial--convert/baz--test--0--patch-3 => baz--test--0--patch-4)
+  * committed baz@mercurial--convert/baz--test--0--patch-4
+
+move a-link-2 file and src directory
+  $ cd ..
+  $ baz mv src/a-link-2 c
+  $ baz mv src test
+  $ baz commit -s "move and rename a-link-2 file and src directory"
+  D/ src/.arch-ids
+  A/ test/.arch-ids
+  /> src	test
+  => src/.arch-ids/a-link-2.id	.arch-ids/c.id
+  => src/a-link-2	c
+  => src/.arch-ids/=id	test/.arch-ids/=id
+  => src/.arch-ids/a-link.id	test/.arch-ids/a-link.id
+  => src/.arch-ids/b.id	test/.arch-ids/b.id
+  * update pristine tree (baz@mercurial--convert/baz--test--0--patch-4 => baz--test--0--patch-5)
+  * committed baz@mercurial--convert/baz--test--0--patch-5
+
+move and add the moved file again
+  $ echo e > e
+  $ baz add e
+  $ baz commit -s "add e"
+  A  .arch-ids/e.id
+  A  e
+  * update pristine tree (baz@mercurial--convert/baz--test--0--patch-5 => baz--test--0--patch-6)
+  * committed baz@mercurial--convert/baz--test--0--patch-6
+  $ baz mv e f
+  $ echo ee > e
+  $ baz add e
+  $ baz commit -s "move e and recreate it again"
+  A  .arch-ids/e.id
+  A  e
+  => .arch-ids/e.id	.arch-ids/f.id
+  => e	f
+  * update pristine tree (baz@mercurial--convert/baz--test--0--patch-6 => baz--test--0--patch-7)
+  * committed baz@mercurial--convert/baz--test--0--patch-7
+  $ cd ..
+
+converting baz repo to Mercurial
+  $ hg convert baz-repo baz-repo-hg
+  initializing destination baz-repo-hg repository
+  analyzing tree version baz@mercurial--convert/baz--test--0...
+  scanning source...
+  sorting...
+  converting...
+  7 initial import
+  6 added a file, src and src/b (binary)
+  5 added link to a and modify a
+  4 added second link and modify b
+  3 file to link and link to file test
+  2 move and rename a-link-2 file and src directory
+  1 add e
+  0 move e and recreate it again
+
+  $ baz register-archive -d baz@mercurial--convert
+
+  $ glog()
+  > {
+  >     hg glog --template '{rev} "{desc|firstline}" files: {files}\n' "$@"
+  > }
+
+show graph log
+  $ glog -R baz-repo-hg
+  o  7 "move e and recreate it again" files: e f
+  |
+  o  6 "add e" files: e
+  |
+  o  5 "move and rename a-link-2 file and src directory" files: c src/a-link src/a-link-2 src/b test/a-link test/b
+  |
+  o  4 "file to link and link to file test" files: src/a-link-2 src/b
+  |
+  o  3 "added second link and modify b" files: src/a-link-2 src/b
+  |
+  o  2 "added link to a and modify a" files: a src/a-link
+  |
+  o  1 "added a file, src and src/b (binary)" files: a src/b
+  |
+  o  0 "initial import" files:
+  
+  $ hg up -q -R baz-repo-hg
+  $ hg -R baz-repo-hg manifest --debug
+  c4072c4b72e1cabace081888efa148ee80ca3cbb 644   a
+  0201ac32a3a8e86e303dff60366382a54b48a72e 644   c
+  1a4a864db0073705a11b1439f563bfa4b46d9246 644   e
+  09e0222742fc3f75777fa9d68a5d8af7294cb5e7 644   f
+  c0067ba5ff0b7c9a3eb17270839d04614c435623 644 @ test/a-link
+  375f4263d86feacdea7e3c27100abd1560f2a973 644 @ test/b
+  $ hg -R baz-repo-hg log -r 5 -r 7 -C --debug | grep copies
+  copies:      c (src/a-link-2) test/a-link (src/a-link) test/b (src/b)
+  copies:      f (e)
--- a/tests/test-convert-darcs.t	Mon Jun 04 17:22:09 2012 -0500
+++ b/tests/test-convert-darcs.t	Mon Jun 04 17:57:57 2012 -0500
@@ -32,7 +32,7 @@
 
 branch and update
 
-  $ darcs get darcs-repo darcs-clone >/dev/null
+  $ darcs get -q darcs-repo darcs-clone >/dev/null
   $ cd darcs-clone
   $ echo c >> a
   $ echo c > c
@@ -48,11 +48,10 @@
   $ darcs record -a -l -m p1.2
   Finished recording patch 'p1.2'
 
-  $ darcs pull -a --no-set-default ../darcs-clone
-  Backing up ./a(-darcs-backup0)
+  $ darcs pull -q -a --no-set-default ../darcs-clone
+  Backing up ./a(*) (glob)
   We have conflicts in the following files:
   ./a
-  Finished pulling and applying.
   $ sleep 1
   $ echo e > a
   $ echo f > f
--- a/tests/test-convert-hg-source.t	Mon Jun 04 17:22:09 2012 -0500
+++ b/tests/test-convert-hg-source.t	Mon Jun 04 17:57:57 2012 -0500
@@ -19,7 +19,7 @@
   $ hg ci -m 'make bar and baz copies of foo' -d '2 0'
   created new head
   $ hg bookmark premerge1
-  $ hg merge
+  $ hg merge -r 1
   merging baz and foo to baz
   1 files updated, 1 files merged, 0 files removed, 0 files unresolved
   (branch merge, don't forget to commit)
--- a/tests/test-convert-p4	Mon Jun 04 17:22:09 2012 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,75 +0,0 @@
-#!/bin/sh
-
-"$TESTDIR/hghave" p4 || exit 80
-
-echo "[extensions]" >> $HGRCPATH
-echo "convert = " >> $HGRCPATH
-
-echo % create p4 depot
-P4ROOT=`pwd`/depot; export P4ROOT
-P4AUDIT=$P4ROOT/audit; export P4AUDIT
-P4JOURNAL=$P4ROOT/journal; export P4JOURNAL
-P4LOG=$P4ROOT/log; export P4LOG
-P4PORT=localhost:16661; export P4PORT
-P4DEBUG=1; export P4DEBUG
-
-echo % start the p4 server
-[ ! -d $P4ROOT ] && mkdir $P4ROOT
-p4d -f -J off >$P4ROOT/stdout 2>$P4ROOT/stderr &
-trap "echo % stop the p4 server ; p4 admin stop" EXIT
-
-# wait for the server to initialize
-while ! p4 ; do
-   sleep 1
-done >/dev/null 2>/dev/null
-
-echo % create a client spec
-P4CLIENT=hg-p4-import; export P4CLIENT
-DEPOTPATH=//depot/test-mercurial-import/...
-p4 client -o | sed '/^View:/,$ d' >p4client
-echo View: >>p4client
-echo " $DEPOTPATH //$P4CLIENT/..." >>p4client
-p4 client -i <p4client
-
-echo % populate the depot
-echo a > a
-mkdir b
-echo c > b/c
-p4 add a b/c
-p4 submit -d initial
-
-echo % change some files
-p4 edit a
-echo aa >> a
-p4 submit -d "change a"
-
-p4 edit b/c
-echo cc >> b/c
-p4 submit -d "change b/c"
-
-echo % convert
-hg convert -s p4 $DEPOTPATH dst
-hg -R dst log --template 'rev={rev} desc="{desc}" tags="{tags}" files="{files}"\n'
-
-echo % change some files
-p4 edit a b/c
-echo aaa >> a
-echo ccc >> b/c
-p4 submit -d "change a b/c"
-
-echo % convert again
-hg convert -s p4 $DEPOTPATH dst
-hg -R dst log --template 'rev={rev} desc="{desc}" tags="{tags}" files="{files}"\n'
-
-echo % interesting names
-echo dddd > "d d"
-mkdir " e"
-echo fff >" e/ f"
-p4 add "d d" " e/ f"
-p4 submit -d "add d e f"
-
-echo % convert again
-hg convert -s p4 $DEPOTPATH dst
-hg -R dst log --template 'rev={rev} desc="{desc}" tags="{tags}" files="{files}"\n'
-
-
--- a/tests/test-convert-p4-filetypes	Mon Jun 04 17:22:09 2012 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,104 +0,0 @@
-#!/bin/sh
-
-"$TESTDIR/hghave" p4 execbit symlink || exit 80
-
-echo "[extensions]" >> $HGRCPATH
-echo "convert = " >> $HGRCPATH
-
-echo % create p4 depot
-P4ROOT=`pwd`/depot; export P4ROOT
-P4AUDIT=$P4ROOT/audit; export P4AUDIT
-P4JOURNAL=$P4ROOT/journal; export P4JOURNAL
-P4LOG=$P4ROOT/log; export P4LOG
-P4PORT=localhost:16661; export P4PORT
-P4DEBUG=1; export P4DEBUG
-P4CHARSET=utf8; export P4CHARSET
-
-echo % start the p4 server
-[ ! -d $P4ROOT ] && mkdir $P4ROOT
-p4d -f -J off -xi >$P4ROOT/stdout 2>$P4ROOT/stderr
-p4d -f -J off >$P4ROOT/stdout 2>$P4ROOT/stderr &
-trap "echo % stop the p4 server ; p4 admin stop" EXIT
-
-# wait for the server to initialize
-while ! p4 ; do
-   sleep 1
-done >/dev/null 2>/dev/null
-
-echo % create a client spec
-P4CLIENT=hg-p4-import; export P4CLIENT
-DEPOTPATH=//depot/test-mercurial-import/...
-p4 client -o | sed '/^View:/,$ d' >p4client
-echo View: >>p4client
-echo " $DEPOTPATH //$P4CLIENT/..." >>p4client
-p4 client -i <p4client
-
-echo % populate the depot
-TYPES="text binary symlink"
-TYPES="$TYPES text+m text+w text+x text+k text+kx text+ko text+l text+C text+D text+F text+S text+S2"
-TYPES="$TYPES binary+k binary+x binary+kx symlink+k"
-TYPES="$TYPES ctext cxtext ktext kxtext ltext tempobj ubinary uxbinary xbinary xltext xtempobj xtext"
-# not testing these
-#TYPES="$TYPES apple resource unicode utf16 uresource xunicode xutf16"
-for T in $TYPES ; do
-   T2=`echo $T | tr [:upper:] [:lower:]`
-   case $T in
-      apple)
-         ;;
-      symlink*)
-         echo "this is target $T" >target_$T2
-         ln -s target_$T file_$T2
-         p4 add target_$T2
-         p4 add -t $T file_$T2
-         ;;
-      binary*)
-         python -c "file('file_$T2', 'wb').write('this is $T')"
-         p4 add -t $T file_$T2
-         ;;
-      *)
-         echo "this is $T" >file_$T2
-         p4 add -t $T file_$T2
-         ;;
-   esac
-done
-p4 submit -d initial
-
-echo % test keyword expansion
-p4 edit file_* target_*
-for T in $TYPES ; do
-   T2=`echo $T | tr [:upper:] [:lower:]`
-   echo '$Id$'       >>file_$T2
-   echo '$Header$'   >>file_$T2
-   echo '$Date$'     >>file_$T2
-   echo '$DateTime$' >>file_$T2
-   echo '$Change$'   >>file_$T2
-   echo '$File$'     >>file_$T2
-   echo '$Revision$' >>file_$T2
-   echo '$Header$$Header$Header$' >>file_$T2
-done
-
-ln -s 'target_$Header$' crazy_symlink+k
-p4 add -t symlink+k crazy_symlink+k
-
-p4 submit -d keywords
-
-echo % check keywords in p4
-grep -H Header file_*
-
-echo % convert
-hg convert -s p4 $DEPOTPATH dst
-hg -R dst log --template 'rev={rev} desc="{desc}" tags="{tags}" files="{files}"\n'g
-
-echo % revision 0
-hg -R dst update 0
-head dst/file_* | cat -v
-
-echo
-echo % revision 1
-hg -R dst update 1
-head dst/file_* | cat -v
-echo
-echo % crazy_symlink
-readlink crazy_symlink+k
-readlink dst/crazy_symlink+k
-
--- a/tests/test-convert-p4-filetypes.out	Mon Jun 04 17:22:09 2012 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,642 +0,0 @@
-% create p4 depot
-% start the p4 server
-% create a client spec
-Client hg-p4-import saved.
-% populate the depot
-//depot/test-mercurial-import/file_text#1 - opened for add
-//depot/test-mercurial-import/file_binary#1 - opened for add
-//depot/test-mercurial-import/target_symlink#1 - opened for add
-//depot/test-mercurial-import/file_symlink#1 - opened for add
-//depot/test-mercurial-import/file_text+m#1 - opened for add
-//depot/test-mercurial-import/file_text+w#1 - opened for add
-//depot/test-mercurial-import/file_text+x#1 - opened for add
-//depot/test-mercurial-import/file_text+k#1 - opened for add
-//depot/test-mercurial-import/file_text+kx#1 - opened for add
-//depot/test-mercurial-import/file_text+ko#1 - opened for add
-//depot/test-mercurial-import/file_text+l#1 - opened for add
-//depot/test-mercurial-import/file_text+c#1 - opened for add
-//depot/test-mercurial-import/file_text+d#1 - opened for add
-//depot/test-mercurial-import/file_text+f#1 - opened for add
-//depot/test-mercurial-import/file_text+s#1 - opened for add
-//depot/test-mercurial-import/file_text+s2#1 - opened for add
-//depot/test-mercurial-import/file_binary+k#1 - opened for add
-//depot/test-mercurial-import/file_binary+x#1 - opened for add
-//depot/test-mercurial-import/file_binary+kx#1 - opened for add
-//depot/test-mercurial-import/target_symlink+k#1 - opened for add
-//depot/test-mercurial-import/file_symlink+k#1 - opened for add
-//depot/test-mercurial-import/file_ctext#1 - opened for add
-//depot/test-mercurial-import/file_cxtext#1 - opened for add
-//depot/test-mercurial-import/file_ktext#1 - opened for add
-//depot/test-mercurial-import/file_kxtext#1 - opened for add
-//depot/test-mercurial-import/file_ltext#1 - opened for add
-//depot/test-mercurial-import/file_tempobj#1 - opened for add
-//depot/test-mercurial-import/file_ubinary#1 - opened for add
-//depot/test-mercurial-import/file_uxbinary#1 - opened for add
-//depot/test-mercurial-import/file_xbinary#1 - opened for add
-//depot/test-mercurial-import/file_xltext#1 - opened for add
-//depot/test-mercurial-import/file_xtempobj#1 - opened for add
-//depot/test-mercurial-import/file_xtext#1 - opened for add
-Submitting change 1.
-Locking 33 files ...
-add //depot/test-mercurial-import/file_binary#1
-add //depot/test-mercurial-import/file_binary+k#1
-add //depot/test-mercurial-import/file_binary+kx#1
-add //depot/test-mercurial-import/file_binary+x#1
-add //depot/test-mercurial-import/file_ctext#1
-add //depot/test-mercurial-import/file_cxtext#1
-add //depot/test-mercurial-import/file_ktext#1
-add //depot/test-mercurial-import/file_kxtext#1
-add //depot/test-mercurial-import/file_ltext#1
-add //depot/test-mercurial-import/file_symlink#1
-add //depot/test-mercurial-import/file_symlink+k#1
-add //depot/test-mercurial-import/file_tempobj#1
-add //depot/test-mercurial-import/file_text#1
-add //depot/test-mercurial-import/file_text+c#1
-add //depot/test-mercurial-import/file_text+d#1
-add //depot/test-mercurial-import/file_text+f#1
-add //depot/test-mercurial-import/file_text+k#1
-add //depot/test-mercurial-import/file_text+ko#1
-add //depot/test-mercurial-import/file_text+kx#1
-add //depot/test-mercurial-import/file_text+l#1
-add //depot/test-mercurial-import/file_text+m#1
-add //depot/test-mercurial-import/file_text+s#1
-add //depot/test-mercurial-import/file_text+s2#1
-add //depot/test-mercurial-import/file_text+w#1
-add //depot/test-mercurial-import/file_text+x#1
-add //depot/test-mercurial-import/file_ubinary#1
-add //depot/test-mercurial-import/file_uxbinary#1
-add //depot/test-mercurial-import/file_xbinary#1
-add //depot/test-mercurial-import/file_xltext#1
-add //depot/test-mercurial-import/file_xtempobj#1
-add //depot/test-mercurial-import/file_xtext#1
-add //depot/test-mercurial-import/target_symlink#1
-add //depot/test-mercurial-import/target_symlink+k#1
-Change 1 submitted.
-//depot/test-mercurial-import/file_binary+k#1 - refreshing
-//depot/test-mercurial-import/file_binary+kx#1 - refreshing
-//depot/test-mercurial-import/file_ktext#1 - refreshing
-//depot/test-mercurial-import/file_kxtext#1 - refreshing
-//depot/test-mercurial-import/file_symlink+k#1 - refreshing
-//depot/test-mercurial-import/file_text+k#1 - refreshing
-//depot/test-mercurial-import/file_text+ko#1 - refreshing
-//depot/test-mercurial-import/file_text+kx#1 - refreshing
-% test keyword expansion
-//depot/test-mercurial-import/file_binary#1 - opened for edit
-//depot/test-mercurial-import/file_binary+k#1 - opened for edit
-//depot/test-mercurial-import/file_binary+kx#1 - opened for edit
-//depot/test-mercurial-import/file_binary+x#1 - opened for edit
-//depot/test-mercurial-import/file_ctext#1 - opened for edit
-//depot/test-mercurial-import/file_cxtext#1 - opened for edit
-//depot/test-mercurial-import/file_ktext#1 - opened for edit
-//depot/test-mercurial-import/file_kxtext#1 - opened for edit
-//depot/test-mercurial-import/file_ltext#1 - opened for edit
-//depot/test-mercurial-import/file_symlink#1 - opened for edit
-//depot/test-mercurial-import/file_symlink+k#1 - opened for edit
-//depot/test-mercurial-import/file_tempobj#1 - opened for edit
-//depot/test-mercurial-import/file_text#1 - opened for edit
-//depot/test-mercurial-import/file_text+c#1 - opened for edit
-//depot/test-mercurial-import/file_text+d#1 - opened for edit
-//depot/test-mercurial-import/file_text+f#1 - opened for edit
-//depot/test-mercurial-import/file_text+k#1 - opened for edit
-//depot/test-mercurial-import/file_text+ko#1 - opened for edit
-//depot/test-mercurial-import/file_text+kx#1 - opened for edit
-//depot/test-mercurial-import/file_text+l#1 - opened for edit
-//depot/test-mercurial-import/file_text+m#1 - opened for edit
-//depot/test-mercurial-import/file_text+s#1 - opened for edit
-//depot/test-mercurial-import/file_text+s2#1 - opened for edit
-//depot/test-mercurial-import/file_text+w#1 - opened for edit
-//depot/test-mercurial-import/file_text+x#1 - opened for edit
-//depot/test-mercurial-import/file_ubinary#1 - opened for edit
-//depot/test-mercurial-import/file_uxbinary#1 - opened for edit
-//depot/test-mercurial-import/file_xbinary#1 - opened for edit
-//depot/test-mercurial-import/file_xltext#1 - opened for edit
-//depot/test-mercurial-import/file_xtempobj#1 - opened for edit
-//depot/test-mercurial-import/file_xtext#1 - opened for edit
-//depot/test-mercurial-import/target_symlink#1 - opened for edit
-//depot/test-mercurial-import/target_symlink+k#1 - opened for edit
-//depot/test-mercurial-import/crazy_symlink+k#1 - opened for add
-Submitting change 2.
-Locking 34 files ...
-add //depot/test-mercurial-import/crazy_symlink+k#1
-edit //depot/test-mercurial-import/file_binary#2
-edit //depot/test-mercurial-import/file_binary+k#2
-edit //depot/test-mercurial-import/file_binary+kx#2
-edit //depot/test-mercurial-import/file_binary+x#2
-edit //depot/test-mercurial-import/file_ctext#2
-edit //depot/test-mercurial-import/file_cxtext#2
-edit //depot/test-mercurial-import/file_ktext#2
-edit //depot/test-mercurial-import/file_kxtext#2
-edit //depot/test-mercurial-import/file_ltext#2
-edit //depot/test-mercurial-import/file_symlink#2
-edit //depot/test-mercurial-import/file_symlink+k#2
-edit //depot/test-mercurial-import/file_tempobj#2
-edit //depot/test-mercurial-import/file_text#2
-edit //depot/test-mercurial-import/file_text+c#2
-edit //depot/test-mercurial-import/file_text+d#2
-edit //depot/test-mercurial-import/file_text+f#2
-edit //depot/test-mercurial-import/file_text+k#2
-edit //depot/test-mercurial-import/file_text+ko#2
-edit //depot/test-mercurial-import/file_text+kx#2
-edit //depot/test-mercurial-import/file_text+l#2
-edit //depot/test-mercurial-import/file_text+m#2
-edit //depot/test-mercurial-import/file_text+s#2
-edit //depot/test-mercurial-import/file_text+s2#2
-edit //depot/test-mercurial-import/file_text+w#2
-edit //depot/test-mercurial-import/file_text+x#2
-edit //depot/test-mercurial-import/file_ubinary#2
-edit //depot/test-mercurial-import/file_uxbinary#2
-edit //depot/test-mercurial-import/file_xbinary#2
-edit //depot/test-mercurial-import/file_xltext#2
-edit //depot/test-mercurial-import/file_xtempobj#2
-edit //depot/test-mercurial-import/file_xtext#2
-edit //depot/test-mercurial-import/target_symlink#2
-edit //depot/test-mercurial-import/target_symlink+k#2
-Change 2 submitted.
-//depot/test-mercurial-import/crazy_symlink+k#1 - refreshing
-//depot/test-mercurial-import/file_binary+k#2 - refreshing
-//depot/test-mercurial-import/file_binary+kx#2 - refreshing
-//depot/test-mercurial-import/file_ktext#2 - refreshing
-//depot/test-mercurial-import/file_kxtext#2 - refreshing
-//depot/test-mercurial-import/file_symlink+k#2 - refreshing
-//depot/test-mercurial-import/file_text+k#2 - refreshing
-//depot/test-mercurial-import/file_text+ko#2 - refreshing
-//depot/test-mercurial-import/file_text+kx#2 - refreshing
-% check keywords in p4
-file_binary:$Header$
-file_binary:$Header$$Header$Header$
-file_binary+k:$Header: //depot/test-mercurial-import/file_binary+k#2 $
-file_binary+k:$Header: //depot/test-mercurial-import/file_binary+k#2 $$Header: //depot/test-mercurial-import/file_binary+k#2 $Header$
-file_binary+kx:$Header: //depot/test-mercurial-import/file_binary+kx#2 $
-file_binary+kx:$Header: //depot/test-mercurial-import/file_binary+kx#2 $$Header: //depot/test-mercurial-import/file_binary+kx#2 $Header$
-file_binary+x:$Header$
-file_binary+x:$Header$$Header$Header$
-file_ctext:$Header$
-file_ctext:$Header$$Header$Header$
-file_cxtext:$Header$
-file_cxtext:$Header$$Header$Header$
-file_ktext:$Header: //depot/test-mercurial-import/file_ktext#2 $
-file_ktext:$Header: //depot/test-mercurial-import/file_ktext#2 $$Header: //depot/test-mercurial-import/file_ktext#2 $Header$
-file_kxtext:$Header: //depot/test-mercurial-import/file_kxtext#2 $
-file_kxtext:$Header: //depot/test-mercurial-import/file_kxtext#2 $$Header: //depot/test-mercurial-import/file_kxtext#2 $Header$
-file_ltext:$Header$
-file_ltext:$Header$$Header$Header$
-file_symlink:$Header$
-file_symlink:$Header$$Header$Header$
-file_symlink+k:$Header$
-file_symlink+k:$Header$$Header$Header$
-file_tempobj:$Header$
-file_tempobj:$Header$$Header$Header$
-file_text:$Header$
-file_text:$Header$$Header$Header$
-file_text+c:$Header$
-file_text+c:$Header$$Header$Header$
-file_text+d:$Header$
-file_text+d:$Header$$Header$Header$
-file_text+f:$Header$
-file_text+f:$Header$$Header$Header$
-file_text+k:$Header: //depot/test-mercurial-import/file_text+k#2 $
-file_text+k:$Header: //depot/test-mercurial-import/file_text+k#2 $$Header: //depot/test-mercurial-import/file_text+k#2 $Header$
-file_text+ko:$Header: //depot/test-mercurial-import/file_text+ko#2 $
-file_text+ko:$Header: //depot/test-mercurial-import/file_text+ko#2 $$Header: //depot/test-mercurial-import/file_text+ko#2 $Header$
-file_text+kx:$Header: //depot/test-mercurial-import/file_text+kx#2 $
-file_text+kx:$Header: //depot/test-mercurial-import/file_text+kx#2 $$Header: //depot/test-mercurial-import/file_text+kx#2 $Header$
-file_text+l:$Header$
-file_text+l:$Header$$Header$Header$
-file_text+m:$Header$
-file_text+m:$Header$$Header$Header$
-file_text+s:$Header$
-file_text+s:$Header$$Header$Header$
-file_text+s2:$Header$
-file_text+s2:$Header$$Header$Header$
-file_text+w:$Header$
-file_text+w:$Header$$Header$Header$
-file_text+x:$Header$
-file_text+x:$Header$$Header$Header$
-file_ubinary:$Header$
-file_ubinary:$Header$$Header$Header$
-file_uxbinary:$Header$
-file_uxbinary:$Header$$Header$Header$
-file_xbinary:$Header$
-file_xbinary:$Header$$Header$Header$
-file_xltext:$Header$
-file_xltext:$Header$$Header$Header$
-file_xtempobj:$Header$
-file_xtempobj:$Header$$Header$Header$
-file_xtext:$Header$
-file_xtext:$Header$$Header$Header$
-% convert
-initializing destination dst repository
-reading p4 views
-collecting p4 changelists
-1 initial
-2 keywords
-scanning source...
-sorting...
-converting...
-1 initial
-0 keywords
-rev=1 desc="keywords" tags="tip" files="crazy_symlink+k file_binary file_binary+k file_binary+kx file_binary+x file_ctext file_cxtext file_ktext file_kxtext file_ltext file_text file_text+c file_text+d file_text+f file_text+k file_text+ko file_text+kx file_text+l file_text+m file_text+s file_text+s2 file_text+w file_text+x file_ubinary file_uxbinary file_xbinary file_xltext file_xtext target_symlink target_symlink+k"
-grev=0 desc="initial" tags="" files="file_binary file_binary+k file_binary+kx file_binary+x file_ctext file_cxtext file_ktext file_kxtext file_ltext file_symlink file_symlink+k file_text file_text+c file_text+d file_text+f file_text+k file_text+ko file_text+kx file_text+l file_text+m file_text+s2 file_text+w file_text+x file_ubinary file_uxbinary file_xbinary file_xltext file_xtext target_symlink target_symlink+k"
-g% revision 0
-30 files updated, 0 files merged, 0 files removed, 0 files unresolved
-==> dst/file_binary <==
-this is binary
-==> dst/file_binary+k <==
-this is binary+k
-==> dst/file_binary+kx <==
-this is binary+kx
-==> dst/file_binary+x <==
-this is binary+x
-==> dst/file_ctext <==
-this is ctext
-
-==> dst/file_cxtext <==
-this is cxtext
-
-==> dst/file_ktext <==
-this is ktext
-
-==> dst/file_kxtext <==
-this is kxtext
-
-==> dst/file_ltext <==
-this is ltext
-
-==> dst/file_symlink <==
-this is target symlink
-
-==> dst/file_symlink+k <==
-this is target symlink+k
-
-==> dst/file_text <==
-this is text
-
-==> dst/file_text+c <==
-this is text+C
-
-==> dst/file_text+d <==
-this is text+D
-
-==> dst/file_text+f <==
-this is text+F
-
-==> dst/file_text+k <==
-this is text+k
-
-==> dst/file_text+ko <==
-this is text+ko
-
-==> dst/file_text+kx <==
-this is text+kx
-
-==> dst/file_text+l <==
-this is text+l
-
-==> dst/file_text+m <==
-this is text+m
-
-==> dst/file_text+s2 <==
-this is text+S2
-
-==> dst/file_text+w <==
-this is text+w
-
-==> dst/file_text+x <==
-this is text+x
-
-==> dst/file_ubinary <==
-this is ubinary
-
-==> dst/file_uxbinary <==
-this is uxbinary
-
-==> dst/file_xbinary <==
-this is xbinary
-
-==> dst/file_xltext <==
-this is xltext
-
-==> dst/file_xtext <==
-this is xtext
-
-% revision 1
-30 files updated, 0 files merged, 0 files removed, 0 files unresolved
-==> dst/file_binary <==
-this is binary$Id$
-$Header$
-$Date$
-$DateTime$
-$Change$
-$File$
-$Revision$
-$Header$$Header$Header$
-
-==> dst/file_binary+k <==
-this is binary+k$Id$
-$Header$
-$Date$
-$DateTime$
-$Change$
-$File$
-$Revision$
-$Header$$Header$Header$
-
-==> dst/file_binary+kx <==
-this is binary+kx$Id$
-$Header$
-$Date$
-$DateTime$
-$Change$
-$File$
-$Revision$
-$Header$$Header$Header$
-
-==> dst/file_binary+x <==
-this is binary+x$Id$
-$Header$
-$Date$
-$DateTime$
-$Change$
-$File$
-$Revision$
-$Header$$Header$Header$
-
-==> dst/file_ctext <==
-this is ctext
-$Id$
-$Header$
-$Date$
-$DateTime$
-$Change$
-$File$
-$Revision$
-$Header$$Header$Header$
-
-==> dst/file_cxtext <==
-this is cxtext
-$Id$
-$Header$
-$Date$
-$DateTime$
-$Change$
-$File$
-$Revision$
-$Header$$Header$Header$
-
-==> dst/file_ktext <==
-this is ktext
-$Id$
-$Header$
-$Date$
-$DateTime$
-$Change$
-$File$
-$Revision$
-$Header$$Header$Header$
-
-==> dst/file_kxtext <==
-this is kxtext
-$Id$
-$Header$
-$Date$
-$DateTime$
-$Change$
-$File$
-$Revision$
-$Header$$Header$Header$
-
-==> dst/file_ltext <==
-this is ltext
-$Id$
-$Header$
-$Date$
-$DateTime$
-$Change$
-$File$
-$Revision$
-$Header$$Header$Header$
-
-==> dst/file_symlink <==
-this is target symlink
-$Id$
-$Header$
-$Date$
-$DateTime$
-$Change$
-$File$
-$Revision$
-$Header$$Header$Header$
-
-==> dst/file_symlink+k <==
-this is target symlink+k
-$Id$
-$Header$
-$Date$
-$DateTime$
-$Change$
-$File$
-$Revision$
-$Header$$Header$Header$
-
-==> dst/file_text <==
-this is text
-$Id$
-$Header$
-$Date$
-$DateTime$
-$Change$
-$File$
-$Revision$
-$Header$$Header$Header$
-
-==> dst/file_text+c <==
-this is text+C
-$Id$
-$Header$
-$Date$
-$DateTime$
-$Change$
-$File$
-$Revision$
-$Header$$Header$Header$
-
-==> dst/file_text+d <==
-this is text+D
-$Id$
-$Header$
-$Date$
-$DateTime$
-$Change$
-$File$
-$Revision$
-$Header$$Header$Header$
-
-==> dst/file_text+f <==
-this is text+F
-$Id$
-$Header$
-$Date$
-$DateTime$
-$Change$
-$File$
-$Revision$
-$Header$$Header$Header$
-
-==> dst/file_text+k <==
-this is text+k
-$Id$
-$Header$
-$Date$
-$DateTime$
-$Change$
-$File$
-$Revision$
-$Header$$Header$Header$
-
-==> dst/file_text+ko <==
-this is text+ko
-$Id$
-$Header$
-$Date$
-$DateTime$
-$Change$
-$File$
-$Revision$
-$Header$$Header$Header$
-
-==> dst/file_text+kx <==
-this is text+kx
-$Id$
-$Header$
-$Date$
-$DateTime$
-$Change$
-$File$
-$Revision$
-$Header$$Header$Header$
-
-==> dst/file_text+l <==
-this is text+l
-$Id$
-$Header$
-$Date$
-$DateTime$
-$Change$
-$File$
-$Revision$
-$Header$$Header$Header$
-
-==> dst/file_text+m <==
-this is text+m
-$Id$
-$Header$
-$Date$
-$DateTime$
-$Change$
-$File$
-$Revision$
-$Header$$Header$Header$
-
-==> dst/file_text+s <==
-this is text+S
-$Id$
-$Header$
-$Date$
-$DateTime$
-$Change$
-$File$
-$Revision$
-$Header$$Header$Header$
-
-==> dst/file_text+s2 <==
-this is text+S2
-$Id$
-$Header$
-$Date$
-$DateTime$
-$Change$
-$File$
-$Revision$
-$Header$$Header$Header$
-
-==> dst/file_text+w <==
-this is text+w
-$Id$
-$Header$
-$Date$
-$DateTime$
-$Change$
-$File$
-$Revision$
-$Header$$Header$Header$
-
-==> dst/file_text+x <==
-this is text+x
-$Id$
-$Header$
-$Date$
-$DateTime$
-$Change$
-$File$
-$Revision$
-$Header$$Header$Header$
-
-==> dst/file_ubinary <==
-this is ubinary
-$Id$
-$Header$
-$Date$
-$DateTime$
-$Change$
-$File$
-$Revision$
-$Header$$Header$Header$
-
-==> dst/file_uxbinary <==
-this is uxbinary
-$Id$
-$Header$
-$Date$
-$DateTime$
-$Change$
-$File$
-$Revision$
-$Header$$Header$Header$
-
-==> dst/file_xbinary <==
-this is xbinary
-$Id$
-$Header$
-$Date$
-$DateTime$
-$Change$
-$File$
-$Revision$
-$Header$$Header$Header$
-
-==> dst/file_xltext <==
-this is xltext
-$Id$
-$Header$
-$Date$
-$DateTime$
-$Change$
-$File$
-$Revision$
-$Header$$Header$Header$
-
-==> dst/file_xtext <==
-this is xtext
-$Id$
-$Header$
-$Date$
-$DateTime$
-$Change$
-$File$
-$Revision$
-$Header$$Header$Header$
-
-% crazy_symlink
-target_$Header: //depot/test-mercurial-import/crazy_symlink+k#1 $
-target_$Header$
-% stop the p4 server
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-convert-p4-filetypes.t	Mon Jun 04 17:57:57 2012 -0500
@@ -0,0 +1,733 @@
+  $ "$TESTDIR/hghave" p4 execbit symlink || exit 80
+
+  $ echo "[extensions]" >> $HGRCPATH
+  $ echo "convert = " >> $HGRCPATH
+
+create p4 depot
+  $ P4ROOT=`pwd`/depot; export P4ROOT
+  $ P4AUDIT=$P4ROOT/audit; export P4AUDIT
+  $ P4JOURNAL=$P4ROOT/journal; export P4JOURNAL
+  $ P4LOG=$P4ROOT/log; export P4LOG
+  $ P4PORT=localhost:16661; export P4PORT
+  $ P4DEBUG=1; export P4DEBUG
+  $ P4CHARSET=utf8; export P4CHARSET
+
+start the p4 server
+  $ [ ! -d $P4ROOT ] && mkdir $P4ROOT
+  $ p4d -f -J off -xi >$P4ROOT/stdout 2>$P4ROOT/stderr
+  $ p4d -f -J off >$P4ROOT/stdout 2>$P4ROOT/stderr &
+  $ echo $! >> $DAEMON_PIDS
+  $ trap "echo stopping the p4 server ; p4 admin stop" EXIT
+
+wait for the server to initialize
+  $ while ! p4 ; do
+  >    sleep 1
+  > done >/dev/null 2>/dev/null
+
+create a client spec
+  $ P4CLIENT=hg-p4-import; export P4CLIENT
+  $ DEPOTPATH=//depot/test-mercurial-import/...
+  $ p4 client -o | sed '/^View:/,$ d' >p4client
+  $ echo View: >>p4client
+  $ echo " $DEPOTPATH //$P4CLIENT/..." >>p4client
+  $ p4 client -i <p4client
+  Client hg-p4-import saved.
+
+populate the depot
+  $ TYPES="text binary symlink"
+  $ TYPES="$TYPES text+m text+w text+x text+k text+kx text+ko text+l text+C text+D text+F text+S text+S2"
+  $ TYPES="$TYPES binary+k binary+x binary+kx symlink+k"
+  $ TYPES="$TYPES ctext cxtext ktext kxtext ltext tempobj ubinary uxbinary xbinary xltext xtempobj xtext"
+not testing these
+  $ #TYPES="$TYPES apple resource unicode utf16 uresource xunicode xutf16"
+  $ for T in $TYPES ; do
+  >    T2=`echo $T | tr [:upper:] [:lower:]`
+  >    case $T in
+  >       apple)
+  >          ;;
+  >       symlink*)
+  >          echo "this is target $T" >target_$T2
+  >          ln -s target_$T file_$T2
+  >          p4 add target_$T2
+  >          p4 add -t $T file_$T2
+  >          ;;
+  >       binary*)
+  >          python -c "file('file_$T2', 'wb').write('this is $T')"
+  >          p4 add -t $T file_$T2
+  >          ;;
+  >       *)
+  >          echo "this is $T" >file_$T2
+  >          p4 add -t $T file_$T2
+  >          ;;
+  >    esac
+  > done
+  //depot/test-mercurial-import/file_text#1 - opened for add
+  //depot/test-mercurial-import/file_binary#1 - opened for add
+  //depot/test-mercurial-import/target_symlink#1 - opened for add
+  //depot/test-mercurial-import/file_symlink#1 - opened for add
+  //depot/test-mercurial-import/file_text+m#1 - opened for add
+  //depot/test-mercurial-import/file_text+w#1 - opened for add
+  //depot/test-mercurial-import/file_text+x#1 - opened for add
+  //depot/test-mercurial-import/file_text+k#1 - opened for add
+  //depot/test-mercurial-import/file_text+kx#1 - opened for add
+  //depot/test-mercurial-import/file_text+ko#1 - opened for add
+  //depot/test-mercurial-import/file_text+l#1 - opened for add
+  //depot/test-mercurial-import/file_text+c#1 - opened for add
+  //depot/test-mercurial-import/file_text+d#1 - opened for add
+  //depot/test-mercurial-import/file_text+f#1 - opened for add
+  //depot/test-mercurial-import/file_text+s#1 - opened for add
+  //depot/test-mercurial-import/file_text+s2#1 - opened for add
+  //depot/test-mercurial-import/file_binary+k#1 - opened for add
+  //depot/test-mercurial-import/file_binary+x#1 - opened for add
+  //depot/test-mercurial-import/file_binary+kx#1 - opened for add
+  //depot/test-mercurial-import/target_symlink+k#1 - opened for add
+  //depot/test-mercurial-import/file_symlink+k#1 - opened for add
+  //depot/test-mercurial-import/file_ctext#1 - opened for add
+  //depot/test-mercurial-import/file_cxtext#1 - opened for add
+  //depot/test-mercurial-import/file_ktext#1 - opened for add
+  //depot/test-mercurial-import/file_kxtext#1 - opened for add
+  //depot/test-mercurial-import/file_ltext#1 - opened for add
+  //depot/test-mercurial-import/file_tempobj#1 - opened for add
+  //depot/test-mercurial-import/file_ubinary#1 - opened for add
+  //depot/test-mercurial-import/file_uxbinary#1 - opened for add
+  //depot/test-mercurial-import/file_xbinary#1 - opened for add
+  //depot/test-mercurial-import/file_xltext#1 - opened for add
+  //depot/test-mercurial-import/file_xtempobj#1 - opened for add
+  //depot/test-mercurial-import/file_xtext#1 - opened for add
+  $ p4 submit -d initial
+  Submitting change 1.
+  Locking 33 files ...
+  add //depot/test-mercurial-import/file_binary#1
+  add //depot/test-mercurial-import/file_binary+k#1
+  add //depot/test-mercurial-import/file_binary+kx#1
+  add //depot/test-mercurial-import/file_binary+x#1
+  add //depot/test-mercurial-import/file_ctext#1
+  add //depot/test-mercurial-import/file_cxtext#1
+  add //depot/test-mercurial-import/file_ktext#1
+  add //depot/test-mercurial-import/file_kxtext#1
+  add //depot/test-mercurial-import/file_ltext#1
+  add //depot/test-mercurial-import/file_symlink#1
+  add //depot/test-mercurial-import/file_symlink+k#1
+  add //depot/test-mercurial-import/file_tempobj#1
+  add //depot/test-mercurial-import/file_text#1
+  add //depot/test-mercurial-import/file_text+c#1
+  add //depot/test-mercurial-import/file_text+d#1
+  add //depot/test-mercurial-import/file_text+f#1
+  add //depot/test-mercurial-import/file_text+k#1
+  add //depot/test-mercurial-import/file_text+ko#1
+  add //depot/test-mercurial-import/file_text+kx#1
+  add //depot/test-mercurial-import/file_text+l#1
+  add //depot/test-mercurial-import/file_text+m#1
+  add //depot/test-mercurial-import/file_text+s#1
+  add //depot/test-mercurial-import/file_text+s2#1
+  add //depot/test-mercurial-import/file_text+w#1
+  add //depot/test-mercurial-import/file_text+x#1
+  add //depot/test-mercurial-import/file_ubinary#1
+  add //depot/test-mercurial-import/file_uxbinary#1
+  add //depot/test-mercurial-import/file_xbinary#1
+  add //depot/test-mercurial-import/file_xltext#1
+  add //depot/test-mercurial-import/file_xtempobj#1
+  add //depot/test-mercurial-import/file_xtext#1
+  add //depot/test-mercurial-import/target_symlink#1
+  add //depot/test-mercurial-import/target_symlink+k#1
+  Change 1 submitted.
+  //depot/test-mercurial-import/file_binary+k#1 - refreshing
+  //depot/test-mercurial-import/file_binary+kx#1 - refreshing
+  //depot/test-mercurial-import/file_ktext#1 - refreshing
+  //depot/test-mercurial-import/file_kxtext#1 - refreshing
+  //depot/test-mercurial-import/file_symlink+k#1 - refreshing
+  //depot/test-mercurial-import/file_text+k#1 - refreshing
+  //depot/test-mercurial-import/file_text+ko#1 - refreshing
+  //depot/test-mercurial-import/file_text+kx#1 - refreshing
+
+test keyword expansion
+  $ p4 edit file_* target_*
+  //depot/test-mercurial-import/file_binary#1 - opened for edit
+  //depot/test-mercurial-import/file_binary+k#1 - opened for edit
+  //depot/test-mercurial-import/file_binary+kx#1 - opened for edit
+  //depot/test-mercurial-import/file_binary+x#1 - opened for edit
+  //depot/test-mercurial-import/file_ctext#1 - opened for edit
+  //depot/test-mercurial-import/file_cxtext#1 - opened for edit
+  //depot/test-mercurial-import/file_ktext#1 - opened for edit
+  //depot/test-mercurial-import/file_kxtext#1 - opened for edit
+  //depot/test-mercurial-import/file_ltext#1 - opened for edit
+  //depot/test-mercurial-import/file_symlink#1 - opened for edit
+  //depot/test-mercurial-import/file_symlink+k#1 - opened for edit
+  //depot/test-mercurial-import/file_tempobj#1 - opened for edit
+  //depot/test-mercurial-import/file_text#1 - opened for edit
+  //depot/test-mercurial-import/file_text+c#1 - opened for edit
+  //depot/test-mercurial-import/file_text+d#1 - opened for edit
+  //depot/test-mercurial-import/file_text+f#1 - opened for edit
+  //depot/test-mercurial-import/file_text+k#1 - opened for edit
+  //depot/test-mercurial-import/file_text+ko#1 - opened for edit
+  //depot/test-mercurial-import/file_text+kx#1 - opened for edit
+  //depot/test-mercurial-import/file_text+l#1 - opened for edit
+  //depot/test-mercurial-import/file_text+m#1 - opened for edit
+  //depot/test-mercurial-import/file_text+s#1 - opened for edit
+  //depot/test-mercurial-import/file_text+s2#1 - opened for edit
+  //depot/test-mercurial-import/file_text+w#1 - opened for edit
+  //depot/test-mercurial-import/file_text+x#1 - opened for edit
+  //depot/test-mercurial-import/file_ubinary#1 - opened for edit
+  //depot/test-mercurial-import/file_uxbinary#1 - opened for edit
+  //depot/test-mercurial-import/file_xbinary#1 - opened for edit
+  //depot/test-mercurial-import/file_xltext#1 - opened for edit
+  //depot/test-mercurial-import/file_xtempobj#1 - opened for edit
+  //depot/test-mercurial-import/file_xtext#1 - opened for edit
+  //depot/test-mercurial-import/target_symlink#1 - opened for edit
+  //depot/test-mercurial-import/target_symlink+k#1 - opened for edit
+  $ for T in $TYPES ; do
+  >    T2=`echo $T | tr [:upper:] [:lower:]`
+  >    echo '$Id$'       >>file_$T2
+  >    echo '$Header$'   >>file_$T2
+  >    echo '$Date$'     >>file_$T2
+  >    echo '$DateTime$' >>file_$T2
+  >    echo '$Change$'   >>file_$T2
+  >    echo '$File$'     >>file_$T2
+  >    echo '$Revision$' >>file_$T2
+  >    echo '$Header$$Header$Header$' >>file_$T2
+  > done
+
+  $ ln -s 'target_$Header$' crazy_symlink+k
+  $ p4 add -t symlink+k crazy_symlink+k
+  //depot/test-mercurial-import/crazy_symlink+k#1 - opened for add
+
+  $ p4 submit -d keywords
+  Submitting change 2.
+  Locking 34 files ...
+  add //depot/test-mercurial-import/crazy_symlink+k#1
+  edit //depot/test-mercurial-import/file_binary#2
+  edit //depot/test-mercurial-import/file_binary+k#2
+  edit //depot/test-mercurial-import/file_binary+kx#2
+  edit //depot/test-mercurial-import/file_binary+x#2
+  edit //depot/test-mercurial-import/file_ctext#2
+  edit //depot/test-mercurial-import/file_cxtext#2
+  edit //depot/test-mercurial-import/file_ktext#2
+  edit //depot/test-mercurial-import/file_kxtext#2
+  edit //depot/test-mercurial-import/file_ltext#2
+  edit //depot/test-mercurial-import/file_symlink#2
+  edit //depot/test-mercurial-import/file_symlink+k#2
+  edit //depot/test-mercurial-import/file_tempobj#2
+  edit //depot/test-mercurial-import/file_text#2
+  edit //depot/test-mercurial-import/file_text+c#2
+  edit //depot/test-mercurial-import/file_text+d#2
+  edit //depot/test-mercurial-import/file_text+f#2
+  edit //depot/test-mercurial-import/file_text+k#2
+  edit //depot/test-mercurial-import/file_text+ko#2
+  edit //depot/test-mercurial-import/file_text+kx#2
+  edit //depot/test-mercurial-import/file_text+l#2
+  edit //depot/test-mercurial-import/file_text+m#2
+  edit //depot/test-mercurial-import/file_text+s#2
+  edit //depot/test-mercurial-import/file_text+s2#2
+  edit //depot/test-mercurial-import/file_text+w#2
+  edit //depot/test-mercurial-import/file_text+x#2
+  edit //depot/test-mercurial-import/file_ubinary#2
+  edit //depot/test-mercurial-import/file_uxbinary#2
+  edit //depot/test-mercurial-import/file_xbinary#2
+  edit //depot/test-mercurial-import/file_xltext#2
+  edit //depot/test-mercurial-import/file_xtempobj#2
+  edit //depot/test-mercurial-import/file_xtext#2
+  edit //depot/test-mercurial-import/target_symlink#2
+  edit //depot/test-mercurial-import/target_symlink+k#2
+  Change 2 submitted.
+  //depot/test-mercurial-import/crazy_symlink+k#1 - refreshing
+  //depot/test-mercurial-import/file_binary+k#2 - refreshing
+  //depot/test-mercurial-import/file_binary+kx#2 - refreshing
+  //depot/test-mercurial-import/file_ktext#2 - refreshing
+  //depot/test-mercurial-import/file_kxtext#2 - refreshing
+  //depot/test-mercurial-import/file_symlink+k#2 - refreshing
+  //depot/test-mercurial-import/file_text+k#2 - refreshing
+  //depot/test-mercurial-import/file_text+ko#2 - refreshing
+  //depot/test-mercurial-import/file_text+kx#2 - refreshing
+
+check keywords in p4
+  $ grep -H Header file_*
+  file_binary:$Header$
+  file_binary:$Header$$Header$Header$
+  file_binary+k:$Header: //depot/test-mercurial-import/file_binary+k#2 $
+  file_binary+k:$Header: //depot/test-mercurial-import/file_binary+k#2 $$Header: //depot/test-mercurial-import/file_binary+k#2 $Header$
+  file_binary+kx:$Header: //depot/test-mercurial-import/file_binary+kx#2 $
+  file_binary+kx:$Header: //depot/test-mercurial-import/file_binary+kx#2 $$Header: //depot/test-mercurial-import/file_binary+kx#2 $Header$
+  file_binary+x:$Header$
+  file_binary+x:$Header$$Header$Header$
+  file_ctext:$Header$
+  file_ctext:$Header$$Header$Header$
+  file_cxtext:$Header$
+  file_cxtext:$Header$$Header$Header$
+  file_ktext:$Header: //depot/test-mercurial-import/file_ktext#2 $
+  file_ktext:$Header: //depot/test-mercurial-import/file_ktext#2 $$Header: //depot/test-mercurial-import/file_ktext#2 $Header$
+  file_kxtext:$Header: //depot/test-mercurial-import/file_kxtext#2 $
+  file_kxtext:$Header: //depot/test-mercurial-import/file_kxtext#2 $$Header: //depot/test-mercurial-import/file_kxtext#2 $Header$
+  file_ltext:$Header$
+  file_ltext:$Header$$Header$Header$
+  file_symlink:$Header$
+  file_symlink:$Header$$Header$Header$
+  file_symlink+k:$Header$
+  file_symlink+k:$Header$$Header$Header$
+  file_tempobj:$Header$
+  file_tempobj:$Header$$Header$Header$
+  file_text:$Header$
+  file_text:$Header$$Header$Header$
+  file_text+c:$Header$
+  file_text+c:$Header$$Header$Header$
+  file_text+d:$Header$
+  file_text+d:$Header$$Header$Header$
+  file_text+f:$Header$
+  file_text+f:$Header$$Header$Header$
+  file_text+k:$Header: //depot/test-mercurial-import/file_text+k#2 $
+  file_text+k:$Header: //depot/test-mercurial-import/file_text+k#2 $$Header: //depot/test-mercurial-import/file_text+k#2 $Header$
+  file_text+ko:$Header: //depot/test-mercurial-import/file_text+ko#2 $
+  file_text+ko:$Header: //depot/test-mercurial-import/file_text+ko#2 $$Header: //depot/test-mercurial-import/file_text+ko#2 $Header$
+  file_text+kx:$Header: //depot/test-mercurial-import/file_text+kx#2 $
+  file_text+kx:$Header: //depot/test-mercurial-import/file_text+kx#2 $$Header: //depot/test-mercurial-import/file_text+kx#2 $Header$
+  file_text+l:$Header$
+  file_text+l:$Header$$Header$Header$
+  file_text+m:$Header$
+  file_text+m:$Header$$Header$Header$
+  file_text+s:$Header$
+  file_text+s:$Header$$Header$Header$
+  file_text+s2:$Header$
+  file_text+s2:$Header$$Header$Header$
+  file_text+w:$Header$
+  file_text+w:$Header$$Header$Header$
+  file_text+x:$Header$
+  file_text+x:$Header$$Header$Header$
+  file_ubinary:$Header$
+  file_ubinary:$Header$$Header$Header$
+  file_uxbinary:$Header$
+  file_uxbinary:$Header$$Header$Header$
+  file_xbinary:$Header$
+  file_xbinary:$Header$$Header$Header$
+  file_xltext:$Header$
+  file_xltext:$Header$$Header$Header$
+  file_xtempobj:$Header$
+  file_xtempobj:$Header$$Header$Header$
+  file_xtext:$Header$
+  file_xtext:$Header$$Header$Header$
+
+convert
+  $ hg convert -s p4 $DEPOTPATH dst
+  initializing destination dst repository
+  reading p4 views
+  collecting p4 changelists
+  1 initial
+  2 keywords
+  scanning source...
+  sorting...
+  converting...
+  1 initial
+  0 keywords
+  $ hg -R dst log --template 'rev={rev} desc="{desc}" tags="{tags}" files="{files}"\n'
+  rev=1 desc="keywords" tags="tip" files="crazy_symlink+k file_binary file_binary+k file_binary+kx file_binary+x file_ctext file_cxtext file_ktext file_kxtext file_ltext file_text file_text+c file_text+d file_text+f file_text+k file_text+ko file_text+kx file_text+l file_text+m file_text+s file_text+s2 file_text+w file_text+x file_ubinary file_uxbinary file_xbinary file_xltext file_xtext target_symlink target_symlink+k"
+  rev=0 desc="initial" tags="" files="file_binary file_binary+k file_binary+kx file_binary+x file_ctext file_cxtext file_ktext file_kxtext file_ltext file_symlink file_symlink+k file_text file_text+c file_text+d file_text+f file_text+k file_text+ko file_text+kx file_text+l file_text+m file_text+s2 file_text+w file_text+x file_ubinary file_uxbinary file_xbinary file_xltext file_xtext target_symlink target_symlink+k"
+
+revision 0
+  $ hg -R dst update 0
+  30 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ head dst/file_* | cat -v
+  ==> dst/file_binary <==
+  this is binary
+  ==> dst/file_binary+k <==
+  this is binary+k
+  ==> dst/file_binary+kx <==
+  this is binary+kx
+  ==> dst/file_binary+x <==
+  this is binary+x
+  ==> dst/file_ctext <==
+  this is ctext
+  
+  ==> dst/file_cxtext <==
+  this is cxtext
+  
+  ==> dst/file_ktext <==
+  this is ktext
+  
+  ==> dst/file_kxtext <==
+  this is kxtext
+  
+  ==> dst/file_ltext <==
+  this is ltext
+  
+  ==> dst/file_symlink <==
+  this is target symlink
+  
+  ==> dst/file_symlink+k <==
+  this is target symlink+k
+  
+  ==> dst/file_text <==
+  this is text
+  
+  ==> dst/file_text+c <==
+  this is text+C
+  
+  ==> dst/file_text+d <==
+  this is text+D
+  
+  ==> dst/file_text+f <==
+  this is text+F
+  
+  ==> dst/file_text+k <==
+  this is text+k
+  
+  ==> dst/file_text+ko <==
+  this is text+ko
+  
+  ==> dst/file_text+kx <==
+  this is text+kx
+  
+  ==> dst/file_text+l <==
+  this is text+l
+  
+  ==> dst/file_text+m <==
+  this is text+m
+  
+  ==> dst/file_text+s2 <==
+  this is text+S2
+  
+  ==> dst/file_text+w <==
+  this is text+w
+  
+  ==> dst/file_text+x <==
+  this is text+x
+  
+  ==> dst/file_ubinary <==
+  this is ubinary
+  
+  ==> dst/file_uxbinary <==
+  this is uxbinary
+  
+  ==> dst/file_xbinary <==
+  this is xbinary
+  
+  ==> dst/file_xltext <==
+  this is xltext
+  
+  ==> dst/file_xtext <==
+  this is xtext
+
+revision 1
+  $ hg -R dst update 1
+  30 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ head dst/file_* | cat -v
+  ==> dst/file_binary <==
+  this is binary$Id$
+  $Header$
+  $Date$
+  $DateTime$
+  $Change$
+  $File$
+  $Revision$
+  $Header$$Header$Header$
+  
+  ==> dst/file_binary+k <==
+  this is binary+k$Id$
+  $Header$
+  $Date$
+  $DateTime$
+  $Change$
+  $File$
+  $Revision$
+  $Header$$Header$Header$
+  
+  ==> dst/file_binary+kx <==
+  this is binary+kx$Id$
+  $Header$
+  $Date$
+  $DateTime$
+  $Change$
+  $File$
+  $Revision$
+  $Header$$Header$Header$
+  
+  ==> dst/file_binary+x <==
+  this is binary+x$Id$
+  $Header$
+  $Date$
+  $DateTime$
+  $Change$
+  $File$
+  $Revision$
+  $Header$$Header$Header$
+  
+  ==> dst/file_ctext <==
+  this is ctext
+  $Id$
+  $Header$
+  $Date$
+  $DateTime$
+  $Change$
+  $File$
+  $Revision$
+  $Header$$Header$Header$
+  
+  ==> dst/file_cxtext <==
+  this is cxtext
+  $Id$
+  $Header$
+  $Date$
+  $DateTime$
+  $Change$
+  $File$
+  $Revision$
+  $Header$$Header$Header$
+  
+  ==> dst/file_ktext <==
+  this is ktext
+  $Id$
+  $Header$
+  $Date$
+  $DateTime$
+  $Change$
+  $File$
+  $Revision$
+  $Header$$Header$Header$
+  
+  ==> dst/file_kxtext <==
+  this is kxtext
+  $Id$
+  $Header$
+  $Date$
+  $DateTime$
+  $Change$
+  $File$
+  $Revision$
+  $Header$$Header$Header$
+  
+  ==> dst/file_ltext <==
+  this is ltext
+  $Id$
+  $Header$
+  $Date$
+  $DateTime$
+  $Change$
+  $File$
+  $Revision$
+  $Header$$Header$Header$
+  
+  ==> dst/file_symlink <==
+  this is target symlink
+  $Id$
+  $Header$
+  $Date$
+  $DateTime$
+  $Change$
+  $File$
+  $Revision$
+  $Header$$Header$Header$
+  
+  ==> dst/file_symlink+k <==
+  this is target symlink+k
+  $Id$
+  $Header$
+  $Date$
+  $DateTime$
+  $Change$
+  $File$
+  $Revision$
+  $Header$$Header$Header$
+  
+  ==> dst/file_text <==
+  this is text
+  $Id$
+  $Header$
+  $Date$
+  $DateTime$
+  $Change$
+  $File$
+  $Revision$
+  $Header$$Header$Header$
+  
+  ==> dst/file_text+c <==
+  this is text+C
+  $Id$
+  $Header$
+  $Date$
+  $DateTime$
+  $Change$
+  $File$
+  $Revision$
+  $Header$$Header$Header$
+  
+  ==> dst/file_text+d <==
+  this is text+D
+  $Id$
+  $Header$
+  $Date$
+  $DateTime$
+  $Change$
+  $File$
+  $Revision$
+  $Header$$Header$Header$
+  
+  ==> dst/file_text+f <==
+  this is text+F
+  $Id$
+  $Header$
+  $Date$
+  $DateTime$
+  $Change$
+  $File$
+  $Revision$
+  $Header$$Header$Header$
+  
+  ==> dst/file_text+k <==
+  this is text+k
+  $Id$
+  $Header$
+  $Date$
+  $DateTime$
+  $Change$
+  $File$
+  $Revision$
+  $Header$$Header$Header$
+  
+  ==> dst/file_text+ko <==
+  this is text+ko
+  $Id$
+  $Header$
+  $Date$
+  $DateTime$
+  $Change$
+  $File$
+  $Revision$
+  $Header$$Header$Header$
+  
+  ==> dst/file_text+kx <==
+  this is text+kx
+  $Id$
+  $Header$
+  $Date$
+  $DateTime$
+  $Change$
+  $File$
+  $Revision$
+  $Header$$Header$Header$
+  
+  ==> dst/file_text+l <==
+  this is text+l
+  $Id$
+  $Header$
+  $Date$
+  $DateTime$
+  $Change$
+  $File$
+  $Revision$
+  $Header$$Header$Header$
+  
+  ==> dst/file_text+m <==
+  this is text+m
+  $Id$
+  $Header$
+  $Date$
+  $DateTime$
+  $Change$
+  $File$
+  $Revision$
+  $Header$$Header$Header$
+  
+  ==> dst/file_text+s <==
+  this is text+S
+  $Id$
+  $Header$
+  $Date$
+  $DateTime$
+  $Change$
+  $File$
+  $Revision$
+  $Header$$Header$Header$
+  
+  ==> dst/file_text+s2 <==
+  this is text+S2
+  $Id$
+  $Header$
+  $Date$
+  $DateTime$
+  $Change$
+  $File$
+  $Revision$
+  $Header$$Header$Header$
+  
+  ==> dst/file_text+w <==
+  this is text+w
+  $Id$
+  $Header$
+  $Date$
+  $DateTime$
+  $Change$
+  $File$
+  $Revision$
+  $Header$$Header$Header$
+  
+  ==> dst/file_text+x <==
+  this is text+x
+  $Id$
+  $Header$
+  $Date$
+  $DateTime$
+  $Change$
+  $File$
+  $Revision$
+  $Header$$Header$Header$
+  
+  ==> dst/file_ubinary <==
+  this is ubinary
+  $Id$
+  $Header$
+  $Date$
+  $DateTime$
+  $Change$
+  $File$
+  $Revision$
+  $Header$$Header$Header$
+  
+  ==> dst/file_uxbinary <==
+  this is uxbinary
+  $Id$
+  $Header$
+  $Date$
+  $DateTime$
+  $Change$
+  $File$
+  $Revision$
+  $Header$$Header$Header$
+  
+  ==> dst/file_xbinary <==
+  this is xbinary
+  $Id$
+  $Header$
+  $Date$
+  $DateTime$
+  $Change$
+  $File$
+  $Revision$
+  $Header$$Header$Header$
+  
+  ==> dst/file_xltext <==
+  this is xltext
+  $Id$
+  $Header$
+  $Date$
+  $DateTime$
+  $Change$
+  $File$
+  $Revision$
+  $Header$$Header$Header$
+  
+  ==> dst/file_xtext <==
+  this is xtext
+  $Id$
+  $Header$
+  $Date$
+  $DateTime$
+  $Change$
+  $File$
+  $Revision$
+  $Header$$Header$Header$
+
+crazy_symlink
+  $ readlink crazy_symlink+k
+  target_$Header: //depot/test-mercurial-import/crazy_symlink+k#1 $
+  $ readlink dst/crazy_symlink+k
+  target_$Header$
+
+exit trap:
+  stopping the p4 server
--- a/tests/test-convert-p4.out	Mon Jun 04 17:22:09 2012 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,88 +0,0 @@
-% create p4 depot
-% start the p4 server
-% create a client spec
-Client hg-p4-import saved.
-% populate the depot
-//depot/test-mercurial-import/a#1 - opened for add
-//depot/test-mercurial-import/b/c#1 - opened for add
-Submitting change 1.
-Locking 2 files ...
-add //depot/test-mercurial-import/a#1
-add //depot/test-mercurial-import/b/c#1
-Change 1 submitted.
-% change some files
-//depot/test-mercurial-import/a#1 - opened for edit
-Submitting change 2.
-Locking 1 files ...
-edit //depot/test-mercurial-import/a#2
-Change 2 submitted.
-//depot/test-mercurial-import/b/c#1 - opened for edit
-Submitting change 3.
-Locking 1 files ...
-edit //depot/test-mercurial-import/b/c#2
-Change 3 submitted.
-% convert
-initializing destination dst repository
-reading p4 views
-collecting p4 changelists
-1 initial
-2 change a
-3 change b/c
-scanning source...
-sorting...
-converting...
-2 initial
-1 change a
-0 change b/c
-rev=2 desc="change b/c" tags="tip" files="b/c"
-rev=1 desc="change a" tags="" files="a"
-rev=0 desc="initial" tags="" files="a b/c"
-% change some files
-//depot/test-mercurial-import/a#2 - opened for edit
-//depot/test-mercurial-import/b/c#2 - opened for edit
-Submitting change 4.
-Locking 2 files ...
-edit //depot/test-mercurial-import/a#3
-edit //depot/test-mercurial-import/b/c#3
-Change 4 submitted.
-% convert again
-reading p4 views
-collecting p4 changelists
-1 initial
-2 change a
-3 change b/c
-4 change a b/c
-scanning source...
-sorting...
-converting...
-0 change a b/c
-rev=3 desc="change a b/c" tags="tip" files="a b/c"
-rev=2 desc="change b/c" tags="" files="b/c"
-rev=1 desc="change a" tags="" files="a"
-rev=0 desc="initial" tags="" files="a b/c"
-% interesting names
-//depot/test-mercurial-import/d d#1 - opened for add
-//depot/test-mercurial-import/ e/ f#1 - opened for add
-Submitting change 5.
-Locking 2 files ...
-add //depot/test-mercurial-import/ e/ f#1
-add //depot/test-mercurial-import/d d#1
-Change 5 submitted.
-% convert again
-reading p4 views
-collecting p4 changelists
-1 initial
-2 change a
-3 change b/c
-4 change a b/c
-5 add d e f
-scanning source...
-sorting...
-converting...
-0 add d e f
-rev=4 desc="add d e f" tags="tip" files=" e/ f d d"
-rev=3 desc="change a b/c" tags="" files="a b/c"
-rev=2 desc="change b/c" tags="" files="b/c"
-rev=1 desc="change a" tags="" files="a"
-rev=0 desc="initial" tags="" files="a b/c"
-% stop the p4 server
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-convert-p4.t	Mon Jun 04 17:57:57 2012 -0500
@@ -0,0 +1,152 @@
+  $ "$TESTDIR/hghave" p4 || exit 80
+
+  $ echo "[extensions]" >> $HGRCPATH
+  $ echo "convert = " >> $HGRCPATH
+
+create p4 depot
+  $ P4ROOT=`pwd`/depot; export P4ROOT
+  $ P4AUDIT=$P4ROOT/audit; export P4AUDIT
+  $ P4JOURNAL=$P4ROOT/journal; export P4JOURNAL
+  $ P4LOG=$P4ROOT/log; export P4LOG
+  $ P4PORT=localhost:16661; export P4PORT
+  $ P4DEBUG=1; export P4DEBUG
+
+start the p4 server
+  $ [ ! -d $P4ROOT ] && mkdir $P4ROOT
+  $ p4d -f -J off >$P4ROOT/stdout 2>$P4ROOT/stderr &
+  $ echo $! >> $DAEMON_PIDS
+  $ trap "echo stopping the p4 server ; p4 admin stop" EXIT
+
+  $ # wait for the server to initialize
+  $ while ! p4 ; do
+  >    sleep 1
+  > done >/dev/null 2>/dev/null
+
+create a client spec
+  $ P4CLIENT=hg-p4-import; export P4CLIENT
+  $ DEPOTPATH=//depot/test-mercurial-import/...
+  $ p4 client -o | sed '/^View:/,$ d' >p4client
+  $ echo View: >>p4client
+  $ echo " $DEPOTPATH //$P4CLIENT/..." >>p4client
+  $ p4 client -i <p4client
+  Client hg-p4-import saved.
+
+populate the depot
+  $ echo a > a
+  $ mkdir b
+  $ echo c > b/c
+  $ p4 add a b/c
+  //depot/test-mercurial-import/a#1 - opened for add
+  //depot/test-mercurial-import/b/c#1 - opened for add
+  $ p4 submit -d initial
+  Submitting change 1.
+  Locking 2 files ...
+  add //depot/test-mercurial-import/a#1
+  add //depot/test-mercurial-import/b/c#1
+  Change 1 submitted.
+
+change some files
+  $ p4 edit a
+  //depot/test-mercurial-import/a#1 - opened for edit
+  $ echo aa >> a
+  $ p4 submit -d "change a"
+  Submitting change 2.
+  Locking 1 files ...
+  edit //depot/test-mercurial-import/a#2
+  Change 2 submitted.
+
+  $ p4 edit b/c
+  //depot/test-mercurial-import/b/c#1 - opened for edit
+  $ echo cc >> b/c
+  $ p4 submit -d "change b/c"
+  Submitting change 3.
+  Locking 1 files ...
+  edit //depot/test-mercurial-import/b/c#2
+  Change 3 submitted.
+
+convert
+  $ hg convert -s p4 $DEPOTPATH dst
+  initializing destination dst repository
+  reading p4 views
+  collecting p4 changelists
+  1 initial
+  2 change a
+  3 change b/c
+  scanning source...
+  sorting...
+  converting...
+  2 initial
+  1 change a
+  0 change b/c
+  $ hg -R dst log --template 'rev={rev} desc="{desc}" tags="{tags}" files="{files}"\n'
+  rev=2 desc="change b/c" tags="tip" files="b/c"
+  rev=1 desc="change a" tags="" files="a"
+  rev=0 desc="initial" tags="" files="a b/c"
+
+change some files
+  $ p4 edit a b/c
+  //depot/test-mercurial-import/a#2 - opened for edit
+  //depot/test-mercurial-import/b/c#2 - opened for edit
+  $ echo aaa >> a
+  $ echo ccc >> b/c
+  $ p4 submit -d "change a b/c"
+  Submitting change 4.
+  Locking 2 files ...
+  edit //depot/test-mercurial-import/a#3
+  edit //depot/test-mercurial-import/b/c#3
+  Change 4 submitted.
+
+convert again
+  $ hg convert -s p4 $DEPOTPATH dst
+  reading p4 views
+  collecting p4 changelists
+  1 initial
+  2 change a
+  3 change b/c
+  4 change a b/c
+  scanning source...
+  sorting...
+  converting...
+  0 change a b/c
+  $ hg -R dst log --template 'rev={rev} desc="{desc}" tags="{tags}" files="{files}"\n'
+  rev=3 desc="change a b/c" tags="tip" files="a b/c"
+  rev=2 desc="change b/c" tags="" files="b/c"
+  rev=1 desc="change a" tags="" files="a"
+  rev=0 desc="initial" tags="" files="a b/c"
+
+interesting names
+  $ echo dddd > "d d"
+  $ mkdir " e"
+  $ echo fff >" e/ f"
+  $ p4 add "d d" " e/ f"
+  //depot/test-mercurial-import/d d#1 - opened for add
+  //depot/test-mercurial-import/ e/ f#1 - opened for add
+  $ p4 submit -d "add d e f"
+  Submitting change 5.
+  Locking 2 files ...
+  add //depot/test-mercurial-import/ e/ f#1
+  add //depot/test-mercurial-import/d d#1
+  Change 5 submitted.
+
+convert again
+  $ hg convert -s p4 $DEPOTPATH dst
+  reading p4 views
+  collecting p4 changelists
+  1 initial
+  2 change a
+  3 change b/c
+  4 change a b/c
+  5 add d e f
+  scanning source...
+  sorting...
+  converting...
+  0 add d e f
+  $ hg -R dst log --template 'rev={rev} desc="{desc}" tags="{tags}" files="{files}"\n'
+  rev=4 desc="add d e f" tags="tip" files=" e/ f d d"
+  rev=3 desc="change a b/c" tags="" files="a b/c"
+  rev=2 desc="change b/c" tags="" files="b/c"
+  rev=1 desc="change a" tags="" files="a"
+  rev=0 desc="initial" tags="" files="a b/c"
+
+exit trap:
+  stopping the p4 server
--- a/tests/test-copy-move-merge.t	Mon Jun 04 17:22:09 2012 -0500
+++ b/tests/test-copy-move-merge.t	Mon Jun 04 17:57:57 2012 -0500
@@ -24,7 +24,7 @@
     unmatched files in other:
      b
      c
-    all copies found (* = to merge, ! = divergent):
+    all copies found (* = to merge, ! = divergent, % = renamed and deleted):
      c -> a *
      b -> a *
     checking for directory renames
--- a/tests/test-copy.t	Mon Jun 04 17:22:09 2012 -0500
+++ b/tests/test-copy.t	Mon Jun 04 17:57:57 2012 -0500
@@ -1,3 +1,7 @@
+
+  $ mkdir part1
+  $ cd part1
+
   $ hg init
   $ echo a > a
   $ hg add a
@@ -92,3 +96,119 @@
   crosschecking files in changesets and manifests
   checking files
   2 files, 2 changesets, 2 total revisions
+
+  $ cd ..
+
+
+  $ mkdir part2
+  $ cd part2
+
+  $ hg init
+  $ echo foo > foo
+should fail - foo is not managed
+  $ hg mv foo bar
+  foo: not copying - file is not managed
+  abort: no files to copy
+  [255]
+  $ hg st -A
+  ? foo
+  $ hg add foo
+dry-run; print a warning that this is not a real copy; foo is added
+  $ hg mv --dry-run foo bar
+  foo has not been committed yet, so no copy data will be stored for bar.
+  $ hg st -A
+  A foo
+should print a warning that this is not a real copy; bar is added
+  $ hg mv foo bar
+  foo has not been committed yet, so no copy data will be stored for bar.
+  $ hg st -A
+  A bar
+should print a warning that this is not a real copy; foo is added
+  $ hg cp bar foo
+  bar has not been committed yet, so no copy data will be stored for foo.
+  $ hg rm -f bar
+  $ rm bar
+  $ hg st -A
+  A foo
+  $ hg commit -m1
+
+moving a missing file
+  $ rm foo
+  $ hg mv foo foo3
+  foo: deleted in working copy
+  foo3 does not exist!
+  $ hg up -qC .
+
+copy --after to a nonexistant target filename
+  $ hg cp -A foo dummy
+  foo: not recording copy - dummy does not exist
+
+dry-run; should show that foo is clean
+  $ hg copy --dry-run foo bar
+  $ hg st -A
+  C foo
+should show copy
+  $ hg copy foo bar
+  $ hg st -C
+  A bar
+    foo
+
+shouldn't show copy
+  $ hg commit -m2
+  $ hg st -C
+
+should match
+  $ hg debugindex foo
+     rev    offset  length   base linkrev nodeid       p1           p2
+       0         0       5      0       0 2ed2a3912a0b 000000000000 000000000000
+  $ hg debugrename bar
+  bar renamed from foo:2ed2a3912a0b24502043eae84ee4b279c18b90dd
+
+  $ echo bleah > foo
+  $ echo quux > bar
+  $ hg commit -m3
+
+should not be renamed
+  $ hg debugrename bar
+  bar not renamed
+
+  $ hg copy -f foo bar
+should show copy
+  $ hg st -C
+  M bar
+    foo
+  $ hg commit -m3
+
+should show no parents for tip
+  $ hg debugindex bar
+     rev    offset  length   base linkrev nodeid       p1           p2
+       0         0      69      0       1 7711d36246cc 000000000000 000000000000
+       1        69       6      1       2 bdf70a2b8d03 7711d36246cc 000000000000
+       2        75      81      1       3 b2558327ea8d 000000000000 000000000000
+should match
+  $ hg debugindex foo
+     rev    offset  length   base linkrev nodeid       p1           p2
+       0         0       5      0       0 2ed2a3912a0b 000000000000 000000000000
+       1         5       7      1       2 dd12c926cf16 2ed2a3912a0b 000000000000
+  $ hg debugrename bar
+  bar renamed from foo:dd12c926cf165e3eb4cf87b084955cb617221c17
+
+should show no copies
+  $ hg st -C
+
+copy --after on an added file
+  $ cp bar baz
+  $ hg add baz
+  $ hg cp -A bar baz
+  $ hg st -C
+  A baz
+    bar
+
+foo was clean:
+  $ hg st -AC foo
+  C foo
+but it's considered modified after a copy --after --force
+  $ hg copy -Af bar foo
+  $ hg st -AC foo
+  M foo
+    bar
--- a/tests/test-copy2.t	Mon Jun 04 17:22:09 2012 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,109 +0,0 @@
-  $ hg init
-  $ echo foo > foo
-should fail - foo is not managed
-  $ hg mv foo bar
-  foo: not copying - file is not managed
-  abort: no files to copy
-  [255]
-  $ hg st -A
-  ? foo
-  $ hg add foo
-dry-run; print a warning that this is not a real copy; foo is added
-  $ hg mv --dry-run foo bar
-  foo has not been committed yet, so no copy data will be stored for bar.
-  $ hg st -A
-  A foo
-should print a warning that this is not a real copy; bar is added
-  $ hg mv foo bar
-  foo has not been committed yet, so no copy data will be stored for bar.
-  $ hg st -A
-  A bar
-should print a warning that this is not a real copy; foo is added
-  $ hg cp bar foo
-  bar has not been committed yet, so no copy data will be stored for foo.
-  $ hg rm -f bar
-  $ rm bar
-  $ hg st -A
-  A foo
-  $ hg commit -m1
-
-moving a missing file
-  $ rm foo
-  $ hg mv foo foo3
-  foo: deleted in working copy
-  foo3 does not exist!
-  $ hg up -qC .
-
-copy --after to a nonexistant target filename
-  $ hg cp -A foo dummy
-  foo: not recording copy - dummy does not exist
-
-dry-run; should show that foo is clean
-  $ hg copy --dry-run foo bar
-  $ hg st -A
-  C foo
-should show copy
-  $ hg copy foo bar
-  $ hg st -C
-  A bar
-    foo
-
-shouldn't show copy
-  $ hg commit -m2
-  $ hg st -C
-
-should match
-  $ hg debugindex foo
-     rev    offset  length   base linkrev nodeid       p1           p2
-       0         0       5      0       0 2ed2a3912a0b 000000000000 000000000000
-  $ hg debugrename bar
-  bar renamed from foo:2ed2a3912a0b24502043eae84ee4b279c18b90dd
-
-  $ echo bleah > foo
-  $ echo quux > bar
-  $ hg commit -m3
-
-should not be renamed
-  $ hg debugrename bar
-  bar not renamed
-
-  $ hg copy -f foo bar
-should show copy
-  $ hg st -C
-  M bar
-    foo
-  $ hg commit -m3
-
-should show no parents for tip
-  $ hg debugindex bar
-     rev    offset  length   base linkrev nodeid       p1           p2
-       0         0      69      0       1 7711d36246cc 000000000000 000000000000
-       1        69       6      1       2 bdf70a2b8d03 7711d36246cc 000000000000
-       2        75      81      1       3 b2558327ea8d 000000000000 000000000000
-should match
-  $ hg debugindex foo
-     rev    offset  length   base linkrev nodeid       p1           p2
-       0         0       5      0       0 2ed2a3912a0b 000000000000 000000000000
-       1         5       7      1       2 dd12c926cf16 2ed2a3912a0b 000000000000
-  $ hg debugrename bar
-  bar renamed from foo:dd12c926cf165e3eb4cf87b084955cb617221c17
-
-should show no copies
-  $ hg st -C
-
-copy --after on an added file
-  $ cp bar baz
-  $ hg add baz
-  $ hg cp -A bar baz
-  $ hg st -C
-  A baz
-    bar
-
-foo was clean:
-  $ hg st -AC foo
-  C foo
-but it's considered modified after a copy --after --force
-  $ hg copy -Af bar foo
-  $ hg st -AC foo
-  M foo
-    bar
--- a/tests/test-debugcomplete.t	Mon Jun 04 17:22:09 2012 -0500
+++ b/tests/test-debugcomplete.t	Mon Jun 04 17:57:57 2012 -0500
@@ -247,10 +247,10 @@
   debugsub: rev
   debugwalk: include, exclude
   debugwireargs: three, four, five, ssh, remotecmd, insecure
-  graft: continue, edit, currentdate, currentuser, date, user, tool, dry-run
+  graft: continue, edit, log, currentdate, currentuser, date, user, tool, dry-run
   grep: print0, all, text, follow, ignore-case, files-with-matches, line-number, rev, user, date, include, exclude
   heads: rev, topo, active, closed, style, template
-  help: extension, command
+  help: extension, command, keyword
   identify: rev, num, id, branch, tags, bookmarks, ssh, remotecmd, insecure
   import: strip, base, edit, force, no-commit, bypass, exact, import-branch, message, logfile, date, user, similarity
   incoming: force, newest-first, bundle, rev, bookmarks, branch, patch, git, limit, no-merges, stat, style, template, ssh, remotecmd, insecure, subrepos
--- a/tests/test-double-merge.t	Mon Jun 04 17:22:09 2012 -0500
+++ b/tests/test-double-merge.t	Mon Jun 04 17:57:57 2012 -0500
@@ -29,7 +29,7 @@
     searching for copies back to rev 1
     unmatched files in other:
      bar
-    all copies found (* = to merge, ! = divergent):
+    all copies found (* = to merge, ! = divergent, % = renamed and deleted):
      bar -> foo *
     checking for directory renames
   resolving manifests
--- a/tests/test-extension.t	Mon Jun 04 17:22:09 2012 -0500
+++ b/tests/test-extension.t	Mon Jun 04 17:57:57 2012 -0500
@@ -178,8 +178,7 @@
   
   list of commands:
   
-   foo:
-        yet another foo command
+   foo           yet another foo command
   
   global options:
   
@@ -208,10 +207,8 @@
   
   list of commands:
   
-   debugfoobar:
-        yet another debug command
-   foo:
-        yet another foo command
+   debugfoobar   yet another debug command
+   foo           yet another foo command
   
   global options:
   
@@ -328,7 +325,7 @@
   
   list of commands:
   
-   extdiff    use external program to diff repository (or selected files)
+   extdiff       use external program to diff repository (or selected files)
   
   use "hg -v help extdiff" to show builtin aliases and global options
 
@@ -478,3 +475,60 @@
   hg: unknown command 'foo'
   warning: error finding commands in $TESTTMP/hgext/forest.py (glob)
   [255]
+
+  $ cat > throw.py <<EOF
+  > from mercurial import cmdutil, commands
+  > cmdtable = {}
+  > command = cmdutil.command(cmdtable)
+  > class Bogon(Exception): pass
+  > 
+  > @command('throw', [], 'hg throw')
+  > def throw(ui, **opts):
+  >     """throws an exception"""
+  >     raise Bogon()
+  > commands.norepo += " throw"
+  > EOF
+No declared supported version, extension complains:
+  $ hg --config extensions.throw=throw.py throw 2>&1 | egrep '^\*\*'
+  ** Unknown exception encountered with possibly-broken third-party extension throw
+  ** which supports versions unknown of Mercurial.
+  ** Please disable throw and try your action again.
+  ** If that fixes the bug please report it to the extension author.
+  ** Python * (glob)
+  ** Mercurial Distributed SCM * (glob)
+  ** Extensions loaded: throw
+If the extension specifies a buglink, show that:
+  $ echo 'buglink = "http://example.com/bts"' >> throw.py
+  $ rm -f throw.pyc throw.pyo
+  $ hg --config extensions.throw=throw.py throw 2>&1 | egrep '^\*\*'
+  ** Unknown exception encountered with possibly-broken third-party extension throw
+  ** which supports versions unknown of Mercurial.
+  ** Please disable throw and try your action again.
+  ** If that fixes the bug please report it to http://example.com/bts
+  ** Python * (glob)
+  ** Mercurial Distributed SCM (*) (glob)
+  ** Extensions loaded: throw
+If the extensions declare outdated versions, accuse the older extension first:
+  $ echo "testedwith = '1.9.3'" >> older.py
+  $ echo "testedwith = '2.1.1'" >> throw.py
+  $ rm -f throw.pyc throw.pyo
+  $ hg --config extensions.throw=throw.py --config extensions.older=older.py \
+  >   throw 2>&1 | egrep '^\*\*'
+  ** Unknown exception encountered with possibly-broken third-party extension older
+  ** which supports versions 1.9.3 of Mercurial.
+  ** Please disable older and try your action again.
+  ** If that fixes the bug please report it to the extension author.
+  ** Python * (glob)
+  ** Mercurial Distributed SCM (*) (glob)
+  ** Extensions loaded: throw, older
+
+Declare the version as supporting this hg version, show regular bts link:
+  $ hgver=`python -c 'from mercurial import util; print util.version().split("+")[0]'`
+  $ echo 'testedwith = """'"$hgver"'"""' >> throw.py
+  $ rm -f throw.pyc throw.pyo
+  $ hg --config extensions.throw=throw.py throw 2>&1 | egrep '^\*\*'
+  ** unknown exception encountered, please report by visiting
+  ** http://mercurial.selenic.com/wiki/BugTracker
+  ** Python * (glob)
+  ** Mercurial Distributed SCM (*) (glob)
+  ** Extensions loaded: throw
--- a/tests/test-filecache.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/tests/test-filecache.py	Mon Jun 04 17:57:57 2012 -0500
@@ -1,6 +1,7 @@
 import sys, os, subprocess
 
-if subprocess.call(['python', '%s/hghave' % os.environ['TESTDIR'], 'cacheable']):
+if subprocess.call(['python', '%s/hghave' % os.environ['TESTDIR'],
+                    'cacheable']):
     sys.exit(80)
 
 from mercurial import util, scmutil, extensions
@@ -77,7 +78,7 @@
 
     try:
         os.remove('x')
-    except:
+    except OSError:
         pass
 
     basic(fakerepo())
--- a/tests/test-graft.t	Mon Jun 04 17:22:09 2012 -0500
+++ b/tests/test-graft.t	Mon Jun 04 17:57:57 2012 -0500
@@ -72,28 +72,23 @@
   # HG changeset patch
   # User foo
   # Date 0 0
-  # Node ID d2e44c99fd3f31c176ea4efb9eca9f6306c81756
+  # Node ID ef0ef43d49e79e81ddafdc7997401ba0041efc82
   # Parent  68795b066622ca79a25816a662041d8f78f3cd9e
   2
   
   diff --git a/a b/b
   rename from a
   rename to b
-  --- a/a
-  +++ b/b
-  @@ -1,1 +1,1 @@
-  -a
-  +b
 
 Look for extra:source
 
   $ hg log --debug -r tip
-  changeset:   7:d2e44c99fd3f31c176ea4efb9eca9f6306c81756
+  changeset:   7:ef0ef43d49e79e81ddafdc7997401ba0041efc82
   tag:         tip
   phase:       draft
   parent:      0:68795b066622ca79a25816a662041d8f78f3cd9e
   parent:      -1:0000000000000000000000000000000000000000
-  manifest:    7:5d59766436fd8fbcd38e7bebef0f6eaf3eebe637
+  manifest:    7:e59b6b228f9cbf9903d5e9abf996e083a1f533eb
   user:        foo
   date:        Thu Jan 01 00:00:00 1970 +0000
   files+:      b
@@ -123,20 +118,25 @@
     searching for copies back to rev 1
     unmatched files in local:
      b
-    all copies found (* = to merge, ! = divergent):
+    all copies found (* = to merge, ! = divergent, % = renamed and deleted):
      b -> a *
     checking for directory renames
   resolving manifests
    overwrite: False, partial: False
-   ancestor: 68795b066622, local: d2e44c99fd3f+, remote: 5d205f8b35b6
+   ancestor: 68795b066622, local: ef0ef43d49e7+, remote: 5d205f8b35b6
    b: local copied/moved to a -> m
   preserving b for resolve of b
   updating: b 1/1 files (100.00%)
+  picked tool 'internal:merge' for b (binary False symlink False)
+  merging b and a to b
+  my b@ef0ef43d49e7+ other a@5d205f8b35b6 ancestor a@68795b066622
+   premerge successful
+  b
   grafting revision 5
     searching for copies back to rev 1
   resolving manifests
    overwrite: False, partial: False
-   ancestor: 4c60f11aa304, local: d2e44c99fd3f+, remote: 97f8bfe72746
+   ancestor: 4c60f11aa304, local: 6b9e5368ca4e+, remote: 97f8bfe72746
    e: remote is newer -> g
   updating: e 1/1 files (100.00%)
   getting e
@@ -145,7 +145,7 @@
     searching for copies back to rev 1
   resolving manifests
    overwrite: False, partial: False
-   ancestor: 4c60f11aa304, local: 839a7e8fcf80+, remote: 9c233e8e184d
+   ancestor: 4c60f11aa304, local: 1905859650ec+, remote: 9c233e8e184d
    e: versions differ -> m
    d: remote is newer -> g
   preserving e for resolve of e
@@ -154,7 +154,7 @@
   updating: e 2/2 files (100.00%)
   picked tool 'internal:merge' for e (binary False symlink False)
   merging e
-  my e@839a7e8fcf80+ other e@9c233e8e184d ancestor e@68795b066622
+  my e@1905859650ec+ other e@9c233e8e184d ancestor e@68795b066622
   warning: conflicts during merge.
   merging e incomplete! (edit conflicts, then use 'hg resolve --mark')
   abort: unresolved conflicts, can't continue
@@ -200,11 +200,13 @@
 View graph:
 
   $ hg --config extensions.graphlog= log -G --template '{author}@{rev}.{phase}: {desc}\n'
-  @  test@10.draft: 3
+  @  test@11.draft: 3
+  |
+  o  test@10.draft: 4
   |
-  o  test@9.draft: 4
+  o  test@9.draft: 5
   |
-  o  test@8.draft: 5
+  o  bar@8.draft: 1
   |
   o  foo@7.draft: 2
   |
@@ -232,17 +234,17 @@
   grafting revision 7
 
   $ hg log -r 7 --template '{rev}:{node}\n'
-  7:d2e44c99fd3f31c176ea4efb9eca9f6306c81756
+  7:ef0ef43d49e79e81ddafdc7997401ba0041efc82
   $ hg log -r 2 --template '{rev}:{node}\n'
   2:5c095ad7e90f871700f02dd1fa5012cb4498a2d4
 
   $ hg log --debug -r tip
-  changeset:   12:95adbe5de6b10f376b699ece9ed5a57cd7b4b0f6
+  changeset:   13:9db0f28fd3747e92c57d015f53b5593aeec53c2d
   tag:         tip
   phase:       draft
-  parent:      11:b592ea63bb0c19a6c5c44685ee29a2284f9f1b8f
+  parent:      12:b592ea63bb0c19a6c5c44685ee29a2284f9f1b8f
   parent:      -1:0000000000000000000000000000000000000000
-  manifest:    12:9944044f82a462bbaccc9bdf7e0ac5b811db7d1b
+  manifest:    13:dc313617b8c32457c0d589e0dbbedfe71f3cd637
   user:        foo
   date:        Thu Jan 01 00:00:00 1970 +0000
   files+:      b
@@ -260,7 +262,7 @@
   [255]
 
 Disallow grafting already grafted csets with the same origin onto each other
-  $ hg up -q 12
+  $ hg up -q 13
   $ hg graft 2
   skipping already grafted revision 2
   [255]
@@ -273,5 +275,15 @@
   skipping already grafted revision 2
   [255]
   $ hg graft tip
-  skipping already grafted revision 12 (same origin 2)
+  skipping already grafted revision 13 (same origin 2)
   [255]
+
+Graft with --log
+
+  $ hg up -Cq 1
+  $ hg graft 3 --log -u foo
+  grafting revision 3
+  warning: can't find ancestor for 'c' copied from 'b'!
+  $ hg log --template '{rev} {parents} {desc}\n' -r tip
+  14 1:5d205f8b35b6  3
+  (grafted from 4c60f11aa304a54ae1c199feb94e7fc771e51ed8)
--- a/tests/test-help.t	Mon Jun 04 17:22:09 2012 -0500
+++ b/tests/test-help.t	Mon Jun 04 17:57:57 2012 -0500
@@ -5,46 +5,46 @@
   
   basic commands:
   
-   add         add the specified files on the next commit
-   annotate    show changeset information by line for each file
-   clone       make a copy of an existing repository
-   commit      commit the specified files or all outstanding changes
-   diff        diff repository (or selected files)
-   export      dump the header and diffs for one or more changesets
-   forget      forget the specified files on the next commit
-   init        create a new repository in the given directory
-   log         show revision history of entire repository or files
-   merge       merge working directory with another revision
-   phase       set or show the current phase name
-   pull        pull changes from the specified source
-   push        push changes to the specified destination
-   remove      remove the specified files on the next commit
-   serve       start stand-alone webserver
-   status      show changed files in the working directory
-   summary     summarize working directory state
-   update      update working directory (or switch revisions)
+   add           add the specified files on the next commit
+   annotate      show changeset information by line for each file
+   clone         make a copy of an existing repository
+   commit        commit the specified files or all outstanding changes
+   diff          diff repository (or selected files)
+   export        dump the header and diffs for one or more changesets
+   forget        forget the specified files on the next commit
+   init          create a new repository in the given directory
+   log           show revision history of entire repository or files
+   merge         merge working directory with another revision
+   phase         set or show the current phase name
+   pull          pull changes from the specified source
+   push          push changes to the specified destination
+   remove        remove the specified files on the next commit
+   serve         start stand-alone webserver
+   status        show changed files in the working directory
+   summary       summarize working directory state
+   update        update working directory (or switch revisions)
   
   use "hg help" for the full list of commands or "hg -v" for details
 
   $ hg -q
-   add         add the specified files on the next commit
-   annotate    show changeset information by line for each file
-   clone       make a copy of an existing repository
-   commit      commit the specified files or all outstanding changes
-   diff        diff repository (or selected files)
-   export      dump the header and diffs for one or more changesets
-   forget      forget the specified files on the next commit
-   init        create a new repository in the given directory
-   log         show revision history of entire repository or files
-   merge       merge working directory with another revision
-   phase       set or show the current phase name
-   pull        pull changes from the specified source
-   push        push changes to the specified destination
-   remove      remove the specified files on the next commit
-   serve       start stand-alone webserver
-   status      show changed files in the working directory
-   summary     summarize working directory state
-   update      update working directory (or switch revisions)
+   add           add the specified files on the next commit
+   annotate      show changeset information by line for each file
+   clone         make a copy of an existing repository
+   commit        commit the specified files or all outstanding changes
+   diff          diff repository (or selected files)
+   export        dump the header and diffs for one or more changesets
+   forget        forget the specified files on the next commit
+   init          create a new repository in the given directory
+   log           show revision history of entire repository or files
+   merge         merge working directory with another revision
+   phase         set or show the current phase name
+   pull          pull changes from the specified source
+   push          push changes to the specified destination
+   remove        remove the specified files on the next commit
+   serve         start stand-alone webserver
+   status        show changed files in the working directory
+   summary       summarize working directory state
+   update        update working directory (or switch revisions)
 
   $ hg help
   Mercurial Distributed SCM
@@ -211,42 +211,26 @@
   
   basic commands:
   
-   add:
-        add the specified files on the next commit
-   annotate, blame:
-        show changeset information by line for each file
-   clone:
-        make a copy of an existing repository
-   commit, ci:
-        commit the specified files or all outstanding changes
-   diff:
-        diff repository (or selected files)
-   export:
-        dump the header and diffs for one or more changesets
-   forget:
-        forget the specified files on the next commit
-   init:
-        create a new repository in the given directory
-   log, history:
-        show revision history of entire repository or files
-   merge:
-        merge working directory with another revision
-   phase:
-        set or show the current phase name
-   pull:
-        pull changes from the specified source
-   push:
-        push changes to the specified destination
-   remove, rm:
-        remove the specified files on the next commit
-   serve:
-        start stand-alone webserver
-   status, st:
-        show changed files in the working directory
-   summary, sum:
-        summarize working directory state
-   update, up, checkout, co:
-        update working directory (or switch revisions)
+   add           add the specified files on the next commit
+   annotate, blame
+                 show changeset information by line for each file
+   clone         make a copy of an existing repository
+   commit, ci    commit the specified files or all outstanding changes
+   diff          diff repository (or selected files)
+   export        dump the header and diffs for one or more changesets
+   forget        forget the specified files on the next commit
+   init          create a new repository in the given directory
+   log, history  show revision history of entire repository or files
+   merge         merge working directory with another revision
+   phase         set or show the current phase name
+   pull          pull changes from the specified source
+   push          push changes to the specified destination
+   remove, rm    remove the specified files on the next commit
+   serve         start stand-alone webserver
+   status, st    show changed files in the working directory
+   summary, sum  summarize working directory state
+   update, up, checkout, co
+                 update working directory (or switch revisions)
   
   global options:
   
@@ -389,8 +373,8 @@
   $ hg help ad
   list of commands:
   
-   add          add the specified files on the next commit
-   addremove    add all new files, delete all missing files
+   add           add the specified files on the next commit
+   addremove     add all new files, delete all missing files
   
   use "hg -v help ad" to show builtin aliases and global options
 
@@ -539,24 +523,24 @@
   
   basic commands:
   
-   add         add the specified files on the next commit
-   annotate    show changeset information by line for each file
-   clone       make a copy of an existing repository
-   commit      commit the specified files or all outstanding changes
-   diff        diff repository (or selected files)
-   export      dump the header and diffs for one or more changesets
-   forget      forget the specified files on the next commit
-   init        create a new repository in the given directory
-   log         show revision history of entire repository or files
-   merge       merge working directory with another revision
-   phase       set or show the current phase name
-   pull        pull changes from the specified source
-   push        push changes to the specified destination
-   remove      remove the specified files on the next commit
-   serve       start stand-alone webserver
-   status      show changed files in the working directory
-   summary     summarize working directory state
-   update      update working directory (or switch revisions)
+   add           add the specified files on the next commit
+   annotate      show changeset information by line for each file
+   clone         make a copy of an existing repository
+   commit        commit the specified files or all outstanding changes
+   diff          diff repository (or selected files)
+   export        dump the header and diffs for one or more changesets
+   forget        forget the specified files on the next commit
+   init          create a new repository in the given directory
+   log           show revision history of entire repository or files
+   merge         merge working directory with another revision
+   phase         set or show the current phase name
+   pull          pull changes from the specified source
+   push          push changes to the specified destination
+   remove        remove the specified files on the next commit
+   serve         start stand-alone webserver
+   status        show changed files in the working directory
+   summary       summarize working directory state
+   update        update working directory (or switch revisions)
   
   use "hg help" for the full list of commands or "hg -v" for details
   [255]
@@ -567,24 +551,24 @@
   
   basic commands:
   
-   add         add the specified files on the next commit
-   annotate    show changeset information by line for each file
-   clone       make a copy of an existing repository
-   commit      commit the specified files or all outstanding changes
-   diff        diff repository (or selected files)
-   export      dump the header and diffs for one or more changesets
-   forget      forget the specified files on the next commit
-   init        create a new repository in the given directory
-   log         show revision history of entire repository or files
-   merge       merge working directory with another revision
-   phase       set or show the current phase name
-   pull        pull changes from the specified source
-   push        push changes to the specified destination
-   remove      remove the specified files on the next commit
-   serve       start stand-alone webserver
-   status      show changed files in the working directory
-   summary     summarize working directory state
-   update      update working directory (or switch revisions)
+   add           add the specified files on the next commit
+   annotate      show changeset information by line for each file
+   clone         make a copy of an existing repository
+   commit        commit the specified files or all outstanding changes
+   diff          diff repository (or selected files)
+   export        dump the header and diffs for one or more changesets
+   forget        forget the specified files on the next commit
+   init          create a new repository in the given directory
+   log           show revision history of entire repository or files
+   merge         merge working directory with another revision
+   phase         set or show the current phase name
+   pull          pull changes from the specified source
+   push          push changes to the specified destination
+   remove        remove the specified files on the next commit
+   serve         start stand-alone webserver
+   status        show changed files in the working directory
+   summary       summarize working directory state
+   update        update working directory (or switch revisions)
   
   use "hg help" for the full list of commands or "hg -v" for details
   [255]
@@ -711,7 +695,7 @@
   
   list of commands:
   
-   nohelp    (no help text available)
+   nohelp        (no help text available)
   
   use "hg -v help helpext" to show builtin aliases and global options
 
@@ -733,13 +717,13 @@
       short-form identifier is only valid if it is the prefix of exactly one
       full-length identifier.
   
-      Any other string is treated as a tag or branch name. A tag name is a
-      symbolic name associated with a revision identifier. A branch name denotes
-      the tipmost revision of that branch. Tag and branch names must not contain
-      the ":" character.
+      Any other string is treated as a bookmark, tag, or branch name. A bookmark
+      is a movable pointer to a revision. A tag is a permanent name associated
+      with a revision. A branch name denotes the tipmost revision of that
+      branch. Bookmark, tag, and branch names must not contain the ":"
+      character.
   
-      The reserved name "tip" is a special tag that always identifies the most
-      recent revision.
+      The reserved name "tip" always identifies the most recent revision.
   
       The reserved name "null" indicates the null revision. This is the revision
       of an empty repository, and the parent of revision 0.
--- a/tests/test-hgweb-commands.t	Mon Jun 04 17:22:09 2012 -0500
+++ b/tests/test-hgweb-commands.t	Mon Jun 04 17:57:57 2012 -0500
@@ -1047,6 +1047,55 @@
   </body>
   </html>
   
+raw graph
+
+  $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/graph/?style=raw'
+  200 Script output follows
+  
+  
+  # HG graph
+  # Node ID ba87b23d29ca67a305625d81a20ac279c1e3f444
+  # Rows shown 4
+  
+  changeset:   ba87b23d29ca
+  user:        test
+  date:        1970-01-01
+  summary:     branch
+  branch:      unstable
+  tag:         tip
+  bookmark:    something
+  
+  node:        (0, 0) (color 1)
+  edge:        (0, 0) -> (0, 1) (color 1)
+  
+  changeset:   1d22e65f027e
+  user:        test
+  date:        1970-01-01
+  summary:     branch
+  branch:      stable
+  
+  node:        (0, 1) (color 1)
+  edge:        (0, 1) -> (0, 2) (color 1)
+  
+  changeset:   a4f92ed23982
+  user:        test
+  date:        1970-01-01
+  summary:     Added tag 1.0 for changeset 2ef0ac749a14
+  branch:      default
+  
+  node:        (0, 2) (color 1)
+  edge:        (0, 2) -> (0, 3) (color 1)
+  
+  changeset:   2ef0ac749a14
+  user:        test
+  date:        1970-01-01
+  summary:     base
+  tag:         1.0
+  bookmark:    anotherthing
+  
+  node:        (0, 3) (color 1)
+  
+  
 
 capabilities
 
--- a/tests/test-hook.t	Mon Jun 04 17:22:09 2012 -0500
+++ b/tests/test-hook.t	Mon Jun 04 17:57:57 2012 -0500
@@ -195,6 +195,7 @@
   no changes found
   listkeys hook: HG_NAMESPACE=phases HG_VALUES={'cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b': '1', 'publishing': 'True'} 
   listkeys hook: HG_NAMESPACE=bookmarks HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'} 
+  adding remote bookmark bar
   importing bookmark bar
   $ cd ../a
 
@@ -279,6 +280,7 @@
   adding manifests
   adding file changes
   added 1 changesets with 1 changes to 1 files
+  adding remote bookmark quux
   (run 'hg update' to get a working copy)
   $ hg rollback
   repository tip rolled back to revision 3 (undo pull)
@@ -447,6 +449,7 @@
   adding manifests
   adding file changes
   added 1 changesets with 1 changes to 1 files
+  adding remote bookmark quux
   (run 'hg update' to get a working copy)
 
 make sure --traceback works
--- a/tests/test-issue672.t	Mon Jun 04 17:22:09 2012 -0500
+++ b/tests/test-issue672.t	Mon Jun 04 17:57:57 2012 -0500
@@ -28,7 +28,7 @@
     searching for copies back to rev 1
     unmatched files in other:
      1a
-    all copies found (* = to merge, ! = divergent):
+    all copies found (* = to merge, ! = divergent, % = renamed and deleted):
      1a -> 1 
     checking for directory renames
   resolving manifests
@@ -59,7 +59,7 @@
     searching for copies back to rev 1
     unmatched files in local:
      1a
-    all copies found (* = to merge, ! = divergent):
+    all copies found (* = to merge, ! = divergent, % = renamed and deleted):
      1a -> 1 *
     checking for directory renames
   resolving manifests
@@ -82,7 +82,7 @@
     searching for copies back to rev 1
     unmatched files in other:
      1a
-    all copies found (* = to merge, ! = divergent):
+    all copies found (* = to merge, ! = divergent, % = renamed and deleted):
      1a -> 1 *
     checking for directory renames
   resolving manifests
--- a/tests/test-keyword.t	Mon Jun 04 17:22:09 2012 -0500
+++ b/tests/test-keyword.t	Mon Jun 04 17:57:57 2012 -0500
@@ -495,6 +495,22 @@
   $ hg forget i
   $ rm i
 
+amend
+
+  $ echo amend >> a
+  $ echo amend >> b
+  $ hg -q commit -d '1 14' -m 'prepare amend'
+
+  $ hg --debug commit --amend -d '1 15' -m 'amend without changes' | grep keywords
+  invalidating branch cache (tip differs)
+  overwriting a expanding keywords
+  $ hg -q id
+  a71343332ea9
+  $ head -1 a
+  expand $Id: a,v a71343332ea9 1970/01/01 00:00:01 test $
+
+  $ hg -q strip -n tip
+
 Test patch queue repo
 
   $ hg init --mq
@@ -558,6 +574,7 @@
   $ hg --debug commit -ma2c -d '1 0' -u 'User Name <user@example.com>'
   c
    c: copy a:0045e12f6c5791aac80ca6cbfd97709a88307292
+  removing unknown node 40a904bbbe4c from 1-phase boundary
   overwriting c expanding keywords
   committed changeset 2:25736cf2f5cbe41f6be4e6784ef6ecf9f3bbcc7d
   $ cat a c
@@ -722,6 +739,7 @@
 
   $ hg --debug commit -l log -d '2 0' -u 'User Name <user@example.com>'
   a
+  removing unknown node 40a904bbbe4c from 1-phase boundary
   overwriting a expanding keywords
   committed changeset 2:bb948857c743469b22bbf51f7ec8112279ca5d83
   $ rm log
--- a/tests/test-largefiles.t	Mon Jun 04 17:22:09 2012 -0500
+++ b/tests/test-largefiles.t	Mon Jun 04 17:57:57 2012 -0500
@@ -432,11 +432,52 @@
   large11
   $ cat sub/large2
   large22
+  $ cd ..
+
+Test cloning with --all-largefiles flag
+
+  $ rm -Rf ${USERCACHE}/*
+  $ hg clone --all-largefiles a a-backup
+  updating to branch default
+  5 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  getting changed largefiles
+  3 largefiles updated, 0 removed
+  8 additional largefiles cached
+
+  $ hg clone --all-largefiles a ssh://localhost/a
+  abort: --all-largefiles is incompatible with non-local destination ssh://localhost/a
+  [255]
+
+Test pulling with --all-largefiles flag
+
+  $ rm -Rf a-backup
+  $ hg clone -r 1 a a-backup
+  adding changesets
+  adding manifests
+  adding file changes
+  added 2 changesets with 8 changes to 4 files
+  updating to branch default
+  4 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  getting changed largefiles
+  2 largefiles updated, 0 removed
+  $ rm -Rf ${USERCACHE}/*
+  $ cd a-backup
+  $ hg pull --all-largefiles
+  pulling from $TESTTMP/a
+  searching for changes
+  adding changesets
+  adding manifests
+  adding file changes
+  added 6 changesets with 16 changes to 8 files
+  (run 'hg update' to get a working copy)
+  caching new largefiles
+  3 largefiles cached
+  3 additional largefiles cached
+  $ cd ..
 
 Rebasing between two repositories does not revert largefiles to old
 revisions (this was a very bad bug that took a lot of work to fix).
 
-  $ cd ..
   $ hg clone a d
   updating to branch default
   5 files updated, 0 files merged, 0 files removed, 0 files unresolved
@@ -1136,4 +1177,37 @@
   abort: uncommitted changes in subrepo subrepo
   (use --subrepos for recursive commit)
   [255]
+
+Add a normal file to the subrepo, then test archiving
+
+  $ echo 'normal file' > subrepo/normal.txt
+  $ hg -R subrepo add subrepo/normal.txt
+
+Lock in subrepo, otherwise the change isn't archived
+
+  $ hg ci -S -m "add normal file to top level"
+  committing subrepository subrepo
+  Invoking status precommit hook
+  M large.txt
+  A normal.txt
+  Invoking status precommit hook
+  M .hgsubstate
+  $ hg archive -S lf_subrepo_archive
+  $ find lf_subrepo_archive | sort
+  lf_subrepo_archive
+  lf_subrepo_archive/.hg_archival.txt
+  lf_subrepo_archive/.hgsub
+  lf_subrepo_archive/.hgsubstate
+  lf_subrepo_archive/a
+  lf_subrepo_archive/a/b
+  lf_subrepo_archive/a/b/c
+  lf_subrepo_archive/a/b/c/d
+  lf_subrepo_archive/a/b/c/d/e.large.txt
+  lf_subrepo_archive/a/b/c/d/e.normal.txt
+  lf_subrepo_archive/a/b/c/x
+  lf_subrepo_archive/a/b/c/x/y.normal.txt
+  lf_subrepo_archive/subrepo
+  lf_subrepo_archive/subrepo/large.txt
+  lf_subrepo_archive/subrepo/normal.txt
+
   $ cd ..
--- a/tests/test-minirst.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/tests/test-minirst.py	Mon Jun 04 17:57:57 2012 -0500
@@ -237,7 +237,8 @@
          ['1', '2', '3'],
          ['foo', 'bar', 'baz this list is very very very long man']]
 
-table = minirst.maketable(data, 2, True)
+rst = minirst.maketable(data, 2, True)
+table = ''.join(rst)
 
 print table
 
--- a/tests/test-mq-qpush-fail.t	Mon Jun 04 17:22:09 2012 -0500
+++ b/tests/test-mq-qpush-fail.t	Mon Jun 04 17:57:57 2012 -0500
@@ -202,6 +202,42 @@
   $ test -f a.orig && echo 'error: backup with --no-backup'
   [1]
 
+test qpop --keep-changes
+
+  $ hg qpush
+  applying p1
+  now at: p1
+  $ hg qpop --keep-changes --force
+  abort: cannot use both --force and --keep-changes
+  [255]
+  $ echo a >> a
+  $ hg qpop --keep-changes
+  abort: local changes found, refresh first
+  [255]
+  $ hg revert -qa a
+  $ rm a
+  $ hg qpop --keep-changes
+  abort: local changes found, refresh first
+  [255]
+  $ hg rm -A a
+  $ hg qpop --keep-changes
+  abort: local changes found, refresh first
+  [255]
+  $ hg revert -qa a
+  $ echo b > b
+  $ hg add b
+  $ hg qpop --keep-changes
+  abort: local changes found, refresh first
+  [255]
+  $ hg forget b
+  $ echo d > d
+  $ hg add d
+  $ hg qpop --keep-changes
+  popping p1
+  patch queue now empty
+  $ hg forget d
+  $ rm d
+
 test qpush --force and backup files
 
   $ echo a >> a
@@ -281,3 +317,108 @@
   now at: p2
   $ test -f a.orig && echo 'error: backup with --no-backup'
   [1]
+
+test qpush --keep-changes
+
+  $ hg qpush --keep-changes --force
+  abort: cannot use both --force and --keep-changes
+  [255]
+  $ hg qpush --keep-changes --exact
+  abort: cannot use --exact and --keep-changes together
+  [255]
+  $ echo b >> b
+  $ hg qpush --keep-changes
+  applying p3
+  errors during apply, please fix and refresh p2
+  [2]
+  $ rm b
+  $ hg qpush --keep-changes
+  applying p3
+  errors during apply, please fix and refresh p2
+  [2]
+  $ hg rm -A b
+  $ hg qpush --keep-changes
+  applying p3
+  errors during apply, please fix and refresh p2
+  [2]
+  $ hg revert -aq b
+  $ echo d > d
+  $ hg add d
+  $ hg qpush --keep-changes
+  applying p3
+  errors during apply, please fix and refresh p2
+  [2]
+  $ hg forget d
+  $ rm d
+  $ hg qpop
+  popping p2
+  patch queue now empty
+  $ echo b >> b
+  $ hg qpush -a --keep-changes
+  applying p2
+  applying p3
+  errors during apply, please fix and refresh p2
+  [2]
+  $ hg qtop
+  p2
+  $ hg parents --template "{rev} {desc}\n"
+  2 imported patch p2
+  $ hg st b
+  M b
+  $ cat b
+  b
+  b
+
+test qgoto --keep-changes
+
+  $ hg revert -aq b
+  $ rm e
+  $ hg qgoto --keep-changes --force p3
+  abort: cannot use both --force and --keep-changes
+  [255]
+  $ echo a >> a
+  $ hg qgoto --keep-changes p3
+  applying p3
+  now at: p3
+  $ hg st a
+  M a
+  $ hg qgoto --keep-changes p2
+  popping p3
+  now at: p2
+  $ hg st a
+  M a
+
+test mq.keepchanges setting
+
+  $ hg --config mq.keepchanges=1 qpush
+  applying p3
+  now at: p3
+  $ hg st a
+  M a
+  $ hg --config mq.keepchanges=1 qpop
+  popping p3
+  now at: p2
+  $ hg st a
+  M a
+  $ hg --config mq.keepchanges=1 qgoto p3
+  applying p3
+  now at: p3
+  $ hg st a
+  M a
+  $ echo b >> b
+  $ hg --config mq.keepchanges=1 qpop --force
+  popping p3
+  now at: p2
+  $ hg st b
+  $ hg --config mq.keepchanges=1 qpush --exact
+  abort: local changes found, refresh first
+  [255]
+  $ hg revert -qa a
+  $ hg qpop
+  popping p2
+  patch queue now empty
+  $ echo a >> a
+  $ hg --config mq.keepchanges=1 qpush --force
+  applying p2
+  now at: p2
+  $ hg st a
--- a/tests/test-mq-strip.t	Mon Jun 04 17:22:09 2012 -0500
+++ b/tests/test-mq-strip.t	Mon Jun 04 17:57:57 2012 -0500
@@ -430,3 +430,37 @@
   $ hg strip 'not ancestors(x)'
   saved backup bundle to $TESTTMP/issue3299/.hg/strip-backup/*-backup.hg (glob)
 
+test hg strip -B bookmark
+
+  $ cd ..
+  $ hg init bookmarks
+  $ cd bookmarks
+  $ hg debugbuilddag '..<2.*1/2:m<2+3:c<m+3:a<2.:b'
+  $ hg bookmark -r 'a' 'todelete'
+  $ hg bookmark -r 'b' 'B'
+  $ hg bookmark -r 'b' 'nostrip'
+  $ hg bookmark -r 'c' 'delete'
+  $ hg up -C todelete
+  0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ hg strip -B nostrip
+  bookmark 'nostrip' deleted
+  abort: empty revision set
+  [255]
+  $ hg strip -B todelete
+  bookmark 'todelete' deleted
+  0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  saved backup bundle to $TESTTMP/bookmarks/.hg/strip-backup/*-backup.hg (glob)
+  $ hg id -ir dcbb326fdec2
+  abort: unknown revision 'dcbb326fdec2'!
+  [255]
+  $ hg id -ir d62d843c9a01
+  d62d843c9a01
+  $ hg bookmarks
+     B                         9:ff43616e5d0f
+     delete                    6:2702dd0c91e7
+  $ hg strip -B delete
+  bookmark 'delete' deleted
+  saved backup bundle to $TESTTMP/bookmarks/.hg/strip-backup/*-backup.hg (glob)
+  $ hg id -ir 6:2702dd0c91e7
+  abort: unknown revision '2702dd0c91e7'!
+  [255]
--- a/tests/test-mq.t	Mon Jun 04 17:22:09 2012 -0500
+++ b/tests/test-mq.t	Mon Jun 04 17:57:57 2012 -0500
@@ -59,6 +59,16 @@
   You will by default be managing a patch queue named "patches". You can create
   other, independent patch queues with the "hg qqueue" command.
   
+  If the working directory contains uncommitted files, qpush, qpop and qgoto
+  abort immediately. If -f/--force is used, the changes are discarded. Setting:
+  
+    [mq]
+    keepchanges = True
+  
+  make them behave as if --keep-changes were passed, and non-conflicting local
+  changes will be tolerated and preserved. If incompatible options such as
+  -f/--force or --exact are passed, this setting is ignored.
+  
   list of commands:
   
    qapplied      print the patches already applied
--- a/tests/test-paths.t	Mon Jun 04 17:22:09 2012 -0500
+++ b/tests/test-paths.t	Mon Jun 04 17:57:57 2012 -0500
@@ -1,5 +1,3 @@
-  $ "$TESTDIR/hghave" no-msys || exit 80 # MSYS will translate /foo/bar as if it was a real file path
-
   $ hg init a
   $ hg clone a b
   updating to branch default
@@ -24,9 +22,15 @@
   $ SOMETHING=foo hg paths
   dupe = $TESTTMP/b (glob)
   expand = $TESTTMP/a/foo/bar (glob)
+#if msys
+  $ SOMETHING=//foo hg paths
+  dupe = $TESTTMP/b (glob)
+  expand = /foo/bar
+#else
   $ SOMETHING=/foo hg paths
   dupe = $TESTTMP/b (glob)
   expand = /foo/bar
+#endif
   $ hg paths -q
   dupe
   expand
--- a/tests/test-phases.t	Mon Jun 04 17:22:09 2012 -0500
+++ b/tests/test-phases.t	Mon Jun 04 17:57:57 2012 -0500
@@ -9,6 +9,15 @@
 
   $ hg init initialrepo
   $ cd initialrepo
+
+Cannot change null revision phase
+
+  $ hg phase --force --secret null
+  abort: cannot change null revision phase
+  [255]
+  $ hg phase null
+  -1: public
+
   $ mkcommit A
 
 New commit are draft by default
--- a/tests/test-progress.t	Mon Jun 04 17:22:09 2012 -0500
+++ b/tests/test-progress.t	Mon Jun 04 17:57:57 2012 -0500
@@ -40,7 +40,7 @@
   $ echo "progress=" >> $HGRCPATH
   $ echo "loop=`pwd`/loop.py" >> $HGRCPATH
   $ echo "[progress]" >> $HGRCPATH
-  $ echo  "format = topic bar number" >> $HGRCPATH
+  $ echo "format = topic bar number" >> $HGRCPATH
   $ echo "assume-tty=1" >> $HGRCPATH
   $ echo "width=60" >> $HGRCPATH
 
--- a/tests/test-qrecord.t	Mon Jun 04 17:22:09 2012 -0500
+++ b/tests/test-qrecord.t	Mon Jun 04 17:57:57 2012 -0500
@@ -6,7 +6,8 @@
 help record (no record)
 
   $ hg help record
-  record extension - commands to interactively select changes for commit/qrefresh
+  record extension - commands to interactively select changes for
+  commit/qrefresh
   
   use "hg help extensions" for information on enabling extensions
 
--- a/tests/test-rebase-collapse.t	Mon Jun 04 17:22:09 2012 -0500
+++ b/tests/test-rebase-collapse.t	Mon Jun 04 17:57:57 2012 -0500
@@ -589,4 +589,44 @@
   b
   $ hg log -r . --template "{file_copies}\n"
   d (a)g (b)
+
+Test collapsing a middle revision in-place
+
+  $ hg tglog
+  @  2: 'Collapsed revision
+  |  * move1
+  |  * move2'
+  o  1: 'change'
+  |
+  o  0: 'add'
+  
+  $ hg rebase --collapse -r 1 -d 0
+  abort: can't remove original changesets with unrebased descendants
+  (use --keep to keep original changesets)
+  [255]
+
+Test collapsing in place
+
+  $ hg rebase --collapse -b . -d 0
+  saved backup bundle to $TESTTMP/copies/.hg/strip-backup/*-backup.hg (glob)
+  $ hg st --change . --copies
+  M a
+  M c
+  A d
+    a
+  A g
+    b
+  R b
+  $ cat a
+  a
+  a
+  $ cat c
+  c
+  c
+  $ cat d
+  a
+  a
+  $ cat g
+  b
+  b
   $ cd ..
--- a/tests/test-rebase-parameters.t	Mon Jun 04 17:22:09 2012 -0500
+++ b/tests/test-rebase-parameters.t	Mon Jun 04 17:57:57 2012 -0500
@@ -158,12 +158,12 @@
   $ cd ..
 
 
-Rebase with dest == `hg branch` => same as no arguments (from 3 onto 8):
+Rebase with dest == branch(.) => same as no arguments (from 3 onto 8):
 
   $ hg clone -q -u 3 a a3
   $ cd a3
 
-  $ hg rebase --dest `hg branch`
+  $ hg rebase --dest 'branch(.)'
   saved backup bundle to $TESTTMP/a3/.hg/strip-backup/*-backup.hg (glob)
 
   $ hg tglog
--- a/tests/test-rename-dir-merge.t	Mon Jun 04 17:22:09 2012 -0500
+++ b/tests/test-rename-dir-merge.t	Mon Jun 04 17:57:57 2012 -0500
@@ -30,7 +30,7 @@
     unmatched files in other:
      b/a
      b/b
-    all copies found (* = to merge, ! = divergent):
+    all copies found (* = to merge, ! = divergent, % = renamed and deleted):
      b/a -> a/a 
      b/b -> a/b 
     checking for directory renames
@@ -81,7 +81,7 @@
      b/b
     unmatched files in other:
      a/c
-    all copies found (* = to merge, ! = divergent):
+    all copies found (* = to merge, ! = divergent, % = renamed and deleted):
      b/a -> a/a 
      b/b -> a/b 
     checking for directory renames
--- a/tests/test-rename-merge1.t	Mon Jun 04 17:22:09 2012 -0500
+++ b/tests/test-rename-merge1.t	Mon Jun 04 17:57:57 2012 -0500
@@ -28,7 +28,7 @@
     unmatched files in other:
      b
      b2
-    all copies found (* = to merge, ! = divergent):
+    all copies found (* = to merge, ! = divergent, % = renamed and deleted):
      c2 -> a2 !
      b -> a *
      b2 -> a2 !
@@ -95,9 +95,6 @@
   $ hg up c761c6948de0
   1 files updated, 0 files merged, 2 files removed, 0 files unresolved
   $ hg up
-  note: possible conflict - b was renamed multiple times to:
-   b3
-   b4
   2 files updated, 0 files merged, 1 files removed, 0 files unresolved
 
 Check for issue2642
@@ -126,6 +123,8 @@
   $ cat f2
   c0
 
+  $ cd ..
+
 Check for issue2089
 
   $ hg init repo2089
@@ -155,3 +154,42 @@
 
   $ cat f2
   c2
+
+  $ cd ..
+
+Check for issue3074
+
+  $ hg init repo3074
+  $ cd repo3074
+  $ echo foo > file
+  $ hg add file
+  $ hg commit -m "added file"
+  $ hg mv file newfile
+  $ hg commit -m "renamed file"
+  $ hg update 0
+  1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+  $ hg rm file
+  $ hg commit -m "deleted file"
+  created new head
+  $ hg merge --debug
+    searching for copies back to rev 1
+    unmatched files in other:
+     newfile
+    all copies found (* = to merge, ! = divergent, % = renamed and deleted):
+     newfile -> file %
+    checking for directory renames
+   file: rename and delete -> rd
+  resolving manifests
+   overwrite: False, partial: False
+   ancestor: 19d7f95df299, local: 0084274f6b67+, remote: 5d32493049f0
+   newfile: remote created -> g
+  updating: file 1/2 files (50.00%)
+  note: possible conflict - file was deleted and renamed to:
+   newfile
+  updating: newfile 2/2 files (100.00%)
+  getting newfile
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  (branch merge, don't forget to commit)
+  $ hg status
+  M newfile
+  $ cd ..
--- a/tests/test-rename-merge2.t	Mon Jun 04 17:22:09 2012 -0500
+++ b/tests/test-rename-merge2.t	Mon Jun 04 17:57:57 2012 -0500
@@ -80,7 +80,7 @@
     searching for copies back to rev 1
     unmatched files in other:
      b
-    all copies found (* = to merge, ! = divergent):
+    all copies found (* = to merge, ! = divergent, % = renamed and deleted):
      b -> a *
     checking for directory renames
   resolving manifests
@@ -115,7 +115,7 @@
     searching for copies back to rev 1
     unmatched files in local:
      b
-    all copies found (* = to merge, ! = divergent):
+    all copies found (* = to merge, ! = divergent, % = renamed and deleted):
      b -> a *
     checking for directory renames
   resolving manifests
@@ -153,7 +153,7 @@
     searching for copies back to rev 1
     unmatched files in other:
      b
-    all copies found (* = to merge, ! = divergent):
+    all copies found (* = to merge, ! = divergent, % = renamed and deleted):
      b -> a *
     checking for directory renames
   resolving manifests
@@ -188,7 +188,7 @@
     searching for copies back to rev 1
     unmatched files in local:
      b
-    all copies found (* = to merge, ! = divergent):
+    all copies found (* = to merge, ! = divergent, % = renamed and deleted):
      b -> a *
     checking for directory renames
   resolving manifests
@@ -222,7 +222,7 @@
     searching for copies back to rev 1
     unmatched files in other:
      b
-    all copies found (* = to merge, ! = divergent):
+    all copies found (* = to merge, ! = divergent, % = renamed and deleted):
      b -> a 
     checking for directory renames
   resolving manifests
@@ -252,7 +252,7 @@
     searching for copies back to rev 1
     unmatched files in local:
      b
-    all copies found (* = to merge, ! = divergent):
+    all copies found (* = to merge, ! = divergent, % = renamed and deleted):
      b -> a 
     checking for directory renames
   resolving manifests
@@ -279,7 +279,7 @@
     searching for copies back to rev 1
     unmatched files in other:
      b
-    all copies found (* = to merge, ! = divergent):
+    all copies found (* = to merge, ! = divergent, % = renamed and deleted):
      b -> a 
     checking for directory renames
   resolving manifests
@@ -311,7 +311,7 @@
     searching for copies back to rev 1
     unmatched files in local:
      b
-    all copies found (* = to merge, ! = divergent):
+    all copies found (* = to merge, ! = divergent, % = renamed and deleted):
      b -> a 
     checking for directory renames
   resolving manifests
@@ -369,7 +369,7 @@
      b
     unmatched files in other:
      c
-    all copies found (* = to merge, ! = divergent):
+    all copies found (* = to merge, ! = divergent, % = renamed and deleted):
      c -> a !
      b -> a !
     checking for directory renames
@@ -648,7 +648,7 @@
     searching for copies back to rev 1
     unmatched files in other:
      b
-    all copies found (* = to merge, ! = divergent):
+    all copies found (* = to merge, ! = divergent, % = renamed and deleted):
      b -> a *
     checking for directory renames
   resolving manifests
@@ -682,7 +682,7 @@
     searching for copies back to rev 1
     unmatched files in local:
      b
-    all copies found (* = to merge, ! = divergent):
+    all copies found (* = to merge, ! = divergent, % = renamed and deleted):
      b -> a *
     checking for directory renames
   resolving manifests
@@ -720,7 +720,7 @@
      b
     unmatched files in other:
      c
-    all copies found (* = to merge, ! = divergent):
+    all copies found (* = to merge, ! = divergent, % = renamed and deleted):
      b -> a *
     checking for directory renames
   resolving manifests
--- a/tests/test-revlog-ancestry.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/tests/test-revlog-ancestry.py	Mon Jun 04 17:57:57 2012 -0500
@@ -47,27 +47,31 @@
 
     # Ancestors
     print 'Ancestors of 5'
-    for r in repo.changelog.ancestors(5):
+    for r in repo.changelog.ancestors([5]):
         print r,
 
     print '\nAncestors of 6 and 5'
-    for r in repo.changelog.ancestors(6, 5):
+    for r in repo.changelog.ancestors([6, 5]):
         print r,
 
     print '\nAncestors of 5 and 4'
-    for r in repo.changelog.ancestors(5, 4):
+    for r in repo.changelog.ancestors([5, 4]):
+        print r,
+
+    print '\nAncestors of 7, stop at 6'
+    for r in repo.changelog.ancestors([7], 6):
         print r,
 
     # Descendants
     print '\n\nDescendants of 5'
-    for r in repo.changelog.descendants(5):
+    for r in repo.changelog.descendants([5]):
         print r,
 
     print '\nDescendants of 5 and 3'
-    for r in repo.changelog.descendants(5, 3):
+    for r in repo.changelog.descendants([5, 3]):
         print r,
 
     print '\nDescendants of 5 and 4'
-    for r in repo.changelog.descendants(5, 4):
+    for r in repo.changelog.descendants([5, 4]):
         print r,
 
--- a/tests/test-revlog-ancestry.py.out	Mon Jun 04 17:22:09 2012 -0500
+++ b/tests/test-revlog-ancestry.py.out	Mon Jun 04 17:57:57 2012 -0500
@@ -4,6 +4,8 @@
 3 4 2 1 0 
 Ancestors of 5 and 4
 4 2 0 
+Ancestors of 7, stop at 6
+6 
 
 Descendants of 5
 7 8 
--- a/tests/test-revset.t	Mon Jun 04 17:22:09 2012 -0500
+++ b/tests/test-revset.t	Mon Jun 04 17:57:57 2012 -0500
@@ -1,5 +1,3 @@
-  $ "$TESTDIR/hghave" no-msys || exit 80 # MSYS will translate /a/b/c/ as if it was a real file path
-
   $ HGENCODING=utf-8
   $ export HGENCODING
 
@@ -32,6 +30,16 @@
   (branches are permanent and global, did you want a bookmark?)
   $ hg ci -Aqm2 -u Bob
 
+  $ hg log -r "extra('branch', 'a-b-c-')" --template '{rev}\n'
+  2
+  $ hg log -r "extra('branch')" --template '{rev}\n'
+  0
+  1
+  2
+  $ hg log -r "extra('branch', 're:a')" --template '{rev} {branch}\n'
+  0 a
+  2 a-b-c-
+
   $ hg co 1
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ hg branch +a+b+c+
@@ -49,8 +57,8 @@
 
   $ hg co 3
   2 files updated, 0 files merged, 0 files removed, 0 files unresolved
-  $ hg branch /a/b/c/
-  marked working directory as branch /a/b/c/
+  $ hg branch !a/b/c/
+  marked working directory as branch !a/b/c/
   (branches are permanent and global, did you want a bookmark?)
   $ hg ci -Aqm"5 bug"
 
@@ -223,9 +231,30 @@
   5
   $ log 'author(bob)'
   2
+  $ log 'author("re:bob|test")'
+  0
+  1
+  2
+  3
+  4
+  5
+  6
+  7
+  8
+  9
   $ log 'branch(é)'
   8
   9
+  $ log 'branch(a)'
+  0
+  $ hg log -r 'branch("re:a")' --template '{rev} {branch}\n'
+  0 a
+  2 a-b-c-
+  3 +a+b+c+
+  4 -a-b-c-
+  5 !a/b/c/
+  6 _a_b_c_
+  7 .a.b.c.
   $ log 'children(ancestor(4,5))'
   2
   3
@@ -362,6 +391,22 @@
   6
   $ log 'tag(tip)'
   9
+
+we can use patterns when searching for tags
+
+  $ log 'tag("1..*")'
+  abort: tag '1..*' does not exist
+  [255]
+  $ log 'tag("re:1..*")'
+  6
+  $ log 'tag("re:[0-9].[0-9]")'
+  6
+  $ log 'tag("literal:1.0")'
+  6
+  $ log 'tag("re:0..*")'
+  abort: no tags exist that match '0..*'
+  [255]
+
   $ log 'tag(unknown)'
   abort: tag 'unknown' does not exist
   [255]
--- a/tests/test-run-tests.t	Mon Jun 04 17:22:09 2012 -0500
+++ b/tests/test-run-tests.t	Mon Jun 04 17:57:57 2012 -0500
@@ -52,6 +52,15 @@
   $ echo 'foo (re)'
   foo (re)
 
+Conditional sections based on hghave:
+
+#if fifo no-fifo
+  $ echo skipped
+#else
+  $ echo tested
+  tested
+#endif
+
 Exit code:
 
   $ (exit 1) 
--- a/tests/test-ssh.t	Mon Jun 04 17:22:09 2012 -0500
+++ b/tests/test-ssh.t	Mon Jun 04 17:57:57 2012 -0500
@@ -308,6 +308,41 @@
   Illegal command "'hg' -R 'a'repo' serve --stdio": No closing quotation
   [255]
 
+Test hg-ssh in read-only mode:
+
+  $ cat > ssh.sh << EOF
+  > userhost="\$1"
+  > SSH_ORIGINAL_COMMAND="\$2"
+  > export SSH_ORIGINAL_COMMAND
+  > PYTHONPATH="$PYTHONPATH"
+  > export PYTHONPATH
+  > python "$TESTDIR/../contrib/hg-ssh" --read-only "$TESTTMP/remote"
+  > EOF
+
+  $ hg clone --ssh "sh ssh.sh" "ssh://user@dummy/$TESTTMP/remote" read-only-local
+  requesting all changes
+  adding changesets
+  adding manifests
+  adding file changes
+  added 4 changesets with 5 changes to 4 files (+1 heads)
+  updating to branch default
+  3 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+  $ cd read-only-local
+  $ echo "baz" > bar
+  $ hg ci -A -m "unpushable commit" bar
+  $ hg push --ssh "sh ../ssh.sh"
+  pushing to ssh://user@dummy/$TESTTMP/remote
+  searching for changes
+  remote: Permission denied
+  remote: abort: prechangegroup.hg-ssh hook failed
+  remote: Permission denied
+  remote: abort: prepushkey.hg-ssh hook failed
+  abort: unexpected response: empty string
+  [255]
+
+  $ cd ..
+
   $ cat dummylog
   Got arguments 1:user@dummy 2:hg -R nonexistent serve --stdio
   Got arguments 1:user@dummy 2:hg -R /$TESTTMP/nonexistent serve --stdio
--- a/tests/test-strict.t	Mon Jun 04 17:22:09 2012 -0500
+++ b/tests/test-strict.t	Mon Jun 04 17:57:57 2012 -0500
@@ -19,24 +19,24 @@
   
   basic commands:
   
-   add         add the specified files on the next commit
-   annotate    show changeset information by line for each file
-   clone       make a copy of an existing repository
-   commit      commit the specified files or all outstanding changes
-   diff        diff repository (or selected files)
-   export      dump the header and diffs for one or more changesets
-   forget      forget the specified files on the next commit
-   init        create a new repository in the given directory
-   log         show revision history of entire repository or files
-   merge       merge working directory with another revision
-   phase       set or show the current phase name
-   pull        pull changes from the specified source
-   push        push changes to the specified destination
-   remove      remove the specified files on the next commit
-   serve       start stand-alone webserver
-   status      show changed files in the working directory
-   summary     summarize working directory state
-   update      update working directory (or switch revisions)
+   add           add the specified files on the next commit
+   annotate      show changeset information by line for each file
+   clone         make a copy of an existing repository
+   commit        commit the specified files or all outstanding changes
+   diff          diff repository (or selected files)
+   export        dump the header and diffs for one or more changesets
+   forget        forget the specified files on the next commit
+   init          create a new repository in the given directory
+   log           show revision history of entire repository or files
+   merge         merge working directory with another revision
+   phase         set or show the current phase name
+   pull          pull changes from the specified source
+   push          push changes to the specified destination
+   remove        remove the specified files on the next commit
+   serve         start stand-alone webserver
+   status        show changed files in the working directory
+   summary       summarize working directory state
+   update        update working directory (or switch revisions)
   
   use "hg help" for the full list of commands or "hg -v" for details
   [255]
--- a/tests/test-tags.t	Mon Jun 04 17:22:09 2012 -0500
+++ b/tests/test-tags.t	Mon Jun 04 17:57:57 2012 -0500
@@ -1,5 +1,3 @@
-  $ "$TESTDIR/hghave" unix-permissions || exit 80
-
 Helper functions:
 
   $ cacheexists() {
@@ -77,11 +75,13 @@
 
 And again, but now unable to write tag cache:
 
+#if unix-permissions
   $ rm -f .hg/cache/tags
   $ chmod 555 .hg
   $ hg identify
   b9154636be93 tip
   $ chmod 755 .hg
+#endif
 
 Create a branch:
 
--- a/tests/test-transplant.t	Mon Jun 04 17:22:09 2012 -0500
+++ b/tests/test-transplant.t	Mon Jun 04 17:57:57 2012 -0500
@@ -120,7 +120,25 @@
   1  r2
   0  r1
 
+test same-parent transplant with --log
 
+  $ hg clone -r 1 ../t ../sameparent
+  adding changesets
+  adding manifests
+  adding file changes
+  added 2 changesets with 2 changes to 2 files
+  updating to branch default
+  2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ cd ../sameparent
+  $ hg transplant --log -s ../prune 5
+  searching for changes
+  applying e234d668f844
+  e234d668f844 transplanted to e07aea8ecf9c
+  $ hg log --template '{rev} {parents} {desc}\n'
+  2  b1
+  (transplanted from e234d668f844e1b1a765f01db83a32c0c7bfa170)
+  1  r2
+  0  r1
 remote transplant
 
   $ hg clone -r 1 ../t ../remote
--- a/tests/test-walkrepo.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/tests/test-walkrepo.py	Mon Jun 04 17:57:57 2012 -0500
@@ -24,10 +24,12 @@
     reposet = frozenset(walkrepos('.', followsym=True))
     if sym and (len(reposet) != 3):
         print "reposet = %r" % (reposet,)
-        print "Found %d repositories when I should have found 3" % (len(reposet),)
+        print ("Found %d repositories when I should have found 3"
+               % (len(reposet),))
     if (not sym) and (len(reposet) != 2):
         print "reposet = %r" % (reposet,)
-        print "Found %d repositories when I should have found 2" % (len(reposet),)
+        print ("Found %d repositories when I should have found 2"
+               % (len(reposet),))
     sub1set = frozenset((pjoin('.', 'sub1'),
                          pjoin('.', 'circle', 'subdir', 'sub1')))
     if len(sub1set & reposet) != 1:
@@ -41,7 +43,7 @@
         print "reposet = %r" % (reposet,)
         print "sub1set and reposet should have exactly one path in common."
     sub3 = pjoin('.', 'circle', 'top1')
-    if sym and not (sub3 in reposet):
+    if sym and sub3 not in reposet:
         print "reposet = %r" % (reposet,)
         print "Symbolic links are supported and %s is not in reposet" % (sub3,)
 
--- a/tests/tinyproxy.py	Mon Jun 04 17:22:09 2012 -0500
+++ b/tests/tinyproxy.py	Mon Jun 04 17:57:57 2012 -0500
@@ -47,7 +47,7 @@
         try: soc.connect(host_port)
         except socket.error, arg:
             try: msg = arg[1]
-            except: msg = arg
+            except (IndexError, TypeError): msg = arg
             self.send_error(404, msg)
             return 0
         return 1