merge with stable
authorMatt Mackall <mpm@selenic.com>
Sun, 13 May 2012 12:52:24 +0200
changeset 16699 d947e1da1259
parent 16698 26756d9d8143 (diff)
parent 16682 40cdf8bc8d40 (current diff)
child 16700 28001e8a5149
merge with stable
hgext/mq.py
mercurial/cmdutil.py
mercurial/localrepo.py
mercurial/parsers.c
tests/test-hook.t
--- a/contrib/check-code.py	Sun May 13 11:19:48 2012 +0200
+++ b/contrib/check-code.py	Sun May 13 12:52:24 2012 +0200
@@ -45,12 +45,10 @@
   [
     (r'pushd|popd', "don't use 'pushd' or 'popd', use 'cd'"),
     (r'\W\$?\(\([^\)\n]*\)\)', "don't use (()) or $(()), use 'expr'"),
-    (r'^function', "don't use 'function', use old style"),
     (r'grep.*-q', "don't use 'grep -q', redirect to /dev/null"),
     (r'sed.*-i', "don't use 'sed -i', use a temporary file"),
     (r'echo.*\\n', "don't use 'echo \\n', use printf"),
     (r'echo -n', "don't use 'echo -n', use printf"),
-    (r'^diff.*-\w*N', "don't use 'diff -N'"),
     (r'(^| )wc[^|]*$\n(?!.*\(re\))', "filter wc output"),
     (r'head -c', "don't use 'head -c', use 'dd'"),
     (r'sha1sum', "don't use sha1sum, use $TESTDIR/md5sum.py"),
@@ -62,10 +60,8 @@
     (r'(^|\|\s*)grep (-\w\s+)*[^|]*[(|]\w',
      "use egrep for extended grep syntax"),
     (r'/bin/', "don't use explicit paths for tools"),
-    (r'\$PWD', "don't use $PWD, use `pwd`"),
     (r'[^\n]\Z', "no trailing newline"),
     (r'export.*=', "don't export and assign at once"),
-    (r'^([^"\'\n]|("[^"\n]*")|(\'[^\'\n]*\'))*\^', "^ must be quoted"),
     (r'^source\b', "don't use 'source', use '.'"),
     (r'touch -d', "don't use 'touch -d', use 'touch -t' instead"),
     (r'ls +[^|\n-]+ +-', "options to 'ls' must come before filenames"),
@@ -79,7 +75,12 @@
     (r'^( *)\t', "don't use tabs to indent"),
   ],
   # warnings
-  []
+  [
+    (r'^function', "don't use 'function', use old style"),
+    (r'^diff.*-\w*N', "don't use 'diff -N'"),
+    (r'\$PWD', "don't use $PWD, use `pwd`"),
+    (r'^([^"\'\n]|("[^"\n]*")|(\'[^\'\n]*\'))*\^', "^ must be quoted"),
+  ]
 ]
 
 testfilters = [
@@ -91,7 +92,8 @@
 utestpats = [
   [
     (r'^(\S|  $ ).*(\S[ \t]+|^[ \t]+)\n', "trailing whitespace on non-output"),
-    (uprefix + r'.*\|\s*sed', "use regex test output patterns instead of sed"),
+    (uprefix + r'.*\|\s*sed[^|>\n]*\n',
+     "use regex test output patterns instead of sed"),
     (uprefix + r'(true|exit 0)', "explicit zero exit unnecessary"),
     (uprefix + r'.*(?<!\[)\$\?', "explicit exit code checks unnecessary"),
     (uprefix + r'.*\|\| echo.*(fail|error)',
@@ -106,9 +108,9 @@
 for i in [0, 1]:
     for p, m in testpats[i]:
         if p.startswith(r'^'):
-            p = r"^  \$ (%s)" % p[1:]
+            p = r"^  [$>] (%s)" % p[1:]
         else:
-            p = r"^  \$ .*(%s)" % p
+            p = r"^  [$>] .*(%s)" % p
         utestpats[i].append((p, m))
 
 utestfilters = [
@@ -137,7 +139,8 @@
     (r' x+[xo][\'"]\n\s+[\'"]x', 'string join across lines with no space'),
     (r'[^\n]\Z', "no trailing newline"),
     (r'(\S[ \t]+|^[ \t]+)\n', "trailing whitespace"),
-#    (r'^\s+[^_ \n][^_. \n]+_[^_\n]+\s*=', "don't use underbars in identifiers"),
+#    (r'^\s+[^_ \n][^_. \n]+_[^_\n]+\s*=',
+#     "don't use underbars in identifiers"),
     (r'^\s+(self\.)?[A-za-z][a-z0-9]+[A-Z]\w* = ',
      "don't use camelcase in identifiers"),
     (r'^\s*(if|while|def|class|except|try)\s[^[\n]*:\s*[^\\n]#\s]+',
@@ -199,6 +202,7 @@
      "always assign an opened file to a variable, and close it afterwards"),
     (r'(?i)descendent', "the proper spelling is descendAnt"),
     (r'\.debug\(\_', "don't mark debug messages for translation"),
+    (r'\.strip\(\)\.split\(\)', "no need to strip before splitting"),
   ],
   # warnings
   [
--- a/contrib/debugcmdserver.py	Sun May 13 11:19:48 2012 +0200
+++ b/contrib/debugcmdserver.py	Sun May 13 12:52:24 2012 +0200
@@ -24,7 +24,7 @@
 def read(size):
     data = sys.stdin.read(size)
     if not data:
-        raise EOFError()
+        raise EOFError
     sys.stdout.write(data)
     sys.stdout.flush()
     return data
--- a/contrib/perf.py	Sun May 13 11:19:48 2012 +0200
+++ b/contrib/perf.py	Sun May 13 12:52:24 2012 +0200
@@ -33,16 +33,17 @@
     try:
         m = scmutil.match(repo[None], pats, {})
         timer(lambda: len(list(repo.dirstate.walk(m, [], True, False))))
-    except:
+    except Exception:
         try:
             m = scmutil.match(repo[None], pats, {})
             timer(lambda: len([b for a, b, c in repo.dirstate.statwalk([], m)]))
-        except:
+        except Exception:
             timer(lambda: len(list(cmdutil.walk(repo, pats, {}))))
 
 def perfstatus(ui, repo, *pats):
     #m = match.always(repo.root, repo.getcwd())
-    #timer(lambda: sum(map(len, repo.dirstate.status(m, [], False, False, False))))
+    #timer(lambda: sum(map(len, repo.dirstate.status(m, [], False, False,
+    #                                                False))))
     timer(lambda: sum(map(len, repo.status())))
 
 def perfheads(ui, repo):
--- a/contrib/setup3k.py	Sun May 13 11:19:48 2012 +0200
+++ b/contrib/setup3k.py	Sun May 13 12:52:24 2012 +0200
@@ -26,22 +26,22 @@
 try:
     import hashlib
     sha = hashlib.sha1()
-except:
+except ImportError:
     try:
         import sha
-    except:
+    except ImportError:
         raise SystemExit(
             "Couldn't import standard hashlib (incomplete Python install).")
 
 try:
     import zlib
-except:
+except ImportError:
     raise SystemExit(
         "Couldn't import standard zlib (incomplete Python install).")
 
 try:
     import bz2
-except:
+except ImportError:
     raise SystemExit(
         "Couldn't import standard bz2 (incomplete Python install).")
 
@@ -84,7 +84,7 @@
             os.dup2(devnull.fileno(), sys.stderr.fileno())
             objects = cc.compile([fname], output_dir=tmpdir)
             cc.link_executable(objects, os.path.join(tmpdir, "a.out"))
-        except:
+        except Exception:
             return False
         return True
     finally:
--- a/doc/gendoc.py	Sun May 13 11:19:48 2012 +0200
+++ b/doc/gendoc.py	Sun May 13 12:52:24 2012 +0200
@@ -102,9 +102,9 @@
         ui.write("\n")
 
     section(ui, _("Extensions"))
-    ui.write(_("This section contains help for extensions that are distributed "
-               "together with Mercurial. Help for other extensions is available "
-               "in the help system."))
+    ui.write(_("This section contains help for extensions that are "
+               "distributed together with Mercurial. Help for other "
+               "extensions is available in the help system."))
     ui.write("\n\n"
              ".. contents::\n"
              "   :class: htmlonly\n"
--- a/doc/hgmanpage.py	Sun May 13 11:19:48 2012 +0200
+++ b/doc/hgmanpage.py	Sun May 13 12:52:24 2012 +0200
@@ -582,7 +582,7 @@
                                     self._docinfo[name],
                                     self.defs['indent'][1],
                                     self.defs['indent'][1]))
-            elif not name in skip:
+            elif name not in skip:
                 if name in self._docinfo_names:
                     label = self._docinfo_names[name]
                 else:
--- a/hgext/bugzilla.py	Sun May 13 11:19:48 2012 +0200
+++ b/hgext/bugzilla.py	Sun May 13 12:52:24 2012 +0200
@@ -416,7 +416,8 @@
         for id in bugs.keys():
             self.ui.status(_('  bug %s\n') % id)
             cmdfmt = self.ui.config('bugzilla', 'notify', self.default_notify)
-            bzdir = self.ui.config('bugzilla', 'bzdir', '/var/www/html/bugzilla')
+            bzdir = self.ui.config('bugzilla', 'bzdir',
+                                   '/var/www/html/bugzilla')
             try:
                 # Backwards-compatible with old notify string, which
                 # took one string. This will throw with a new format
@@ -468,8 +469,8 @@
                 userid = self.get_user_id(defaultuser)
                 user = defaultuser
             except KeyError:
-                raise util.Abort(_('cannot find bugzilla user id for %s or %s') %
-                                 (user, defaultuser))
+                raise util.Abort(_('cannot find bugzilla user id for %s or %s')
+                                 % (user, defaultuser))
         return (user, userid)
 
     def updatebug(self, bugid, newstate, text, committer):
--- a/hgext/children.py	Sun May 13 11:19:48 2012 +0200
+++ b/hgext/children.py	Sun May 13 12:52:24 2012 +0200
@@ -8,7 +8,11 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
-'''command to display child changesets'''
+'''command to display child changesets (DEPRECATED)
+
+This extension is deprecated. You should use :hg:`log -r
+"children(REV)"` instead.
+'''
 
 from mercurial import cmdutil
 from mercurial.commands import templateopts
--- a/hgext/convert/__init__.py	Sun May 13 11:19:48 2012 +0200
+++ b/hgext/convert/__init__.py	Sun May 13 12:52:24 2012 +0200
@@ -328,7 +328,8 @@
           ('', 'root', '', _('specify cvsroot')),
           # Options specific to builtin cvsps
           ('', 'parents', '', _('show parent changesets')),
-          ('', 'ancestors', '', _('show current changeset in ancestor branches')),
+          ('', 'ancestors', '',
+           _('show current changeset in ancestor branches')),
           # Options that are ignored for compatibility with cvsps-2.1
           ('A', 'cvs-direct', None, _('ignored for compatibility')),
          ],
--- a/hgext/convert/bzr.py	Sun May 13 11:19:48 2012 +0200
+++ b/hgext/convert/bzr.py	Sun May 13 12:52:24 2012 +0200
@@ -72,7 +72,7 @@
                 self.ui.warn(_('warning: lightweight checkouts may cause '
                                'conversion failures, try with a regular '
                                'branch instead.\n'))
-        except:
+        except Exception:
             self.ui.note(_('bzr source type could not be determined\n'))
 
     def before(self):
--- a/hgext/convert/common.py	Sun May 13 11:19:48 2012 +0200
+++ b/hgext/convert/common.py	Sun May 13 12:52:24 2012 +0200
@@ -76,7 +76,7 @@
 
     def getheads(self):
         """Return a list of this repository's heads"""
-        raise NotImplementedError()
+        raise NotImplementedError
 
     def getfile(self, name, rev):
         """Return a pair (data, mode) where data is the file content
@@ -84,7 +84,7 @@
         identifier returned by a previous call to getchanges(). Raise
         IOError to indicate that name was deleted in rev.
         """
-        raise NotImplementedError()
+        raise NotImplementedError
 
     def getchanges(self, version):
         """Returns a tuple of (files, copies).
@@ -95,18 +95,18 @@
 
         copies is a dictionary of dest: source
         """
-        raise NotImplementedError()
+        raise NotImplementedError
 
     def getcommit(self, version):
         """Return the commit object for version"""
-        raise NotImplementedError()
+        raise NotImplementedError
 
     def gettags(self):
         """Return the tags as a dictionary of name: revision
 
         Tag names must be UTF-8 strings.
         """
-        raise NotImplementedError()
+        raise NotImplementedError
 
     def recode(self, s, encoding=None):
         if not encoding:
@@ -116,10 +116,10 @@
             return s.encode("utf-8")
         try:
             return s.decode(encoding).encode("utf-8")
-        except:
+        except UnicodeError:
             try:
                 return s.decode("latin-1").encode("utf-8")
-            except:
+            except UnicodeError:
                 return s.decode(encoding, "replace").encode("utf-8")
 
     def getchangedfiles(self, rev, i):
@@ -133,7 +133,7 @@
 
         This function is only needed to support --filemap
         """
-        raise NotImplementedError()
+        raise NotImplementedError
 
     def converted(self, rev, sinkrev):
         '''Notify the source that a revision has been converted.'''
@@ -175,13 +175,13 @@
 
     def getheads(self):
         """Return a list of this repository's heads"""
-        raise NotImplementedError()
+        raise NotImplementedError
 
     def revmapfile(self):
         """Path to a file that will contain lines
         source_rev_id sink_rev_id
         mapping equivalent revision identifiers for each system."""
-        raise NotImplementedError()
+        raise NotImplementedError
 
     def authorfile(self):
         """Path to a file that will contain lines
@@ -203,7 +203,7 @@
         a particular revision (or even what that revision would be)
         before it receives the file data.
         """
-        raise NotImplementedError()
+        raise NotImplementedError
 
     def puttags(self, tags):
         """Put tags into sink.
@@ -212,7 +212,7 @@
         Return a pair (tag_revision, tag_parent_revision), or (None, None)
         if nothing was changed.
         """
-        raise NotImplementedError()
+        raise NotImplementedError
 
     def setbranch(self, branch, pbranches):
         """Set the current branch name. Called before the first putcommit
@@ -247,7 +247,7 @@
 
     def hascommit(self, rev):
         """Return True if the sink contains rev"""
-        raise NotImplementedError()
+        raise NotImplementedError
 
 class commandline(object):
     def __init__(self, ui, command):
@@ -333,7 +333,7 @@
         argmax = 4096
         try:
             argmax = os.sysconf("SC_ARG_MAX")
-        except:
+        except (AttributeError, ValueError):
             pass
 
         # Windows shells impose their own limits on command line length,
--- a/hgext/convert/convcmd.py	Sun May 13 11:19:48 2012 +0200
+++ b/hgext/convert/convcmd.py	Sun May 13 12:52:24 2012 +0200
@@ -190,7 +190,7 @@
                 children.setdefault(n, [])
                 hasparent = False
                 for p in parents[n]:
-                    if not p in self.map:
+                    if p not in self.map:
                         visit.append(p)
                         hasparent = True
                     children.setdefault(p, []).append(n)
@@ -462,7 +462,7 @@
     if not revmapfile:
         try:
             revmapfile = destc.revmapfile()
-        except:
+        except Exception:
             revmapfile = os.path.join(destc, "map")
 
     c = converter(ui, srcc, destc, revmapfile, opts)
--- a/hgext/convert/cvs.py	Sun May 13 11:19:48 2012 +0200
+++ b/hgext/convert/cvs.py	Sun May 13 12:52:24 2012 +0200
@@ -121,12 +121,13 @@
                         pf = open(cvspass)
                         for line in pf.read().splitlines():
                             part1, part2 = line.split(' ', 1)
+                            # /1 :pserver:user@example.com:2401/cvsroot/foo
+                            # Ah<Z
                             if part1 == '/1':
-                                # /1 :pserver:user@example.com:2401/cvsroot/foo Ah<Z
                                 part1, part2 = part2.split(' ', 1)
                                 format = format1
+                            # :pserver:user@example.com:/cvsroot/foo Ah<Z
                             else:
-                                # :pserver:user@example.com:/cvsroot/foo Ah<Z
                                 format = format0
                             if part1 == format:
                                 passw = part2
--- a/hgext/convert/cvsps.py	Sun May 13 11:19:48 2012 +0200
+++ b/hgext/convert/cvsps.py	Sun May 13 12:52:24 2012 +0200
@@ -336,7 +336,8 @@
                 else:
                     myrev = '.'.join(myrev[:-2] + ['0', myrev[-2]])
                     branches = [b for b in branchmap if branchmap[b] == myrev]
-                    assert len(branches) == 1, 'unknown branch: %s' % e.mergepoint
+                    assert len(branches) == 1, ('unknown branch: %s'
+                                                % e.mergepoint)
                     e.mergepoint = branches[0]
             else:
                 e.mergepoint = None
@@ -705,11 +706,11 @@
         if mergeto:
             m = mergeto.search(c.comment)
             if m:
-                try:
+                if m.groups():
                     m = m.group(1)
                     if m == 'HEAD':
                         m = None
-                except:
+                else:
                     m = None   # if no group found then merge to HEAD
                 if m in branches and c.branch != m:
                     # insert empty changeset for merge
--- a/hgext/convert/git.py	Sun May 13 11:19:48 2012 +0200
+++ b/hgext/convert/git.py	Sun May 13 12:52:24 2012 +0200
@@ -69,7 +69,7 @@
 
     def catfile(self, rev, type):
         if rev == hex(nullid):
-            raise IOError()
+            raise IOError
         data, ret = self.gitread("git cat-file %s %s" % (type, rev))
         if ret:
             raise util.Abort(_('cannot read %r object at %s') % (type, rev))
@@ -181,8 +181,8 @@
                 m, f = l[:-1].split("\t")
                 changes.append(f)
         else:
-            fh = self.gitopen('git diff-tree --name-only --root -r %s "%s^%s" --'
-                             % (version, version, i + 1))
+            fh = self.gitopen('git diff-tree --name-only --root -r %s '
+                              '"%s^%s" --' % (version, version, i + 1))
             changes = [f.rstrip('\n') for f in fh]
         if fh.close():
             raise util.Abort(_('cannot read changes in %s') % version)
@@ -211,7 +211,7 @@
                         continue
                     name = '%s%s' % (reftype, name[prefixlen:])
                     bookmarks[name] = rev
-            except:
+            except Exception:
                 pass
 
         return bookmarks
--- a/hgext/convert/hg.py	Sun May 13 11:19:48 2012 +0200
+++ b/hgext/convert/hg.py	Sun May 13 12:52:24 2012 +0200
@@ -95,7 +95,7 @@
             self.after()
             try:
                 self.repo = hg.repository(self.ui, branchpath)
-            except:
+            except Exception:
                 self.repo = hg.repository(self.ui, branchpath, create=True)
             self.before()
 
@@ -105,7 +105,7 @@
         for b in pbranches:
             try:
                 self.repo.lookup(b[0])
-            except:
+            except Exception:
                 missings.setdefault(b[1], []).append(b[0])
 
         if missings:
@@ -192,7 +192,7 @@
 
         try:
             oldlines = sorted(parentctx['.hgtags'].data().splitlines(True))
-        except:
+        except Exception:
             oldlines = []
 
         newlines = sorted([("%s %s\n" % (tags[tag], tag)) for tag in tags])
@@ -224,7 +224,7 @@
             bookmarks.write(self.repo)
 
     def hascommit(self, rev):
-        if not rev in self.repo and self.clonebranches:
+        if rev not in self.repo and self.clonebranches:
             raise util.Abort(_('revision %s not found in destination '
                                'repository (lookups with clonebranches=true '
                                'are not implemented)') % rev)
@@ -241,7 +241,7 @@
             # try to provoke an exception if this isn't really a hg
             # repo, but some other bogus compatible-looking url
             if not self.repo.local():
-                raise error.RepoError()
+                raise error.RepoError
         except error.RepoError:
             ui.traceback()
             raise NoRepo(_("%s is not a local Mercurial repository") % path)
@@ -294,7 +294,8 @@
         if not parents:
             files = sorted(ctx.manifest())
             # getcopies() is not needed for roots, but it is a simple way to
-            # detect missing revlogs and abort on errors or populate self.ignored
+            # detect missing revlogs and abort on errors or populate
+            # self.ignored
             self.getcopies(ctx, parents, files)
             return [(f, rev) for f in files if f not in self.ignored], {}
         if self._changescache and self._changescache[0] == rev:
--- a/hgext/convert/monotone.py	Sun May 13 11:19:48 2012 +0200
+++ b/hgext/convert/monotone.py	Sun May 13 12:52:24 2012 +0200
@@ -30,7 +30,7 @@
                 f = file(path, 'rb')
                 header = f.read(16)
                 f.close()
-            except:
+            except IOError:
                 header = ''
             if header != 'SQLite format 3\x00':
                 raise norepo
@@ -283,11 +283,11 @@
 
     def getfile(self, name, rev):
         if not self.mtnisfile(name, rev):
-            raise IOError() # file was deleted or renamed
+            raise IOError # file was deleted or renamed
         try:
             data = self.mtnrun("get_file_of", name, r=rev)
-        except:
-            raise IOError() # file was deleted or renamed
+        except Exception:
+            raise IOError # file was deleted or renamed
         self.mtnloadmanifest(rev)
         node, attr = self.files.get(name, (None, ""))
         return data, attr
@@ -317,7 +317,7 @@
     def getchangedfiles(self, rev, i):
         # This function is only needed to support --filemap
         # ... and we don't support that
-        raise NotImplementedError()
+        raise NotImplementedError
 
     def before(self):
         # Check if we have a new enough version to use automate stdio
--- a/hgext/convert/subversion.py	Sun May 13 11:19:48 2012 +0200
+++ b/hgext/convert/subversion.py	Sun May 13 12:52:24 2012 +0200
@@ -85,8 +85,8 @@
         self.copyfrom_rev = p.copyfrom_rev
         self.action = p.action
 
-def get_log_child(fp, url, paths, start, end, limit=0, discover_changed_paths=True,
-                    strict_node_history=False):
+def get_log_child(fp, url, paths, start, end, limit=0,
+                  discover_changed_paths=True, strict_node_history=False):
     protocol = -1
     def receiver(orig_paths, revnum, author, date, message, pool):
         if orig_paths is not None:
@@ -139,7 +139,7 @@
                                    ' hg executable is in PATH'))
             try:
                 orig_paths, revnum, author, date, message = entry
-            except:
+            except (TypeError, ValueError):
                 if entry is None:
                     break
                 raise util.Abort(_("log stream exception '%s'") % entry)
@@ -176,7 +176,7 @@
                       'know better.\n'))
             return True
         data = inst.fp.read()
-    except:
+    except Exception:
         # Could be urllib2.URLError if the URL is invalid or anything else.
         return False
     return '<m:human-readable errcode="160013">' in data
@@ -276,7 +276,8 @@
             except ValueError:
                 raise util.Abort(_('svn: revision %s is not an integer') % rev)
 
-        self.trunkname = self.ui.config('convert', 'svn.trunk', 'trunk').strip('/')
+        self.trunkname = self.ui.config('convert', 'svn.trunk',
+                                        'trunk').strip('/')
         self.startrev = self.ui.config('convert', 'svn.startrev', default=0)
         try:
             self.startrev = int(self.startrev)
@@ -862,13 +863,14 @@
                     pass
         except SubversionException, (inst, num):
             if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION:
-                raise util.Abort(_('svn: branch has no revision %s') % to_revnum)
+                raise util.Abort(_('svn: branch has no revision %s')
+                                 % to_revnum)
             raise
 
     def getfile(self, file, rev):
         # TODO: ra.get_file transmits the whole file instead of diffs.
         if file in self.removed:
-            raise IOError()
+            raise IOError
         mode = ''
         try:
             new_module, revnum = revsplit(rev)[1:]
@@ -889,7 +891,7 @@
             notfound = (svn.core.SVN_ERR_FS_NOT_FOUND,
                 svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND)
             if e.apr_err in notfound: # File not found
-                raise IOError()
+                raise IOError
             raise
         if mode == 'l':
             link_prefix = "link "
@@ -949,8 +951,8 @@
             if not p.startswith('/'):
                 p = self.module + '/' + p
             relpaths.append(p.strip('/'))
-        args = [self.baseurl, relpaths, start, end, limit, discover_changed_paths,
-                strict_node_history]
+        args = [self.baseurl, relpaths, start, end, limit,
+                discover_changed_paths, strict_node_history]
         arg = encodeargs(args)
         hgexe = util.hgexecutable()
         cmd = '%s debugsvnlog' % util.shellquote(hgexe)
--- a/hgext/eol.py	Sun May 13 11:19:48 2012 +0200
+++ b/hgext/eol.py	Sun May 13 12:52:24 2012 +0200
@@ -111,7 +111,8 @@
         return s
     if ui.configbool('eol', 'only-consistent', True) and inconsistenteol(s):
         return s
-    if ui.configbool('eol', 'fix-trailing-newline', False) and s and s[-1] != '\n':
+    if (ui.configbool('eol', 'fix-trailing-newline', False)
+        and s and s[-1] != '\n'):
         s = s + '\n'
     return eolre.sub('\n', s)
 
@@ -121,7 +122,8 @@
         return s
     if ui.configbool('eol', 'only-consistent', True) and inconsistenteol(s):
         return s
-    if ui.configbool('eol', 'fix-trailing-newline', False) and s and s[-1] != '\n':
+    if (ui.configbool('eol', 'fix-trailing-newline', False)
+        and s and s[-1] != '\n'):
         s = s + '\n'
     return eolre.sub('\r\n', s)
 
--- a/hgext/extdiff.py	Sun May 13 11:19:48 2012 +0200
+++ b/hgext/extdiff.py	Sun May 13 12:52:24 2012 +0200
@@ -88,7 +88,7 @@
     ctx = repo[node]
     for fn in files:
         wfn = util.pconvert(fn)
-        if not wfn in ctx:
+        if wfn not in ctx:
             # File doesn't exist; could be a bogus modify
             continue
         ui.note('  %s\n' % wfn)
--- a/hgext/fetch.py	Sun May 13 11:19:48 2012 +0200
+++ b/hgext/fetch.py	Sun May 13 12:52:24 2012 +0200
@@ -5,7 +5,7 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
-'''pull, update and merge in one command'''
+'''pull, update and merge in one command (DEPRECATED)'''
 
 from mercurial.i18n import _
 from mercurial.node import nullid, short
--- a/hgext/gpg.py	Sun May 13 11:19:48 2012 +0200
+++ b/hgext/gpg.py	Sun May 13 12:52:24 2012 +0200
@@ -43,7 +43,7 @@
                 try:
                     if f:
                         os.unlink(f)
-                except:
+                except OSError:
                     pass
         keys = []
         key, fingerprint = None, None
--- a/hgext/hgcia.py	Sun May 13 11:19:48 2012 +0200
+++ b/hgext/hgcia.py	Sun May 13 12:52:24 2012 +0200
@@ -46,17 +46,14 @@
 from mercurial import cmdutil, patch, templater, util, mail
 import email.Parser
 
-import xmlrpclib
+import socket, xmlrpclib
 from xml.sax import saxutils
 
 socket_timeout = 30 # seconds
-try:
+if util.safehasattr(socket, 'setdefaulttimeout'):
     # set a timeout for the socket so you don't have to wait so looooong
     # when cia.vc is having problems. requires python >= 2.3:
-    import socket
     socket.setdefaulttimeout(socket_timeout)
-except:
-    pass
 
 HGCIA_VERSION = '0.1'
 HGCIA_URL = 'http://hg.kublai.com/mercurial/hgcia'
--- a/hgext/hgk.py	Sun May 13 11:19:48 2012 +0200
+++ b/hgext/hgk.py	Sun May 13 12:52:24 2012 +0200
@@ -95,7 +95,8 @@
     nlprefix = '\n' + prefix
     if ctx is None:
         ctx = repo[n]
-    ui.write("tree %s\n" % short(ctx.changeset()[0])) # use ctx.node() instead ??
+    # use ctx.node() instead ??
+    ui.write("tree %s\n" % short(ctx.changeset()[0]))
     for p in ctx.parents():
         ui.write("parent %s\n" % p)
 
@@ -113,7 +114,8 @@
     ui.write("branch %s\n\n" % ctx.branch())
 
     if prefix != "":
-        ui.write("%s%s\n" % (prefix, description.replace('\n', nlprefix).strip()))
+        ui.write("%s%s\n" % (prefix,
+                             description.replace('\n', nlprefix).strip()))
     else:
         ui.write(description + "\n")
     if prefix:
--- a/hgext/highlight/__init__.py	Sun May 13 11:19:48 2012 +0200
+++ b/hgext/highlight/__init__.py	Sun May 13 12:52:24 2012 +0200
@@ -51,11 +51,13 @@
     pg_style = web.config('web', 'pygments_style', 'colorful')
     fmter = highlight.HtmlFormatter(style = pg_style)
     req.respond(common.HTTP_OK, 'text/css')
-    return ['/* pygments_style = %s */\n\n' % pg_style, fmter.get_style_defs('')]
+    return ['/* pygments_style = %s */\n\n' % pg_style,
+            fmter.get_style_defs('')]
 
 def extsetup():
     # monkeypatch in the new version
-    extensions.wrapfunction(webcommands, '_filerevision', filerevision_highlight)
+    extensions.wrapfunction(webcommands, '_filerevision',
+                            filerevision_highlight)
     extensions.wrapfunction(webcommands, 'annotate', annotate_highlight)
     webcommands.highlightcss = generate_css
     webcommands.__all__.append('highlightcss')
--- a/hgext/inotify/__init__.py	Sun May 13 11:19:48 2012 +0200
+++ b/hgext/inotify/__init__.py	Sun May 13 12:52:24 2012 +0200
@@ -46,7 +46,8 @@
             files = match.files()
             if '.' in files:
                 files = []
-            if self._inotifyon and not ignored and not subrepos and not self._dirty:
+            if (self._inotifyon and not ignored and not subrepos and
+                not self._dirty):
                 cli = client(ui, repo)
                 try:
                     result = cli.statusquery(files, match, False,
--- a/hgext/inotify/server.py	Sun May 13 11:19:48 2012 +0200
+++ b/hgext/inotify/server.py	Sun May 13 12:52:24 2012 +0200
@@ -355,7 +355,7 @@
                 except (OSError, socket.error), inst:
                     try:
                         os.unlink(self.realsockpath)
-                    except:
+                    except OSError:
                         pass
                     os.rmdir(tempdir)
                     if inst.errno == errno.EEXIST:
@@ -416,7 +416,7 @@
                 # try to send back our version to the client
                 # this way, the client too is informed of the mismatch
                 sock.sendall(chr(common.version))
-            except:
+            except socket.error:
                 pass
             return
 
--- a/hgext/keyword.py	Sun May 13 11:19:48 2012 +0200
+++ b/hgext/keyword.py	Sun May 13 12:52:24 2012 +0200
@@ -238,7 +238,7 @@
     def iskwfile(self, cand, ctx):
         '''Returns subset of candidates which are configured for keyword
         expansion but are not symbolic links.'''
-        return [f for f in cand if self.match(f) and not 'l' in ctx.flags(f)]
+        return [f for f in cand if self.match(f) and 'l' not in ctx.flags(f)]
 
     def overwrite(self, ctx, candidates, lookup, expand, rekw=False):
         '''Overwrites selected files expanding/shrinking keywords.'''
@@ -651,7 +651,7 @@
             return kwt.match(source)
 
         candidates = [f for f in repo.dirstate.copies() if
-                      not 'l' in wctx.flags(f) and haskwsource(f)]
+                      'l' not in wctx.flags(f) and haskwsource(f)]
         kwt.overwrite(wctx, candidates, False, False)
 
     def kw_dorecord(orig, ui, repo, commitfunc, *pats, **opts):
@@ -680,7 +680,7 @@
         # not make sense
         if (fctx._filerev is None and
             (self._repo._encodefilterpats or
-             kwt.match(fctx.path()) and not 'l' in fctx.flags() or
+             kwt.match(fctx.path()) and 'l' not in fctx.flags() or
              self.size() - 4 == fctx.size()) or
             self.size() == fctx.size()):
             return self._filelog.cmp(self._filenode, fctx.data())
--- a/hgext/largefiles/lfcommands.py	Sun May 13 11:19:48 2012 +0200
+++ b/hgext/largefiles/lfcommands.py	Sun May 13 12:52:24 2012 +0200
@@ -11,7 +11,8 @@
 import os
 import shutil
 
-from mercurial import util, match as match_, hg, node, context, error, cmdutil
+from mercurial import util, match as match_, hg, node, context, error, \
+    cmdutil, scmutil
 from mercurial.i18n import _
 
 import lfutil
@@ -129,7 +130,7 @@
             try:
                 fctx = ctx.filectx(lfutil.standin(f))
             except error.LookupError:
-                raise IOError()
+                raise IOError
             renamed = fctx.renamed()
             if renamed:
                 renamed = lfutil.splitstandin(renamed[0])
@@ -229,7 +230,7 @@
             try:
                 fctx = ctx.filectx(srcfname)
             except error.LookupError:
-                raise IOError()
+                raise IOError
             renamed = fctx.renamed()
             if renamed:
                 # standin is always a largefile because largefile-ness
@@ -278,7 +279,7 @@
     try:
         fctx = ctx.filectx(f)
     except error.LookupError:
-        raise IOError()
+        raise IOError
     renamed = fctx.renamed()
     if renamed:
         renamed = renamed[0]
@@ -400,6 +401,23 @@
 
     return ([], [])
 
+def downloadlfiles(ui, repo, rev=None):
+    matchfn = scmutil.match(repo[None],
+                            [repo.wjoin(lfutil.shortname)], {})
+    def prepare(ctx, fns):
+        pass
+    totalsuccess = 0
+    totalmissing = 0
+    for ctx in cmdutil.walkchangerevs(repo, matchfn, {'rev' : rev},
+                                      prepare):
+        success, missing = cachelfiles(ui, repo, ctx.node())
+        totalsuccess += len(success)
+        totalmissing += len(missing)
+    ui.status(_("%d additional largefiles cached\n") % totalsuccess)
+    if totalmissing > 0:
+        ui.status(_("%d largefiles failed to download\n") % totalmissing)
+    return totalsuccess, totalmissing
+
 def updatelfiles(ui, repo, filelist=None, printmessage=True):
     wlock = repo.wlock()
     try:
--- a/hgext/largefiles/overrides.py	Sun May 13 11:19:48 2012 +0200
+++ b/hgext/largefiles/overrides.py	Sun May 13 12:52:24 2012 +0200
@@ -651,6 +651,7 @@
 # take some extra care so that the largefiles are correctly updated in the
 # working copy
 def overridepull(orig, ui, repo, source=None, **opts):
+    revsprepull = len(repo)
     if opts.get('rebase', False):
         repo._isrebasing = True
         try:
@@ -660,7 +661,6 @@
                           'the update flag\n')
             del opts['rebase']
             cmdutil.bailifchanged(repo)
-            revsprepull = len(repo)
             origpostincoming = commands.postincoming
             def _dummy(*args, **kwargs):
                 pass
@@ -695,8 +695,29 @@
             (cached, missing) = lfcommands.cachelfiles(ui, repo, head)
             numcached += len(cached)
         ui.status(_("%d largefiles cached\n") % numcached)
+    if opts.get('all_largefiles'):
+        revspostpull = len(repo)
+        revs = []
+        for rev in xrange(revsprepull + 1, revspostpull):
+            revs.append(repo[rev].rev())
+        lfcommands.downloadlfiles(ui, repo, revs)
     return result
 
+def overrideclone(orig, ui, source, dest=None, **opts):
+    result = hg.clone(ui, opts, source, dest,
+                      pull=opts.get('pull'),
+                      stream=opts.get('uncompressed'),
+                      rev=opts.get('rev'),
+                      update=True, # required for successful walkchangerevs
+                      branch=opts.get('branch'))
+    if result is None:
+        return True
+    if opts.get('all_largefiles'):
+        sourcerepo, destrepo = result
+        success, missing = lfcommands.downloadlfiles(ui, destrepo, None)
+        return missing != 0
+    return result is None
+
 def overriderebase(orig, ui, repo, **opts):
     repo._isrebasing = True
     try:
@@ -782,6 +803,47 @@
 
     archiver.done()
 
+def hgsubrepoarchive(orig, repo, ui, archiver, prefix):
+    rev = repo._state[1]
+    ctx = repo._repo[rev]
+
+    lfcommands.cachelfiles(ui, repo._repo, ctx.node())
+
+    def write(name, mode, islink, getdata):
+        if lfutil.isstandin(name):
+            return
+        data = getdata()
+
+        archiver.addfile(prefix + repo._path + '/' + name, mode, islink, data)
+
+    for f in ctx:
+        ff = ctx.flags(f)
+        getdata = ctx[f].data
+        if lfutil.isstandin(f):
+            path = lfutil.findfile(repo._repo, getdata().strip())
+            if path is None:
+                raise util.Abort(
+                    _('largefile %s not found in repo store or system cache')
+                    % lfutil.splitstandin(f))
+            f = lfutil.splitstandin(f)
+
+            def getdatafn():
+                fd = None
+                try:
+                    fd = open(os.path.join(prefix, path), 'rb')
+                    return fd.read()
+                finally:
+                    if fd:
+                        fd.close()
+
+            getdata = getdatafn
+
+        write(f, 'x' in ff and 0755 or 0644, 'l' in ff, getdata)
+
+    for subpath in ctx.substate:
+        sub = ctx.sub(subpath)
+        sub.archive(repo.ui, archiver, prefix)
+
 # If a largefile is modified, the change is not reflected in its
 # standin until a commit. cmdutil.bailifchanged() raises an exception
 # if the repo has uncommitted changes. Wrap it to also check if
--- a/hgext/largefiles/uisetup.py	Sun May 13 11:19:48 2012 +0200
+++ b/hgext/largefiles/uisetup.py	Sun May 13 12:52:24 2012 +0200
@@ -70,6 +70,15 @@
                                    overrides.overrideupdate)
     entry = extensions.wrapcommand(commands.table, 'pull',
                                    overrides.overridepull)
+    pullopt = [('', 'all-largefiles', None,
+                 _('download all pulled versions of largefiles'))]
+    entry[1].extend(pullopt)
+    entry = extensions.wrapcommand(commands.table, 'clone',
+                                   overrides.overrideclone)
+    cloneopt = [('', 'all-largefiles', None,
+                 _('download all versions of all largefiles'))]
+
+    entry[1].extend(cloneopt)
     entry = extensions.wrapcommand(commands.table, 'cat',
                                    overrides.overridecat)
     entry = extensions.wrapfunction(merge, '_checkunknownfile',
@@ -100,6 +109,7 @@
     extensions.wrapfunction(hg, 'merge', overrides.hgmerge)
 
     extensions.wrapfunction(archival, 'archive', overrides.overridearchive)
+    extensions.wrapfunction(hgsubrepo, 'archive', overrides.hgsubrepoarchive)
     extensions.wrapfunction(cmdutil, 'bailifchanged',
                             overrides.overridebailifchanged)
 
--- a/hgext/largefiles/wirestore.py	Sun May 13 11:19:48 2012 +0200
+++ b/hgext/largefiles/wirestore.py	Sun May 13 12:52:24 2012 +0200
@@ -14,7 +14,7 @@
         if not cap:
             raise lfutil.storeprotonotcapable([])
         storetypes = cap.split(',')
-        if not 'serve' in storetypes:
+        if 'serve' not in storetypes:
             raise lfutil.storeprotonotcapable(storetypes)
         self.remote = remote
         super(wirestore, self).__init__(ui, repo, remote.url())
--- a/hgext/mq.py	Sun May 13 11:19:48 2012 +0200
+++ b/hgext/mq.py	Sun May 13 12:52:24 2012 +0200
@@ -46,6 +46,17 @@
 
 You will by default be managing a patch queue named "patches". You can
 create other, independent patch queues with the :hg:`qqueue` command.
+
+If the working directory contains uncommitted files, qpush, qpop and
+qgoto abort immediately. If -f/--force is used, the changes are
+discarded. Setting:
+
+  [mq]
+  check = True
+
+make them behave as if -c/--check were passed, and non-conflicting
+local changes will be tolerated and preserved. If incompatible options
+such as -f/--force or --exact are passed, this setting is ignored.
 '''
 
 from mercurial.i18n import _
@@ -280,6 +291,9 @@
         if phase is not None:
             repo.ui.restoreconfig(backup)
 
+class AbortNoCleanup(error.Abort):
+    pass
+
 class queue(object):
     def __init__(self, ui, path, patchdir=None):
         self.basepath = path
@@ -308,7 +322,7 @@
         try:
             gitmode = ui.configbool('mq', 'git', None)
             if gitmode is None:
-                raise error.ConfigError()
+                raise error.ConfigError
             self.gitmode = gitmode and 'yes' or 'no'
         except error.ConfigError:
             self.gitmode = ui.config('mq', 'git', 'auto').lower()
@@ -599,7 +613,7 @@
             raise util.Abort(_("repo commit failed"))
         try:
             ph = patchheader(mergeq.join(patch), self.plainmode)
-        except:
+        except Exception:
             raise util.Abort(_("unable to read %s") % patch)
 
         diffopts = self.patchopts(diffopts, patch)
@@ -681,7 +695,7 @@
 
     def apply(self, repo, series, list=False, update_status=True,
               strict=False, patchdir=None, merge=None, all_files=None,
-              tobackup=None):
+              tobackup=None, check=False):
         wlock = lock = tr = None
         try:
             wlock = repo.wlock()
@@ -690,10 +704,14 @@
             try:
                 ret = self._apply(repo, series, list, update_status,
                                   strict, patchdir, merge, all_files=all_files,
-                                  tobackup=tobackup)
+                                  tobackup=tobackup, check=check)
                 tr.close()
                 self.savedirty()
                 return ret
+            except AbortNoCleanup:
+                tr.close()
+                self.savedirty()
+                return 2, repo.dirstate.p1()
             except:
                 try:
                     tr.abort()
@@ -708,7 +726,7 @@
 
     def _apply(self, repo, series, list=False, update_status=True,
                strict=False, patchdir=None, merge=None, all_files=None,
-               tobackup=None):
+               tobackup=None, check=False):
         """returns (error, hash)
 
         error = 1 for unable to read, 2 for patch failed, 3 for patch
@@ -749,6 +767,9 @@
                 if tobackup:
                     touched = patchmod.changedfiles(self.ui, repo, pf)
                     touched = set(touched) & tobackup
+                    if touched and check:
+                        raise AbortNoCleanup(
+                            _("local changes found, refresh first"))
                     self.backup(repo, touched, copy=True)
                     tobackup = tobackup - touched
                 (patcherr, files, fuzz) = self.patch(repo, pf)
@@ -862,7 +883,7 @@
     def finish(self, repo, revs):
         # Manually trigger phase computation to ensure phasedefaults is
         # executed before we remove the patches.
-        repo._phaserev
+        repo._phasecache
         patches = self._revpatches(repo, sorted(revs))
         qfinished = self._cleanup(patches, len(patches))
         if qfinished and repo.ui.configbool('mq', 'secret', False):
@@ -959,6 +980,10 @@
             else:
                 raise util.Abort(_('patch "%s" already exists') % name)
 
+    def checkforcecheck(self, check, force):
+        if force and check:
+            raise util.Abort(_('cannot use both --force and --check'))
+
     def new(self, repo, patchfn, *pats, **opts):
         """options:
            msg: a string or a no-argument function returning a string
@@ -1059,7 +1084,7 @@
                 patchpath = self.join(patchfn)
                 try:
                     os.unlink(patchpath)
-                except:
+                except OSError:
                     self.ui.warn(_('error unlinking %s\n') % patchpath)
                 raise
             self.removeundo(repo)
@@ -1156,8 +1181,9 @@
                                 return self.series[i + off]
         raise util.Abort(_("patch %s not in series") % patch)
 
-    def push(self, repo, patch=None, force=False, list=False,
-             mergeq=None, all=False, move=False, exact=False, nobackup=False):
+    def push(self, repo, patch=None, force=False, list=False, mergeq=None,
+             all=False, move=False, exact=False, nobackup=False, check=False):
+        self.checkforcecheck(check, force)
         diffopts = self.diffopts()
         wlock = repo.wlock()
         try:
@@ -1212,14 +1238,19 @@
             if start == len(self.series):
                 self.ui.warn(_('patch series already fully applied\n'))
                 return 1
-            if not force:
+            if not force and not check:
                 self.checklocalchanges(repo, refresh=self.applied)
 
             if exact:
+                if check:
+                    raise util.Abort(
+                        _("cannot use --exact and --check together"))
                 if move:
-                    raise util.Abort(_("cannot use --exact and --move together"))
+                    raise util.Abort(_('cannot use --exact and --move '
+                                       'together'))
                 if self.applied:
-                    raise util.Abort(_("cannot push --exact with applied patches"))
+                    raise util.Abort(_('cannot push --exact with applied '
+                                       'patches'))
                 root = self.series[start]
                 target = patchheader(self.join(root), self.plainmode).parent
                 if not target:
@@ -1257,9 +1288,12 @@
                 end = self.series.index(patch, start) + 1
 
             tobackup = set()
-            if not nobackup and force:
+            if (not nobackup and force) or check:
                 m, a, r, d = self.checklocalchanges(repo, force=True)
-                tobackup.update(m + a)
+                if check:
+                    tobackup.update(m + a + r + d)
+                else:
+                    tobackup.update(m + a)
 
             s = self.series[start:end]
             all_files = set()
@@ -1268,7 +1302,7 @@
                     ret = self.mergepatch(repo, mergeq, s, diffopts)
                 else:
                     ret = self.apply(repo, s, list, all_files=all_files,
-                                     tobackup=tobackup)
+                                     tobackup=tobackup, check=check)
             except:
                 self.ui.warn(_('cleaning up working directory...'))
                 node = repo.dirstate.p1()
@@ -1299,7 +1333,8 @@
             wlock.release()
 
     def pop(self, repo, patch=None, force=False, update=True, all=False,
-            nobackup=False):
+            nobackup=False, check=False):
+        self.checkforcecheck(check, force)
         wlock = repo.wlock()
         try:
             if patch:
@@ -1346,9 +1381,12 @@
 
             tobackup = set()
             if update:
-                m, a, r, d = self.checklocalchanges(repo, force=force)
-                if not nobackup and force:
-                    tobackup.update(m + a)
+                m, a, r, d = self.checklocalchanges(repo, force=force or check)
+                if force:
+                    if not nobackup:
+                        tobackup.update(m + a)
+                elif check:
+                    tobackup.update(m + a + r + d)
 
             self.applieddirty = True
             end = len(self.applied)
@@ -1379,8 +1417,10 @@
                 if d:
                     raise util.Abort(_("deletions found between repo revs"))
 
-                # backup local changes in --force case
-                self.backup(repo, set(a + m + r) & tobackup)
+                tobackup = set(a + m + r) & tobackup
+                if check and tobackup:
+                    self.localchangesfound()
+                self.backup(repo, tobackup)
 
                 for f in a:
                     try:
@@ -1959,6 +1999,14 @@
         self.removeundo(repo)
         return imported
 
+def fixcheckopts(ui, opts):
+    if (not ui.configbool('mq', 'check') or opts.get('force')
+        or opts.get('exact')):
+        return opts
+    opts = dict(opts)
+    opts['check'] = True
+    return opts
+
 @command("qdelete|qremove|qrm",
          [('k', 'keep', None, _('keep patch file')),
           ('r', 'rev', [],
@@ -2140,7 +2188,8 @@
 
 @command("qclone",
          [('', 'pull', None, _('use pull protocol to copy metadata')),
-          ('U', 'noupdate', None, _('do not update the new working directories')),
+          ('U', 'noupdate', None,
+           _('do not update the new working directories')),
           ('', 'uncompressed', None,
            _('use uncompressed transfer (fast over LAN)')),
           ('p', 'patches', '',
@@ -2242,7 +2291,8 @@
     """print the entire series file
 
     Returns 0 on success."""
-    repo.mq.qseries(repo, missing=opts.get('missing'), summary=opts.get('summary'))
+    repo.mq.qseries(repo, missing=opts.get('missing'),
+                    summary=opts.get('summary'))
     return 0
 
 @command("qtop", seriesopts, _('hg qtop [-s]'))
@@ -2463,7 +2513,8 @@
         if p in patches or p == parent:
             ui.warn(_('Skipping already folded patch %s\n') % p)
         if q.isapplied(p):
-            raise util.Abort(_('qfold cannot fold already applied patch %s') % p)
+            raise util.Abort(_('qfold cannot fold already applied patch %s')
+                             % p)
         patches.append(p)
 
     for p in patches:
@@ -2497,20 +2548,25 @@
         wlock.release()
 
 @command("qgoto",
-         [('f', 'force', None, _('overwrite any local changes')),
+         [('c', 'check', None, _('tolerate non-conflicting local changes')),
+          ('f', 'force', None, _('overwrite any local changes')),
           ('', 'no-backup', None, _('do not save backup copies of files'))],
          _('hg qgoto [OPTION]... PATCH'))
 def goto(ui, repo, patch, **opts):
     '''push or pop patches until named patch is at top of stack
 
     Returns 0 on success.'''
+    opts = fixcheckopts(ui, opts)
     q = repo.mq
     patch = q.lookup(patch)
     nobackup = opts.get('no_backup')
+    check = opts.get('check')
     if q.isapplied(patch):
-        ret = q.pop(repo, patch, force=opts.get('force'), nobackup=nobackup)
+        ret = q.pop(repo, patch, force=opts.get('force'), nobackup=nobackup,
+                    check=check)
     else:
-        ret = q.push(repo, patch, force=opts.get('force'), nobackup=nobackup)
+        ret = q.push(repo, patch, force=opts.get('force'), nobackup=nobackup,
+                     check=check)
     q.savedirty()
     return ret
 
@@ -2566,7 +2622,8 @@
     args = list(args)
     if opts.get('list'):
         if args or opts.get('none'):
-            raise util.Abort(_('cannot mix -l/--list with options or arguments'))
+            raise util.Abort(_('cannot mix -l/--list with options or '
+                               'arguments'))
         for i in xrange(len(q.series)):
             status(i)
         return
@@ -2630,8 +2687,10 @@
     return newpath
 
 @command("^qpush",
-         [('f', 'force', None, _('apply on top of local changes')),
-          ('e', 'exact', None, _('apply the target patch to its recorded parent')),
+         [('c', 'check', None, _('tolerate non-conflicting local changes')),
+          ('f', 'force', None, _('apply on top of local changes')),
+          ('e', 'exact', None,
+           _('apply the target patch to its recorded parent')),
           ('l', 'list', None, _('list patch name in commit text')),
           ('a', 'all', None, _('apply all patches')),
           ('m', 'merge', None, _('merge from another queue (DEPRECATED)')),
@@ -2644,14 +2703,17 @@
 def push(ui, repo, patch=None, **opts):
     """push the next patch onto the stack
 
-    When -f/--force is applied, all local changes in patched files
-    will be lost.
+    By default, abort if the working directory contains uncommitted
+    changes. With -c/--check, abort only if the uncommitted files
+    overlap with patched files. With -f/--force, backup and patch over
+    uncommitted changes.
 
     Return 0 on success.
     """
     q = repo.mq
     mergeq = None
 
+    opts = fixcheckopts(ui, opts)
     if opts.get('merge'):
         if opts.get('name'):
             newpath = repo.join(opts.get('name'))
@@ -2664,25 +2726,33 @@
         ui.warn(_("merging with queue at: %s\n") % mergeq.path)
     ret = q.push(repo, patch, force=opts.get('force'), list=opts.get('list'),
                  mergeq=mergeq, all=opts.get('all'), move=opts.get('move'),
-                 exact=opts.get('exact'), nobackup=opts.get('no_backup'))
+                 exact=opts.get('exact'), nobackup=opts.get('no_backup'),
+                 check=opts.get('check'))
     return ret
 
 @command("^qpop",
          [('a', 'all', None, _('pop all patches')),
           ('n', 'name', '',
            _('queue name to pop (DEPRECATED)'), _('NAME')),
+          ('c', 'check', None, _('tolerate non-conflicting local changes')),
           ('f', 'force', None, _('forget any local changes to patched files')),
           ('', 'no-backup', None, _('do not save backup copies of files'))],
          _('hg qpop [-a] [-f] [PATCH | INDEX]'))
 def pop(ui, repo, patch=None, **opts):
     """pop the current patch off the stack
 
-    By default, pops off the top of the patch stack. If given a patch
-    name, keeps popping off patches until the named patch is at the
-    top of the stack.
+    Without argument, pops off the top of the patch stack. If given a
+    patch name, keeps popping off patches until the named patch is at
+    the top of the stack.
+
+    By default, abort if the working directory contains uncommitted
+    changes. With -c/--check, abort only if the uncommitted files
+    overlap with patched files. With -f/--force, backup and discard
+    changes made to such files.
 
     Return 0 on success.
     """
+    opts = fixcheckopts(ui, opts)
     localupdate = True
     if opts.get('name'):
         q = queue(ui, repo.path, repo.join(opts.get('name')))
@@ -2691,7 +2761,8 @@
     else:
         q = repo.mq
     ret = q.pop(repo, patch, force=opts.get('force'), update=localupdate,
-                all=opts.get('all'), nobackup=opts.get('no_backup'))
+                all=opts.get('all'), nobackup=opts.get('no_backup'),
+                check=opts.get('check'))
     q.savedirty()
     return ret
 
@@ -3309,8 +3380,8 @@
             tags = result[0]
             for patch in mqtags:
                 if patch[1] in tags:
-                    self.ui.warn(_('Tag %s overrides mq patch of the same name\n')
-                                 % patch[1])
+                    self.ui.warn(_('Tag %s overrides mq patch of the same '
+                                   'name\n') % patch[1])
                 else:
                     tags[patch[1]] = patch[0]
 
--- a/hgext/notify.py	Sun May 13 11:19:48 2012 +0200
+++ b/hgext/notify.py	Sun May 13 12:52:24 2012 +0200
@@ -353,8 +353,8 @@
                     author = repo[rev].user()
             else:
                 data += ui.popbuffer()
-                ui.note(_('notify: suppressing notification for merge %d:%s\n') %
-                        (rev, repo[rev].hex()[:12]))
+                ui.note(_('notify: suppressing notification for merge %d:%s\n')
+                        % (rev, repo[rev].hex()[:12]))
                 ui.pushbuffer()
         if count:
             n.diff(ctx, repo['tip'])
--- a/hgext/patchbomb.py	Sun May 13 11:19:48 2012 +0200
+++ b/hgext/patchbomb.py	Sun May 13 12:52:24 2012 +0200
@@ -109,7 +109,8 @@
         msg = email.MIMEMultipart.MIMEMultipart()
         if body:
             msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test')))
-        p = mail.mimetextpatch('\n'.join(patchlines), 'x-patch', opts.get('test'))
+        p = mail.mimetextpatch('\n'.join(patchlines), 'x-patch',
+                               opts.get('test'))
         binnode = bin(node)
         # if node is mq patch, it will have the patch file's name as a tag
         if not patchname:
@@ -119,7 +120,8 @@
                 patchname = patchtags[0]
             elif total > 1:
                 patchname = cmdutil.makefilename(repo, '%b-%n.patch',
-                                                  binnode, seqno=idx, total=total)
+                                                 binnode, seqno=idx,
+                                                 total=total)
             else:
                 patchname = cmdutil.makefilename(repo, '%b.patch', binnode)
         disposition = 'inline'
@@ -302,7 +304,7 @@
         finally:
             try:
                 os.unlink(tmpfn)
-            except:
+            except OSError:
                 pass
             os.rmdir(tmpdir)
 
--- a/hgext/progress.py	Sun May 13 11:19:48 2012 +0200
+++ b/hgext/progress.py	Sun May 13 12:52:24 2012 +0200
@@ -237,7 +237,7 @@
             # truncate the list of topics assuming all topics within
             # this one are also closed
             if topic in self.topics:
-              self.topics = self.topics[:self.topics.index(topic)]
+                self.topics = self.topics[:self.topics.index(topic)]
         else:
             if topic not in self.topics:
                 self.starttimes[topic] = now
--- a/hgext/rebase.py	Sun May 13 11:19:48 2012 +0200
+++ b/hgext/rebase.py	Sun May 13 12:52:24 2012 +0200
@@ -182,7 +182,7 @@
                 branch = repo[None].branch()
                 dest = repo[branch]
             else:
-                dest = repo[destf]
+                dest = scmutil.revsingle(repo, destf)
 
             if revf:
                 rebaseset = repo.revs('%lr', revf)
@@ -201,7 +201,7 @@
                 root = None
 
             if not rebaseset:
-                repo.ui.debug('base is ancestor of destination')
+                repo.ui.debug('base is ancestor of destination\n')
                 result = None
             elif not keepf and list(repo.revs('first(children(%ld) - %ld)',
                                               rebaseset, rebaseset)):
@@ -214,7 +214,7 @@
                                  % repo[root],
                                  hint=_('see hg help phases for details'))
             else:
-                result = buildstate(repo, dest, rebaseset, detachf)
+                result = buildstate(repo, dest, rebaseset, detachf, collapsef)
 
             if not result:
                 # Empty state built, nothing to rebase
@@ -265,7 +265,7 @@
                 else:
                     try:
                         ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
-                        stats = rebasenode(repo, rev, p1, state)
+                        stats = rebasenode(repo, rev, p1, state, collapsef)
                         if stats and stats[3] > 0:
                             raise util.Abort(_('unresolved conflicts (see hg '
                                         'resolve, then hg rebase --continue)'))
@@ -383,7 +383,7 @@
         repo.dirstate.invalidate()
         raise
 
-def rebasenode(repo, rev, p1, state):
+def rebasenode(repo, rev, p1, state, collapse):
     'Rebase a single revision'
     # Merge phase
     # Update to target and merge it with local
@@ -397,7 +397,9 @@
     base = None
     if repo[rev].rev() != repo[min(state)].rev():
         base = repo[rev].p1().node()
-    return merge.update(repo, rev, True, True, False, base)
+    # When collapsing in-place, the parent is the common ancestor, we
+    # have to allow merging with it.
+    return merge.update(repo, rev, True, True, False, base, collapse)
 
 def defineparents(repo, rev, target, state, targetancestors):
     'Return the new parent relationship of the revision that will be rebased'
@@ -589,7 +591,7 @@
         repo.ui.warn(_('rebase aborted\n'))
         return 0
 
-def buildstate(repo, dest, rebaseset, detach):
+def buildstate(repo, dest, rebaseset, detach, collapse):
     '''Define which revisions are going to be rebased and where
 
     repo: repo
@@ -617,9 +619,9 @@
         raise util.Abort(_('source is ancestor of destination'))
     if commonbase == dest:
         samebranch = root.branch() == dest.branch()
-        if samebranch and root in dest.children():
-           repo.ui.debug('source is a child of destination')
-           return None
+        if not collapse and samebranch and root in dest.children():
+            repo.ui.debug('source is a child of destination\n')
+            return None
         # rebase on ancestor, force detach
         detach = True
     if detach:
--- a/hgext/record.py	Sun May 13 11:19:48 2012 +0200
+++ b/hgext/record.py	Sun May 13 12:52:24 2012 +0200
@@ -516,10 +516,11 @@
                                '(use "hg commit" instead)'))
 
         changes = repo.status(match=match)[:3]
-        diffopts = mdiff.diffopts(git=True, nodates=True,
-                                  ignorews=opts.get('ignore_all_space'),
-                                  ignorewsamount=opts.get('ignore_space_change'),
-                                  ignoreblanklines=opts.get('ignore_blank_lines'))
+        diffopts = mdiff.diffopts(
+            git=True, nodates=True,
+            ignorews=opts.get('ignore_all_space'),
+            ignorewsamount=opts.get('ignore_space_change'),
+            ignoreblanklines=opts.get('ignore_blank_lines'))
         chunks = patch.diff(repo, changes=changes, opts=diffopts)
         fp = cStringIO.StringIO()
         fp.write(''.join(chunks))
--- a/hgext/relink.py	Sun May 13 11:19:48 2012 +0200
+++ b/hgext/relink.py	Sun May 13 12:52:24 2012 +0200
@@ -79,7 +79,7 @@
         dirnames.sort()
         relpath = dirpath[len(src) + seplen:]
         for filename in sorted(filenames):
-            if not filename[-2:] in ('.d', '.i'):
+            if filename[-2:] not in ('.d', '.i'):
                 continue
             st = os.stat(os.path.join(dirpath, filename))
             if not stat.S_ISREG(st.st_mode):
--- a/hgext/transplant.py	Sun May 13 11:19:48 2012 +0200
+++ b/hgext/transplant.py	Sun May 13 12:52:24 2012 +0200
@@ -124,7 +124,7 @@
                     continue
 
                 parents = source.changelog.parents(node)
-                if not opts.get('filter'):
+                if not (opts.get('filter') or opts.get('log')):
                     # If the changeset parent is the same as the
                     # wdir's parent, just pull it.
                     if parents[0] == p1:
--- a/hgext/zeroconf/__init__.py	Sun May 13 11:19:48 2012 +0200
+++ b/hgext/zeroconf/__init__.py	Sun May 13 12:52:24 2012 +0200
@@ -44,7 +44,7 @@
         s.connect(('1.0.0.1', 0))
         ip = s.getsockname()[0]
         return ip
-    except:
+    except socket.error:
         pass
 
     # Generic method, sometimes gives useless results
@@ -61,7 +61,7 @@
         s.connect(('1.0.0.1', 1))
         ip = s.getsockname()[0]
         return ip
-    except:
+    except socket.error:
         pass
 
     return dumbip
@@ -119,7 +119,8 @@
             name = os.path.basename(repo)
             path = (prefix + repo).strip('/')
             desc = u.config('web', 'description', name)
-            publish(name, desc, path, util.getport(u.config("web", "port", 8000)))
+            publish(name, desc, path,
+                    util.getport(u.config("web", "port", 8000)))
 
 # listen
 
--- a/mercurial/bookmarks.py	Sun May 13 11:19:48 2012 +0200
+++ b/mercurial/bookmarks.py	Sun May 13 12:52:24 2012 +0200
@@ -221,6 +221,11 @@
                     repo._bookmarks[n] = cr.node()
                     changed = True
                     ui.warn(_("divergent bookmark %s stored as %s\n") % (k, n))
+        elif rb[k] in repo:
+            # add remote bookmarks for changes we already have
+            repo._bookmarks[k] = repo[rb[k]].node()
+            changed = True
+            ui.status(_("adding remote bookmark %s\n") % k)
 
     if changed:
         write(repo)
--- a/mercurial/bundlerepo.py	Sun May 13 11:19:48 2012 +0200
+++ b/mercurial/bundlerepo.py	Sun May 13 12:52:24 2012 +0200
@@ -54,7 +54,7 @@
                 continue
 
             for p in (p1, p2):
-                if not p in self.nodemap:
+                if p not in self.nodemap:
                     raise error.LookupError(p, self.indexfile,
                                             _("unknown parent"))
             # start, size, full unc. size, base (unused), link, p1, p2, node
@@ -323,13 +323,16 @@
 
     Returns a tuple (local, csets, cleanupfn):
 
-    "local" is a local repo from which to obtain the actual incoming changesets; it
-      is a bundlerepo for the obtained bundle when the original "other" is remote.
+    "local" is a local repo from which to obtain the actual incoming
+      changesets; it is a bundlerepo for the obtained bundle when the
+      original "other" is remote.
     "csets" lists the incoming changeset node ids.
-    "cleanupfn" must be called without arguments when you're done processing the
-      changes; it closes both the original "other" and the one returned here.
+    "cleanupfn" must be called without arguments when you're done processing
+      the changes; it closes both the original "other" and the one returned
+      here.
     '''
-    tmp = discovery.findcommonincoming(repo, other, heads=onlyheads, force=force)
+    tmp = discovery.findcommonincoming(repo, other, heads=onlyheads,
+                                       force=force)
     common, incoming, rheads = tmp
     if not incoming:
         try:
--- a/mercurial/cmdutil.py	Sun May 13 11:19:48 2012 +0200
+++ b/mercurial/cmdutil.py	Sun May 13 12:52:24 2012 +0200
@@ -1363,7 +1363,7 @@
                                               copied=copied.get(path))
                     return mctx
                 except KeyError:
-                    raise IOError()
+                    raise IOError
         else:
             ui.note(_('copying changeset %s to %s\n') % (old, base))
 
@@ -1372,7 +1372,7 @@
                 try:
                     return old.filectx(path)
                 except KeyError:
-                    raise IOError()
+                    raise IOError
 
             # See if we got a message from -m or -l, if not, open the editor
             # with the message of the changeset to amend
@@ -1489,7 +1489,7 @@
         def badfn(path, msg):
             if path in names:
                 return
-            if path in repo[node].substate:
+            if path in ctx.substate:
                 return
             path_ = path + '/'
             for f in names:
@@ -1497,14 +1497,14 @@
                     return
             ui.warn("%s: %s\n" % (m.rel(path), msg))
 
-        m = scmutil.match(repo[node], pats, opts)
+        m = scmutil.match(ctx, pats, opts)
         m.bad = badfn
-        for abs in repo[node].walk(m):
+        for abs in ctx.walk(m):
             if abs not in names:
                 names[abs] = m.rel(abs), m.exact(abs)
 
         # get the list of subrepos that must be reverted
-        targetsubs = [s for s in repo[node].substate if m(s)]
+        targetsubs = [s for s in ctx.substate if m(s)]
         m = scmutil.matchfiles(repo, names)
         changes = repo.status(match=m)[:4]
         modified, added, removed, deleted = map(set, changes)
--- a/mercurial/commands.py	Sun May 13 11:19:48 2012 +0200
+++ b/mercurial/commands.py	Sun May 13 12:52:24 2012 +0200
@@ -520,10 +520,12 @@
     revision as good or bad without checking it out first.
 
     If you supply a command, it will be used for automatic bisection.
-    Its exit status will be used to mark revisions as good or bad:
-    status 0 means good, 125 means to skip the revision, 127
-    (command not found) will abort the bisection, and any other
-    non-zero exit status means the revision is bad.
+    The environment variable HG_NODE will contain the ID of the
+    changeset being tested. The exit status of the command will be
+    used to mark revisions as good or bad: status 0 means good, 125
+    means to skip the revision, 127 (command not found) will abort the
+    bisection, and any other non-zero exit status means the revision
+    is bad.
 
     .. container:: verbose
 
@@ -563,6 +565,11 @@
 
           hg log -r "bisect(pruned)"
 
+      - see the changeset currently being bisected (especially useful
+        if running with -U/--noupdate)::
+
+          hg log -r "bisect(current)"
+
       - see all changesets that took part in the current bisection::
 
           hg log -r "bisect(range)"
@@ -647,10 +654,22 @@
     if command:
         changesets = 1
         try:
+            node = state['current'][0]
+        except LookupError:
+            if noupdate:
+                raise util.Abort(_('current bisect revision is unknown - '
+                                   'start a new bisect to fix'))
+            node, p2 = repo.dirstate.parents()
+            if p2 != nullid:
+                raise util.Abort(_('current bisect revision is a merge'))
+        try:
             while changesets:
                 # update state
+                state['current'] = [node]
                 hbisect.save_state(repo, state)
-                status = util.system(command, out=ui.fout)
+                status = util.system(command,
+                                     environ={'HG_NODE': hex(node)},
+                                     out=ui.fout)
                 if status == 125:
                     transition = "skip"
                 elif status == 0:
@@ -662,7 +681,7 @@
                     raise util.Abort(_("%s killed") % command)
                 else:
                     transition = "bad"
-                ctx = scmutil.revsingle(repo, rev)
+                ctx = scmutil.revsingle(repo, rev, node)
                 rev = None # clear for future iterations
                 state[transition].append(ctx.node())
                 ui.status(_('Changeset %d:%s: %s\n') % (ctx, ctx, transition))
@@ -670,9 +689,12 @@
                 # bisect
                 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
                 # update to next check
-                cmdutil.bailifchanged(repo)
-                hg.clean(repo, nodes[0], show_stats=False)
+                node = nodes[0]
+                if not noupdate:
+                    cmdutil.bailifchanged(repo)
+                    hg.clean(repo, node, show_stats=False)
         finally:
+            state['current'] = [node]
             hbisect.save_state(repo, state)
         print_result(nodes, good)
         return
@@ -704,6 +726,8 @@
             if extendnode is not None:
                 ui.write(_("Extending search to changeset %d:%s\n"
                          % (extendnode.rev(), extendnode)))
+                state['current'] = [extendnode.node()]
+                hbisect.save_state(repo, state)
                 if noupdate:
                     return
                 cmdutil.bailifchanged(repo)
@@ -723,6 +747,8 @@
         ui.write(_("Testing changeset %d:%s "
                    "(%d changesets remaining, ~%d tests)\n")
                  % (rev, short(node), changesets, tests))
+        state['current'] = [node]
+        hbisect.save_state(repo, state)
         if not noupdate:
             cmdutil.bailifchanged(repo)
             return hg.clean(repo, node)
@@ -921,26 +947,26 @@
 
     for isactive, node, tag in branches:
         if (not active) or isactive:
+            hn = repo.lookup(node)
+            if isactive:
+                label = 'branches.active'
+                notice = ''
+            elif hn not in repo.branchheads(tag, closed=False):
+                if not closed:
+                    continue
+                label = 'branches.closed'
+                notice = _(' (closed)')
+            else:
+                label = 'branches.inactive'
+                notice = _(' (inactive)')
+            if tag == repo.dirstate.branch():
+                label = 'branches.current'
+            rev = str(node).rjust(31 - encoding.colwidth(tag))
+            rev = ui.label('%s:%s' % (rev, hexfunc(hn)), 'log.changeset')
+            tag = ui.label(tag, label)
             if ui.quiet:
                 ui.write("%s\n" % tag)
             else:
-                hn = repo.lookup(node)
-                if isactive:
-                    label = 'branches.active'
-                    notice = ''
-                elif hn not in repo.branchheads(tag, closed=False):
-                    if not closed:
-                        continue
-                    label = 'branches.closed'
-                    notice = _(' (closed)')
-                else:
-                    label = 'branches.inactive'
-                    notice = _(' (inactive)')
-                if tag == repo.dirstate.branch():
-                    label = 'branches.current'
-                rev = str(node).rjust(31 - encoding.colwidth(tag))
-                rev = ui.label('%s:%s' % (rev, hexfunc(hn)), 'log.changeset')
-                tag = ui.label(tag, label)
                 ui.write("%s %s%s\n" % (tag, rev, notice))
 
 @command('bundle',
@@ -1661,7 +1687,8 @@
         revs = set((int(r) for r in revs))
         def events():
             for r in rlog:
-                yield 'n', (r, list(set(p for p in rlog.parentrevs(r) if p != -1)))
+                yield 'n', (r, list(set(p for p in rlog.parentrevs(r)
+                                        if p != -1)))
                 if r in revs:
                     yield 'l', (r, "r%i" % r)
     elif repo:
@@ -1680,7 +1707,8 @@
                     if newb != b:
                         yield 'a', newb
                         b = newb
-                yield 'n', (r, list(set(p for p in cl.parentrevs(r) if p != -1)))
+                yield 'n', (r, list(set(p for p in cl.parentrevs(r)
+                                        if p != -1)))
                 if tags:
                     ls = labels.get(r)
                     if ls:
@@ -1738,7 +1766,8 @@
     _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
 def debugdiscovery(ui, repo, remoteurl="default", **opts):
     """runs the changeset discovery protocol in isolation"""
-    remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl), opts.get('branch'))
+    remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
+                                      opts.get('branch'))
     remote = hg.peer(repo, opts, remoteurl)
     ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
 
@@ -1748,7 +1777,8 @@
     def doit(localheads, remoteheads):
         if opts.get('old'):
             if localheads:
-                raise util.Abort('cannot use localheads with old style discovery')
+                raise util.Abort('cannot use localheads with old style '
+                                 'discovery')
             common, _in, hds = treediscovery.findcommonincoming(repo, remote,
                                                                 force=True)
             common = set(common)
@@ -1875,7 +1905,8 @@
                  " nodeid       p1           p2\n")
     elif format == 1:
         ui.write("   rev flag   offset   length"
-                 "     size " + basehdr + "   link     p1     p2       nodeid\n")
+                 "     size " + basehdr + "   link     p1     p2"
+                 "       nodeid\n")
 
     for i in r:
         node = r.node(i)
@@ -1886,7 +1917,7 @@
         if format == 0:
             try:
                 pp = r.parents(node)
-            except:
+            except Exception:
                 pp = [nullid, nullid]
             ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
                     i, r.start(i), r.length(i), base, r.linkrev(i),
@@ -2000,8 +2031,8 @@
 def debugknown(ui, repopath, *ids, **opts):
     """test whether node ids are known to a repo
 
-    Every ID must be a full-length hex node id string. Returns a list of 0s and 1s
-    indicating unknown/known.
+    Every ID must be a full-length hex node id string. Returns a list of 0s
+    and 1s indicating unknown/known.
     """
     repo = hg.peer(ui, opts, repopath)
     if not repo.capable('known'):
@@ -2233,13 +2264,17 @@
         fmt2 = pcfmtstr(numdeltas, 4)
         ui.write('deltas against prev  : ' + fmt % pcfmt(numprev, numdeltas))
         if numprev > 0:
-            ui.write('    where prev = p1  : ' + fmt2 % pcfmt(nump1prev, numprev))
-            ui.write('    where prev = p2  : ' + fmt2 % pcfmt(nump2prev, numprev))
-            ui.write('    other            : ' + fmt2 % pcfmt(numoprev, numprev))
+            ui.write('    where prev = p1  : ' + fmt2 % pcfmt(nump1prev,
+                                                              numprev))
+            ui.write('    where prev = p2  : ' + fmt2 % pcfmt(nump2prev,
+                                                              numprev))
+            ui.write('    other            : ' + fmt2 % pcfmt(numoprev,
+                                                              numprev))
         if gdelta:
             ui.write('deltas against p1    : ' + fmt % pcfmt(nump1, numdeltas))
             ui.write('deltas against p2    : ' + fmt % pcfmt(nump2, numdeltas))
-            ui.write('deltas against other : ' + fmt % pcfmt(numother, numdeltas))
+            ui.write('deltas against other : ' + fmt % pcfmt(numother,
+                                                             numdeltas))
 
 @command('debugrevspec', [], ('REVSPEC'))
 def debugrevspec(ui, repo, expr):
@@ -2555,6 +2590,7 @@
     'graft',
     [('c', 'continue', False, _('resume interrupted graft')),
      ('e', 'edit', False, _('invoke editor on commit messages')),
+     ('', 'log', None, _('append graft info to log message')),
      ('D', 'currentdate', False,
       _('record the current date as commit date')),
      ('U', 'currentuser', False,
@@ -2573,6 +2609,11 @@
     Changesets that are ancestors of the current revision, that have
     already been grafted, or that are merges will be skipped.
 
+    If --log is specified, log messages will have a comment appended
+    of the form::
+
+      (grafted from CHANGESETHASH)
+
     If a graft merge results in conflicts, the graft process is
     interrupted so that the current merge can be manually resolved.
     Once all conflicts are addressed, the graft process can be
@@ -2722,8 +2763,13 @@
             date = ctx.date()
             if opts.get('date'):
                 date = opts['date']
-            repo.commit(text=ctx.description(), user=user,
+            message = ctx.description()
+            if opts.get('log'):
+                message += '\n(grafted from %s)' % ctx.hex()
+            node = repo.commit(text=message, user=user,
                         date=date, extra=extra, editor=editor)
+            if node is None:
+                ui.status(_('graft for revision %s is empty\n') % ctx.rev())
     finally:
         wlock.release()
 
@@ -4347,10 +4393,10 @@
         lock = repo.lock()
         try:
             # set phase
-            nodes = [ctx.node() for ctx in repo.set('%ld', revs)]
-            if not nodes:
-                raise util.Abort(_('empty revision set'))
-            olddata = repo._phaserev[:]
+            if not revs:
+                 raise util.Abort(_('empty revision set'))
+            nodes = [repo[r].node() for r in revs]
+            olddata = repo._phasecache.getphaserevs(repo)[:]
             phases.advanceboundary(repo, targetphase, nodes)
             if opts['force']:
                 phases.retractboundary(repo, targetphase, nodes)
@@ -4358,7 +4404,7 @@
             lock.release()
         if olddata is not None:
             changes = 0
-            newdata = repo._phaserev
+            newdata = repo._phasecache.getphaserevs(repo)
             changes = sum(o != newdata[i] for i, o in enumerate(olddata))
             rejected = [n for n in nodes
                         if newdata[repo[n].rev()] < targetphase]
@@ -4396,7 +4442,8 @@
         if currentbranchheads == modheads:
             ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
         elif currentbranchheads > 1:
-            ui.status(_("(run 'hg heads .' to see heads, 'hg merge' to merge)\n"))
+            ui.status(_("(run 'hg heads .' to see heads, 'hg merge' to "
+                        "merge)\n"))
         else:
             ui.status(_("(run 'hg heads' to see heads)\n"))
     else:
@@ -5368,7 +5415,8 @@
         t = []
         source, branches = hg.parseurl(ui.expandpath('default'))
         other = hg.peer(repo, {}, source)
-        revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev'))
+        revs, checkout = hg.addbranchrevs(repo, other, branches,
+                                          opts.get('rev'))
         ui.debug('comparing with %s\n' % util.hidepassword(source))
         repo.ui.pushbuffer()
         commoninc = discovery.findcommonincoming(repo, other)
@@ -5586,9 +5634,9 @@
             f = url.open(ui, fname)
             gen = changegroup.readbundle(f, fname)
             modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
-        bookmarks.updatecurrentbookmark(repo, wc.node(), wc.branch())
     finally:
         lock.release()
+    bookmarks.updatecurrentbookmark(repo, wc.node(), wc.branch())
     return postincoming(ui, repo, modheads, opts.get('update'), None)
 
 @command('^update|up|checkout|co',
--- a/mercurial/commandserver.py	Sun May 13 11:19:48 2012 +0200
+++ b/mercurial/commandserver.py	Sun May 13 12:52:24 2012 +0200
@@ -142,8 +142,8 @@
             else:
                 logfile = open(logpath, 'a')
 
-        # the ui here is really the repo ui so take its baseui so we don't end up
-        # with its local configuration
+        # the ui here is really the repo ui so take its baseui so we don't end
+        # up with its local configuration
         self.ui = repo.baseui
         self.repo = repo
         self.repoui = repo.ui
@@ -166,7 +166,7 @@
 
         # is the other end closed?
         if not data:
-            raise EOFError()
+            raise EOFError
 
         return data
 
--- a/mercurial/context.py	Sun May 13 11:19:48 2012 +0200
+++ b/mercurial/context.py	Sun May 13 12:52:24 2012 +0200
@@ -8,6 +8,7 @@
 from node import nullid, nullrev, short, hex, bin
 from i18n import _
 import ancestor, mdiff, error, util, scmutil, subrepo, patch, encoding, phases
+import copies
 import match as matchmod
 import os, errno, stat
 
@@ -190,12 +191,7 @@
     def bookmarks(self):
         return self._repo.nodebookmarks(self._node)
     def phase(self):
-        if self._rev == -1:
-            return phases.public
-        if self._rev >= len(self._repo._phaserev):
-            # outdated cache
-            del self._repo._phaserev
-        return self._repo._phaserev[self._rev]
+        return self._repo._phasecache.phase(self._repo, self._rev)
     def phasestr(self):
         return phases.phasenames[self.phase()]
     def mutable(self):
@@ -237,7 +233,8 @@
                                         _('not found in manifest'))
         if '_manifestdelta' in self.__dict__ or path in self.files():
             if path in self._manifestdelta:
-                return self._manifestdelta[path], self._manifestdelta.flags(path)
+                return (self._manifestdelta[path],
+                        self._manifestdelta.flags(path))
         node, flag = self._repo.manifest.find(self._changeset[0], path)
         if not node:
             raise error.LookupError(self._node, path,
@@ -634,27 +631,27 @@
 
         return zip(hist[base][0], hist[base][1].splitlines(True))
 
-    def ancestor(self, fc2, actx=None):
+    def ancestor(self, fc2, actx):
         """
         find the common ancestor file context, if any, of self, and fc2
 
-        If actx is given, it must be the changectx of the common ancestor
+        actx must be the changectx of the common ancestor
         of self's and fc2's respective changesets.
         """
 
-        if actx is None:
-            actx = self.changectx().ancestor(fc2.changectx())
-
-        # the trivial case: changesets are unrelated, files must be too
-        if not actx:
-            return None
-
         # the easy case: no (relevant) renames
         if fc2.path() == self.path() and self.path() in actx:
             return actx[self.path()]
-        acache = {}
+
+        # the next easiest cases: unambiguous predecessor (name trumps
+        # history)
+        if self.path() in actx and fc2.path() not in actx:
+            return actx[self.path()]
+        if fc2.path() in actx and self.path() not in actx:
+            return actx[fc2.path()]
 
         # prime the ancestor cache for the working directory
+        acache = {}
         for c in (self, fc2):
             if c._filerev is None:
                 pl = [(n.path(), n.filenode()) for n in c.parents()]
@@ -695,6 +692,14 @@
             c = visit.pop(max(visit))
             yield c
 
+    def copies(self, c2):
+        if not util.safehasattr(self, "_copycache"):
+            self._copycache = {}
+        sc2 = str(c2)
+        if sc2 not in self._copycache:
+            self._copycache[sc2] = copies.pathcopies(c2)
+        return self._copycache[sc2]
+
 class workingctx(changectx):
     """A workingctx object makes access to data related to
     the current working directory convenient.
--- a/mercurial/dagparser.py	Sun May 13 11:19:48 2012 +0200
+++ b/mercurial/dagparser.py	Sun May 13 12:52:24 2012 +0200
@@ -268,7 +268,8 @@
                 s += c
                 i += 1
                 c = nextch()
-            raise util.Abort(_("invalid character in dag description: %s...") % s)
+            raise util.Abort(_('invalid character in dag description: '
+                               '%s...') % s)
 
 def dagtextlines(events,
                  addspaces=True,
@@ -436,7 +437,9 @@
         >>> dagtext([('n', (0, [-1])), ('a', 'ann'), ('n', (1, [0]))])
         '+1 @ann +1'
 
-        >>> dagtext([('n', (0, [-1])), ('a', 'my annotation'), ('n', (1, [0]))])
+        >>> dagtext([('n', (0, [-1])),
+        ...          ('a', 'my annotation'),
+        ...          ('n', (1, [0]))])
         '+1 @"my annotation" +1'
 
     Commands:
@@ -447,7 +450,9 @@
         >>> dagtext([('n', (0, [-1])), ('c', 'my command'), ('n', (1, [0]))])
         '+1 !"my command" +1'
 
-        >>> dagtext([('n', (0, [-1])), ('C', 'my command line'), ('n', (1, [0]))])
+        >>> dagtext([('n', (0, [-1])),
+        ...          ('C', 'my command line'),
+        ...          ('n', (1, [0]))])
         '+1 !!my command line\\n+1'
 
     Comments:
--- a/mercurial/dagutil.py	Sun May 13 11:19:48 2012 +0200
+++ b/mercurial/dagutil.py	Sun May 13 12:52:24 2012 +0200
@@ -26,25 +26,25 @@
 
     def nodeset(self):
         '''set of all node idxs'''
-        raise NotImplementedError()
+        raise NotImplementedError
 
     def heads(self):
         '''list of head ixs'''
-        raise NotImplementedError()
+        raise NotImplementedError
 
     def parents(self, ix):
         '''list of parents ixs of ix'''
-        raise NotImplementedError()
+        raise NotImplementedError
 
     def inverse(self):
         '''inverse DAG, where parents becomes children, etc.'''
-        raise NotImplementedError()
+        raise NotImplementedError
 
     def ancestorset(self, starts, stops=None):
         '''
         set of all ancestors of starts (incl), but stop walk at stops (excl)
         '''
-        raise NotImplementedError()
+        raise NotImplementedError
 
     def descendantset(self, starts, stops=None):
         '''
@@ -59,7 +59,7 @@
         By "connected list" we mean that if an ancestor and a descendant are in
         the list, then so is at least one path connecting them.
         '''
-        raise NotImplementedError()
+        raise NotImplementedError
 
     def externalize(self, ix):
         '''return a list of (or set if given a set) of node ids'''
--- a/mercurial/diffhelpers.c	Sun May 13 11:19:48 2012 +0200
+++ b/mercurial/diffhelpers.c	Sun May 13 12:52:24 2012 +0200
@@ -20,14 +20,14 @@
 /* fixup the last lines of a and b when the patch has no newline at eof */
 static void _fix_newline(PyObject *hunk, PyObject *a, PyObject *b)
 {
-	int hunksz = PyList_Size(hunk);
+	Py_ssize_t hunksz = PyList_Size(hunk);
 	PyObject *s = PyList_GET_ITEM(hunk, hunksz-1);
 	char *l = PyBytes_AsString(s);
-	int alen = PyList_Size(a);
-	int blen = PyList_Size(b);
+	Py_ssize_t alen = PyList_Size(a);
+	Py_ssize_t blen = PyList_Size(b);
 	char c = l[0];
 	PyObject *hline;
-	int sz = PyBytes_GET_SIZE(s);
+	Py_ssize_t sz = PyBytes_GET_SIZE(s);
 
 	if (sz > 1 && l[sz-2] == '\r')
 		/* tolerate CRLF in last line */
@@ -57,6 +57,12 @@
 	return Py_BuildValue("l", 0);
 }
 
+#if (PY_VERSION_HEX < 0x02050000)
+static const char *addlines_format = "OOiiOO";
+#else
+static const char *addlines_format = "OOnnOO";
+#endif
+
 /*
  * read lines from fp into the hunk.  The hunk is parsed into two arrays
  * a and b.  a gets the old state of the text, b gets the new state
@@ -68,13 +74,14 @@
 {
 
 	PyObject *fp, *hunk, *a, *b, *x;
-	int i;
-	int lena, lenb;
-	int num;
-	int todoa, todob;
+	Py_ssize_t i;
+	Py_ssize_t lena, lenb;
+	Py_ssize_t num;
+	Py_ssize_t todoa, todob;
 	char *s, c;
 	PyObject *l;
-	if (!PyArg_ParseTuple(args, "OOiiOO", &fp, &hunk, &lena, &lenb, &a, &b))
+	if (!PyArg_ParseTuple(args, addlines_format,
+			      &fp, &hunk, &lena, &lenb, &a, &b))
 		return NULL;
 
 	while (1) {
@@ -127,8 +134,8 @@
 
 	PyObject *a, *b;
 	long bstart;
-	int alen, blen;
-	int i;
+	Py_ssize_t alen, blen;
+	Py_ssize_t i;
 	char *sa, *sb;
 
 	if (!PyArg_ParseTuple(args, "OOl", &a, &b, &bstart))
--- a/mercurial/dirstate.py	Sun May 13 11:19:48 2012 +0200
+++ b/mercurial/dirstate.py	Sun May 13 12:52:24 2012 +0200
@@ -695,7 +695,8 @@
         if not skipstep3 and not exact:
             visit = sorted([f for f in dmap if f not in results and matchfn(f)])
             for nf, st in zip(visit, util.statfiles([join(i) for i in visit])):
-                if not st is None and not getkind(st.st_mode) in (regkind, lnkkind):
+                if (not st is None and
+                    getkind(st.st_mode) not in (regkind, lnkkind)):
                     st = None
                 results[nf] = st
         for s in subrepos:
--- a/mercurial/discovery.py	Sun May 13 11:19:48 2012 +0200
+++ b/mercurial/discovery.py	Sun May 13 12:52:24 2012 +0200
@@ -86,13 +86,14 @@
             self._computecommonmissing()
         return self._missing
 
-def findcommonoutgoing(repo, other, onlyheads=None, force=False, commoninc=None):
+def findcommonoutgoing(repo, other, onlyheads=None, force=False,
+                       commoninc=None):
     '''Return an outgoing instance to identify the nodes present in repo but
     not in other.
 
-    If onlyheads is given, only nodes ancestral to nodes in onlyheads (inclusive)
-    are included. If you already know the local repo's heads, passing them in
-    onlyheads is faster than letting them be recomputed here.
+    If onlyheads is given, only nodes ancestral to nodes in onlyheads
+    (inclusive) are included. If you already know the local repo's heads,
+    passing them in onlyheads is faster than letting them be recomputed here.
 
     If commoninc is given, it must the the result of a prior call to
     findcommonincoming(repo, other, force) to avoid recomputing it here.'''
@@ -105,7 +106,7 @@
     og.commonheads, _any, _hds = commoninc
 
     # compute outgoing
-    if not repo._phaseroots[phases.secret]:
+    if not repo._phasecache.phaseroots[phases.secret]:
         og.missingheads = onlyheads or repo.heads()
     elif onlyheads is None:
         # use visible heads as it should be cached
--- a/mercurial/dispatch.py	Sun May 13 11:19:48 2012 +0200
+++ b/mercurial/dispatch.py	Sun May 13 12:52:24 2012 +0200
@@ -12,7 +12,8 @@
 import ui as uimod
 
 class request(object):
-    def __init__(self, args, ui=None, repo=None, fin=None, fout=None, ferr=None):
+    def __init__(self, args, ui=None, repo=None, fin=None, fout=None,
+                 ferr=None):
         self.args = args
         self.ui = ui
         self.repo = repo
@@ -532,7 +533,8 @@
 
     if cmd and util.safehasattr(fn, 'shell'):
         d = lambda: fn(ui, *args[1:])
-        return lambda: runcommand(lui, None, cmd, args[:1], ui, options, d, [], {})
+        return lambda: runcommand(lui, None, cmd, args[:1], ui, options, d,
+                                  [], {})
 
     restorecommands()
 
@@ -680,7 +682,8 @@
                             return _dispatch(req)
                     if not path:
                         raise error.RepoError(_("no repository found in '%s'"
-                                                " (.hg not found)") % os.getcwd())
+                                                " (.hg not found)")
+                                              % os.getcwd())
                     raise
         if repo:
             ui = repo.ui
@@ -703,7 +706,7 @@
     field = ui.config('profiling', 'sort', default='inlinetime')
     climit = ui.configint('profiling', 'nested', default=5)
 
-    if not format in ['text', 'kcachegrind']:
+    if format not in ['text', 'kcachegrind']:
         ui.warn(_("unrecognized profiling format '%s'"
                     " - Ignored\n") % format)
         format = 'text'
--- a/mercurial/extensions.py	Sun May 13 11:19:48 2012 +0200
+++ b/mercurial/extensions.py	Sun May 13 12:52:24 2012 +0200
@@ -301,7 +301,7 @@
 
 def disabledcmd(ui, cmd, strict=False):
     '''import disabled extensions until cmd is found.
-    returns (cmdname, extname, doc)'''
+    returns (cmdname, extname, module)'''
 
     paths = _disabledpaths(strip_init=True)
     if not paths:
@@ -329,18 +329,19 @@
             cmd = aliases[0]
         return (cmd, name, mod)
 
+    ext = None
     # first, search for an extension with the same name as the command
     path = paths.pop(cmd, None)
     if path:
         ext = findcmd(cmd, cmd, path)
-        if ext:
-            return ext
-
-    # otherwise, interrogate each extension until there's a match
-    for name, path in paths.iteritems():
-        ext = findcmd(cmd, name, path)
-        if ext:
-            return ext
+    if not ext:
+        # otherwise, interrogate each extension until there's a match
+        for name, path in paths.iteritems():
+            ext = findcmd(cmd, name, path)
+            if ext:
+                break
+    if ext and 'DEPRECATED' not in ext.__doc__:
+        return ext
 
     raise error.UnknownCommand(cmd)
 
--- a/mercurial/hbisect.py	Sun May 13 11:19:48 2012 +0200
+++ b/mercurial/hbisect.py	Sun May 13 12:52:24 2012 +0200
@@ -132,7 +132,7 @@
 
 
 def load_state(repo):
-    state = {'good': [], 'bad': [], 'skip': []}
+    state = {'current': [], 'good': [], 'bad': [], 'skip': []}
     if os.path.exists(repo.join("bisect.state")):
         for l in repo.opener("bisect.state"):
             kind, node = l[:-1].split()
@@ -164,10 +164,11 @@
     - ``pruned``             : csets that are goods, bads or skipped
     - ``untested``           : csets whose fate is yet unknown
     - ``ignored``            : csets ignored due to DAG topology
+    - ``current``            : the cset currently being bisected
     """
     state = load_state(repo)
-    if status in ('good', 'bad', 'skip'):
-        return [repo.changelog.rev(n) for n in state[status]]
+    if status in ('good', 'bad', 'skip', 'current'):
+        return map(repo.changelog.rev, state[status])
     else:
         # In the floowing sets, we do *not* call 'bisect()' with more
         # than one level of recusrsion, because that can be very, very
@@ -233,7 +234,7 @@
     if rev in get(repo, 'skip'):
         # i18n: bisect changeset status
         return _('skipped')
-    if rev in get(repo, 'untested'):
+    if rev in get(repo, 'untested') or rev in get(repo, 'current'):
         # i18n: bisect changeset status
         return _('untested')
     if rev in get(repo, 'ignored'):
--- a/mercurial/hgweb/common.py	Sun May 13 11:19:48 2012 +0200
+++ b/mercurial/hgweb/common.py	Sun May 13 12:52:24 2012 +0200
@@ -95,7 +95,7 @@
     def __getattr__(self, attr):
         if attr in ('close', 'readline', 'readlines', '__iter__'):
             return getattr(self.f, attr)
-        raise AttributeError()
+        raise AttributeError
 
 def _statusmessage(code):
     from BaseHTTPServer import BaseHTTPRequestHandler
--- a/mercurial/hgweb/hgweb_mod.py	Sun May 13 11:19:48 2012 +0200
+++ b/mercurial/hgweb/hgweb_mod.py	Sun May 13 12:52:24 2012 +0200
@@ -73,7 +73,8 @@
             self.repo = hg.repository(self.repo.ui, self.repo.root)
             self.maxchanges = int(self.config("web", "maxchanges", 10))
             self.stripecount = int(self.config("web", "stripes", 1))
-            self.maxshortchanges = int(self.config("web", "maxshortchanges", 60))
+            self.maxshortchanges = int(self.config("web", "maxshortchanges",
+                                                   60))
             self.maxfiles = int(self.config("web", "maxfiles", 10))
             self.allowpull = self.configbool("web", "allowpull", True)
             encoding.encoding = self.config("web", "encoding",
--- a/mercurial/httpclient/__init__.py	Sun May 13 11:19:48 2012 +0200
+++ b/mercurial/httpclient/__init__.py	Sun May 13 12:52:24 2012 +0200
@@ -45,6 +45,7 @@
 import select
 import socket
 
+import _readers
 import socketutil
 
 logger = logging.getLogger(__name__)
@@ -54,8 +55,6 @@
 HTTP_VER_1_0 = 'HTTP/1.0'
 HTTP_VER_1_1 = 'HTTP/1.1'
 
-_LEN_CLOSE_IS_END = -1
-
 OUTGOING_BUFFER_SIZE = 1 << 15
 INCOMING_BUFFER_SIZE = 1 << 20
 
@@ -83,23 +82,19 @@
     The response will continue to load as available. If you need the
     complete response before continuing, check the .complete() method.
     """
-    def __init__(self, sock, timeout):
+    def __init__(self, sock, timeout, method):
         self.sock = sock
+        self.method = method
         self.raw_response = ''
-        self._body = None
         self._headers_len = 0
-        self._content_len = 0
         self.headers = None
         self.will_close = False
         self.status_line = ''
         self.status = None
+        self.continued = False
         self.http_version = None
         self.reason = None
-        self._chunked = False
-        self._chunked_done = False
-        self._chunked_until_next = 0
-        self._chunked_skip_bytes = 0
-        self._chunked_preloaded_block = None
+        self._reader = None
 
         self._read_location = 0
         self._eol = EOL
@@ -117,11 +112,12 @@
         socket is closed, this will nearly always return False, even
         in cases where all the data has actually been loaded.
         """
-        if self._chunked:
-            return self._chunked_done
-        if self._content_len == _LEN_CLOSE_IS_END:
-            return False
-        return self._body is not None and len(self._body) >= self._content_len
+        if self._reader:
+            return self._reader.done()
+
+    def _close(self):
+        if self._reader is not None:
+            self._reader._close()
 
     def readline(self):
         """Read a single line from the response body.
@@ -129,30 +125,34 @@
         This may block until either a line ending is found or the
         response is complete.
         """
-        eol = self._body.find('\n', self._read_location)
-        while eol == -1 and not self.complete():
+        # TODO: move this into the reader interface where it can be
+        # smarter (and probably avoid copies)
+        bytes = []
+        while not bytes:
+            try:
+                bytes = [self._reader.read(1)]
+            except _readers.ReadNotReady:
+                self._select()
+        while bytes[-1] != '\n' and not self.complete():
             self._select()
-            eol = self._body.find('\n', self._read_location)
-        if eol != -1:
-            eol += 1
-        else:
-            eol = len(self._body)
-        data = self._body[self._read_location:eol]
-        self._read_location = eol
-        return data
+            bytes.append(self._reader.read(1))
+        if bytes[-1] != '\n':
+            next = self._reader.read(1)
+            while next and next != '\n':
+                bytes.append(next)
+                next = self._reader.read(1)
+            bytes.append(next)
+        return ''.join(bytes)
 
     def read(self, length=None):
         # if length is None, unbounded read
         while (not self.complete()  # never select on a finished read
                and (not length  # unbounded, so we wait for complete()
-                    or (self._read_location + length) > len(self._body))):
+                    or length > self._reader.available_data)):
             self._select()
         if not length:
-            length = len(self._body) - self._read_location
-        elif len(self._body) < (self._read_location + length):
-            length = len(self._body) - self._read_location
-        r = self._body[self._read_location:self._read_location + length]
-        self._read_location += len(r)
+            length = self._reader.available_data
+        r = self._reader.read(length)
         if self.complete() and self.will_close:
             self.sock.close()
         return r
@@ -160,93 +160,35 @@
     def _select(self):
         r, _, _ = select.select([self.sock], [], [], self._timeout)
         if not r:
-            # socket was not readable. If the response is not complete
-            # and we're not a _LEN_CLOSE_IS_END response, raise a timeout.
-            # If we are a _LEN_CLOSE_IS_END response and we have no data,
-            # raise a timeout.
-            if not (self.complete() or
-                    (self._content_len == _LEN_CLOSE_IS_END and self._body)):
+            # socket was not readable. If the response is not
+            # complete, raise a timeout.
+            if not self.complete():
                 logger.info('timed out with timeout of %s', self._timeout)
                 raise HTTPTimeoutException('timeout reading data')
-            logger.info('cl: %r body: %r', self._content_len, self._body)
         try:
             data = self.sock.recv(INCOMING_BUFFER_SIZE)
-            # If the socket was readable and no data was read, that
-            # means the socket was closed. If this isn't a
-            # _CLOSE_IS_END socket, then something is wrong if we're
-            # here (we shouldn't enter _select() if the response is
-            # complete), so abort.
-            if not data and self._content_len != _LEN_CLOSE_IS_END:
-                raise HTTPRemoteClosedError(
-                    'server appears to have closed the socket mid-response')
         except socket.sslerror, e:
             if e.args[0] != socket.SSL_ERROR_WANT_READ:
                 raise
             logger.debug('SSL_WANT_READ in _select, should retry later')
             return True
         logger.debug('response read %d data during _select', len(data))
+        # If the socket was readable and no data was read, that means
+        # the socket was closed. Inform the reader (if any) so it can
+        # raise an exception if this is an invalid situation.
         if not data:
-            if self.headers and self._content_len == _LEN_CLOSE_IS_END:
-                self._content_len = len(self._body)
+            if self._reader:
+                self._reader._close()
             return False
         else:
             self._load_response(data)
             return True
 
-    def _chunked_parsedata(self, data):
-        if self._chunked_preloaded_block:
-            data = self._chunked_preloaded_block + data
-            self._chunked_preloaded_block = None
-        while data:
-            logger.debug('looping with %d data remaining', len(data))
-            # Slice out anything we should skip
-            if self._chunked_skip_bytes:
-                if len(data) <= self._chunked_skip_bytes:
-                    self._chunked_skip_bytes -= len(data)
-                    data = ''
-                    break
-                else:
-                    data = data[self._chunked_skip_bytes:]
-                    self._chunked_skip_bytes = 0
-
-            # determine how much is until the next chunk
-            if self._chunked_until_next:
-                amt = self._chunked_until_next
-                logger.debug('reading remaining %d of existing chunk', amt)
-                self._chunked_until_next = 0
-                body = data
-            else:
-                try:
-                    amt, body = data.split(self._eol, 1)
-                except ValueError:
-                    self._chunked_preloaded_block = data
-                    logger.debug('saving %r as a preloaded block for chunked',
-                                 self._chunked_preloaded_block)
-                    return
-                amt = int(amt, base=16)
-                logger.debug('reading chunk of length %d', amt)
-                if amt == 0:
-                    self._chunked_done = True
-
-            # read through end of what we have or the chunk
-            self._body += body[:amt]
-            if len(body) >= amt:
-                data = body[amt:]
-                self._chunked_skip_bytes = len(self._eol)
-            else:
-                self._chunked_until_next = amt - len(body)
-                self._chunked_skip_bytes = 0
-                data = ''
-
     def _load_response(self, data):
-        if self._chunked:
-            self._chunked_parsedata(data)
-            return
-        elif self._body is not None:
-            self._body += data
-            return
-
-        # We haven't seen end of headers yet
+        # Being here implies we're not at the end of the headers yet,
+        # since at the end of this method if headers were completely
+        # loaded we replace this method with the load() method of the
+        # reader we created.
         self.raw_response += data
         # This is a bogus server with bad line endings
         if self._eol not in self.raw_response:
@@ -270,6 +212,7 @@
         http_ver, status = hdrs.split(' ', 1)
         if status.startswith('100'):
             self.raw_response = body
+            self.continued = True
             logger.debug('continue seen, setting body to %r', body)
             return
 
@@ -289,23 +232,46 @@
         if self._eol != EOL:
             hdrs = hdrs.replace(self._eol, '\r\n')
         headers = rfc822.Message(cStringIO.StringIO(hdrs))
+        content_len = None
         if HDR_CONTENT_LENGTH in headers:
-            self._content_len = int(headers[HDR_CONTENT_LENGTH])
+            content_len = int(headers[HDR_CONTENT_LENGTH])
         if self.http_version == HTTP_VER_1_0:
             self.will_close = True
         elif HDR_CONNECTION_CTRL in headers:
             self.will_close = (
                 headers[HDR_CONNECTION_CTRL].lower() == CONNECTION_CLOSE)
-            if self._content_len == 0:
-                self._content_len = _LEN_CLOSE_IS_END
         if (HDR_XFER_ENCODING in headers
             and headers[HDR_XFER_ENCODING].lower() == XFER_ENCODING_CHUNKED):
-            self._body = ''
-            self._chunked_parsedata(body)
-            self._chunked = True
-        if self._body is None:
-            self._body = body
+            self._reader = _readers.ChunkedReader(self._eol)
+            logger.debug('using a chunked reader')
+        else:
+            # HEAD responses are forbidden from returning a body, and
+            # it's implausible for a CONNECT response to use
+            # close-is-end logic for an OK response.
+            if (self.method == 'HEAD' or
+                (self.method == 'CONNECT' and content_len is None)):
+                content_len = 0
+            if content_len is not None:
+                logger.debug('using a content-length reader with length %d',
+                             content_len)
+                self._reader = _readers.ContentLengthReader(content_len)
+            else:
+                # Response body had no length specified and is not
+                # chunked, so the end of the body will only be
+                # identifiable by the termination of the socket by the
+                # server. My interpretation of the spec means that we
+                # are correct in hitting this case if
+                # transfer-encoding, content-length, and
+                # connection-control were left unspecified.
+                self._reader = _readers.CloseIsEndReader()
+                logger.debug('using a close-is-end reader')
+                self.will_close = True
+
+        if body:
+            self._reader._load(body)
+        logger.debug('headers complete')
         self.headers = headers
+        self._load_response = self._reader._load
 
 
 class HTTPConnection(object):
@@ -382,13 +348,14 @@
                                          {}, HTTP_VER_1_0)
                 sock.send(data)
                 sock.setblocking(0)
-                r = self.response_class(sock, self.timeout)
+                r = self.response_class(sock, self.timeout, 'CONNECT')
                 timeout_exc = HTTPTimeoutException(
                     'Timed out waiting for CONNECT response from proxy')
                 while not r.complete():
                     try:
                         if not r._select():
-                            raise timeout_exc
+                            if not r.complete():
+                                raise timeout_exc
                     except HTTPTimeoutException:
                         # This raise/except pattern looks goofy, but
                         # _select can raise the timeout as well as the
@@ -527,7 +494,7 @@
             out = outgoing_headers or body
             blocking_on_continue = False
             if expect_continue and not outgoing_headers and not (
-                response and response.headers):
+                response and (response.headers or response.continued)):
                 logger.info(
                     'waiting up to %s seconds for'
                     ' continue response from server',
@@ -550,11 +517,6 @@
                                 'server, optimistically sending request body')
                 else:
                     raise HTTPTimeoutException('timeout sending data')
-            # TODO exceptional conditions with select? (what are those be?)
-            # TODO if the response is loading, must we finish sending at all?
-            #
-            # Certainly not if it's going to close the connection and/or
-            # the response is already done...I think.
             was_first = first
 
             # incoming data
@@ -572,11 +534,11 @@
                         logger.info('socket appears closed in read')
                         self.sock = None
                         self._current_response = None
+                        if response is not None:
+                            response._close()
                         # This if/elif ladder is a bit subtle,
                         # comments in each branch should help.
-                        if response is not None and (
-                            response.complete() or
-                            response._content_len == _LEN_CLOSE_IS_END):
+                        if response is not None and response.complete():
                             # Server responded completely and then
                             # closed the socket. We should just shut
                             # things down and let the caller get their
@@ -605,7 +567,7 @@
                                 'response was missing or incomplete!')
                     logger.debug('read %d bytes in request()', len(data))
                     if response is None:
-                        response = self.response_class(r[0], self.timeout)
+                        response = self.response_class(r[0], self.timeout, method)
                     response._load_response(data)
                     # Jump to the next select() call so we load more
                     # data if the server is still sending us content.
@@ -613,10 +575,6 @@
                 except socket.error, e:
                     if e[0] != errno.EPIPE and not was_first:
                         raise
-                    if (response._content_len
-                        and response._content_len != _LEN_CLOSE_IS_END):
-                        outgoing_headers = sent_data + outgoing_headers
-                        reconnect('read')
 
             # outgoing data
             if w and out:
@@ -661,7 +619,7 @@
         # close if the server response said to or responded before eating
         # the whole request
         if response is None:
-            response = self.response_class(self.sock, self.timeout)
+            response = self.response_class(self.sock, self.timeout, method)
         complete = response.complete()
         data_left = bool(outgoing_headers or body)
         if data_left:
@@ -679,7 +637,8 @@
             raise httplib.ResponseNotReady()
         r = self._current_response
         while r.headers is None:
-            r._select()
+            if not r._select() and not r.complete():
+                raise _readers.HTTPRemoteClosedError()
         if r.will_close:
             self.sock = None
             self._current_response = None
@@ -705,7 +664,7 @@
 class HTTPStateError(httplib.HTTPException):
     """Invalid internal state encountered."""
 
-
-class HTTPRemoteClosedError(httplib.HTTPException):
-    """The server closed the remote socket in the middle of a response."""
+# Forward this exception type from _readers since it needs to be part
+# of the public API.
+HTTPRemoteClosedError = _readers.HTTPRemoteClosedError
 # no-check-code
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/httpclient/_readers.py	Sun May 13 12:52:24 2012 +0200
@@ -0,0 +1,195 @@
+# Copyright 2011, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+#     * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+#     * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+#     * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+"""Reader objects to abstract out different body response types.
+
+This module is package-private. It is not expected that these will
+have any clients outside of httpplus.
+"""
+
+import httplib
+import itertools
+import logging
+
+logger = logging.getLogger(__name__)
+
+
+class ReadNotReady(Exception):
+    """Raised when read() is attempted but not enough data is loaded."""
+
+
+class HTTPRemoteClosedError(httplib.HTTPException):
+    """The server closed the remote socket in the middle of a response."""
+
+
+class AbstractReader(object):
+    """Abstract base class for response readers.
+
+    Subclasses must implement _load, and should implement _close if
+    it's not an error for the server to close their socket without
+    some termination condition being detected during _load.
+    """
+    def __init__(self):
+        self._finished = False
+        self._done_chunks = []
+
+    @property
+    def available_data(self):
+        return sum(map(len, self._done_chunks))
+
+    def done(self):
+        return self._finished
+
+    def read(self, amt):
+        if self.available_data < amt and not self._finished:
+            raise ReadNotReady()
+        need = [amt]
+        def pred(s):
+            needed = need[0] > 0
+            need[0] -= len(s)
+            return needed
+        blocks = list(itertools.takewhile(pred, self._done_chunks))
+        self._done_chunks = self._done_chunks[len(blocks):]
+        over_read = sum(map(len, blocks)) - amt
+        if over_read > 0 and blocks:
+            logger.debug('need to reinsert %d data into done chunks', over_read)
+            last = blocks[-1]
+            blocks[-1], reinsert = last[:-over_read], last[-over_read:]
+            self._done_chunks.insert(0, reinsert)
+        result = ''.join(blocks)
+        assert len(result) == amt or (self._finished and len(result) < amt)
+        return result
+
+    def _load(self, data): # pragma: no cover
+        """Subclasses must implement this.
+
+        As data is available to be read out of this object, it should
+        be placed into the _done_chunks list. Subclasses should not
+        rely on data remaining in _done_chunks forever, as it may be
+        reaped if the client is parsing data as it comes in.
+        """
+        raise NotImplementedError
+
+    def _close(self):
+        """Default implementation of close.
+
+        The default implementation assumes that the reader will mark
+        the response as finished on the _finished attribute once the
+        entire response body has been read. In the event that this is
+        not true, the subclass should override the implementation of
+        close (for example, close-is-end responses have to set
+        self._finished in the close handler.)
+        """
+        if not self._finished:
+            raise HTTPRemoteClosedError(
+                'server appears to have closed the socket mid-response')
+
+
+class AbstractSimpleReader(AbstractReader):
+    """Abstract base class for simple readers that require no response decoding.
+
+    Examples of such responses are Connection: Close (close-is-end)
+    and responses that specify a content length.
+    """
+    def _load(self, data):
+        if data:
+            assert not self._finished, (
+                'tried to add data (%r) to a closed reader!' % data)
+        logger.debug('%s read an addtional %d data', self.name, len(data))
+        self._done_chunks.append(data)
+
+
+class CloseIsEndReader(AbstractSimpleReader):
+    """Reader for responses that specify Connection: Close for length."""
+    name = 'close-is-end'
+
+    def _close(self):
+        logger.info('Marking close-is-end reader as closed.')
+        self._finished = True
+
+
+class ContentLengthReader(AbstractSimpleReader):
+    """Reader for responses that specify an exact content length."""
+    name = 'content-length'
+
+    def __init__(self, amount):
+        AbstractReader.__init__(self)
+        self._amount = amount
+        if amount == 0:
+            self._finished = True
+        self._amount_seen = 0
+
+    def _load(self, data):
+        AbstractSimpleReader._load(self, data)
+        self._amount_seen += len(data)
+        if self._amount_seen >= self._amount:
+            self._finished = True
+            logger.debug('content-length read complete')
+
+
+class ChunkedReader(AbstractReader):
+    """Reader for chunked transfer encoding responses."""
+    def __init__(self, eol):
+        AbstractReader.__init__(self)
+        self._eol = eol
+        self._leftover_skip_amt = 0
+        self._leftover_data = ''
+
+    def _load(self, data):
+        assert not self._finished, 'tried to add data to a closed reader!'
+        logger.debug('chunked read an addtional %d data', len(data))
+        position = 0
+        if self._leftover_data:
+            logger.debug('chunked reader trying to finish block from leftover data')
+            # TODO: avoid this string concatenation if possible
+            data = self._leftover_data + data
+            position = self._leftover_skip_amt
+            self._leftover_data = ''
+            self._leftover_skip_amt = 0
+        datalen = len(data)
+        while position < datalen:
+            split = data.find(self._eol, position)
+            if split == -1:
+                self._leftover_data = data
+                self._leftover_skip_amt = position
+                return
+            amt = int(data[position:split], base=16)
+            block_start = split + len(self._eol)
+            # If the whole data chunk plus the eol trailer hasn't
+            # loaded, we'll wait for the next load.
+            if block_start + amt + len(self._eol) > len(data):
+                self._leftover_data = data
+                self._leftover_skip_amt = position
+                return
+            if amt == 0:
+                self._finished = True
+                logger.debug('closing chunked redaer due to chunk of length 0')
+                return
+            self._done_chunks.append(data[block_start:block_start + amt])
+            position = block_start + amt + len(self._eol)
+# no-check-code
--- a/mercurial/httpclient/tests/simple_http_test.py	Sun May 13 11:19:48 2012 +0200
+++ b/mercurial/httpclient/tests/simple_http_test.py	Sun May 13 12:52:24 2012 +0200
@@ -29,7 +29,7 @@
 import socket
 import unittest
 
-import http
+import httpplus
 
 # relative import to ease embedding the library
 import util
@@ -38,7 +38,7 @@
 class SimpleHttpTest(util.HttpTestBase, unittest.TestCase):
 
     def _run_simple_test(self, host, server_data, expected_req, expected_data):
-        con = http.HTTPConnection(host)
+        con = httpplus.HTTPConnection(host)
         con._connect()
         con.sock.data = server_data
         con.request('GET', '/')
@@ -47,9 +47,9 @@
         self.assertEqual(expected_data, con.getresponse().read())
 
     def test_broken_data_obj(self):
-        con = http.HTTPConnection('1.2.3.4:80')
+        con = httpplus.HTTPConnection('1.2.3.4:80')
         con._connect()
-        self.assertRaises(http.BadRequestData,
+        self.assertRaises(httpplus.BadRequestData,
                           con.request, 'POST', '/', body=1)
 
     def test_no_keepalive_http_1_0(self):
@@ -74,7 +74,7 @@
 fncache
 dotencode
 """
-        con = http.HTTPConnection('localhost:9999')
+        con = httpplus.HTTPConnection('localhost:9999')
         con._connect()
         con.sock.data = [expected_response_headers, expected_response_body]
         con.request('GET', '/remote/.hg/requires',
@@ -95,7 +95,7 @@
         self.assert_(resp.sock.closed)
 
     def test_multiline_header(self):
-        con = http.HTTPConnection('1.2.3.4:80')
+        con = httpplus.HTTPConnection('1.2.3.4:80')
         con._connect()
         con.sock.data = ['HTTP/1.1 200 OK\r\n',
                          'Server: BogusServer 1.0\r\n',
@@ -122,7 +122,7 @@
         self.assertEqual(con.sock.closed, False)
 
     def testSimpleRequest(self):
-        con = http.HTTPConnection('1.2.3.4:80')
+        con = httpplus.HTTPConnection('1.2.3.4:80')
         con._connect()
         con.sock.data = ['HTTP/1.1 200 OK\r\n',
                          'Server: BogusServer 1.0\r\n',
@@ -149,12 +149,13 @@
                          resp.headers.getheaders('server'))
 
     def testHeaderlessResponse(self):
-        con = http.HTTPConnection('1.2.3.4', use_ssl=False)
+        con = httpplus.HTTPConnection('1.2.3.4', use_ssl=False)
         con._connect()
         con.sock.data = ['HTTP/1.1 200 OK\r\n',
                          '\r\n'
                          '1234567890'
                          ]
+        con.sock.close_on_empty = True
         con.request('GET', '/')
 
         expected_req = ('GET / HTTP/1.1\r\n'
@@ -169,7 +170,30 @@
         self.assertEqual(resp.status, 200)
 
     def testReadline(self):
-        con = http.HTTPConnection('1.2.3.4')
+        con = httpplus.HTTPConnection('1.2.3.4')
+        con._connect()
+        con.sock.data = ['HTTP/1.1 200 OK\r\n',
+                         'Server: BogusServer 1.0\r\n',
+                         'Connection: Close\r\n',
+                         '\r\n'
+                         '1\n2\nabcdefg\n4\n5']
+        con.sock.close_on_empty = True
+
+        expected_req = ('GET / HTTP/1.1\r\n'
+                        'Host: 1.2.3.4\r\n'
+                        'accept-encoding: identity\r\n\r\n')
+
+        con.request('GET', '/')
+        self.assertEqual(('1.2.3.4', 80), con.sock.sa)
+        self.assertEqual(expected_req, con.sock.sent)
+        r = con.getresponse()
+        for expected in ['1\n', '2\n', 'abcdefg\n', '4\n', '5']:
+            actual = r.readline()
+            self.assertEqual(expected, actual,
+                             'Expected %r, got %r' % (expected, actual))
+
+    def testReadlineTrickle(self):
+        con = httpplus.HTTPConnection('1.2.3.4')
         con._connect()
         # make sure it trickles in one byte at a time
         # so that we touch all the cases in readline
@@ -179,6 +203,7 @@
              'Connection: Close\r\n',
              '\r\n'
              '1\n2\nabcdefg\n4\n5']))
+        con.sock.close_on_empty = True
 
         expected_req = ('GET / HTTP/1.1\r\n'
                         'Host: 1.2.3.4\r\n'
@@ -193,6 +218,59 @@
             self.assertEqual(expected, actual,
                              'Expected %r, got %r' % (expected, actual))
 
+    def testVariousReads(self):
+        con = httpplus.HTTPConnection('1.2.3.4')
+        con._connect()
+        # make sure it trickles in one byte at a time
+        # so that we touch all the cases in readline
+        con.sock.data = list(''.join(
+            ['HTTP/1.1 200 OK\r\n',
+             'Server: BogusServer 1.0\r\n',
+             'Connection: Close\r\n',
+             '\r\n'
+             '1\n2',
+             '\na', 'bc',
+             'defg\n4\n5']))
+        con.sock.close_on_empty = True
+
+        expected_req = ('GET / HTTP/1.1\r\n'
+                        'Host: 1.2.3.4\r\n'
+                        'accept-encoding: identity\r\n\r\n')
+
+        con.request('GET', '/')
+        self.assertEqual(('1.2.3.4', 80), con.sock.sa)
+        self.assertEqual(expected_req, con.sock.sent)
+        r = con.getresponse()
+        for read_amt, expect in [(1, '1'), (1, '\n'),
+                                 (4, '2\nab'),
+                                 ('line', 'cdefg\n'),
+                                 (None, '4\n5')]:
+            if read_amt == 'line':
+                self.assertEqual(expect, r.readline())
+            else:
+                self.assertEqual(expect, r.read(read_amt))
+
+    def testZeroLengthBody(self):
+        con = httpplus.HTTPConnection('1.2.3.4')
+        con._connect()
+        # make sure it trickles in one byte at a time
+        # so that we touch all the cases in readline
+        con.sock.data = list(''.join(
+            ['HTTP/1.1 200 OK\r\n',
+             'Server: BogusServer 1.0\r\n',
+             'Content-length: 0\r\n',
+             '\r\n']))
+
+        expected_req = ('GET / HTTP/1.1\r\n'
+                        'Host: 1.2.3.4\r\n'
+                        'accept-encoding: identity\r\n\r\n')
+
+        con.request('GET', '/')
+        self.assertEqual(('1.2.3.4', 80), con.sock.sa)
+        self.assertEqual(expected_req, con.sock.sent)
+        r = con.getresponse()
+        self.assertEqual('', r.read())
+
     def testIPv6(self):
         self._run_simple_test('[::1]:8221',
                         ['HTTP/1.1 200 OK\r\n',
@@ -226,7 +304,7 @@
                         '1234567890')
 
     def testEarlyContinueResponse(self):
-        con = http.HTTPConnection('1.2.3.4:80')
+        con = httpplus.HTTPConnection('1.2.3.4:80')
         con._connect()
         sock = con.sock
         sock.data = ['HTTP/1.1 403 Forbidden\r\n',
@@ -240,8 +318,23 @@
         self.assertEqual("You can't do that.", con.getresponse().read())
         self.assertEqual(sock.closed, True)
 
+    def testEarlyContinueResponseNoContentLength(self):
+        con = httpplus.HTTPConnection('1.2.3.4:80')
+        con._connect()
+        sock = con.sock
+        sock.data = ['HTTP/1.1 403 Forbidden\r\n',
+                         'Server: BogusServer 1.0\r\n',
+                         '\r\n'
+                         "You can't do that."]
+        sock.close_on_empty = True
+        expected_req = self.doPost(con, expect_body=False)
+        self.assertEqual(('1.2.3.4', 80), sock.sa)
+        self.assertStringEqual(expected_req, sock.sent)
+        self.assertEqual("You can't do that.", con.getresponse().read())
+        self.assertEqual(sock.closed, True)
+
     def testDeniedAfterContinueTimeoutExpires(self):
-        con = http.HTTPConnection('1.2.3.4:80')
+        con = httpplus.HTTPConnection('1.2.3.4:80')
         con._connect()
         sock = con.sock
         sock.data = ['HTTP/1.1 403 Forbidden\r\n',
@@ -269,7 +362,7 @@
         self.assertEqual(sock.closed, True)
 
     def testPostData(self):
-        con = http.HTTPConnection('1.2.3.4:80')
+        con = httpplus.HTTPConnection('1.2.3.4:80')
         con._connect()
         sock = con.sock
         sock.read_wait_sentinel = 'POST data'
@@ -286,7 +379,7 @@
         self.assertEqual(sock.closed, False)
 
     def testServerWithoutContinue(self):
-        con = http.HTTPConnection('1.2.3.4:80')
+        con = httpplus.HTTPConnection('1.2.3.4:80')
         con._connect()
         sock = con.sock
         sock.read_wait_sentinel = 'POST data'
@@ -302,7 +395,7 @@
         self.assertEqual(sock.closed, False)
 
     def testServerWithSlowContinue(self):
-        con = http.HTTPConnection('1.2.3.4:80')
+        con = httpplus.HTTPConnection('1.2.3.4:80')
         con._connect()
         sock = con.sock
         sock.read_wait_sentinel = 'POST data'
@@ -321,7 +414,7 @@
         self.assertEqual(sock.closed, False)
 
     def testSlowConnection(self):
-        con = http.HTTPConnection('1.2.3.4:80')
+        con = httpplus.HTTPConnection('1.2.3.4:80')
         con._connect()
         # simulate one byte arriving at a time, to check for various
         # corner cases
@@ -340,12 +433,26 @@
         self.assertEqual(expected_req, con.sock.sent)
         self.assertEqual('1234567890', con.getresponse().read())
 
+    def testCloseAfterNotAllOfHeaders(self):
+        con = httpplus.HTTPConnection('1.2.3.4:80')
+        con._connect()
+        con.sock.data = ['HTTP/1.1 200 OK\r\n',
+                         'Server: NO CARRIER']
+        con.sock.close_on_empty = True
+        con.request('GET', '/')
+        self.assertRaises(httpplus.HTTPRemoteClosedError,
+                          con.getresponse)
+
+        expected_req = ('GET / HTTP/1.1\r\n'
+                        'Host: 1.2.3.4\r\n'
+                        'accept-encoding: identity\r\n\r\n')
+
     def testTimeout(self):
-        con = http.HTTPConnection('1.2.3.4:80')
+        con = httpplus.HTTPConnection('1.2.3.4:80')
         con._connect()
         con.sock.data = []
         con.request('GET', '/')
-        self.assertRaises(http.HTTPTimeoutException,
+        self.assertRaises(httpplus.HTTPTimeoutException,
                           con.getresponse)
 
         expected_req = ('GET / HTTP/1.1\r\n'
@@ -370,7 +477,7 @@
             return s
 
         socket.socket = closingsocket
-        con = http.HTTPConnection('1.2.3.4:80')
+        con = httpplus.HTTPConnection('1.2.3.4:80')
         con._connect()
         con.request('GET', '/')
         r1 = con.getresponse()
@@ -381,7 +488,7 @@
         self.assertEqual(2, len(sockets))
 
     def test_server_closes_before_end_of_body(self):
-        con = http.HTTPConnection('1.2.3.4:80')
+        con = httpplus.HTTPConnection('1.2.3.4:80')
         con._connect()
         s = con.sock
         s.data = ['HTTP/1.1 200 OK\r\n',
@@ -393,9 +500,9 @@
         s.close_on_empty = True
         con.request('GET', '/')
         r1 = con.getresponse()
-        self.assertRaises(http.HTTPRemoteClosedError, r1.read)
+        self.assertRaises(httpplus.HTTPRemoteClosedError, r1.read)
 
     def test_no_response_raises_response_not_ready(self):
-        con = http.HTTPConnection('foo')
-        self.assertRaises(http.httplib.ResponseNotReady, con.getresponse)
+        con = httpplus.HTTPConnection('foo')
+        self.assertRaises(httpplus.httplib.ResponseNotReady, con.getresponse)
 # no-check-code
--- a/mercurial/httpclient/tests/test_bogus_responses.py	Sun May 13 11:19:48 2012 +0200
+++ b/mercurial/httpclient/tests/test_bogus_responses.py	Sun May 13 12:52:24 2012 +0200
@@ -34,7 +34,7 @@
 """
 import unittest
 
-import http
+import httpplus
 
 # relative import to ease embedding the library
 import util
@@ -43,7 +43,7 @@
 class SimpleHttpTest(util.HttpTestBase, unittest.TestCase):
 
     def bogusEOL(self, eol):
-        con = http.HTTPConnection('1.2.3.4:80')
+        con = httpplus.HTTPConnection('1.2.3.4:80')
         con._connect()
         con.sock.data = ['HTTP/1.1 200 OK%s' % eol,
                          'Server: BogusServer 1.0%s' % eol,
--- a/mercurial/httpclient/tests/test_chunked_transfer.py	Sun May 13 11:19:48 2012 +0200
+++ b/mercurial/httpclient/tests/test_chunked_transfer.py	Sun May 13 12:52:24 2012 +0200
@@ -29,7 +29,7 @@
 import cStringIO
 import unittest
 
-import http
+import httpplus
 
 # relative import to ease embedding the library
 import util
@@ -50,7 +50,7 @@
 
 class ChunkedTransferTest(util.HttpTestBase, unittest.TestCase):
     def testChunkedUpload(self):
-        con = http.HTTPConnection('1.2.3.4:80')
+        con = httpplus.HTTPConnection('1.2.3.4:80')
         con._connect()
         sock = con.sock
         sock.read_wait_sentinel = '0\r\n\r\n'
@@ -77,7 +77,7 @@
         self.assertEqual(sock.closed, False)
 
     def testChunkedDownload(self):
-        con = http.HTTPConnection('1.2.3.4:80')
+        con = httpplus.HTTPConnection('1.2.3.4:80')
         con._connect()
         sock = con.sock
         sock.data = ['HTTP/1.1 200 OK\r\n',
@@ -85,14 +85,31 @@
                      'transfer-encoding: chunked',
                      '\r\n\r\n',
                      chunkedblock('hi '),
-                     chunkedblock('there'),
+                     ] + list(chunkedblock('there')) + [
                      chunkedblock(''),
                      ]
         con.request('GET', '/')
         self.assertStringEqual('hi there', con.getresponse().read())
 
+    def testChunkedDownloadOddReadBoundaries(self):
+        con = httpplus.HTTPConnection('1.2.3.4:80')
+        con._connect()
+        sock = con.sock
+        sock.data = ['HTTP/1.1 200 OK\r\n',
+                     'Server: BogusServer 1.0\r\n',
+                     'transfer-encoding: chunked',
+                     '\r\n\r\n',
+                     chunkedblock('hi '),
+                     ] + list(chunkedblock('there')) + [
+                     chunkedblock(''),
+                     ]
+        con.request('GET', '/')
+        resp = con.getresponse()
+        for amt, expect in [(1, 'h'), (5, 'i the'), (100, 're')]:
+            self.assertEqual(expect, resp.read(amt))
+
     def testChunkedDownloadBadEOL(self):
-        con = http.HTTPConnection('1.2.3.4:80')
+        con = httpplus.HTTPConnection('1.2.3.4:80')
         con._connect()
         sock = con.sock
         sock.data = ['HTTP/1.1 200 OK\n',
@@ -107,7 +124,7 @@
         self.assertStringEqual('hi there', con.getresponse().read())
 
     def testChunkedDownloadPartialChunkBadEOL(self):
-        con = http.HTTPConnection('1.2.3.4:80')
+        con = httpplus.HTTPConnection('1.2.3.4:80')
         con._connect()
         sock = con.sock
         sock.data = ['HTTP/1.1 200 OK\n',
@@ -122,7 +139,7 @@
                                con.getresponse().read())
 
     def testChunkedDownloadPartialChunk(self):
-        con = http.HTTPConnection('1.2.3.4:80')
+        con = httpplus.HTTPConnection('1.2.3.4:80')
         con._connect()
         sock = con.sock
         sock.data = ['HTTP/1.1 200 OK\r\n',
@@ -136,7 +153,7 @@
                                con.getresponse().read())
 
     def testChunkedDownloadEarlyHangup(self):
-        con = http.HTTPConnection('1.2.3.4:80')
+        con = httpplus.HTTPConnection('1.2.3.4:80')
         con._connect()
         sock = con.sock
         broken = chunkedblock('hi'*20)[:-1]
@@ -149,5 +166,5 @@
         sock.close_on_empty = True
         con.request('GET', '/')
         resp = con.getresponse()
-        self.assertRaises(http.HTTPRemoteClosedError, resp.read)
+        self.assertRaises(httpplus.HTTPRemoteClosedError, resp.read)
 # no-check-code
--- a/mercurial/httpclient/tests/test_proxy_support.py	Sun May 13 11:19:48 2012 +0200
+++ b/mercurial/httpclient/tests/test_proxy_support.py	Sun May 13 12:52:24 2012 +0200
@@ -29,13 +29,13 @@
 import unittest
 import socket
 
-import http
+import httpplus
 
 # relative import to ease embedding the library
 import util
 
 
-def make_preloaded_socket(data):
+def make_preloaded_socket(data, close=False):
     """Make a socket pre-loaded with data so it can be read during connect.
 
     Useful for https proxy tests because we have to read from the
@@ -44,6 +44,7 @@
     def s(*args, **kwargs):
         sock = util.MockSocket(*args, **kwargs)
         sock.early_data = data[:]
+        sock.close_on_empty = close
         return sock
     return s
 
@@ -51,7 +52,7 @@
 class ProxyHttpTest(util.HttpTestBase, unittest.TestCase):
 
     def _run_simple_test(self, host, server_data, expected_req, expected_data):
-        con = http.HTTPConnection(host)
+        con = httpplus.HTTPConnection(host)
         con._connect()
         con.sock.data = server_data
         con.request('GET', '/')
@@ -60,7 +61,7 @@
         self.assertEqual(expected_data, con.getresponse().read())
 
     def testSimpleRequest(self):
-        con = http.HTTPConnection('1.2.3.4:80',
+        con = httpplus.HTTPConnection('1.2.3.4:80',
                                   proxy_hostport=('magicproxy', 4242))
         con._connect()
         con.sock.data = ['HTTP/1.1 200 OK\r\n',
@@ -88,7 +89,7 @@
                          resp.headers.getheaders('server'))
 
     def testSSLRequest(self):
-        con = http.HTTPConnection('1.2.3.4:443',
+        con = httpplus.HTTPConnection('1.2.3.4:443',
                                   proxy_hostport=('magicproxy', 4242))
         socket.socket = make_preloaded_socket(
             ['HTTP/1.1 200 OK\r\n',
@@ -124,12 +125,47 @@
         self.assertEqual(['BogusServer 1.0'],
                          resp.headers.getheaders('server'))
 
-    def testSSLProxyFailure(self):
-        con = http.HTTPConnection('1.2.3.4:443',
+    def testSSLRequestNoConnectBody(self):
+        con = httpplus.HTTPConnection('1.2.3.4:443',
                                   proxy_hostport=('magicproxy', 4242))
         socket.socket = make_preloaded_socket(
-            ['HTTP/1.1 407 Proxy Authentication Required\r\n\r\n'])
-        self.assertRaises(http.HTTPProxyConnectFailedException, con._connect)
-        self.assertRaises(http.HTTPProxyConnectFailedException,
+            ['HTTP/1.1 200 OK\r\n',
+             'Server: BogusServer 1.0\r\n',
+             '\r\n'])
+        con._connect()
+        con.sock.data = ['HTTP/1.1 200 OK\r\n',
+                         'Server: BogusServer 1.0\r\n',
+                         'Content-Length: 10\r\n',
+                         '\r\n'
+                         '1234567890'
+                         ]
+        connect_sent = con.sock.sent
+        con.sock.sent = ''
+        con.request('GET', '/')
+
+        expected_connect = ('CONNECT 1.2.3.4:443 HTTP/1.0\r\n'
+                            'Host: 1.2.3.4\r\n'
+                            'accept-encoding: identity\r\n'
+                            '\r\n')
+        expected_request = ('GET / HTTP/1.1\r\n'
+                            'Host: 1.2.3.4\r\n'
+                            'accept-encoding: identity\r\n\r\n')
+
+        self.assertEqual(('127.0.0.42', 4242), con.sock.sa)
+        self.assertStringEqual(expected_connect, connect_sent)
+        self.assertStringEqual(expected_request, con.sock.sent)
+        resp = con.getresponse()
+        self.assertEqual(resp.status, 200)
+        self.assertEqual('1234567890', resp.read())
+        self.assertEqual(['BogusServer 1.0'],
+                         resp.headers.getheaders('server'))
+
+    def testSSLProxyFailure(self):
+        con = httpplus.HTTPConnection('1.2.3.4:443',
+                                  proxy_hostport=('magicproxy', 4242))
+        socket.socket = make_preloaded_socket(
+            ['HTTP/1.1 407 Proxy Authentication Required\r\n\r\n'], close=True)
+        self.assertRaises(httpplus.HTTPProxyConnectFailedException, con._connect)
+        self.assertRaises(httpplus.HTTPProxyConnectFailedException,
                           con.request, 'GET', '/')
 # no-check-code
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/httpclient/tests/test_readers.py	Sun May 13 12:52:24 2012 +0200
@@ -0,0 +1,70 @@
+# Copyright 2010, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+#     * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+#     * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+#     * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+import unittest
+
+from httpplus import _readers
+
+def chunkedblock(x, eol='\r\n'):
+    r"""Make a chunked transfer-encoding block.
+
+    >>> chunkedblock('hi')
+    '2\r\nhi\r\n'
+    >>> chunkedblock('hi' * 10)
+    '14\r\nhihihihihihihihihihi\r\n'
+    >>> chunkedblock('hi', eol='\n')
+    '2\nhi\n'
+    """
+    return ''.join((hex(len(x))[2:], eol, x, eol))
+
+corpus = 'foo\r\nbar\r\nbaz\r\n'
+
+
+class ChunkedReaderTest(unittest.TestCase):
+    def test_many_block_boundaries(self):
+        for step in xrange(1, len(corpus)):
+            data = ''.join(chunkedblock(corpus[start:start+step]) for
+                           start in xrange(0, len(corpus), step))
+            for istep in xrange(1, len(data)):
+                rdr = _readers.ChunkedReader('\r\n')
+                print 'step', step, 'load', istep
+                for start in xrange(0, len(data), istep):
+                    rdr._load(data[start:start+istep])
+                rdr._load(chunkedblock(''))
+                self.assertEqual(corpus, rdr.read(len(corpus) + 1))
+
+    def test_small_chunk_blocks_large_wire_blocks(self):
+        data = ''.join(map(chunkedblock, corpus)) + chunkedblock('')
+        rdr = _readers.ChunkedReader('\r\n')
+        for start in xrange(0, len(data), 4):
+            d = data[start:start + 4]
+            if d:
+                rdr._load(d)
+        self.assertEqual(corpus, rdr.read(len(corpus)+100))
+# no-check-code
--- a/mercurial/httpclient/tests/test_ssl.py	Sun May 13 11:19:48 2012 +0200
+++ b/mercurial/httpclient/tests/test_ssl.py	Sun May 13 12:52:24 2012 +0200
@@ -28,7 +28,7 @@
 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 import unittest
 
-import http
+import httpplus
 
 # relative import to ease embedding the library
 import util
@@ -37,7 +37,7 @@
 
 class HttpSslTest(util.HttpTestBase, unittest.TestCase):
     def testSslRereadRequired(self):
-        con = http.HTTPConnection('1.2.3.4:443')
+        con = httpplus.HTTPConnection('1.2.3.4:443')
         con._connect()
         # extend the list instead of assign because of how
         # MockSSLSocket works.
@@ -66,7 +66,7 @@
                          resp.headers.getheaders('server'))
 
     def testSslRereadInEarlyResponse(self):
-        con = http.HTTPConnection('1.2.3.4:443')
+        con = httpplus.HTTPConnection('1.2.3.4:443')
         con._connect()
         con.sock.early_data = ['HTTP/1.1 200 OK\r\n',
                                'Server: BogusServer 1.0\r\n',
--- a/mercurial/httpclient/tests/util.py	Sun May 13 11:19:48 2012 +0200
+++ b/mercurial/httpclient/tests/util.py	Sun May 13 12:52:24 2012 +0200
@@ -29,7 +29,7 @@
 import difflib
 import socket
 
-import http
+import httpplus
 
 
 class MockSocket(object):
@@ -57,7 +57,7 @@
         self.remote_closed = self.closed = False
         self.close_on_empty = False
         self.sent = ''
-        self.read_wait_sentinel = http._END_HEADERS
+        self.read_wait_sentinel = httpplus._END_HEADERS
 
     def close(self):
         self.closed = True
@@ -86,7 +86,7 @@
 
     @property
     def ready_for_read(self):
-        return ((self.early_data and http._END_HEADERS in self.sent)
+        return ((self.early_data and httpplus._END_HEADERS in self.sent)
                 or (self.read_wait_sentinel in self.sent and self.data)
                 or self.closed or self.remote_closed)
 
@@ -132,7 +132,7 @@
 
 
 def mocksslwrap(sock, keyfile=None, certfile=None,
-                server_side=False, cert_reqs=http.socketutil.CERT_NONE,
+                server_side=False, cert_reqs=httpplus.socketutil.CERT_NONE,
                 ssl_version=None, ca_certs=None,
                 do_handshake_on_connect=True,
                 suppress_ragged_eofs=True):
@@ -156,16 +156,16 @@
         self.orig_getaddrinfo = socket.getaddrinfo
         socket.getaddrinfo = mockgetaddrinfo
 
-        self.orig_select = http.select.select
-        http.select.select = mockselect
+        self.orig_select = httpplus.select.select
+        httpplus.select.select = mockselect
 
-        self.orig_sslwrap = http.socketutil.wrap_socket
-        http.socketutil.wrap_socket = mocksslwrap
+        self.orig_sslwrap = httpplus.socketutil.wrap_socket
+        httpplus.socketutil.wrap_socket = mocksslwrap
 
     def tearDown(self):
         socket.socket = self.orig_socket
-        http.select.select = self.orig_select
-        http.socketutil.wrap_socket = self.orig_sslwrap
+        httpplus.select.select = self.orig_select
+        httpplus.socketutil.wrap_socket = self.orig_sslwrap
         socket.getaddrinfo = self.orig_getaddrinfo
 
     def assertStringEqual(self, l, r):
--- a/mercurial/keepalive.py	Sun May 13 11:19:48 2012 +0200
+++ b/mercurial/keepalive.py	Sun May 13 12:52:24 2012 +0200
@@ -136,7 +136,7 @@
     def add(self, host, connection, ready):
         self._lock.acquire()
         try:
-            if not host in self._hostmap:
+            if host not in self._hostmap:
                 self._hostmap[host] = []
             self._hostmap[host].append(connection)
             self._connmap[connection] = host
@@ -534,7 +534,7 @@
         if self.auto_open:
             self.connect()
         else:
-            raise httplib.NotConnected()
+            raise httplib.NotConnected
 
     # send the data to the server. if we get a broken pipe, then close
     # the socket. we want to reconnect when somebody tries to send again.
@@ -758,7 +758,7 @@
     try:
         N = int(sys.argv[1])
         url = sys.argv[2]
-    except:
+    except (IndexError, ValueError):
         print "%s <integer> <url>" % sys.argv[0]
     else:
         test(url, N)
--- a/mercurial/localrepo.py	Sun May 13 11:19:48 2012 +0200
+++ b/mercurial/localrepo.py	Sun May 13 12:52:24 2012 +0200
@@ -41,7 +41,6 @@
         self.wopener = scmutil.opener(self.root)
         self.baseui = baseui
         self.ui = baseui.copy()
-        self._dirtyphases = False
         # A list of callback to shape the phase if no data were found.
         # Callback are in the form: func(repo, roots) --> processed root.
         # This list it to be filled by extension during repo setup
@@ -182,23 +181,8 @@
       bookmarks.write(self)
 
     @storecache('phaseroots')
-    def _phaseroots(self):
-        self._dirtyphases = False
-        phaseroots = phases.readroots(self)
-        phases.filterunknown(self, phaseroots)
-        return phaseroots
-
-    @propertycache
-    def _phaserev(self):
-        cache = [phases.public] * len(self)
-        for phase in phases.trackedphases:
-            roots = map(self.changelog.rev, self._phaseroots[phase])
-            if roots:
-                for rev in roots:
-                    cache[rev] = phase
-                for rev in self.changelog.descendants(*roots):
-                    cache[rev] = phase
-        return cache
+    def _phasecache(self):
+        return phases.phasecache(self, self._phasedefaults)
 
     @storecache('00changelog.i')
     def changelog(self):
@@ -296,7 +280,8 @@
                 fp.write('\n')
             for name in names:
                 m = munge and munge(name) or name
-                if self._tagscache.tagtypes and name in self._tagscache.tagtypes:
+                if (self._tagscache.tagtypes and
+                    name in self._tagscache.tagtypes):
                     old = self.tags().get(name, nullid)
                     fp.write('%s %s\n' % (hex(old), m))
                 fp.write('%s %s\n' % (hex(node), m))
@@ -376,7 +361,8 @@
 
     @propertycache
     def _tagscache(self):
-        '''Returns a tagscache object that contains various tags related caches.'''
+        '''Returns a tagscache object that contains various tags related
+        caches.'''
 
         # This simplifies its cache management by having one decorated
         # function (this one) and the rest simply fetch things from it.
@@ -505,7 +491,7 @@
             partial = self._branchcache
 
         self._branchtags(partial, lrev)
-        # this private cache holds all heads (not just tips)
+        # this private cache holds all heads (not just the branch tips)
         self._branchcache = partial
 
     def branchmap(self):
@@ -585,8 +571,8 @@
                 latest = newnodes.pop()
                 if latest not in bheads:
                     continue
-                minbhrev = self[bheads[0]].node()
-                reachable = self.changelog.reachable(latest, minbhrev)
+                minbhnode = self[bheads[0]].node()
+                reachable = self.changelog.reachable(latest, minbhnode)
                 reachable.remove(latest)
                 if reachable:
                     bheads = [b for b in bheads if b not in reachable]
@@ -605,10 +591,11 @@
 
     def known(self, nodes):
         nm = self.changelog.nodemap
+        pc = self._phasecache
         result = []
         for n in nodes:
             r = nm.get(n)
-            resp = not (r is None or self._phaserev[r] >= phases.secret)
+            resp = not (r is None or pc.phase(self, r) >= phases.secret)
             result.append(resp)
         return result
 
@@ -864,7 +851,6 @@
                 pass
 
         delcache('_tagscache')
-        delcache('_phaserev')
 
         self._branchcache = None # in UTF-8
         self._branchcachetip = None
@@ -934,9 +920,8 @@
 
         def unlock():
             self.store.write()
-            if self._dirtyphases:
-                phases.writeroots(self)
-                self._dirtyphases = False
+            if '_phasecache' in vars(self):
+                self._phasecache.write()
             for k, ce in self._filecache.items():
                 if k == 'dirstate':
                     continue
@@ -1192,7 +1177,8 @@
             p1, p2 = self.dirstate.parents()
             hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
             try:
-                self.hook("precommit", throw=True, parent1=hookp1, parent2=hookp2)
+                self.hook("precommit", throw=True, parent1=hookp1,
+                          parent2=hookp2)
                 ret = self.commitctx(cctx, True)
             except:
                 if edited:
@@ -1330,7 +1316,8 @@
     def status(self, node1='.', node2=None, match=None,
                ignored=False, clean=False, unknown=False,
                listsubrepos=False):
-        """return status of files between two nodes or node and working directory
+        """return status of files between two nodes or node and working
+        directory.
 
         If node1 is None, use the first dirstate parent instead.
         If node2 is None, compare node1 with working directory.
@@ -1338,6 +1325,8 @@
 
         def mfmatches(ctx):
             mf = ctx.manifest().copy()
+            if match.always():
+                return mf
             for fn in mf.keys():
                 if not match(fn):
                     del mf[fn]
@@ -1423,10 +1412,11 @@
                 mf2 = mfmatches(ctx2)
 
             modified, added, clean = [], [], []
+            withflags = mf1.withflags() | mf2.withflags()
             for fn in mf2:
                 if fn in mf1:
                     if (fn not in deleted and
-                        (mf1.flags(fn) != mf2.flags(fn) or
+                        ((fn in withflags and mf1.flags(fn) != mf2.flags(fn)) or
                          (mf1[fn] != mf2[fn] and
                           (mf2[fn] or ctx1[fn].cmp(ctx2[fn]))))):
                         modified.append(fn)
@@ -1672,7 +1662,8 @@
                         # http: return remote's addchangegroup() or 0 for error
                         ret = remote.unbundle(cg, remoteheads, 'push')
                     else:
-                        # we return an integer indicating remote head count change
+                        # we return an integer indicating remote head count
+                        # change
                         ret = remote.addchangegroup(cg, 'push', self.url())
 
                 if ret:
@@ -1698,7 +1689,7 @@
                     # * missingheads part of comon (::commonheads)
                     common = set(outgoing.common)
                     cheads = [node for node in revs if node in common]
-                    # and 
+                    # and
                     # * commonheads parents on missing
                     revset = self.set('%ln and parents(roots(%ln))',
                                      outgoing.commonheads,
@@ -1904,7 +1895,8 @@
             for fname in sorted(changedfiles):
                 filerevlog = self.file(fname)
                 if not len(filerevlog):
-                    raise util.Abort(_("empty or missing revlog for %s") % fname)
+                    raise util.Abort(_("empty or missing revlog for %s")
+                                     % fname)
                 fstate[0] = fname
                 fstate[1] = fnodes.pop(fname, {})
 
@@ -2004,7 +1996,8 @@
             for fname in sorted(changedfiles):
                 filerevlog = self.file(fname)
                 if not len(filerevlog):
-                    raise util.Abort(_("empty or missing revlog for %s") % fname)
+                    raise util.Abort(_("empty or missing revlog for %s")
+                                     % fname)
                 fstate[0] = fname
                 nodelist = gennodelst(filerevlog)
                 if nodelist:
@@ -2261,7 +2254,8 @@
                            (util.bytecount(total_bytes), elapsed,
                             util.bytecount(total_bytes / elapsed)))
 
-            # new requirements = old non-format requirements + new format-related
+            # new requirements = old non-format requirements +
+            #                    new format-related
             # requirements from the streamed-in repository
             requirements.update(set(self.requirements) - self.supportedformats)
             self._applyrequirements(requirements)
--- a/mercurial/manifest.py	Sun May 13 11:19:48 2012 +0200
+++ b/mercurial/manifest.py	Sun May 13 12:52:24 2012 +0200
@@ -19,6 +19,8 @@
         self._flags = flags
     def flags(self, f):
         return self._flags.get(f, "")
+    def withflags(self):
+        return set(self._flags.keys())
     def set(self, f, flags):
         self._flags[f] = flags
     def copy(self):
@@ -124,8 +126,8 @@
                     addlist[start:end] = array.array('c', content)
                 else:
                     del addlist[start:end]
-            return "".join(struct.pack(">lll", start, end, len(content)) + content
-                           for start, end, content in x)
+            return "".join(struct.pack(">lll", start, end, len(content))
+                           + content for start, end, content in x)
 
         def checkforbidden(l):
             for f in l:
--- a/mercurial/match.py	Sun May 13 11:19:48 2012 +0200
+++ b/mercurial/match.py	Sun May 13 12:52:24 2012 +0200
@@ -118,6 +118,8 @@
         return self._files
     def anypats(self):
         return self._anypats
+    def always(self):
+        return False
 
 class exact(match):
     def __init__(self, root, cwd, files):
@@ -126,6 +128,8 @@
 class always(match):
     def __init__(self, root, cwd):
         match.__init__(self, root, cwd, [])
+    def always(self):
+        return True
 
 class narrowmatcher(match):
     """Adapt a matcher to work on a subdirectory only.
@@ -272,7 +276,7 @@
     try:
         pat = '(?:%s)' % '|'.join([_regex(k, p, tail) for (k, p) in pats])
         if len(pat) > 20000:
-            raise OverflowError()
+            raise OverflowError
         return pat, re.compile(pat).match
     except OverflowError:
         # We're using a Python with a tiny regex engine and we
--- a/mercurial/merge.py	Sun May 13 11:19:48 2012 +0200
+++ b/mercurial/merge.py	Sun May 13 12:52:24 2012 +0200
@@ -363,7 +363,8 @@
             removed += 1
         elif m == "m": # merge
             if f == '.hgsubstate': # subrepo states need updating
-                subrepo.submerge(repo, wctx, mctx, wctx.ancestor(mctx), overwrite)
+                subrepo.submerge(repo, wctx, mctx, wctx.ancestor(mctx),
+                                 overwrite)
                 continue
             f2, fd, flags, move = a[2:]
             repo.wopener.audit(fd)
@@ -479,7 +480,8 @@
                 if f:
                     repo.dirstate.drop(f)
 
-def update(repo, node, branchmerge, force, partial, ancestor=None):
+def update(repo, node, branchmerge, force, partial, ancestor=None,
+           mergeancestor=False):
     """
     Perform a merge between the working directory and the given node
 
@@ -487,6 +489,10 @@
     branchmerge = whether to merge between branches
     force = whether to force branch merging or file overwriting
     partial = a function to filter file lists (dirstate not updated)
+    mergeancestor = if false, merging with an ancestor (fast-forward)
+      is only allowed between different named branches. This flag
+      is used by rebase extension as a temporary fix and should be
+      avoided in general.
 
     The table below shows all the behaviors of the update command
     given the -c and -C or no options, whether the working directory
@@ -547,7 +553,7 @@
                 raise util.Abort(_("merging with a working directory ancestor"
                                    " has no effect"))
             elif pa == p1:
-                if p1.branch() == p2.branch():
+                if not mergeancestor and p1.branch() == p2.branch():
                     raise util.Abort(_("nothing to merge"),
                                      hint=_("use 'hg update' "
                                             "or check 'hg heads'"))
--- a/mercurial/parsers.c	Sun May 13 11:19:48 2012 +0200
+++ b/mercurial/parsers.c	Sun May 13 12:52:24 2012 +0200
@@ -13,8 +13,10 @@
 
 #include "util.h"
 
-static int hexdigit(char c)
+static inline int hexdigit(const char *p, Py_ssize_t off)
 {
+	char c = p[off];
+
 	if (c >= '0' && c <= '9')
 		return c - '0';
 	if (c >= 'a' && c <= 'f')
@@ -32,8 +34,8 @@
 static PyObject *unhexlify(const char *str, int len)
 {
 	PyObject *ret;
-	const char *c;
 	char *d;
+	int i;
 
 	ret = PyBytes_FromStringAndSize(NULL, len / 2);
 
@@ -42,9 +44,9 @@
 
 	d = PyBytes_AsString(ret);
 
-	for (c = str; c < str + len;) {
-		int hi = hexdigit(*c++);
-		int lo = hexdigit(*c++);
+	for (i = 0; i < len;) {
+		int hi = hexdigit(str, i++);
+		int lo = hexdigit(str, i++);
 		*d++ = (hi << 4) | lo;
 	}
 
@@ -385,7 +387,7 @@
 	Py_ssize_t length = index_length(self);
 	const char *data;
 
-	if (pos == length - 1)
+	if (pos == length - 1 || pos == INT_MAX)
 		return nullid;
 
 	if (pos >= length)
@@ -506,13 +508,13 @@
 		return NULL;
 
 #define istat(__n, __d) \
-	if (PyDict_SetItemString(obj, __d, PyInt_FromLong(self->__n)) == -1) \
+	if (PyDict_SetItemString(obj, __d, PyInt_FromSsize_t(self->__n)) == -1) \
 		goto bail;
 
 	if (self->added) {
 		Py_ssize_t len = PyList_GET_SIZE(self->added);
 		if (PyDict_SetItemString(obj, "index entries added",
-					 PyInt_FromLong(len)) == -1)
+					 PyInt_FromSsize_t(len)) == -1)
 			goto bail;
 	}
 
@@ -536,7 +538,7 @@
 	return NULL;
 }
 
-static inline int nt_level(const char *node, int level)
+static inline int nt_level(const char *node, Py_ssize_t level)
 {
 	int v = node[level>>1];
 	if (!(level & 1))
@@ -544,8 +546,17 @@
 	return v & 0xf;
 }
 
-static int nt_find(indexObject *self, const char *node, Py_ssize_t nodelen)
+/*
+ * Return values:
+ *
+ *   -4: match is ambiguous (multiple candidates)
+ *   -2: not found
+ * rest: valid rev
+ */
+static int nt_find(indexObject *self, const char *node, Py_ssize_t nodelen,
+		   int hex)
 {
+	int (*getnybble)(const char *, Py_ssize_t) = hex ? hexdigit : nt_level;
 	int level, maxlevel, off;
 
 	if (nodelen == 20 && node[0] == '\0' && memcmp(node, nullid, 20) == 0)
@@ -554,27 +565,35 @@
 	if (self->nt == NULL)
 		return -2;
 
-	maxlevel = nodelen > 20 ? 40 : ((int)nodelen * 2);
+	if (hex)
+		maxlevel = nodelen > 40 ? 40 : (int)nodelen;
+	else
+		maxlevel = nodelen > 20 ? 40 : ((int)nodelen * 2);
 
 	for (level = off = 0; level < maxlevel; level++) {
-		int k = nt_level(node, level);
+		int k = getnybble(node, level);
 		nodetree *n = &self->nt[off];
 		int v = n->children[k];
 
 		if (v < 0) {
 			const char *n;
+			Py_ssize_t i;
+
 			v = -v - 1;
 			n = index_node(self, v);
 			if (n == NULL)
 				return -2;
-			return memcmp(node, n, nodelen > 20 ? 20 : nodelen)
-				? -2 : v;
+			for (i = level; i < maxlevel; i++)
+				if (getnybble(node, i) != nt_level(n, i))
+					return -2;
+			return v;
 		}
 		if (v == 0)
 			return -2;
 		off = v;
 	}
-	return -2;
+	/* multiple matches against an ambiguous prefix */
+	return -4;
 }
 
 static int nt_new(indexObject *self)
@@ -638,6 +657,26 @@
 	return -1;
 }
 
+static int nt_init(indexObject *self)
+{
+	if (self->nt == NULL) {
+		self->ntcapacity = self->raw_length < 4
+			? 4 : self->raw_length / 2;
+		self->nt = calloc(self->ntcapacity, sizeof(nodetree));
+		if (self->nt == NULL) {
+			PyErr_NoMemory();
+			return -1;
+		}
+		self->ntlength = 1;
+		self->ntrev = (int)index_length(self) - 1;
+		self->ntlookups = 1;
+		self->ntmisses = 0;
+		if (nt_insert(self, nullid, INT_MAX) == -1)
+			return -1;
+	}
+	return 0;
+}
+
 /*
  * Return values:
  *
@@ -651,23 +690,12 @@
 	int rev;
 
 	self->ntlookups++;
-	rev = nt_find(self, node, nodelen);
+	rev = nt_find(self, node, nodelen, 0);
 	if (rev >= -1)
 		return rev;
 
-	if (self->nt == NULL) {
-		self->ntcapacity = self->raw_length < 4
-			? 4 : self->raw_length / 2;
-		self->nt = calloc(self->ntcapacity, sizeof(nodetree));
-		if (self->nt == NULL) {
-			PyErr_SetString(PyExc_MemoryError, "out of memory");
-			return -3;
-		}
-		self->ntlength = 1;
-		self->ntrev = (int)index_length(self) - 1;
-		self->ntlookups = 1;
-		self->ntmisses = 0;
-	}
+	if (nt_init(self) == -1)
+		return -3;
 
 	/*
 	 * For the first handful of lookups, we scan the entire index,
@@ -692,10 +720,14 @@
 	} else {
 		for (rev = self->ntrev - 1; rev >= 0; rev--) {
 			const char *n = index_node(self, rev);
-			if (n == NULL)
+			if (n == NULL) {
+				self->ntrev = rev + 1;
 				return -2;
-			if (nt_insert(self, n, rev) == -1)
+			}
+			if (nt_insert(self, n, rev) == -1) {
+				self->ntrev = rev + 1;
 				return -3;
+			}
 			if (memcmp(node, n, nodelen > 20 ? 20 : nodelen) == 0) {
 				break;
 			}
@@ -763,6 +795,77 @@
 	return NULL;
 }
 
+static int nt_partialmatch(indexObject *self, const char *node,
+			   Py_ssize_t nodelen)
+{
+	int rev;
+
+	if (nt_init(self) == -1)
+		return -3;
+
+	if (self->ntrev > 0) {
+		/* ensure that the radix tree is fully populated */
+		for (rev = self->ntrev - 1; rev >= 0; rev--) {
+			const char *n = index_node(self, rev);
+			if (n == NULL)
+				return -2;
+			if (nt_insert(self, n, rev) == -1)
+				return -3;
+		}
+		self->ntrev = rev;
+	}
+
+	return nt_find(self, node, nodelen, 1);
+}
+
+static PyObject *index_partialmatch(indexObject *self, PyObject *args)
+{
+	const char *fullnode;
+	int nodelen;
+	char *node;
+	int rev, i;
+
+	if (!PyArg_ParseTuple(args, "s#", &node, &nodelen))
+		return NULL;
+
+	if (nodelen < 4) {
+		PyErr_SetString(PyExc_ValueError, "key too short");
+		return NULL;
+	}
+
+	if (nodelen > 40)
+		nodelen = 40;
+
+	for (i = 0; i < nodelen; i++)
+		hexdigit(node, i);
+	if (PyErr_Occurred()) {
+		/* input contains non-hex characters */
+		PyErr_Clear();
+		Py_RETURN_NONE;
+	}
+
+	rev = nt_partialmatch(self, node, nodelen);
+
+	switch (rev) {
+	case -4:
+		raise_revlog_error();
+	case -3:
+		return NULL;
+	case -2:
+		Py_RETURN_NONE;
+	case -1:
+		return PyString_FromStringAndSize(nullid, 20);
+	}
+
+	fullnode = index_node(self, rev);
+	if (fullnode == NULL) {
+		PyErr_Format(PyExc_IndexError,
+			     "could not access rev %d", rev);
+		return NULL;
+	}
+	return PyString_FromStringAndSize(fullnode, 20);
+}
+
 static PyObject *index_m_get(indexObject *self, PyObject *args)
 {
 	Py_ssize_t nodelen;
@@ -1045,6 +1148,8 @@
 	 "get an index entry"},
 	{"insert", (PyCFunction)index_insert, METH_VARARGS,
 	 "insert an index entry"},
+	{"partialmatch", (PyCFunction)index_partialmatch, METH_VARARGS,
+	 "match a potentially ambiguous node ID"},
 	{"stats", (PyCFunction)index_stats, METH_NOARGS,
 	 "stats for the index"},
 	{NULL} /* Sentinel */
--- a/mercurial/patch.py	Sun May 13 11:19:48 2012 +0200
+++ b/mercurial/patch.py	Sun May 13 12:52:24 2012 +0200
@@ -534,7 +534,7 @@
         if fname in self.data:
             return self.data[fname]
         if not self.opener or fname not in self.files:
-            raise IOError()
+            raise IOError
         fn, mode, copied = self.files[fname]
         return self.opener.read(fn), mode, copied
 
@@ -560,7 +560,7 @@
         try:
             fctx = self.ctx[fname]
         except error.LookupError:
-            raise IOError()
+            raise IOError
         flags = fctx.flags()
         return fctx.data(), ('l' in flags, 'x' in flags)
 
@@ -858,7 +858,8 @@
             self.lenb = int(self.lenb)
         self.starta = int(self.starta)
         self.startb = int(self.startb)
-        diffhelpers.addlines(lr, self.hunk, self.lena, self.lenb, self.a, self.b)
+        diffhelpers.addlines(lr, self.hunk, self.lena, self.lenb, self.a,
+                             self.b)
         # if we hit eof before finishing out the hunk, the last line will
         # be zero length.  Lets try to fix it up.
         while len(self.hunk[-1]) == 0:
@@ -1040,12 +1041,13 @@
             hunk.append(l)
             return l.rstrip('\r\n')
 
-        line = getline(lr, self.hunk)
-        while line and not line.startswith('literal '):
+        while True:
             line = getline(lr, self.hunk)
-        if not line:
-            raise PatchError(_('could not extract "%s" binary data')
-                             % self._fname)
+            if not line:
+                raise PatchError(_('could not extract "%s" binary data')
+                                 % self._fname)
+            if line.startswith('literal '):
+                break
         size = int(line[8:].rstrip())
         dec = []
         line = getline(lr, self.hunk)
@@ -1619,13 +1621,14 @@
     if opts.git or opts.upgrade:
         copy = copies.pathcopies(ctx1, ctx2)
 
-    difffn = lambda opts, losedata: trydiff(repo, revs, ctx1, ctx2,
-                 modified, added, removed, copy, getfilectx, opts, losedata, prefix)
+    difffn = (lambda opts, losedata:
+                  trydiff(repo, revs, ctx1, ctx2, modified, added, removed,
+                          copy, getfilectx, opts, losedata, prefix))
     if opts.upgrade and not opts.git:
         try:
             def losedata(fn):
                 if not losedatafn or not losedatafn(fn=fn):
-                    raise GitDiffRequired()
+                    raise GitDiffRequired
             # Buffer the whole output until we are sure it can be generated
             return list(difffn(opts.copy(git=False), losedata))
         except GitDiffRequired:
@@ -1656,7 +1659,7 @@
                 if line.startswith('@'):
                     head = False
             else:
-                if line and not line[0] in ' +-@\\':
+                if line and line[0] not in ' +-@\\':
                     head = True
             stripline = line
             if not head and line and line[0] in '+-':
@@ -1861,7 +1864,8 @@
                        countwidth, count, pluses, minuses))
 
     if stats:
-        output.append(_(' %d files changed, %d insertions(+), %d deletions(-)\n')
+        output.append(_(' %d files changed, %d insertions(+), '
+                        '%d deletions(-)\n')
                       % (len(stats), totaladds, totalremoves))
 
     return ''.join(output)
--- a/mercurial/phases.py	Sun May 13 11:19:48 2012 +0200
+++ b/mercurial/phases.py	Sun May 13 12:52:24 2012 +0200
@@ -99,60 +99,165 @@
 """
 
 import errno
-from node import nullid, bin, hex, short
+from node import nullid, nullrev, bin, hex, short
 from i18n import _
+import util
 
 allphases = public, draft, secret = range(3)
 trackedphases = allphases[1:]
 phasenames = ['public', 'draft', 'secret']
 
-def readroots(repo):
-    """Read phase roots from disk"""
+def _filterunknown(ui, changelog, phaseroots):
+    """remove unknown nodes from the phase boundary
+
+    Nothing is lost as unknown nodes only hold data for their descendants
+    """
+    updated = False
+    nodemap = changelog.nodemap # to filter unknown nodes
+    for phase, nodes in enumerate(phaseroots):
+        missing = [node for node in nodes if node not in nodemap]
+        if missing:
+            for mnode in missing:
+                ui.debug(
+                    'removing unknown node %s from %i-phase boundary\n'
+                    % (short(mnode), phase))
+            nodes.symmetric_difference_update(missing)
+            updated = True
+    return updated
+
+def _readroots(repo, phasedefaults=None):
+    """Read phase roots from disk
+
+    phasedefaults is a list of fn(repo, roots) callable, which are
+    executed if the phase roots file does not exist. When phases are
+    being initialized on an existing repository, this could be used to
+    set selected changesets phase to something else than public.
+
+    Return (roots, dirty) where dirty is true if roots differ from
+    what is being stored.
+    """
+    dirty = False
     roots = [set() for i in allphases]
     try:
         f = repo.sopener('phaseroots')
         try:
             for line in f:
-                phase, nh = line.strip().split()
+                phase, nh = line.split()
                 roots[int(phase)].add(bin(nh))
         finally:
             f.close()
     except IOError, inst:
         if inst.errno != errno.ENOENT:
             raise
-        for f in repo._phasedefaults:
-            roots = f(repo, roots)
-        repo._dirtyphases = True
-    return roots
+        if phasedefaults:
+            for f in phasedefaults:
+                roots = f(repo, roots)
+        dirty = True
+    if _filterunknown(repo.ui, repo.changelog, roots):
+        dirty = True
+    return roots, dirty
+
+class phasecache(object):
+    def __init__(self, repo, phasedefaults, _load=True):
+        if _load:
+            # Cheap trick to allow shallow-copy without copy module
+            self.phaseroots, self.dirty = _readroots(repo, phasedefaults)
+            self.opener = repo.sopener
+            self._phaserevs = None
+
+    def copy(self):
+        # Shallow copy meant to ensure isolation in
+        # advance/retractboundary(), nothing more.
+        ph = phasecache(None, None, _load=False)
+        ph.phaseroots = self.phaseroots[:]
+        ph.dirty = self.dirty
+        ph.opener = self.opener
+        ph._phaserevs = self._phaserevs
+        return ph
 
-def writeroots(repo):
-    """Write phase roots from disk"""
-    f = repo.sopener('phaseroots', 'w', atomictemp=True)
-    try:
-        for phase, roots in enumerate(repo._phaseroots):
-            for h in roots:
-                f.write('%i %s\n' % (phase, hex(h)))
-        repo._dirtyphases = False
-    finally:
-        f.close()
+    def replace(self, phcache):
+        for a in 'phaseroots dirty opener _phaserevs'.split():
+            setattr(self, a, getattr(phcache, a))
+
+    def getphaserevs(self, repo, rebuild=False):
+        if rebuild or self._phaserevs is None:
+            revs = [public] * len(repo.changelog)
+            for phase in trackedphases:
+                roots = map(repo.changelog.rev, self.phaseroots[phase])
+                if roots:
+                    for rev in roots:
+                        revs[rev] = phase
+                    for rev in repo.changelog.descendants(*roots):
+                        revs[rev] = phase
+            self._phaserevs = revs
+        return self._phaserevs
+
+    def phase(self, repo, rev):
+        # We need a repo argument here to be able to build _phaserev
+        # if necessary. The repository instance is not stored in
+        # phasecache to avoid reference cycles. The changelog instance
+        # is not stored because it is a filecache() property and can
+        # be replaced without us being notified.
+        if rev == nullrev:
+            return public
+        if self._phaserevs is None or rev >= len(self._phaserevs):
+            self._phaserevs = self.getphaserevs(repo, rebuild=True)
+        return self._phaserevs[rev]
 
-def filterunknown(repo, phaseroots=None):
-    """remove unknown nodes from the phase boundary
+    def write(self):
+        if not self.dirty:
+            return
+        f = self.opener('phaseroots', 'w', atomictemp=True)
+        try:
+            for phase, roots in enumerate(self.phaseroots):
+                for h in roots:
+                    f.write('%i %s\n' % (phase, hex(h)))
+        finally:
+            f.close()
+        self.dirty = False
+
+    def _updateroots(self, phase, newroots):
+        self.phaseroots[phase] = newroots
+        self._phaserevs = None
+        self.dirty = True
+
+    def advanceboundary(self, repo, targetphase, nodes):
+        # Be careful to preserve shallow-copied values: do not update
+        # phaseroots values, replace them.
 
-    no data is lost as unknown node only old data for their descentants
-    """
-    if phaseroots is None:
-        phaseroots = repo._phaseroots
-    nodemap = repo.changelog.nodemap # to filter unknown nodes
-    for phase, nodes in enumerate(phaseroots):
-        missing = [node for node in nodes if node not in nodemap]
-        if missing:
-            for mnode in missing:
-                repo.ui.debug(
-                    'removing unknown node %s from %i-phase boundary\n'
-                    % (short(mnode), phase))
-            nodes.symmetric_difference_update(missing)
-            repo._dirtyphases = True
+        delroots = [] # set of root deleted by this path
+        for phase in xrange(targetphase + 1, len(allphases)):
+            # filter nodes that are not in a compatible phase already
+            nodes = [n for n in nodes
+                     if self.phase(repo, repo[n].rev()) >= phase]
+            if not nodes:
+                break # no roots to move anymore
+            olds = self.phaseroots[phase]
+            roots = set(ctx.node() for ctx in repo.set(
+                    'roots((%ln::) - (%ln::%ln))', olds, olds, nodes))
+            if olds != roots:
+                self._updateroots(phase, roots)
+                # some roots may need to be declared for lower phases
+                delroots.extend(olds - roots)
+            # declare deleted root in the target phase
+            if targetphase != 0:
+                self.retractboundary(repo, targetphase, delroots)
+
+    def retractboundary(self, repo, targetphase, nodes):
+        # Be careful to preserve shallow-copied values: do not update
+        # phaseroots values, replace them.
+
+        currentroots = self.phaseroots[targetphase]
+        newroots = [n for n in nodes
+                    if self.phase(repo, repo[n].rev()) < targetphase]
+        if newroots:
+            if nullid in newroots:
+                raise util.Abort(_('cannot change null revision phase'))
+            currentroots = currentroots.copy()
+            currentroots.update(newroots)
+            ctxs = repo.set('roots(%ln::)', currentroots)
+            currentroots.intersection_update(ctx.node() for ctx in ctxs)
+            self._updateroots(targetphase, currentroots)
 
 def advanceboundary(repo, targetphase, nodes):
     """Add nodes to a phase changing other nodes phases if necessary.
@@ -161,30 +266,9 @@
     in the target phase or kept in a *lower* phase.
 
     Simplify boundary to contains phase roots only."""
-    delroots = [] # set of root deleted by this path
-    for phase in xrange(targetphase + 1, len(allphases)):
-        # filter nodes that are not in a compatible phase already
-        # XXX rev phase cache might have been invalidated by a previous loop
-        # XXX we need to be smarter here
-        nodes = [n for n in nodes if repo[n].phase() >= phase]
-        if not nodes:
-            break # no roots to move anymore
-        roots = repo._phaseroots[phase]
-        olds = roots.copy()
-        ctxs = list(repo.set('roots((%ln::) - (%ln::%ln))', olds, olds, nodes))
-        roots.clear()
-        roots.update(ctx.node() for ctx in ctxs)
-        if olds != roots:
-            # invalidate cache (we probably could be smarter here
-            if '_phaserev' in vars(repo):
-                del repo._phaserev
-            repo._dirtyphases = True
-            # some roots may need to be declared for lower phases
-            delroots.extend(olds - roots)
-        # declare deleted root in the target phase
-        if targetphase != 0:
-            retractboundary(repo, targetphase, delroots)
-
+    phcache = repo._phasecache.copy()
+    phcache.advanceboundary(repo, targetphase, nodes)
+    repo._phasecache.replace(phcache)
 
 def retractboundary(repo, targetphase, nodes):
     """Set nodes back to a phase changing other nodes phases if necessary.
@@ -193,22 +277,15 @@
     in the target phase or kept in a *higher* phase.
 
     Simplify boundary to contains phase roots only."""
-    currentroots = repo._phaseroots[targetphase]
-    newroots = [n for n in nodes if repo[n].phase() < targetphase]
-    if newroots:
-        currentroots.update(newroots)
-        ctxs = repo.set('roots(%ln::)', currentroots)
-        currentroots.intersection_update(ctx.node() for ctx in ctxs)
-        if '_phaserev' in vars(repo):
-            del repo._phaserev
-        repo._dirtyphases = True
-
+    phcache = repo._phasecache.copy()
+    phcache.retractboundary(repo, targetphase, nodes)
+    repo._phasecache.replace(phcache)
 
 def listphases(repo):
     """List phases root for serialisation over pushkey"""
     keys = {}
     value = '%i' % draft
-    for root in repo._phaseroots[draft]:
+    for root in repo._phasecache.phaseroots[draft]:
         keys[hex(root)] = value
 
     if repo.ui.configbool('phases', 'publish', True):
@@ -251,7 +328,7 @@
 def visibleheads(repo):
     """return the set of visible head of this repo"""
     # XXX we want a cache on this
-    sroots = repo._phaseroots[secret]
+    sroots = repo._phasecache.phaseroots[secret]
     if sroots:
         # XXX very slow revset. storing heads or secret "boundary" would help.
         revset = repo.set('heads(not (%ln::))', sroots)
@@ -267,7 +344,7 @@
     """return a branchmap for the visible set"""
     # XXX Recomputing this data on the fly is very slow.  We should build a
     # XXX cached version while computin the standard branchmap version.
-    sroots = repo._phaseroots[secret]
+    sroots = repo._phasecache.phaseroots[secret]
     if sroots:
         vbranchmap = {}
         for branch, nodes in  repo.branchmap().iteritems():
--- a/mercurial/pure/mpatch.py	Sun May 13 11:19:48 2012 +0200
+++ b/mercurial/pure/mpatch.py	Sun May 13 12:52:24 2012 +0200
@@ -85,10 +85,10 @@
             p1, p2, l = struct.unpack(">lll", m.read(12))
             pull(new, frags, p1 - last) # what didn't change
             pull([], frags, p2 - p1)    # what got deleted
-            new.append((l, pos + 12))        # what got added
+            new.append((l, pos + 12))   # what got added
             pos += l + 12
             last = p2
-        frags.extend(reversed(new))                    # what was left at the end
+        frags.extend(reversed(new))     # what was left at the end
 
     t = collect(b2, frags)
 
--- a/mercurial/pure/osutil.py	Sun May 13 11:19:48 2012 +0200
+++ b/mercurial/pure/osutil.py	Sun May 13 12:52:24 2012 +0200
@@ -119,7 +119,7 @@
                 flags = _O_TEXT
 
             m0 = mode[0]
-            if m0 == 'r' and not '+' in mode:
+            if m0 == 'r' and '+' not in mode:
                 flags |= _O_RDONLY
                 access = _GENERIC_READ
             else:
--- a/mercurial/repair.py	Sun May 13 11:19:48 2012 +0200
+++ b/mercurial/repair.py	Sun May 13 12:52:24 2012 +0200
@@ -6,7 +6,7 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
-from mercurial import changegroup, bookmarks, phases
+from mercurial import changegroup, bookmarks
 from mercurial.node import short
 from mercurial.i18n import _
 import os
@@ -38,14 +38,14 @@
     """return the changesets which will be broken by the truncation"""
     s = set()
     def collectone(revlog):
-        links = (revlog.linkrev(i) for i in revlog)
+        linkgen = (revlog.linkrev(i) for i in revlog)
         # find the truncation point of the revlog
-        for lrev in links:
+        for lrev in linkgen:
             if lrev >= striprev:
                 break
         # see if any revision after this point has a linkrev
         # less than striprev (those will be broken by strip)
-        for lrev in links:
+        for lrev in linkgen:
             if lrev < striprev:
                 s.add(lrev)
 
@@ -170,7 +170,3 @@
         raise
 
     repo.destroyed()
-
-    # remove potential unknown phase
-    # XXX using to_strip data would be faster
-    phases.filterunknown(repo)
--- a/mercurial/revlog.py	Sun May 13 11:19:48 2012 +0200
+++ b/mercurial/revlog.py	Sun May 13 12:52:24 2012 +0200
@@ -756,6 +756,15 @@
                 pass
 
     def _partialmatch(self, id):
+        try:
+            return self.index.partialmatch(id)
+        except RevlogError:
+            # parsers.c radix tree lookup gave multiple matches
+            raise LookupError(id, self.indexfile, _("ambiguous identifier"))
+        except (AttributeError, ValueError):
+            # we are pure python, or key was too short to search radix tree
+            pass
+
         if id in self._pcache:
             return self._pcache[id]
 
@@ -1199,7 +1208,7 @@
                     continue
 
                 for p in (p1, p2):
-                    if not p in self.nodemap:
+                    if p not in self.nodemap:
                         raise LookupError(p, self.indexfile,
                                           _('unknown parent'))
 
--- a/mercurial/revset.py	Sun May 13 11:19:48 2012 +0200
+++ b/mercurial/revset.py	Sun May 13 12:52:24 2012 +0200
@@ -108,7 +108,8 @@
                 pos += 1
             else:
                 raise error.ParseError(_("unterminated string"), s)
-        elif c.isalnum() or c in '._' or ord(c) > 127: # gather up a symbol/keyword
+        # gather up a symbol/keyword
+        elif c.isalnum() or c in '._' or ord(c) > 127:
             s = pos
             pos += 1
             while pos < l: # find end of symbol
@@ -257,7 +258,8 @@
 
 def ancestorspec(repo, subset, x, n):
     """``set~n``
-    Changesets that are the Nth ancestor (first parents only) of a changeset in set.
+    Changesets that are the Nth ancestor (first parents only) of a changeset
+    in set.
     """
     try:
         n = int(n[1])
@@ -289,6 +291,7 @@
     - ``pruned``             : csets that are goods, bads or skipped
     - ``untested``           : csets whose fate is yet unknown
     - ``ignored``            : csets ignored due to DAG topology
+    - ``current``            : the cset currently being bisected
     """
     status = getstring(x, _("bisect requires a string")).lower()
     state = set(hbisect.get(repo, status))
@@ -462,7 +465,26 @@
     """``draft()``
     Changeset in draft phase."""
     getargs(x, 0, 0, _("draft takes no arguments"))
-    return [r for r in subset if repo._phaserev[r] == phases.draft]
+    pc = repo._phasecache
+    return [r for r in subset if pc.phase(repo, r) == phases.draft]
+
+def extra(repo, subset, x):
+    """``extra(label, [value])``
+    Changesets with the given label in the extra metadata, with the given
+    optional value."""
+
+    l = getargs(x, 1, 2, _('extra takes at least 1 and at most 2 arguments'))
+    label = getstring(l[0], _('first argument to extra must be a string'))
+    value = None
+
+    if len(l) > 1:
+        value = getstring(l[1], _('second argument to extra must be a string'))
+
+    def _matchvalue(r):
+        extra = repo[r].extra()
+        return label in extra and (value is None or value == extra[label])
+
+    return [r for r in subset if _matchvalue(r)]
 
 def filelog(repo, subset, x):
     """``filelog(pattern)``
@@ -851,7 +873,8 @@
     """``public()``
     Changeset in public phase."""
     getargs(x, 0, 0, _("public takes no arguments"))
-    return [r for r in subset if repo._phaserev[r] == phases.public]
+    pc = repo._phasecache
+    return [r for r in subset if pc.phase(repo, r) == phases.public]
 
 def remote(repo, subset, x):
     """``remote([id [,path]])``
@@ -1030,7 +1053,8 @@
     """``secret()``
     Changeset in secret phase."""
     getargs(x, 0, 0, _("secret takes no arguments"))
-    return [r for r in subset if repo._phaserev[r] == phases.secret]
+    pc = repo._phasecache
+    return [r for r in subset if pc.phase(repo, r) == phases.secret]
 
 def sort(repo, subset, x):
     """``sort(set[, [-]key...])``
@@ -1143,6 +1167,7 @@
     "descendants": descendants,
     "_firstdescendants": _firstdescendants,
     "draft": draft,
+    "extra": extra,
     "file": hasfile,
     "filelog": filelog,
     "first": first,
--- a/mercurial/scmutil.py	Sun May 13 11:19:48 2012 +0200
+++ b/mercurial/scmutil.py	Sun May 13 12:52:24 2012 +0200
@@ -141,8 +141,9 @@
                 elif (stat.S_ISDIR(st.st_mode) and
                       os.path.isdir(os.path.join(curpath, '.hg'))):
                     if not self.callback or not self.callback(curpath):
-                        raise util.Abort(_("path '%s' is inside nested repo %r") %
-                                         (path, prefix))
+                        raise util.Abort(_("path '%s' is inside nested "
+                                           "repo %r")
+                                         % (path, prefix))
             prefixes.append(normprefix)
             parts.pop()
             normparts.pop()
@@ -654,8 +655,9 @@
             unknown.append(abs)
             if repo.ui.verbose or not exact:
                 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
-        elif repo.dirstate[abs] != 'r' and (not good or not os.path.lexists(target)
-            or (os.path.isdir(target) and not os.path.islink(target))):
+        elif (repo.dirstate[abs] != 'r' and
+              (not good or not os.path.lexists(target) or
+               (os.path.isdir(target) and not os.path.islink(target)))):
             deleted.append(abs)
             if repo.ui.verbose or not exact:
                 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
@@ -764,8 +766,9 @@
             missings.append(r)
     missings.sort()
     if missings:
-        raise error.RequirementError(_("unknown repository format: "
-            "requires features '%s' (upgrade Mercurial)") % "', '".join(missings))
+        raise error.RequirementError(
+            _("unknown repository format: requires features '%s' (upgrade "
+              "Mercurial)") % "', '".join(missings))
     return requirements
 
 class filecacheentry(object):
--- a/mercurial/setdiscovery.py	Sun May 13 11:19:48 2012 +0200
+++ b/mercurial/setdiscovery.py	Sun May 13 12:52:24 2012 +0200
@@ -134,11 +134,16 @@
         return (ownheadhashes, True, srvheadhashes,)
 
     # full blown discovery
-    undecided = dag.nodeset() # own nodes where I don't know if remote knows them
-    common = set() # own nodes I know we both know
-    missing = set() # own nodes I know remote lacks
 
-    # treat remote heads (and maybe own heads) as a first implicit sample response
+    # own nodes where I don't know if remote knows them
+    undecided = dag.nodeset()
+    # own nodes I know we both know
+    common = set()
+    # own nodes I know remote lacks
+    missing = set()
+
+    # treat remote heads (and maybe own heads) as a first implicit sample
+    # response
     common.update(dag.ancestorset(srvheads))
     undecided.difference_update(common)
 
--- a/mercurial/similar.py	Sun May 13 11:19:48 2012 +0200
+++ b/mercurial/similar.py	Sun May 13 12:52:24 2012 +0200
@@ -44,7 +44,8 @@
     '''
     copies = {}
     for i, r in enumerate(removed):
-        repo.ui.progress(_('searching for similar files'), i, total=len(removed))
+        repo.ui.progress(_('searching for similar files'), i,
+                         total=len(removed))
 
         # lazily load text
         @util.cachefunc
--- a/mercurial/simplemerge.py	Sun May 13 11:19:48 2012 +0200
+++ b/mercurial/simplemerge.py	Sun May 13 12:52:24 2012 +0200
@@ -94,7 +94,7 @@
             elif self.a[0].endswith('\r'):
                 newline = '\r'
         if base_marker and reprocess:
-            raise CantReprocessAndShowBase()
+            raise CantReprocessAndShowBase
         if name_a:
             start_marker = start_marker + ' ' + name_a
         if name_b:
@@ -222,7 +222,8 @@
         # section a[0:ia] has been disposed of, etc
         iz = ia = ib = 0
 
-        for zmatch, zend, amatch, aend, bmatch, bend in self.find_sync_regions():
+        for region in self.find_sync_regions():
+            zmatch, zend, amatch, aend, bmatch, bend = region
             #print 'match base [%d:%d]' % (zmatch, zend)
 
             matchlen = zend - zmatch
--- a/mercurial/sshrepo.py	Sun May 13 11:19:48 2012 +0200
+++ b/mercurial/sshrepo.py	Sun May 13 12:52:24 2012 +0200
@@ -29,6 +29,7 @@
     def __init__(self, ui, path, create=False):
         self._url = path
         self.ui = ui
+        self.pipeo = self.pipei = self.pipee = None
 
         u = util.url(path, parsequery=False, parsefragment=False)
         if u.scheme != 'ssh' or not u.host or u.path is None:
@@ -86,7 +87,8 @@
             lines.append(l)
             max_noise -= 1
         else:
-            self._abort(error.RepoError(_("no suitable response from remote hg")))
+            self._abort(error.RepoError(_('no suitable response from '
+                                          'remote hg')))
 
         self.capabilities = set()
         for l in reversed(lines):
@@ -110,15 +112,17 @@
         raise exception
 
     def cleanup(self):
+        if self.pipeo is None:
+            return
+        self.pipeo.close()
+        self.pipei.close()
         try:
-            self.pipeo.close()
-            self.pipei.close()
             # read the error descriptor until EOF
             for l in self.pipee:
                 self.ui.status(_("remote: "), l)
-            self.pipee.close()
-        except:
+        except (IOError, ValueError):
             pass
+        self.pipee.close()
 
     __del__ = cleanup
 
--- a/mercurial/subrepo.py	Sun May 13 11:19:48 2012 +0200
+++ b/mercurial/subrepo.py	Sun May 13 12:52:24 2012 +0200
@@ -200,7 +200,8 @@
                  'use (l)ocal source (%s) or (r)emote source (%s)?\n')
                % (subrelpath(sub), local, remote))
     else:
-        msg = (_(' subrepository sources for %s differ (in checked out version)\n'
+        msg = (_(' subrepository sources for %s differ (in checked out '
+                 'version)\n'
                  'use (l)ocal source (%s) or (r)emote source (%s)?\n')
                % (subrelpath(sub), local, remote))
     return ui.promptchoice(msg, (_('&Local'), _('&Remote')), 0)
@@ -498,8 +499,9 @@
                                      % (subrelpath(self), srcurl))
                 parentrepo = self._repo._subparent
                 shutil.rmtree(self._repo.path)
-                other, self._repo = hg.clone(self._repo._subparent.ui, {}, other,
-                                         self._repo.root, update=False)
+                other, self._repo = hg.clone(self._repo._subparent.ui, {},
+                                             other, self._repo.root,
+                                             update=False)
                 self._initrepo(parentrepo, source, create=True)
             else:
                 self._repo.ui.status(_('pulling subrepo %s from %s\n')
--- a/mercurial/tags.py	Sun May 13 11:19:48 2012 +0200
+++ b/mercurial/tags.py	Sun May 13 12:52:24 2012 +0200
@@ -181,7 +181,7 @@
             for line in cachelines:
                 if line == "\n":
                     break
-                line = line.rstrip().split()
+                line = line.split()
                 cacherevs.append(int(line[0]))
                 headnode = bin(line[1])
                 cacheheads.append(headnode)
--- a/mercurial/templatefilters.py	Sun May 13 11:19:48 2012 +0200
+++ b/mercurial/templatefilters.py	Sun May 13 12:52:24 2012 +0200
@@ -260,7 +260,7 @@
     >>> person('"Foo Bar <foo@bar>')
     'Foo Bar'
     """
-    if not '@' in author:
+    if '@' not in author:
         return author
     f = author.find('<')
     if f != -1:
--- a/mercurial/templater.py	Sun May 13 11:19:48 2012 +0200
+++ b/mercurial/templater.py	Sun May 13 12:52:24 2012 +0200
@@ -312,7 +312,7 @@
 
     def load(self, t):
         '''Get the template for the given template name. Use a local cache.'''
-        if not t in self.cache:
+        if t not in self.cache:
             try:
                 self.cache[t] = util.readfile(self.map[t][1])
             except KeyError, inst:
--- a/mercurial/transaction.py	Sun May 13 11:19:48 2012 +0200
+++ b/mercurial/transaction.py	Sun May 13 12:52:24 2012 +0200
@@ -164,7 +164,7 @@
                 _playback(self.journal, self.report, self.opener,
                           self.entries, False)
                 self.report(_("rollback completed\n"))
-            except:
+            except Exception:
                 self.report(_("rollback failed - please run hg recover\n"))
         finally:
             self.journal = None
--- a/mercurial/ui.py	Sun May 13 11:19:48 2012 +0200
+++ b/mercurial/ui.py	Sun May 13 12:52:24 2012 +0200
@@ -680,7 +680,8 @@
         printed.'''
         if self.tracebackflag:
             if exc:
-                traceback.print_exception(exc[0], exc[1], exc[2], file=self.ferr)
+                traceback.print_exception(exc[0], exc[1], exc[2],
+                                          file=self.ferr)
             else:
                 traceback.print_exc(file=self.ferr)
         return self.tracebackflag
--- a/mercurial/url.py	Sun May 13 11:19:48 2012 +0200
+++ b/mercurial/url.py	Sun May 13 12:52:24 2012 +0200
@@ -377,7 +377,8 @@
                 keyfile = self.auth['key']
                 certfile = self.auth['cert']
 
-            conn = httpsconnection(host, port, keyfile, certfile, *args, **kwargs)
+            conn = httpsconnection(host, port, keyfile, certfile, *args,
+                                   **kwargs)
             conn.ui = self.ui
             return conn
 
--- a/mercurial/util.h	Sun May 13 11:19:48 2012 +0200
+++ b/mercurial/util.h	Sun May 13 12:52:24 2012 +0200
@@ -109,6 +109,7 @@
 typedef int Py_ssize_t;
 typedef Py_ssize_t (*lenfunc)(PyObject *);
 typedef PyObject *(*ssizeargfunc)(PyObject *, Py_ssize_t);
+#define PyInt_FromSsize_t PyInt_FromLong
 
 #if !defined(PY_SSIZE_T_MIN)
 #define PY_SSIZE_T_MAX INT_MAX
--- a/mercurial/util.py	Sun May 13 11:19:48 2012 +0200
+++ b/mercurial/util.py	Sun May 13 12:52:24 2012 +0200
@@ -1079,7 +1079,7 @@
             try:
                 d["d"] = days
                 return parsedate(date, extendeddateformats, d)[0]
-            except:
+            except Abort:
                 pass
         d["d"] = "28"
         return parsedate(date, extendeddateformats, d)[0]
--- a/mercurial/verify.py	Sun May 13 11:19:48 2012 +0200
+++ b/mercurial/verify.py	Sun May 13 12:52:24 2012 +0200
@@ -87,7 +87,7 @@
                         # attempt to filter down to real linkrevs
                         linkrevs = [l for l in linkrevs
                                     if lrugetctx(l)[f].filenode() == node]
-                    except:
+                    except Exception:
                         pass
                 warn(_(" (expected %s)") % " ".join(map(str, linkrevs)))
             lr = None # can't be trusted
@@ -189,7 +189,7 @@
                 try:
                     fl = repo.file(f)
                     lr = min([fl.linkrev(fl.rev(n)) for n in filenodes[f]])
-                except:
+                except Exception:
                     lr = None
                 err(lr, _("in manifest but not in changeset"), f)
 
--- a/mercurial/win32.py	Sun May 13 11:19:48 2012 +0200
+++ b/mercurial/win32.py	Sun May 13 12:52:24 2012 +0200
@@ -305,7 +305,7 @@
     buf = ctypes.create_string_buffer(size + 1)
     len = _kernel32.GetModuleFileNameA(None, ctypes.byref(buf), size)
     if len == 0:
-        raise ctypes.WinError()
+        raise ctypes.WinError
     elif len == size:
         raise ctypes.WinError(_ERROR_INSUFFICIENT_BUFFER)
     return buf.value
@@ -315,7 +315,7 @@
     size = _DWORD(300)
     buf = ctypes.create_string_buffer(size.value + 1)
     if not _advapi32.GetUserNameA(ctypes.byref(buf), ctypes.byref(size)):
-        raise ctypes.WinError()
+        raise ctypes.WinError
     return buf.value
 
 _signalhandler = []
@@ -333,7 +333,7 @@
     h = _SIGNAL_HANDLER(handler)
     _signalhandler.append(h) # needed to prevent garbage collection
     if not _kernel32.SetConsoleCtrlHandler(h, True):
-        raise ctypes.WinError()
+        raise ctypes.WinError
 
 def hidewindow():
 
@@ -396,7 +396,7 @@
         None, args, None, None, False, _DETACHED_PROCESS,
         env, os.getcwd(), ctypes.byref(si), ctypes.byref(pi))
     if not res:
-        raise ctypes.WinError()
+        raise ctypes.WinError
 
     return pi.dwProcessId
 
--- a/mercurial/windows.py	Sun May 13 11:19:48 2012 +0200
+++ b/mercurial/windows.py	Sun May 13 12:52:24 2012 +0200
@@ -304,7 +304,7 @@
 
 def groupmembers(name):
     # Don't support groups on Windows for now
-    raise KeyError()
+    raise KeyError
 
 def isexec(f):
     return False
--- a/mercurial/wireproto.py	Sun May 13 11:19:48 2012 +0200
+++ b/mercurial/wireproto.py	Sun May 13 12:52:24 2012 +0200
@@ -24,9 +24,9 @@
 class batcher(object):
     '''base class for batches of commands submittable in a single request
 
-    All methods invoked on instances of this class are simply queued and return a
-    a future for the result. Once you call submit(), all the queued calls are
-    performed and the results set in their respective futures.
+    All methods invoked on instances of this class are simply queued and
+    return a a future for the result. Once you call submit(), all the queued
+    calls are performed and the results set in their respective futures.
     '''
     def __init__(self):
         self.calls = []
@@ -51,7 +51,8 @@
 class remotebatch(batcher):
     '''batches the queued calls; uses as few roundtrips as possible'''
     def __init__(self, remote):
-        '''remote must support _submitbatch(encbatch) and _submitone(op, encargs)'''
+        '''remote must support _submitbatch(encbatch) and
+        _submitone(op, encargs)'''
         batcher.__init__(self)
         self.remote = remote
     def submit(self):
@@ -97,14 +98,14 @@
         encresref = future()
         # Return encoded arguments and future:
         yield encargs, encresref
-        # Assuming the future to be filled with the result from the batched request
-        # now. Decode it:
+        # Assuming the future to be filled with the result from the batched
+        # request now. Decode it:
         yield decode(encresref.value)
 
-    The decorator returns a function which wraps this coroutine as a plain method,
-    but adds the original method as an attribute called "batchable", which is
-    used by remotebatch to split the call into separate encoding and decoding
-    phases.
+    The decorator returns a function which wraps this coroutine as a plain
+    method, but adds the original method as an attribute called "batchable",
+    which is used by remotebatch to split the call into separate encoding and
+    decoding phases.
     '''
     def plain(*args, **opts):
         batchable = f(*args, **opts)
--- a/setup.py	Sun May 13 11:19:48 2012 +0200
+++ b/setup.py	Sun May 13 12:52:24 2012 +0200
@@ -23,24 +23,25 @@
 try:
     import hashlib
     sha = hashlib.sha1()
-except:
+except ImportError:
     try:
         import sha
-    except:
+    except ImportError:
         raise SystemExit(
             "Couldn't import standard hashlib (incomplete Python install).")
 
 try:
     import zlib
-except:
+except ImportError:
     raise SystemExit(
         "Couldn't import standard zlib (incomplete Python install).")
 
 # The base IronPython distribution (as of 2.7.1) doesn't support bz2
 isironpython = False
 try:
-    isironpython = platform.python_implementation().lower().find("ironpython") != -1
-except:
+    isironpython = (platform.python_implementation()
+                    .lower().find("ironpython") != -1)
+except AttributeError:
     pass
 
 if isironpython:
@@ -48,7 +49,7 @@
 else:
     try:
         import bz2
-    except:
+    except ImportError:
         raise SystemExit(
             "Couldn't import standard bz2 (incomplete Python install).")
 
@@ -107,7 +108,7 @@
             os.dup2(devnull.fileno(), sys.stderr.fileno())
             objects = cc.compile([fname], output_dir=tmpdir)
             cc.link_executable(objects, os.path.join(tmpdir, "a.out"))
-        except:
+        except Exception:
             return False
         return True
     finally:
@@ -211,10 +212,12 @@
     # Insert hgbuildmo first so that files in mercurial/locale/ are found
     # when build_py is run next.
     sub_commands = [('build_mo', None),
-    # We also need build_ext before build_py. Otherwise, when 2to3 is called (in
-    # build_py), it will not find osutil & friends, thinking that those modules are
-    # global and, consequently, making a mess, now that all module imports are
-    # global.
+
+    # We also need build_ext before build_py. Otherwise, when 2to3 is
+    # called (in build_py), it will not find osutil & friends,
+    # thinking that those modules are global and, consequently, making
+    # a mess, now that all module imports are global.
+
                     ('build_ext', build.has_ext_modules),
                    ] + build.sub_commands
 
@@ -292,7 +295,8 @@
             self.distribution.ext_modules = []
         else:
             if not os.path.exists(os.path.join(get_python_inc(), 'Python.h')):
-                raise SystemExit("Python headers are required to build Mercurial")
+                raise SystemExit('Python headers are required to build '
+                                 'Mercurial')
 
     def find_modules(self):
         modules = build_py.find_modules(self)
--- a/tests/hghave	Sun May 13 11:19:48 2012 +0200
+++ b/tests/hghave	Sun May 13 12:52:24 2012 +0200
@@ -31,14 +31,14 @@
 def has_bzr():
     try:
         import bzrlib
-        return bzrlib.__doc__ != None
+        return bzrlib.__doc__ is not None
     except ImportError:
         return False
 
 def has_bzr114():
     try:
         import bzrlib
-        return (bzrlib.__doc__ != None
+        return (bzrlib.__doc__ is not None
                 and bzrlib.version_info[:2] >= (1, 14))
     except ImportError:
         return False
@@ -60,7 +60,7 @@
         os.close(fd)
         os.remove(path)
         return True
-    except:
+    except (IOError, OSError):
         return False
 
 def has_executablebit():
@@ -93,7 +93,7 @@
         try:
             s2 = os.stat(p2)
             return s2 == s1
-        except:
+        except OSError:
             return False
     finally:
         os.remove(path)
@@ -106,7 +106,7 @@
         return False
 
 def has_fifo():
-    return hasattr(os, "mkfifo")
+    return getattr(os, "mkfifo", None) is not None
 
 def has_cacheable_fs():
     from mercurial import util
@@ -165,10 +165,11 @@
         return False
 
 def has_p4():
-    return matchoutput('p4 -V', r'Rev\. P4/') and matchoutput('p4d -V', r'Rev\. P4D/')
+    return (matchoutput('p4 -V', r'Rev\. P4/') and
+            matchoutput('p4d -V', r'Rev\. P4D/'))
 
 def has_symlink():
-    if not hasattr(os, "symlink"):
+    if getattr(os, "symlink", None) is None:
         return False
     name = tempfile.mktemp(dir=".", prefix='hg-checklink-')
     try:
--- a/tests/run-tests.py	Sun May 13 11:19:48 2012 +0200
+++ b/tests/run-tests.py	Sun May 13 12:52:24 2012 +0200
@@ -860,7 +860,7 @@
         tf = open(testpath)
         firstline = tf.readline().rstrip()
         tf.close()
-    except:
+    except IOError:
         firstline = ''
     lctest = test.lower()
 
@@ -1187,6 +1187,7 @@
     os.environ['http_proxy'] = ''
     os.environ['no_proxy'] = ''
     os.environ['NO_PROXY'] = ''
+    os.environ['TERM'] = 'xterm'
 
     # unset env related to hooks
     for k in os.environ.keys():
--- a/tests/test-bisect.t	Sun May 13 11:19:48 2012 +0200
+++ b/tests/test-bisect.t	Sun May 13 12:52:24 2012 +0200
@@ -224,6 +224,7 @@
   Testing changeset 12:1941b52820a5 (23 changesets remaining, ~4 tests)
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ cat .hg/bisect.state
+  current 1941b52820a544549596820a8ae006842b0e2c64
   skip 9d7d07bc967ca98ad0600c24953fd289ad5fa991
   skip ce8f0998e922c179e80819d5066fbe46e2998784
   skip e7fa0811edb063f6319531f0d0a865882138e180
@@ -396,6 +397,12 @@
   date:        Thu Jan 01 00:00:06 1970 +0000
   summary:     msg 6
   
+  $ hg log -r "bisect(current)"
+  changeset:   5:7874a09ea728
+  user:        test
+  date:        Thu Jan 01 00:00:05 1970 +0000
+  summary:     msg 5
+  
   $ hg log -r "bisect(skip)"
   changeset:   1:5cd978ea5149
   user:        test
@@ -466,3 +473,40 @@
   date:        Thu Jan 01 00:00:06 1970 +0000
   summary:     msg 6
   
+
+
+test bisecting via a command without updating the working dir, and
+ensure that the bisect state file is updated before running a test
+command
+
+  $ hg update null
+  0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+  $ cat > script.sh <<'EOF'
+  > #!/bin/sh
+  > test -n "$HG_NODE" || (echo HG_NODE missing; exit 127)
+  > current="`hg log -r \"bisect(current)\" --template {node}`"
+  > test "$current" = "$HG_NODE" || (echo current is bad: $current; exit 127)
+  > rev="`hg log -r $HG_NODE --template {rev}`"
+  > test "$rev" -ge 6
+  > EOF
+  $ chmod +x script.sh
+  $ hg bisect -r
+  $ hg bisect --good tip --noupdate
+  $ hg bisect --bad 0 --noupdate
+  Testing changeset 15:e7fa0811edb0 (31 changesets remaining, ~4 tests)
+  $ hg bisect --command "'`pwd`/script.sh' and some params" --noupdate
+  Changeset 15:e7fa0811edb0: good
+  Changeset 7:03750880c6b5: good
+  Changeset 3:b53bea5e2fcb: bad
+  Changeset 5:7874a09ea728: bad
+  Changeset 6:a3d5c6fdf0d3: good
+  The first good revision is:
+  changeset:   6:a3d5c6fdf0d3
+  user:        test
+  date:        Thu Jan 01 00:00:06 1970 +0000
+  summary:     msg 6
+  
+
+ensure that we still don't have a working dir
+
+  $ hg parents
--- a/tests/test-bookmarks-pushpull.t	Sun May 13 11:19:48 2012 +0200
+++ b/tests/test-bookmarks-pushpull.t	Sun May 13 12:52:24 2012 +0200
@@ -29,9 +29,13 @@
   adding file changes
   added 1 changesets with 1 changes to 1 files
   updating bookmark Y
+  adding remote bookmark X
+  adding remote bookmark Z
   (run 'hg update' to get a working copy)
   $ hg bookmarks
+     X                         0:4e3505fd9583
      Y                         0:4e3505fd9583
+     Z                         0:4e3505fd9583
   $ hg debugpushkey ../a namespaces
   bookmarks	
   phases	
@@ -47,6 +51,7 @@
   $ hg bookmark
      X                         0:4e3505fd9583
      Y                         0:4e3505fd9583
+     Z                         0:4e3505fd9583
 
 export bookmark by name
 
@@ -111,6 +116,7 @@
   $ hg book
    * X                         1:9b140be10808
      Y                         0:4e3505fd9583
+     Z                         0:4e3505fd9583
      foo                       -1:000000000000
      foobar                    1:9b140be10808
 
@@ -122,11 +128,13 @@
   adding file changes
   added 1 changesets with 1 changes to 1 files (+1 heads)
   divergent bookmark X stored as X@foo
+  updating bookmark Z
   (run 'hg heads' to see heads, 'hg merge' to merge)
   $ hg book
    * X                         1:9b140be10808
      X@foo                     2:0d2164f0ce0d
      Y                         0:4e3505fd9583
+     Z                         2:0d2164f0ce0d
      foo                       -1:000000000000
      foobar                    1:9b140be10808
   $ hg push -f ../a
@@ -159,13 +167,15 @@
   namespaces	
   $ hg debugpushkey http://localhost:$HGPORT/ bookmarks
   Y	4e3505fd95835d721066b76e75dbb8cc554d7f77
-  X	9b140be1080824d768c5a4691a564088eede71f9
+  foobar	9b140be1080824d768c5a4691a564088eede71f9
+  Z	0d2164f0ce0d8f1d6f94351eba04b794909be66c
   foo	0000000000000000000000000000000000000000
-  foobar	9b140be1080824d768c5a4691a564088eede71f9
+  X	9b140be1080824d768c5a4691a564088eede71f9
   $ hg out -B http://localhost:$HGPORT/
   comparing with http://localhost:$HGPORT/
   searching for changed bookmarks
-     Z                         0d2164f0ce0d
+  no changed bookmarks found
+  [1]
   $ hg push -B Z http://localhost:$HGPORT/
   pushing to http://localhost:$HGPORT/
   searching for changes
@@ -182,6 +192,9 @@
   $ hg pull -B Z http://localhost:$HGPORT/
   pulling from http://localhost:$HGPORT/
   no changes found
+  adding remote bookmark foobar
+  adding remote bookmark Z
+  adding remote bookmark foo
   divergent bookmark X stored as X@1
   importing bookmark Z
   $ hg clone http://localhost:$HGPORT/ cloned-bookmarks
--- a/tests/test-branches.t	Sun May 13 11:19:48 2012 +0200
+++ b/tests/test-branches.t	Sun May 13 12:52:24 2012 +0200
@@ -241,6 +241,11 @@
   default                        0:19709c5a4e75 (inactive)
   $ hg branches -a
   a branch name much longer than the default justification used by branches 7:10ff5895aa57
+  $ hg branches -q
+  a branch name much longer than the default justification used by branches
+  c
+  a
+  default
   $ hg heads b
   no open branch heads found on branches b
   [1]
--- a/tests/test-check-code-hg.t	Sun May 13 11:19:48 2012 +0200
+++ b/tests/test-check-code-hg.t	Sun May 13 12:52:24 2012 +0200
@@ -8,63 +8,9 @@
   $ hg manifest | xargs "$check_code" || echo 'FAILURE IS NOT AN OPTION!!!'
 
   $ hg manifest | xargs "$check_code" --warnings --nolineno --per-file=0 || true
-  contrib/check-code.py:0:
-   > #    (r'^\s+[^_ \n][^_. \n]+_[^_\n]+\s*=', "don't use underbars in identifiers"),
-   warning: line over 80 characters
-  contrib/perf.py:0:
-   >         except:
-   warning: naked except clause
-  contrib/perf.py:0:
-   >     #timer(lambda: sum(map(len, repo.dirstate.status(m, [], False, False, False))))
-   warning: line over 80 characters
-  contrib/perf.py:0:
-   >     except:
-   warning: naked except clause
-  contrib/setup3k.py:0:
-   >         except:
-   warning: naked except clause
-  contrib/setup3k.py:0:
-   >     except:
-   warning: naked except clause
-  contrib/setup3k.py:0:
-   > except:
-   warning: naked except clause
-   warning: naked except clause
-   warning: naked except clause
   contrib/shrink-revlog.py:0:
    >         except:
    warning: naked except clause
-  doc/gendoc.py:0:
-   >                "together with Mercurial. Help for other extensions is available "
-   warning: line over 80 characters
-  hgext/bugzilla.py:0:
-   >                 raise util.Abort(_('cannot find bugzilla user id for %s or %s') %
-   warning: line over 80 characters
-  hgext/bugzilla.py:0:
-   >             bzdir = self.ui.config('bugzilla', 'bzdir', '/var/www/html/bugzilla')
-   warning: line over 80 characters
-  hgext/convert/__init__.py:0:
-   >           ('', 'ancestors', '', _('show current changeset in ancestor branches')),
-   warning: line over 80 characters
-  hgext/convert/bzr.py:0:
-   >         except:
-   warning: naked except clause
-  hgext/convert/common.py:0:
-   >             except:
-   warning: naked except clause
-  hgext/convert/common.py:0:
-   >         except:
-   warning: naked except clause
-   warning: naked except clause
-  hgext/convert/convcmd.py:0:
-   >         except:
-   warning: naked except clause
-  hgext/convert/cvs.py:0:
-   >                                 # /1 :pserver:user@example.com:2401/cvsroot/foo Ah<Z
-   warning: line over 80 characters
-  hgext/convert/cvsps.py:0:
-   >                     assert len(branches) == 1, 'unknown branch: %s' % e.mergepoint
-   warning: line over 80 characters
   hgext/convert/cvsps.py:0:
    >                     ui.write('Ancestors: %s\n' % (','.join(r)))
    warning: unwrapped ui message
@@ -75,9 +21,6 @@
    >                     ui.write('Parents: %s\n' %
    warning: unwrapped ui message
   hgext/convert/cvsps.py:0:
-   >                 except:
-   warning: naked except clause
-  hgext/convert/cvsps.py:0:
    >                 ui.write('Branchpoints: %s \n' % ', '.join(branchpoints))
    warning: unwrapped ui message
   hgext/convert/cvsps.py:0:
@@ -101,59 +44,6 @@
   hgext/convert/cvsps.py:0:
    >             ui.write('Tag%s: %s \n' % (['', 's'][len(cs.tags) > 1],
    warning: unwrapped ui message
-  hgext/convert/git.py:0:
-   >             except:
-   warning: naked except clause
-  hgext/convert/git.py:0:
-   >             fh = self.gitopen('git diff-tree --name-only --root -r %s "%s^%s" --'
-   warning: line over 80 characters
-  hgext/convert/hg.py:0:
-   >             # detect missing revlogs and abort on errors or populate self.ignored
-   warning: line over 80 characters
-  hgext/convert/hg.py:0:
-   >             except:
-   warning: naked except clause
-   warning: naked except clause
-  hgext/convert/hg.py:0:
-   >         except:
-   warning: naked except clause
-  hgext/convert/monotone.py:0:
-   >             except:
-   warning: naked except clause
-  hgext/convert/monotone.py:0:
-   >         except:
-   warning: naked except clause
-  hgext/convert/subversion.py:0:
-   >                 raise util.Abort(_('svn: branch has no revision %s') % to_revnum)
-   warning: line over 80 characters
-  hgext/convert/subversion.py:0:
-   >             except:
-   warning: naked except clause
-  hgext/convert/subversion.py:0:
-   >         args = [self.baseurl, relpaths, start, end, limit, discover_changed_paths,
-   warning: line over 80 characters
-  hgext/convert/subversion.py:0:
-   >         self.trunkname = self.ui.config('convert', 'svn.trunk', 'trunk').strip('/')
-   warning: line over 80 characters
-  hgext/convert/subversion.py:0:
-   >     except:
-   warning: naked except clause
-  hgext/convert/subversion.py:0:
-   > def get_log_child(fp, url, paths, start, end, limit=0, discover_changed_paths=True,
-   warning: line over 80 characters
-  hgext/eol.py:0:
-   >     if ui.configbool('eol', 'fix-trailing-newline', False) and s and s[-1] != '\n':
-   warning: line over 80 characters
-   warning: line over 80 characters
-  hgext/gpg.py:0:
-   >                 except:
-   warning: naked except clause
-  hgext/hgcia.py:0:
-   > except:
-   warning: naked except clause
-  hgext/hgk.py:0:
-   >         ui.write("%s%s\n" % (prefix, description.replace('\n', nlprefix).strip()))
-   warning: line over 80 characters
   hgext/hgk.py:0:
    >         ui.write("parent %s\n" % p)
    warning: unwrapped ui message
@@ -173,40 +63,14 @@
    >     ui.write("revision %d\n" % ctx.rev())
    warning: unwrapped ui message
   hgext/hgk.py:0:
-   >     ui.write("tree %s\n" % short(ctx.changeset()[0])) # use ctx.node() instead ??
-   warning: line over 80 characters
+   >     ui.write("tree %s\n" % short(ctx.changeset()[0]))
    warning: unwrapped ui message
-  hgext/highlight/__init__.py:0:
-   >     extensions.wrapfunction(webcommands, '_filerevision', filerevision_highlight)
-   warning: line over 80 characters
-  hgext/highlight/__init__.py:0:
-   >     return ['/* pygments_style = %s */\n\n' % pg_style, fmter.get_style_defs('')]
-   warning: line over 80 characters
-  hgext/inotify/__init__.py:0:
-   >             if self._inotifyon and not ignored and not subrepos and not self._dirty:
-   warning: line over 80 characters
-  hgext/inotify/server.py:0:
-   >                     except:
-   warning: naked except clause
-  hgext/inotify/server.py:0:
-   >             except:
-   warning: naked except clause
   hgext/keyword.py:0:
    >     ui.note("hg ci -m '%s'\n" % msg)
    warning: unwrapped ui message
   hgext/mq.py:0:
-   >                     raise util.Abort(_("cannot push --exact with applied patches"))
-   warning: line over 80 characters
-  hgext/mq.py:0:
-   >                     raise util.Abort(_("cannot use --exact and --move together"))
-   warning: line over 80 characters
-  hgext/mq.py:0:
-   >                     self.ui.warn(_('Tag %s overrides mq patch of the same name\n')
-   warning: line over 80 characters
-  hgext/mq.py:0:
    >                 except:
    warning: naked except clause
-   warning: naked except clause
   hgext/mq.py:0:
    >             except:
    warning: naked except clause
@@ -214,72 +78,14 @@
    warning: naked except clause
    warning: naked except clause
   hgext/mq.py:0:
-   >             raise util.Abort(_('cannot mix -l/--list with options or arguments'))
-   warning: line over 80 characters
-  hgext/mq.py:0:
-   >             raise util.Abort(_('qfold cannot fold already applied patch %s') % p)
-   warning: line over 80 characters
-  hgext/mq.py:0:
-   >           ('U', 'noupdate', None, _('do not update the new working directories')),
-   warning: line over 80 characters
-  hgext/mq.py:0:
-   >           ('e', 'exact', None, _('apply the target patch to its recorded parent')),
-   warning: line over 80 characters
-  hgext/mq.py:0:
-   >         except:
-   warning: naked except clause
-  hgext/mq.py:0:
    >         ui.write("mq:     %s\n" % ', '.join(m))
    warning: unwrapped ui message
-  hgext/mq.py:0:
-   >     repo.mq.qseries(repo, missing=opts.get('missing'), summary=opts.get('summary'))
-   warning: line over 80 characters
-  hgext/notify.py:0:
-   >                 ui.note(_('notify: suppressing notification for merge %d:%s\n') %
-   warning: line over 80 characters
-  hgext/patchbomb.py:0:
-   >                                                   binnode, seqno=idx, total=total)
-   warning: line over 80 characters
-  hgext/patchbomb.py:0:
-   >             except:
-   warning: naked except clause
   hgext/patchbomb.py:0:
    >             ui.write('Subject: %s\n' % subj)
    warning: unwrapped ui message
   hgext/patchbomb.py:0:
-   >         p = mail.mimetextpatch('\n'.join(patchlines), 'x-patch', opts.get('test'))
-   warning: line over 80 characters
-  hgext/patchbomb.py:0:
    >         ui.write('From: %s\n' % sender)
    warning: unwrapped ui message
-  hgext/record.py:0:
-   >                                   ignoreblanklines=opts.get('ignore_blank_lines'))
-   warning: line over 80 characters
-  hgext/record.py:0:
-   >                                   ignorewsamount=opts.get('ignore_space_change'),
-   warning: line over 80 characters
-  hgext/zeroconf/__init__.py:0:
-   >             publish(name, desc, path, util.getport(u.config("web", "port", 8000)))
-   warning: line over 80 characters
-  hgext/zeroconf/__init__.py:0:
-   >     except:
-   warning: naked except clause
-   warning: naked except clause
-  mercurial/bundlerepo.py:0:
-   >       is a bundlerepo for the obtained bundle when the original "other" is remote.
-   warning: line over 80 characters
-  mercurial/bundlerepo.py:0:
-   >     "local" is a local repo from which to obtain the actual incoming changesets; it
-   warning: line over 80 characters
-  mercurial/bundlerepo.py:0:
-   >     tmp = discovery.findcommonincoming(repo, other, heads=onlyheads, force=force)
-   warning: line over 80 characters
-  mercurial/commands.py:0:
-   >                  "     size " + basehdr + "   link     p1     p2       nodeid\n")
-   warning: line over 80 characters
-  mercurial/commands.py:0:
-   >                 raise util.Abort('cannot use localheads with old style discovery')
-   warning: line over 80 characters
   mercurial/commands.py:0:
    >                 ui.note('branch %s\n' % data)
    warning: unwrapped ui message
@@ -293,18 +99,6 @@
    >                 ui.write("unpruned common: %s\n" % " ".join([short(n)
    warning: unwrapped ui message
   mercurial/commands.py:0:
-   >                 yield 'n', (r, list(set(p for p in cl.parentrevs(r) if p != -1)))
-   warning: line over 80 characters
-  mercurial/commands.py:0:
-   >                 yield 'n', (r, list(set(p for p in rlog.parentrevs(r) if p != -1)))
-   warning: line over 80 characters
-  mercurial/commands.py:0:
-   >             except:
-   warning: naked except clause
-  mercurial/commands.py:0:
-   >             ui.status(_("(run 'hg heads .' to see heads, 'hg merge' to merge)\n"))
-   warning: line over 80 characters
-  mercurial/commands.py:0:
    >             ui.write("format: id, p1, p2, cset, delta base, len(delta)\n")
    warning: unwrapped ui message
   mercurial/commands.py:0:
@@ -314,17 +108,7 @@
    >             ui.write("remote is subset\n")
    warning: unwrapped ui message
   mercurial/commands.py:0:
-   >             ui.write('    other            : ' + fmt2 % pcfmt(numoprev, numprev))
-   warning: line over 80 characters
-  mercurial/commands.py:0:
-   >             ui.write('    where prev = p1  : ' + fmt2 % pcfmt(nump1prev, numprev))
-   warning: line over 80 characters
-  mercurial/commands.py:0:
-   >             ui.write('    where prev = p2  : ' + fmt2 % pcfmt(nump2prev, numprev))
-   warning: line over 80 characters
-  mercurial/commands.py:0:
-   >             ui.write('deltas against other : ' + fmt % pcfmt(numother, numdeltas))
-   warning: line over 80 characters
+   >             ui.write('deltas against other : ' + fmt % pcfmt(numother,
    warning: unwrapped ui message
   mercurial/commands.py:0:
    >             ui.write('deltas against p1    : ' + fmt % pcfmt(nump1, numdeltas))
@@ -336,9 +120,6 @@
    >         except:
    warning: naked except clause
   mercurial/commands.py:0:
-   >         revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev'))
-   warning: line over 80 characters
-  mercurial/commands.py:0:
    >         ui.write("common heads: %s\n" % " ".join([short(n) for n in common]))
    warning: unwrapped ui message
   mercurial/commands.py:0:
@@ -354,12 +135,6 @@
    >         ui.write('uncompressed data size (min/max/avg) : %d / %d / %d\n'
    warning: unwrapped ui message
   mercurial/commands.py:0:
-   >     Every ID must be a full-length hex node id string. Returns a list of 0s and 1s
-   warning: line over 80 characters
-  mercurial/commands.py:0:
-   >     remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl), opts.get('branch'))
-   warning: line over 80 characters
-  mercurial/commands.py:0:
    >     ui.write("digraph G {\n")
    warning: unwrapped ui message
   mercurial/commands.py:0:
@@ -402,229 +177,52 @@
   mercurial/commands.py:0:
    >     ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
    warning: unwrapped ui message
-  mercurial/commandserver.py:0:
-   >         # the ui here is really the repo ui so take its baseui so we don't end up
-   warning: line over 80 characters
-  mercurial/context.py:0:
-   >                 return self._manifestdelta[path], self._manifestdelta.flags(path)
-   warning: line over 80 characters
-  mercurial/dagparser.py:0:
-   >             raise util.Abort(_("invalid character in dag description: %s...") % s)
-   warning: line over 80 characters
-  mercurial/dagparser.py:0:
-   >         >>> dagtext([('n', (0, [-1])), ('C', 'my command line'), ('n', (1, [0]))])
-   warning: line over 80 characters
-  mercurial/dirstate.py:0:
-   >                 if not st is None and not getkind(st.st_mode) in (regkind, lnkkind):
-   warning: line over 80 characters
-  mercurial/discovery.py:0:
-   >     If onlyheads is given, only nodes ancestral to nodes in onlyheads (inclusive)
-   warning: line over 80 characters
-  mercurial/discovery.py:0:
-   > def findcommonoutgoing(repo, other, onlyheads=None, force=False, commoninc=None):
-   warning: line over 80 characters
-  mercurial/dispatch.py:0:
-   >                                                 " (.hg not found)") % os.getcwd())
-   warning: line over 80 characters
   mercurial/dispatch.py:0:
    >         except:
    warning: naked except clause
   mercurial/dispatch.py:0:
-   >         return lambda: runcommand(lui, None, cmd, args[:1], ui, options, d, [], {})
-   warning: line over 80 characters
-  mercurial/dispatch.py:0:
-   >     def __init__(self, args, ui=None, repo=None, fin=None, fout=None, ferr=None):
-   warning: line over 80 characters
-  mercurial/dispatch.py:0:
    >     except:
    warning: naked except clause
   mercurial/hg.py:0:
    >     except:
    warning: naked except clause
-  mercurial/hgweb/hgweb_mod.py:0:
-   >             self.maxshortchanges = int(self.config("web", "maxshortchanges", 60))
-   warning: line over 80 characters
   mercurial/keepalive.py:0:
    >         except:
    warning: naked except clause
-  mercurial/keepalive.py:0:
-   >     except:
-   warning: naked except clause
-  mercurial/localrepo.py:0:
-   >                         # we return an integer indicating remote head count change
-   warning: line over 80 characters
-  mercurial/localrepo.py:0:
-   >                     raise util.Abort(_("empty or missing revlog for %s") % fname)
-   warning: line over 80 characters
-   warning: line over 80 characters
-  mercurial/localrepo.py:0:
-   >                 if self._tagscache.tagtypes and name in self._tagscache.tagtypes:
-   warning: line over 80 characters
-  mercurial/localrepo.py:0:
-   >                 self.hook("precommit", throw=True, parent1=hookp1, parent2=hookp2)
-   warning: line over 80 characters
-  mercurial/localrepo.py:0:
-   >             # new requirements = old non-format requirements + new format-related
-   warning: line over 80 characters
   mercurial/localrepo.py:0:
    >             except:
    warning: naked except clause
-  mercurial/localrepo.py:0:
-   >         """return status of files between two nodes or node and working directory
-   warning: line over 80 characters
-  mercurial/localrepo.py:0:
-   >         '''Returns a tagscache object that contains various tags related caches.'''
-   warning: line over 80 characters
-  mercurial/manifest.py:0:
-   >             return "".join(struct.pack(">lll", start, end, len(content)) + content
-   warning: line over 80 characters
-  mercurial/merge.py:0:
-   >                 subrepo.submerge(repo, wctx, mctx, wctx.ancestor(mctx), overwrite)
-   warning: line over 80 characters
-  mercurial/patch.py:0:
-   >                  modified, added, removed, copy, getfilectx, opts, losedata, prefix)
-   warning: line over 80 characters
-  mercurial/patch.py:0:
-   >         diffhelpers.addlines(lr, self.hunk, self.lena, self.lenb, self.a, self.b)
-   warning: line over 80 characters
-  mercurial/patch.py:0:
-   >         output.append(_(' %d files changed, %d insertions(+), %d deletions(-)\n')
-   warning: line over 80 characters
   mercurial/patch.py:0:
    >     except:
    warning: naked except clause
-  mercurial/pure/mpatch.py:0:
-   >         frags.extend(reversed(new))                    # what was left at the end
-   warning: line over 80 characters
   mercurial/repair.py:0:
    >         except:
    warning: naked except clause
   mercurial/repair.py:0:
    >     except:
    warning: naked except clause
-  mercurial/revset.py:0:
-   >         elif c.isalnum() or c in '._' or ord(c) > 127: # gather up a symbol/keyword
-   warning: line over 80 characters
-  mercurial/revset.py:0:
-   >     Changesets that are the Nth ancestor (first parents only) of a changeset in set.
-   warning: line over 80 characters
-  mercurial/scmutil.py:0:
-   >                         raise util.Abort(_("path '%s' is inside nested repo %r") %
-   warning: line over 80 characters
-  mercurial/scmutil.py:0:
-   >             "requires features '%s' (upgrade Mercurial)") % "', '".join(missings))
-   warning: line over 80 characters
-  mercurial/scmutil.py:0:
-   >         elif repo.dirstate[abs] != 'r' and (not good or not os.path.lexists(target)
-   warning: line over 80 characters
-  mercurial/setdiscovery.py:0:
-   >     # treat remote heads (and maybe own heads) as a first implicit sample response
-   warning: line over 80 characters
-  mercurial/setdiscovery.py:0:
-   >     undecided = dag.nodeset() # own nodes where I don't know if remote knows them
-   warning: line over 80 characters
-  mercurial/similar.py:0:
-   >         repo.ui.progress(_('searching for similar files'), i, total=len(removed))
-   warning: line over 80 characters
-  mercurial/simplemerge.py:0:
-   >         for zmatch, zend, amatch, aend, bmatch, bend in self.find_sync_regions():
-   warning: line over 80 characters
-  mercurial/sshrepo.py:0:
-   >             self._abort(error.RepoError(_("no suitable response from remote hg")))
-   warning: line over 80 characters
-  mercurial/sshrepo.py:0:
-   >         except:
-   warning: naked except clause
-  mercurial/subrepo.py:0:
-   >                 other, self._repo = hg.clone(self._repo._subparent.ui, {}, other,
-   warning: line over 80 characters
-  mercurial/subrepo.py:0:
-   >         msg = (_(' subrepository sources for %s differ (in checked out version)\n'
-   warning: line over 80 characters
-  mercurial/transaction.py:0:
-   >             except:
-   warning: naked except clause
-  mercurial/ui.py:0:
-   >                 traceback.print_exception(exc[0], exc[1], exc[2], file=self.ferr)
-   warning: line over 80 characters
-  mercurial/url.py:0:
-   >             conn = httpsconnection(host, port, keyfile, certfile, *args, **kwargs)
-   warning: line over 80 characters
-  mercurial/util.py:0:
-   >             except:
-   warning: naked except clause
   mercurial/util.py:0:
    >     except:
    warning: naked except clause
-  mercurial/verify.py:0:
-   >                     except:
-   warning: naked except clause
-  mercurial/verify.py:0:
-   >                 except:
-   warning: naked except clause
-  mercurial/wireproto.py:0:
-   >         # Assuming the future to be filled with the result from the batched request
-   warning: line over 80 characters
-  mercurial/wireproto.py:0:
-   >         '''remote must support _submitbatch(encbatch) and _submitone(op, encargs)'''
-   warning: line over 80 characters
-  mercurial/wireproto.py:0:
-   >     All methods invoked on instances of this class are simply queued and return a
-   warning: line over 80 characters
-  mercurial/wireproto.py:0:
-   >     The decorator returns a function which wraps this coroutine as a plain method,
-   warning: line over 80 characters
-  setup.py:0:
-   >                 raise SystemExit("Python headers are required to build Mercurial")
-   warning: line over 80 characters
-  setup.py:0:
-   >         except:
-   warning: naked except clause
-  setup.py:0:
-   >     # build_py), it will not find osutil & friends, thinking that those modules are
-   warning: line over 80 characters
-  setup.py:0:
-   >     except:
-   warning: naked except clause
-   warning: naked except clause
-  setup.py:0:
-   >     isironpython = platform.python_implementation().lower().find("ironpython") != -1
-   warning: line over 80 characters
-  setup.py:0:
-   > except:
-   warning: naked except clause
-   warning: naked except clause
-   warning: naked except clause
   tests/autodiff.py:0:
    >         ui.write('data lost for: %s\n' % fn)
    warning: unwrapped ui message
-  tests/run-tests.py:0:
-   >     except:
-   warning: naked except clause
-  tests/test-commandserver.py:0:
-   >                         'hooks.pre-identify=python:test-commandserver.hook', 'id'],
-   warning: line over 80 characters
-  tests/test-commandserver.py:0:
-   >     # the cached repo local hgrc contains ui.foo=bar, so showconfig should show it
-   warning: line over 80 characters
-  tests/test-commandserver.py:0:
-   >     print '%c, %r' % (ch, re.sub('encoding: [a-zA-Z0-9-]+', 'encoding: ***', data))
-   warning: line over 80 characters
-  tests/test-filecache.py:0:
-   >     except:
-   warning: naked except clause
-  tests/test-filecache.py:0:
-   > if subprocess.call(['python', '%s/hghave' % os.environ['TESTDIR'], 'cacheable']):
-   warning: line over 80 characters
+  tests/test-convert-mtn.t:0:
+   >   > function get_passphrase(keypair_id)
+   don't use 'function', use old style
+  tests/test-import-git.t:0:
+   >   > Mc\${NkU|\`?^000jF3jhEB
+   ^ must be quoted
+  tests/test-import.t:0:
+   >   > diff -Naur proj-orig/foo proj-new/foo
+   don't use 'diff -N'
+   don't use 'diff -N'
+  tests/test-schemes.t:0:
+   >   > z = file:\$PWD/
+   don't use $PWD, use `pwd`
   tests/test-ui-color.py:0:
    > testui.warn('warning\n')
    warning: unwrapped ui message
   tests/test-ui-color.py:0:
    > testui.write('buffered\n')
    warning: unwrapped ui message
-  tests/test-walkrepo.py:0:
-   >         print "Found %d repositories when I should have found 2" % (len(reposet),)
-   warning: line over 80 characters
-  tests/test-walkrepo.py:0:
-   >         print "Found %d repositories when I should have found 3" % (len(reposet),)
-   warning: line over 80 characters
--- a/tests/test-commandserver.py	Sun May 13 11:19:48 2012 +0200
+++ b/tests/test-commandserver.py	Sun May 13 12:52:24 2012 +0200
@@ -18,7 +18,7 @@
 def readchannel(server):
     data = server.stdout.read(5)
     if not data:
-        raise EOFError()
+        raise EOFError
     channel, length = struct.unpack('>cI', data)
     if channel in 'IL':
         return channel, length
@@ -71,7 +71,8 @@
 def hellomessage(server):
     ch, data = readchannel(server)
     # escaping python tests output not supported
-    print '%c, %r' % (ch, re.sub('encoding: [a-zA-Z0-9-]+', 'encoding: ***', data))
+    print '%c, %r' % (ch, re.sub('encoding: [a-zA-Z0-9-]+', 'encoding: ***',
+                                 data))
 
     # run an arbitrary command to make sure the next thing the server sends
     # isn't part of the hello message
@@ -142,7 +143,8 @@
     is used """
     readchannel(server)
 
-    # the cached repo local hgrc contains ui.foo=bar, so showconfig should show it
+    # the cached repo local hgrc contains ui.foo=bar, so showconfig should
+    # show it
     runcommand(server, ['showconfig'])
 
     # but not for this repo
@@ -157,7 +159,8 @@
 def hookoutput(server):
     readchannel(server)
     runcommand(server, ['--config',
-                        'hooks.pre-identify=python:test-commandserver.hook', 'id'],
+                        'hooks.pre-identify=python:test-commandserver.hook',
+                        'id'],
                input=cStringIO.StringIO('some input'))
 
 def outsidechanges(server):
--- a/tests/test-convert-baz	Sun May 13 11:19:48 2012 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,79 +0,0 @@
-#!/bin/sh
-
-"$TESTDIR/hghave" baz || exit 80
-
-baz my-id "mercurial <mercurial@selenic.com>"
-
-echo "[extensions]" >> $HGRCPATH
-echo "convert=" >> $HGRCPATH
-echo 'graphlog =' >> $HGRCPATH
-
-echo % create baz archive
-baz make-archive baz@mercurial--convert hg-test-convert-baz
-
-echo % initialize baz repo
-mkdir baz-repo
-cd baz-repo/
-baz init-tree baz@mercurial--convert/baz--test--0
-baz import
-
-echo % create initial files
-echo 'this is a file' > a
-baz add a
-mkdir src
-baz add src
-cd src
-dd count=1 if=/dev/zero of=b > /dev/null 2> /dev/null
-baz add b
-# HACK: hide GNU tar-1.22 "tar: The --preserve option is deprecated, use --preserve-permissions --preserve-order instead"
-baz commit -s "added a file, src and src/b (binary)" 2>&1 | grep -v '^tar'
-
-echo % create link file and modify a
-ln -s ../a a-link
-baz add a-link
-echo 'this a modification to a' >> ../a
-baz commit -s "added link to a and modify a"
-
-echo % create second link and modify b
-ln -s ../a a-link-2
-baz add a-link-2
-dd count=1 seek=1 if=/dev/zero of=b > /dev/null 2> /dev/null
-baz commit -s "added second link and modify b"
-
-echo % b file to link and a-link-2 to regular file
-rm -f a-link-2
-echo 'this is now a regular file' > a-link-2
-ln -sf ../a b
-baz commit -s "file to link and link to file test"
-
-echo % move a-link-2 file and src directory
-cd ..
-baz mv src/a-link-2 c
-baz mv src test
-baz commit -s "move and rename a-link-2 file and src directory"
-
-echo % move and add the moved file again
-echo e > e
-baz add e
-baz commit -s "add e"
-baz mv e f
-echo ee > e
-baz add e
-baz commit -s "move e and recreate it again"
-cd ..
-
-echo % converting baz repo to Mercurial
-hg convert baz-repo baz-repo-hg
-
-baz register-archive -d baz@mercurial--convert
-
-glog()
-{
-    hg glog --template '{rev} "{desc|firstline}" files: {files}\n' "$@"
-}
-
-echo % show graph log
-glog -R baz-repo-hg
-hg up -q -R baz-repo-hg
-hg -R baz-repo-hg manifest --debug
-hg -R baz-repo-hg log -r 5 -r 7 -C --debug | grep copies
--- a/tests/test-convert-baz.out	Sun May 13 11:19:48 2012 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,96 +0,0 @@
-% create baz archive
-% initialize baz repo
-* creating version baz@mercurial--convert/baz--test--0
-* imported baz@mercurial--convert/baz--test--0
-% create initial files
-* build pristine tree for baz@mercurial--convert/baz--test--0--base-0
-* Scanning for full-tree revision: .
-* from import revision: baz@mercurial--convert/baz--test--0--base-0
-A/ .arch-ids
-A/ src
-A/ src/.arch-ids
-A  .arch-ids/a.id
-A  a
-A  src/.arch-ids/=id
-A  src/.arch-ids/b.id
-A  src/b
-* update pristine tree (baz@mercurial--convert/baz--test--0--base-0 => baz--test--0--patch-1)
-* committed baz@mercurial--convert/baz--test--0--patch-1
-% create link file and modify a
-A  src/.arch-ids/a-link.id
-A  src/a-link
-M  a
-* update pristine tree (baz@mercurial--convert/baz--test--0--patch-1 => baz--test--0--patch-2)
-* committed baz@mercurial--convert/baz--test--0--patch-2
-% create second link and modify b
-A  src/.arch-ids/a-link-2.id
-A  src/a-link-2
-Mb src/b
-* update pristine tree (baz@mercurial--convert/baz--test--0--patch-2 => baz--test--0--patch-3)
-* committed baz@mercurial--convert/baz--test--0--patch-3
-% b file to link and a-link-2 to regular file
-fl src/b
-lf src/a-link-2
-* update pristine tree (baz@mercurial--convert/baz--test--0--patch-3 => baz--test--0--patch-4)
-* committed baz@mercurial--convert/baz--test--0--patch-4
-% move a-link-2 file and src directory
-D/ src/.arch-ids
-A/ test/.arch-ids
-/> src	test
-=> src/.arch-ids/a-link-2.id	.arch-ids/c.id
-=> src/a-link-2	c
-=> src/.arch-ids/=id	test/.arch-ids/=id
-=> src/.arch-ids/a-link.id	test/.arch-ids/a-link.id
-=> src/.arch-ids/b.id	test/.arch-ids/b.id
-* update pristine tree (baz@mercurial--convert/baz--test--0--patch-4 => baz--test--0--patch-5)
-* committed baz@mercurial--convert/baz--test--0--patch-5
-% move and add the moved file again
-A  .arch-ids/e.id
-A  e
-* update pristine tree (baz@mercurial--convert/baz--test--0--patch-5 => baz--test--0--patch-6)
-* committed baz@mercurial--convert/baz--test--0--patch-6
-A  .arch-ids/e.id
-A  e
-=> .arch-ids/e.id	.arch-ids/f.id
-=> e	f
-* update pristine tree (baz@mercurial--convert/baz--test--0--patch-6 => baz--test--0--patch-7)
-* committed baz@mercurial--convert/baz--test--0--patch-7
-% converting baz repo to Mercurial
-initializing destination baz-repo-hg repository
-analyzing tree version baz@mercurial--convert/baz--test--0...
-scanning source...
-sorting...
-converting...
-7 initial import
-6 added a file, src and src/b (binary)
-5 added link to a and modify a
-4 added second link and modify b
-3 file to link and link to file test
-2 move and rename a-link-2 file and src directory
-1 add e
-0 move e and recreate it again
-% show graph log
-o  7 "move e and recreate it again" files: e f
-|
-o  6 "add e" files: e
-|
-o  5 "move and rename a-link-2 file and src directory" files: c src/a-link src/a-link-2 src/b test/a-link test/b
-|
-o  4 "file to link and link to file test" files: src/a-link-2 src/b
-|
-o  3 "added second link and modify b" files: src/a-link-2 src/b
-|
-o  2 "added link to a and modify a" files: a src/a-link
-|
-o  1 "added a file, src and src/b (binary)" files: a src/b
-|
-o  0 "initial import" files:
-
-c4072c4b72e1cabace081888efa148ee80ca3cbb 644   a
-0201ac32a3a8e86e303dff60366382a54b48a72e 644   c
-1a4a864db0073705a11b1439f563bfa4b46d9246 644   e
-09e0222742fc3f75777fa9d68a5d8af7294cb5e7 644   f
-c0067ba5ff0b7c9a3eb17270839d04614c435623 644 @ test/a-link
-375f4263d86feacdea7e3c27100abd1560f2a973 644 @ test/b
-copies:      c (src/a-link-2) test/a-link (src/a-link) test/b (src/b)
-copies:      f (e)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-convert-baz.t	Sun May 13 12:52:24 2012 +0200
@@ -0,0 +1,163 @@
+  $ "$TESTDIR/hghave" baz || exit 80
+
+  $ baz my-id "mercurial <mercurial@selenic.com>"
+
+  $ echo "[extensions]" >> $HGRCPATH
+  $ echo "convert=" >> $HGRCPATH
+  $ echo 'graphlog =' >> $HGRCPATH
+
+create baz archive
+  $ baz make-archive baz@mercurial--convert hg-test-convert-baz
+
+initialize baz repo
+  $ mkdir baz-repo
+  $ cd baz-repo/
+  $ baz init-tree baz@mercurial--convert/baz--test--0
+  $ baz import
+  * creating version baz@mercurial--convert/baz--test--0
+  * imported baz@mercurial--convert/baz--test--0
+
+create initial files
+  $ echo 'this is a file' > a
+  $ baz add a
+  $ mkdir src
+  $ baz add src
+  $ cd src
+  $ dd count=1 if=/dev/zero of=b > /dev/null 2> /dev/null
+  $ baz add b
+HACK: hide GNU tar-1.22 "tar: The --preserve option is deprecated, use --preserve-permissions --preserve-order instead"
+  $ baz commit -s "added a file, src and src/b (binary)" 2>&1 | grep -v '^tar'
+  * build pristine tree for baz@mercurial--convert/baz--test--0--base-0
+  * Scanning for full-tree revision: .
+  * from import revision: baz@mercurial--convert/baz--test--0--base-0
+  A/ .arch-ids
+  A/ src
+  A/ src/.arch-ids
+  A  .arch-ids/a.id
+  A  a
+  A  src/.arch-ids/=id
+  A  src/.arch-ids/b.id
+  A  src/b
+  * update pristine tree (baz@mercurial--convert/baz--test--0--base-0 => baz--test--0--patch-1)
+  * committed baz@mercurial--convert/baz--test--0--patch-1
+
+create link file and modify a
+  $ ln -s ../a a-link
+  $ baz add a-link
+  $ echo 'this a modification to a' >> ../a
+  $ baz commit -s "added link to a and modify a"
+  A  src/.arch-ids/a-link.id
+  A  src/a-link
+  M  a
+  * update pristine tree (baz@mercurial--convert/baz--test--0--patch-1 => baz--test--0--patch-2)
+  * committed baz@mercurial--convert/baz--test--0--patch-2
+
+create second link and modify b
+  $ ln -s ../a a-link-2
+  $ baz add a-link-2
+  $ dd count=1 seek=1 if=/dev/zero of=b > /dev/null 2> /dev/null
+  $ baz commit -s "added second link and modify b"
+  A  src/.arch-ids/a-link-2.id
+  A  src/a-link-2
+  Mb src/b
+  * update pristine tree (baz@mercurial--convert/baz--test--0--patch-2 => baz--test--0--patch-3)
+  * committed baz@mercurial--convert/baz--test--0--patch-3
+
+b file to link and a-link-2 to regular file
+  $ rm -f a-link-2
+  $ echo 'this is now a regular file' > a-link-2
+  $ ln -sf ../a b
+  $ baz commit -s "file to link and link to file test"
+  fl src/b
+  lf src/a-link-2
+  * update pristine tree (baz@mercurial--convert/baz--test--0--patch-3 => baz--test--0--patch-4)
+  * committed baz@mercurial--convert/baz--test--0--patch-4
+
+move a-link-2 file and src directory
+  $ cd ..
+  $ baz mv src/a-link-2 c
+  $ baz mv src test
+  $ baz commit -s "move and rename a-link-2 file and src directory"
+  D/ src/.arch-ids
+  A/ test/.arch-ids
+  /> src	test
+  => src/.arch-ids/a-link-2.id	.arch-ids/c.id
+  => src/a-link-2	c
+  => src/.arch-ids/=id	test/.arch-ids/=id
+  => src/.arch-ids/a-link.id	test/.arch-ids/a-link.id
+  => src/.arch-ids/b.id	test/.arch-ids/b.id
+  * update pristine tree (baz@mercurial--convert/baz--test--0--patch-4 => baz--test--0--patch-5)
+  * committed baz@mercurial--convert/baz--test--0--patch-5
+
+move and add the moved file again
+  $ echo e > e
+  $ baz add e
+  $ baz commit -s "add e"
+  A  .arch-ids/e.id
+  A  e
+  * update pristine tree (baz@mercurial--convert/baz--test--0--patch-5 => baz--test--0--patch-6)
+  * committed baz@mercurial--convert/baz--test--0--patch-6
+  $ baz mv e f
+  $ echo ee > e
+  $ baz add e
+  $ baz commit -s "move e and recreate it again"
+  A  .arch-ids/e.id
+  A  e
+  => .arch-ids/e.id	.arch-ids/f.id
+  => e	f
+  * update pristine tree (baz@mercurial--convert/baz--test--0--patch-6 => baz--test--0--patch-7)
+  * committed baz@mercurial--convert/baz--test--0--patch-7
+  $ cd ..
+
+converting baz repo to Mercurial
+  $ hg convert baz-repo baz-repo-hg
+  initializing destination baz-repo-hg repository
+  analyzing tree version baz@mercurial--convert/baz--test--0...
+  scanning source...
+  sorting...
+  converting...
+  7 initial import
+  6 added a file, src and src/b (binary)
+  5 added link to a and modify a
+  4 added second link and modify b
+  3 file to link and link to file test
+  2 move and rename a-link-2 file and src directory
+  1 add e
+  0 move e and recreate it again
+
+  $ baz register-archive -d baz@mercurial--convert
+
+  $ glog()
+  > {
+  >     hg glog --template '{rev} "{desc|firstline}" files: {files}\n' "$@"
+  > }
+
+show graph log
+  $ glog -R baz-repo-hg
+  o  7 "move e and recreate it again" files: e f
+  |
+  o  6 "add e" files: e
+  |
+  o  5 "move and rename a-link-2 file and src directory" files: c src/a-link src/a-link-2 src/b test/a-link test/b
+  |
+  o  4 "file to link and link to file test" files: src/a-link-2 src/b
+  |
+  o  3 "added second link and modify b" files: src/a-link-2 src/b
+  |
+  o  2 "added link to a and modify a" files: a src/a-link
+  |
+  o  1 "added a file, src and src/b (binary)" files: a src/b
+  |
+  o  0 "initial import" files:
+  
+  $ hg up -q -R baz-repo-hg
+  $ hg -R baz-repo-hg manifest --debug
+  c4072c4b72e1cabace081888efa148ee80ca3cbb 644   a
+  0201ac32a3a8e86e303dff60366382a54b48a72e 644   c
+  1a4a864db0073705a11b1439f563bfa4b46d9246 644   e
+  09e0222742fc3f75777fa9d68a5d8af7294cb5e7 644   f
+  c0067ba5ff0b7c9a3eb17270839d04614c435623 644 @ test/a-link
+  375f4263d86feacdea7e3c27100abd1560f2a973 644 @ test/b
+  $ hg -R baz-repo-hg log -r 5 -r 7 -C --debug | grep copies
+  copies:      c (src/a-link-2) test/a-link (src/a-link) test/b (src/b)
+  copies:      f (e)
--- a/tests/test-convert-darcs.t	Sun May 13 11:19:48 2012 +0200
+++ b/tests/test-convert-darcs.t	Sun May 13 12:52:24 2012 +0200
@@ -32,7 +32,7 @@
 
 branch and update
 
-  $ darcs get darcs-repo darcs-clone >/dev/null
+  $ darcs get -q darcs-repo darcs-clone >/dev/null
   $ cd darcs-clone
   $ echo c >> a
   $ echo c > c
@@ -48,11 +48,10 @@
   $ darcs record -a -l -m p1.2
   Finished recording patch 'p1.2'
 
-  $ darcs pull -a --no-set-default ../darcs-clone
-  Backing up ./a(-darcs-backup0)
+  $ darcs pull -q -a --no-set-default ../darcs-clone
+  Backing up ./a(*) (glob)
   We have conflicts in the following files:
   ./a
-  Finished pulling and applying.
   $ sleep 1
   $ echo e > a
   $ echo f > f
--- a/tests/test-convert-p4	Sun May 13 11:19:48 2012 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,75 +0,0 @@
-#!/bin/sh
-
-"$TESTDIR/hghave" p4 || exit 80
-
-echo "[extensions]" >> $HGRCPATH
-echo "convert = " >> $HGRCPATH
-
-echo % create p4 depot
-P4ROOT=`pwd`/depot; export P4ROOT
-P4AUDIT=$P4ROOT/audit; export P4AUDIT
-P4JOURNAL=$P4ROOT/journal; export P4JOURNAL
-P4LOG=$P4ROOT/log; export P4LOG
-P4PORT=localhost:16661; export P4PORT
-P4DEBUG=1; export P4DEBUG
-
-echo % start the p4 server
-[ ! -d $P4ROOT ] && mkdir $P4ROOT
-p4d -f -J off >$P4ROOT/stdout 2>$P4ROOT/stderr &
-trap "echo % stop the p4 server ; p4 admin stop" EXIT
-
-# wait for the server to initialize
-while ! p4 ; do
-   sleep 1
-done >/dev/null 2>/dev/null
-
-echo % create a client spec
-P4CLIENT=hg-p4-import; export P4CLIENT
-DEPOTPATH=//depot/test-mercurial-import/...
-p4 client -o | sed '/^View:/,$ d' >p4client
-echo View: >>p4client
-echo " $DEPOTPATH //$P4CLIENT/..." >>p4client
-p4 client -i <p4client
-
-echo % populate the depot
-echo a > a
-mkdir b
-echo c > b/c
-p4 add a b/c
-p4 submit -d initial
-
-echo % change some files
-p4 edit a
-echo aa >> a
-p4 submit -d "change a"
-
-p4 edit b/c
-echo cc >> b/c
-p4 submit -d "change b/c"
-
-echo % convert
-hg convert -s p4 $DEPOTPATH dst
-hg -R dst log --template 'rev={rev} desc="{desc}" tags="{tags}" files="{files}"\n'
-
-echo % change some files
-p4 edit a b/c
-echo aaa >> a
-echo ccc >> b/c
-p4 submit -d "change a b/c"
-
-echo % convert again
-hg convert -s p4 $DEPOTPATH dst
-hg -R dst log --template 'rev={rev} desc="{desc}" tags="{tags}" files="{files}"\n'
-
-echo % interesting names
-echo dddd > "d d"
-mkdir " e"
-echo fff >" e/ f"
-p4 add "d d" " e/ f"
-p4 submit -d "add d e f"
-
-echo % convert again
-hg convert -s p4 $DEPOTPATH dst
-hg -R dst log --template 'rev={rev} desc="{desc}" tags="{tags}" files="{files}"\n'
-
-
--- a/tests/test-convert-p4-filetypes	Sun May 13 11:19:48 2012 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,104 +0,0 @@
-#!/bin/sh
-
-"$TESTDIR/hghave" p4 execbit symlink || exit 80
-
-echo "[extensions]" >> $HGRCPATH
-echo "convert = " >> $HGRCPATH
-
-echo % create p4 depot
-P4ROOT=`pwd`/depot; export P4ROOT
-P4AUDIT=$P4ROOT/audit; export P4AUDIT
-P4JOURNAL=$P4ROOT/journal; export P4JOURNAL
-P4LOG=$P4ROOT/log; export P4LOG
-P4PORT=localhost:16661; export P4PORT
-P4DEBUG=1; export P4DEBUG
-P4CHARSET=utf8; export P4CHARSET
-
-echo % start the p4 server
-[ ! -d $P4ROOT ] && mkdir $P4ROOT
-p4d -f -J off -xi >$P4ROOT/stdout 2>$P4ROOT/stderr
-p4d -f -J off >$P4ROOT/stdout 2>$P4ROOT/stderr &
-trap "echo % stop the p4 server ; p4 admin stop" EXIT
-
-# wait for the server to initialize
-while ! p4 ; do
-   sleep 1
-done >/dev/null 2>/dev/null
-
-echo % create a client spec
-P4CLIENT=hg-p4-import; export P4CLIENT
-DEPOTPATH=//depot/test-mercurial-import/...
-p4 client -o | sed '/^View:/,$ d' >p4client
-echo View: >>p4client
-echo " $DEPOTPATH //$P4CLIENT/..." >>p4client
-p4 client -i <p4client
-
-echo % populate the depot
-TYPES="text binary symlink"
-TYPES="$TYPES text+m text+w text+x text+k text+kx text+ko text+l text+C text+D text+F text+S text+S2"
-TYPES="$TYPES binary+k binary+x binary+kx symlink+k"
-TYPES="$TYPES ctext cxtext ktext kxtext ltext tempobj ubinary uxbinary xbinary xltext xtempobj xtext"
-# not testing these
-#TYPES="$TYPES apple resource unicode utf16 uresource xunicode xutf16"
-for T in $TYPES ; do
-   T2=`echo $T | tr [:upper:] [:lower:]`
-   case $T in
-      apple)
-         ;;
-      symlink*)
-         echo "this is target $T" >target_$T2
-         ln -s target_$T file_$T2
-         p4 add target_$T2
-         p4 add -t $T file_$T2
-         ;;
-      binary*)
-         python -c "file('file_$T2', 'wb').write('this is $T')"
-         p4 add -t $T file_$T2
-         ;;
-      *)
-         echo "this is $T" >file_$T2
-         p4 add -t $T file_$T2
-         ;;
-   esac
-done
-p4 submit -d initial
-
-echo % test keyword expansion
-p4 edit file_* target_*
-for T in $TYPES ; do
-   T2=`echo $T | tr [:upper:] [:lower:]`
-   echo '$Id$'       >>file_$T2
-   echo '$Header$'   >>file_$T2
-   echo '$Date$'     >>file_$T2
-   echo '$DateTime$' >>file_$T2
-   echo '$Change$'   >>file_$T2
-   echo '$File$'     >>file_$T2
-   echo '$Revision$' >>file_$T2
-   echo '$Header$$Header$Header$' >>file_$T2
-done
-
-ln -s 'target_$Header$' crazy_symlink+k
-p4 add -t symlink+k crazy_symlink+k
-
-p4 submit -d keywords
-
-echo % check keywords in p4
-grep -H Header file_*
-
-echo % convert
-hg convert -s p4 $DEPOTPATH dst
-hg -R dst log --template 'rev={rev} desc="{desc}" tags="{tags}" files="{files}"\n'g
-
-echo % revision 0
-hg -R dst update 0
-head dst/file_* | cat -v
-
-echo
-echo % revision 1
-hg -R dst update 1
-head dst/file_* | cat -v
-echo
-echo % crazy_symlink
-readlink crazy_symlink+k
-readlink dst/crazy_symlink+k
-
--- a/tests/test-convert-p4-filetypes.out	Sun May 13 11:19:48 2012 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,642 +0,0 @@
-% create p4 depot
-% start the p4 server
-% create a client spec
-Client hg-p4-import saved.
-% populate the depot
-//depot/test-mercurial-import/file_text#1 - opened for add
-//depot/test-mercurial-import/file_binary#1 - opened for add
-//depot/test-mercurial-import/target_symlink#1 - opened for add
-//depot/test-mercurial-import/file_symlink#1 - opened for add
-//depot/test-mercurial-import/file_text+m#1 - opened for add
-//depot/test-mercurial-import/file_text+w#1 - opened for add
-//depot/test-mercurial-import/file_text+x#1 - opened for add
-//depot/test-mercurial-import/file_text+k#1 - opened for add
-//depot/test-mercurial-import/file_text+kx#1 - opened for add
-//depot/test-mercurial-import/file_text+ko#1 - opened for add
-//depot/test-mercurial-import/file_text+l#1 - opened for add
-//depot/test-mercurial-import/file_text+c#1 - opened for add
-//depot/test-mercurial-import/file_text+d#1 - opened for add
-//depot/test-mercurial-import/file_text+f#1 - opened for add
-//depot/test-mercurial-import/file_text+s#1 - opened for add
-//depot/test-mercurial-import/file_text+s2#1 - opened for add
-//depot/test-mercurial-import/file_binary+k#1 - opened for add
-//depot/test-mercurial-import/file_binary+x#1 - opened for add
-//depot/test-mercurial-import/file_binary+kx#1 - opened for add
-//depot/test-mercurial-import/target_symlink+k#1 - opened for add
-//depot/test-mercurial-import/file_symlink+k#1 - opened for add
-//depot/test-mercurial-import/file_ctext#1 - opened for add
-//depot/test-mercurial-import/file_cxtext#1 - opened for add
-//depot/test-mercurial-import/file_ktext#1 - opened for add
-//depot/test-mercurial-import/file_kxtext#1 - opened for add
-//depot/test-mercurial-import/file_ltext#1 - opened for add
-//depot/test-mercurial-import/file_tempobj#1 - opened for add
-//depot/test-mercurial-import/file_ubinary#1 - opened for add
-//depot/test-mercurial-import/file_uxbinary#1 - opened for add
-//depot/test-mercurial-import/file_xbinary#1 - opened for add
-//depot/test-mercurial-import/file_xltext#1 - opened for add
-//depot/test-mercurial-import/file_xtempobj#1 - opened for add
-//depot/test-mercurial-import/file_xtext#1 - opened for add
-Submitting change 1.
-Locking 33 files ...
-add //depot/test-mercurial-import/file_binary#1
-add //depot/test-mercurial-import/file_binary+k#1
-add //depot/test-mercurial-import/file_binary+kx#1
-add //depot/test-mercurial-import/file_binary+x#1
-add //depot/test-mercurial-import/file_ctext#1
-add //depot/test-mercurial-import/file_cxtext#1
-add //depot/test-mercurial-import/file_ktext#1
-add //depot/test-mercurial-import/file_kxtext#1
-add //depot/test-mercurial-import/file_ltext#1
-add //depot/test-mercurial-import/file_symlink#1
-add //depot/test-mercurial-import/file_symlink+k#1
-add //depot/test-mercurial-import/file_tempobj#1
-add //depot/test-mercurial-import/file_text#1
-add //depot/test-mercurial-import/file_text+c#1
-add //depot/test-mercurial-import/file_text+d#1
-add //depot/test-mercurial-import/file_text+f#1
-add //depot/test-mercurial-import/file_text+k#1
-add //depot/test-mercurial-import/file_text+ko#1
-add //depot/test-mercurial-import/file_text+kx#1
-add //depot/test-mercurial-import/file_text+l#1
-add //depot/test-mercurial-import/file_text+m#1
-add //depot/test-mercurial-import/file_text+s#1
-add //depot/test-mercurial-import/file_text+s2#1
-add //depot/test-mercurial-import/file_text+w#1
-add //depot/test-mercurial-import/file_text+x#1
-add //depot/test-mercurial-import/file_ubinary#1
-add //depot/test-mercurial-import/file_uxbinary#1
-add //depot/test-mercurial-import/file_xbinary#1
-add //depot/test-mercurial-import/file_xltext#1
-add //depot/test-mercurial-import/file_xtempobj#1
-add //depot/test-mercurial-import/file_xtext#1
-add //depot/test-mercurial-import/target_symlink#1
-add //depot/test-mercurial-import/target_symlink+k#1
-Change 1 submitted.
-//depot/test-mercurial-import/file_binary+k#1 - refreshing
-//depot/test-mercurial-import/file_binary+kx#1 - refreshing
-//depot/test-mercurial-import/file_ktext#1 - refreshing
-//depot/test-mercurial-import/file_kxtext#1 - refreshing
-//depot/test-mercurial-import/file_symlink+k#1 - refreshing
-//depot/test-mercurial-import/file_text+k#1 - refreshing
-//depot/test-mercurial-import/file_text+ko#1 - refreshing
-//depot/test-mercurial-import/file_text+kx#1 - refreshing
-% test keyword expansion
-//depot/test-mercurial-import/file_binary#1 - opened for edit
-//depot/test-mercurial-import/file_binary+k#1 - opened for edit
-//depot/test-mercurial-import/file_binary+kx#1 - opened for edit
-//depot/test-mercurial-import/file_binary+x#1 - opened for edit
-//depot/test-mercurial-import/file_ctext#1 - opened for edit
-//depot/test-mercurial-import/file_cxtext#1 - opened for edit
-//depot/test-mercurial-import/file_ktext#1 - opened for edit
-//depot/test-mercurial-import/file_kxtext#1 - opened for edit
-//depot/test-mercurial-import/file_ltext#1 - opened for edit
-//depot/test-mercurial-import/file_symlink#1 - opened for edit
-//depot/test-mercurial-import/file_symlink+k#1 - opened for edit
-//depot/test-mercurial-import/file_tempobj#1 - opened for edit
-//depot/test-mercurial-import/file_text#1 - opened for edit
-//depot/test-mercurial-import/file_text+c#1 - opened for edit
-//depot/test-mercurial-import/file_text+d#1 - opened for edit
-//depot/test-mercurial-import/file_text+f#1 - opened for edit
-//depot/test-mercurial-import/file_text+k#1 - opened for edit
-//depot/test-mercurial-import/file_text+ko#1 - opened for edit
-//depot/test-mercurial-import/file_text+kx#1 - opened for edit
-//depot/test-mercurial-import/file_text+l#1 - opened for edit
-//depot/test-mercurial-import/file_text+m#1 - opened for edit
-//depot/test-mercurial-import/file_text+s#1 - opened for edit
-//depot/test-mercurial-import/file_text+s2#1 - opened for edit
-//depot/test-mercurial-import/file_text+w#1 - opened for edit
-//depot/test-mercurial-import/file_text+x#1 - opened for edit
-//depot/test-mercurial-import/file_ubinary#1 - opened for edit
-//depot/test-mercurial-import/file_uxbinary#1 - opened for edit
-//depot/test-mercurial-import/file_xbinary#1 - opened for edit
-//depot/test-mercurial-import/file_xltext#1 - opened for edit
-//depot/test-mercurial-import/file_xtempobj#1 - opened for edit
-//depot/test-mercurial-import/file_xtext#1 - opened for edit
-//depot/test-mercurial-import/target_symlink#1 - opened for edit
-//depot/test-mercurial-import/target_symlink+k#1 - opened for edit
-//depot/test-mercurial-import/crazy_symlink+k#1 - opened for add
-Submitting change 2.
-Locking 34 files ...
-add //depot/test-mercurial-import/crazy_symlink+k#1
-edit //depot/test-mercurial-import/file_binary#2
-edit //depot/test-mercurial-import/file_binary+k#2
-edit //depot/test-mercurial-import/file_binary+kx#2
-edit //depot/test-mercurial-import/file_binary+x#2
-edit //depot/test-mercurial-import/file_ctext#2
-edit //depot/test-mercurial-import/file_cxtext#2
-edit //depot/test-mercurial-import/file_ktext#2
-edit //depot/test-mercurial-import/file_kxtext#2
-edit //depot/test-mercurial-import/file_ltext#2
-edit //depot/test-mercurial-import/file_symlink#2
-edit //depot/test-mercurial-import/file_symlink+k#2
-edit //depot/test-mercurial-import/file_tempobj#2
-edit //depot/test-mercurial-import/file_text#2
-edit //depot/test-mercurial-import/file_text+c#2
-edit //depot/test-mercurial-import/file_text+d#2
-edit //depot/test-mercurial-import/file_text+f#2
-edit //depot/test-mercurial-import/file_text+k#2
-edit //depot/test-mercurial-import/file_text+ko#2
-edit //depot/test-mercurial-import/file_text+kx#2
-edit //depot/test-mercurial-import/file_text+l#2
-edit //depot/test-mercurial-import/file_text+m#2
-edit //depot/test-mercurial-import/file_text+s#2
-edit //depot/test-mercurial-import/file_text+s2#2
-edit //depot/test-mercurial-import/file_text+w#2
-edit //depot/test-mercurial-import/file_text+x#2
-edit //depot/test-mercurial-import/file_ubinary#2
-edit //depot/test-mercurial-import/file_uxbinary#2
-edit //depot/test-mercurial-import/file_xbinary#2
-edit //depot/test-mercurial-import/file_xltext#2
-edit //depot/test-mercurial-import/file_xtempobj#2
-edit //depot/test-mercurial-import/file_xtext#2
-edit //depot/test-mercurial-import/target_symlink#2
-edit //depot/test-mercurial-import/target_symlink+k#2
-Change 2 submitted.
-//depot/test-mercurial-import/crazy_symlink+k#1 - refreshing
-//depot/test-mercurial-import/file_binary+k#2 - refreshing
-//depot/test-mercurial-import/file_binary+kx#2 - refreshing
-//depot/test-mercurial-import/file_ktext#2 - refreshing
-//depot/test-mercurial-import/file_kxtext#2 - refreshing
-//depot/test-mercurial-import/file_symlink+k#2 - refreshing
-//depot/test-mercurial-import/file_text+k#2 - refreshing
-//depot/test-mercurial-import/file_text+ko#2 - refreshing
-//depot/test-mercurial-import/file_text+kx#2 - refreshing
-% check keywords in p4
-file_binary:$Header$
-file_binary:$Header$$Header$Header$
-file_binary+k:$Header: //depot/test-mercurial-import/file_binary+k#2 $
-file_binary+k:$Header: //depot/test-mercurial-import/file_binary+k#2 $$Header: //depot/test-mercurial-import/file_binary+k#2 $Header$
-file_binary+kx:$Header: //depot/test-mercurial-import/file_binary+kx#2 $
-file_binary+kx:$Header: //depot/test-mercurial-import/file_binary+kx#2 $$Header: //depot/test-mercurial-import/file_binary+kx#2 $Header$
-file_binary+x:$Header$
-file_binary+x:$Header$$Header$Header$
-file_ctext:$Header$
-file_ctext:$Header$$Header$Header$
-file_cxtext:$Header$
-file_cxtext:$Header$$Header$Header$
-file_ktext:$Header: //depot/test-mercurial-import/file_ktext#2 $
-file_ktext:$Header: //depot/test-mercurial-import/file_ktext#2 $$Header: //depot/test-mercurial-import/file_ktext#2 $Header$
-file_kxtext:$Header: //depot/test-mercurial-import/file_kxtext#2 $
-file_kxtext:$Header: //depot/test-mercurial-import/file_kxtext#2 $$Header: //depot/test-mercurial-import/file_kxtext#2 $Header$
-file_ltext:$Header$
-file_ltext:$Header$$Header$Header$
-file_symlink:$Header$
-file_symlink:$Header$$Header$Header$
-file_symlink+k:$Header$
-file_symlink+k:$Header$$Header$Header$
-file_tempobj:$Header$
-file_tempobj:$Header$$Header$Header$
-file_text:$Header$
-file_text:$Header$$Header$Header$
-file_text+c:$Header$
-file_text+c:$Header$$Header$Header$
-file_text+d:$Header$
-file_text+d:$Header$$Header$Header$
-file_text+f:$Header$
-file_text+f:$Header$$Header$Header$
-file_text+k:$Header: //depot/test-mercurial-import/file_text+k#2 $
-file_text+k:$Header: //depot/test-mercurial-import/file_text+k#2 $$Header: //depot/test-mercurial-import/file_text+k#2 $Header$
-file_text+ko:$Header: //depot/test-mercurial-import/file_text+ko#2 $
-file_text+ko:$Header: //depot/test-mercurial-import/file_text+ko#2 $$Header: //depot/test-mercurial-import/file_text+ko#2 $Header$
-file_text+kx:$Header: //depot/test-mercurial-import/file_text+kx#2 $
-file_text+kx:$Header: //depot/test-mercurial-import/file_text+kx#2 $$Header: //depot/test-mercurial-import/file_text+kx#2 $Header$
-file_text+l:$Header$
-file_text+l:$Header$$Header$Header$
-file_text+m:$Header$
-file_text+m:$Header$$Header$Header$
-file_text+s:$Header$
-file_text+s:$Header$$Header$Header$
-file_text+s2:$Header$
-file_text+s2:$Header$$Header$Header$
-file_text+w:$Header$
-file_text+w:$Header$$Header$Header$
-file_text+x:$Header$
-file_text+x:$Header$$Header$Header$
-file_ubinary:$Header$
-file_ubinary:$Header$$Header$Header$
-file_uxbinary:$Header$
-file_uxbinary:$Header$$Header$Header$
-file_xbinary:$Header$
-file_xbinary:$Header$$Header$Header$
-file_xltext:$Header$
-file_xltext:$Header$$Header$Header$
-file_xtempobj:$Header$
-file_xtempobj:$Header$$Header$Header$
-file_xtext:$Header$
-file_xtext:$Header$$Header$Header$
-% convert
-initializing destination dst repository
-reading p4 views
-collecting p4 changelists
-1 initial
-2 keywords
-scanning source...
-sorting...
-converting...
-1 initial
-0 keywords
-rev=1 desc="keywords" tags="tip" files="crazy_symlink+k file_binary file_binary+k file_binary+kx file_binary+x file_ctext file_cxtext file_ktext file_kxtext file_ltext file_text file_text+c file_text+d file_text+f file_text+k file_text+ko file_text+kx file_text+l file_text+m file_text+s file_text+s2 file_text+w file_text+x file_ubinary file_uxbinary file_xbinary file_xltext file_xtext target_symlink target_symlink+k"
-grev=0 desc="initial" tags="" files="file_binary file_binary+k file_binary+kx file_binary+x file_ctext file_cxtext file_ktext file_kxtext file_ltext file_symlink file_symlink+k file_text file_text+c file_text+d file_text+f file_text+k file_text+ko file_text+kx file_text+l file_text+m file_text+s2 file_text+w file_text+x file_ubinary file_uxbinary file_xbinary file_xltext file_xtext target_symlink target_symlink+k"
-g% revision 0
-30 files updated, 0 files merged, 0 files removed, 0 files unresolved
-==> dst/file_binary <==
-this is binary
-==> dst/file_binary+k <==
-this is binary+k
-==> dst/file_binary+kx <==
-this is binary+kx
-==> dst/file_binary+x <==
-this is binary+x
-==> dst/file_ctext <==
-this is ctext
-
-==> dst/file_cxtext <==
-this is cxtext
-
-==> dst/file_ktext <==
-this is ktext
-
-==> dst/file_kxtext <==
-this is kxtext
-
-==> dst/file_ltext <==
-this is ltext
-
-==> dst/file_symlink <==
-this is target symlink
-
-==> dst/file_symlink+k <==
-this is target symlink+k
-
-==> dst/file_text <==
-this is text
-
-==> dst/file_text+c <==
-this is text+C
-
-==> dst/file_text+d <==
-this is text+D
-
-==> dst/file_text+f <==
-this is text+F
-
-==> dst/file_text+k <==
-this is text+k
-
-==> dst/file_text+ko <==
-this is text+ko
-
-==> dst/file_text+kx <==
-this is text+kx
-
-==> dst/file_text+l <==
-this is text+l
-
-==> dst/file_text+m <==
-this is text+m
-
-==> dst/file_text+s2 <==
-this is text+S2
-
-==> dst/file_text+w <==
-this is text+w
-
-==> dst/file_text+x <==
-this is text+x
-
-==> dst/file_ubinary <==
-this is ubinary
-
-==> dst/file_uxbinary <==
-this is uxbinary
-
-==> dst/file_xbinary <==
-this is xbinary
-
-==> dst/file_xltext <==
-this is xltext
-
-==> dst/file_xtext <==
-this is xtext
-
-% revision 1
-30 files updated, 0 files merged, 0 files removed, 0 files unresolved
-==> dst/file_binary <==
-this is binary$Id$
-$Header$
-$Date$
-$DateTime$
-$Change$
-$File$
-$Revision$
-$Header$$Header$Header$
-
-==> dst/file_binary+k <==
-this is binary+k$Id$
-$Header$
-$Date$
-$DateTime$
-$Change$
-$File$
-$Revision$
-$Header$$Header$Header$
-
-==> dst/file_binary+kx <==
-this is binary+kx$Id$
-$Header$
-$Date$
-$DateTime$
-$Change$
-$File$
-$Revision$
-$Header$$Header$Header$
-
-==> dst/file_binary+x <==
-this is binary+x$Id$
-$Header$
-$Date$
-$DateTime$
-$Change$
-$File$
-$Revision$
-$Header$$Header$Header$
-
-==> dst/file_ctext <==
-this is ctext
-$Id$
-$Header$
-$Date$
-$DateTime$
-$Change$
-$File$
-$Revision$
-$Header$$Header$Header$
-
-==> dst/file_cxtext <==
-this is cxtext
-$Id$
-$Header$
-$Date$
-$DateTime$
-$Change$
-$File$
-$Revision$
-$Header$$Header$Header$
-
-==> dst/file_ktext <==
-this is ktext
-$Id$
-$Header$
-$Date$
-$DateTime$
-$Change$
-$File$
-$Revision$
-$Header$$Header$Header$
-
-==> dst/file_kxtext <==
-this is kxtext
-$Id$
-$Header$
-$Date$
-$DateTime$
-$Change$
-$File$
-$Revision$
-$Header$$Header$Header$
-
-==> dst/file_ltext <==
-this is ltext
-$Id$
-$Header$
-$Date$
-$DateTime$
-$Change$
-$File$
-$Revision$
-$Header$$Header$Header$
-
-==> dst/file_symlink <==
-this is target symlink
-$Id$
-$Header$
-$Date$
-$DateTime$
-$Change$
-$File$
-$Revision$
-$Header$$Header$Header$
-
-==> dst/file_symlink+k <==
-this is target symlink+k
-$Id$
-$Header$
-$Date$
-$DateTime$
-$Change$
-$File$
-$Revision$
-$Header$$Header$Header$
-
-==> dst/file_text <==
-this is text
-$Id$
-$Header$
-$Date$
-$DateTime$
-$Change$
-$File$
-$Revision$
-$Header$$Header$Header$
-
-==> dst/file_text+c <==
-this is text+C
-$Id$
-$Header$
-$Date$
-$DateTime$
-$Change$
-$File$
-$Revision$
-$Header$$Header$Header$
-
-==> dst/file_text+d <==
-this is text+D
-$Id$
-$Header$
-$Date$
-$DateTime$
-$Change$
-$File$
-$Revision$
-$Header$$Header$Header$
-
-==> dst/file_text+f <==
-this is text+F
-$Id$
-$Header$
-$Date$
-$DateTime$
-$Change$
-$File$
-$Revision$
-$Header$$Header$Header$
-
-==> dst/file_text+k <==
-this is text+k
-$Id$
-$Header$
-$Date$
-$DateTime$
-$Change$
-$File$
-$Revision$
-$Header$$Header$Header$
-
-==> dst/file_text+ko <==
-this is text+ko
-$Id$
-$Header$
-$Date$
-$DateTime$
-$Change$
-$File$
-$Revision$
-$Header$$Header$Header$
-
-==> dst/file_text+kx <==
-this is text+kx
-$Id$
-$Header$
-$Date$
-$DateTime$
-$Change$
-$File$
-$Revision$
-$Header$$Header$Header$
-
-==> dst/file_text+l <==
-this is text+l
-$Id$
-$Header$
-$Date$
-$DateTime$
-$Change$
-$File$
-$Revision$
-$Header$$Header$Header$
-
-==> dst/file_text+m <==
-this is text+m
-$Id$
-$Header$
-$Date$
-$DateTime$
-$Change$
-$File$
-$Revision$
-$Header$$Header$Header$
-
-==> dst/file_text+s <==
-this is text+S
-$Id$
-$Header$
-$Date$
-$DateTime$
-$Change$
-$File$
-$Revision$
-$Header$$Header$Header$
-
-==> dst/file_text+s2 <==
-this is text+S2
-$Id$
-$Header$
-$Date$
-$DateTime$
-$Change$
-$File$
-$Revision$
-$Header$$Header$Header$
-
-==> dst/file_text+w <==
-this is text+w
-$Id$
-$Header$
-$Date$
-$DateTime$
-$Change$
-$File$
-$Revision$
-$Header$$Header$Header$
-
-==> dst/file_text+x <==
-this is text+x
-$Id$
-$Header$
-$Date$
-$DateTime$
-$Change$
-$File$
-$Revision$
-$Header$$Header$Header$
-
-==> dst/file_ubinary <==
-this is ubinary
-$Id$
-$Header$
-$Date$
-$DateTime$
-$Change$
-$File$
-$Revision$
-$Header$$Header$Header$
-
-==> dst/file_uxbinary <==
-this is uxbinary
-$Id$
-$Header$
-$Date$
-$DateTime$
-$Change$
-$File$
-$Revision$
-$Header$$Header$Header$
-
-==> dst/file_xbinary <==
-this is xbinary
-$Id$
-$Header$
-$Date$
-$DateTime$
-$Change$
-$File$
-$Revision$
-$Header$$Header$Header$
-
-==> dst/file_xltext <==
-this is xltext
-$Id$
-$Header$
-$Date$
-$DateTime$
-$Change$
-$File$
-$Revision$
-$Header$$Header$Header$
-
-==> dst/file_xtext <==
-this is xtext
-$Id$
-$Header$
-$Date$
-$DateTime$
-$Change$
-$File$
-$Revision$
-$Header$$Header$Header$
-
-% crazy_symlink
-target_$Header: //depot/test-mercurial-import/crazy_symlink+k#1 $
-target_$Header$
-% stop the p4 server
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-convert-p4-filetypes.t	Sun May 13 12:52:24 2012 +0200
@@ -0,0 +1,733 @@
+  $ "$TESTDIR/hghave" p4 execbit symlink || exit 80
+
+  $ echo "[extensions]" >> $HGRCPATH
+  $ echo "convert = " >> $HGRCPATH
+
+create p4 depot
+  $ P4ROOT=`pwd`/depot; export P4ROOT
+  $ P4AUDIT=$P4ROOT/audit; export P4AUDIT
+  $ P4JOURNAL=$P4ROOT/journal; export P4JOURNAL
+  $ P4LOG=$P4ROOT/log; export P4LOG
+  $ P4PORT=localhost:16661; export P4PORT
+  $ P4DEBUG=1; export P4DEBUG
+  $ P4CHARSET=utf8; export P4CHARSET
+
+start the p4 server
+  $ [ ! -d $P4ROOT ] && mkdir $P4ROOT
+  $ p4d -f -J off -xi >$P4ROOT/stdout 2>$P4ROOT/stderr
+  $ p4d -f -J off >$P4ROOT/stdout 2>$P4ROOT/stderr &
+  $ echo $! >> $DAEMON_PIDS
+  $ trap "echo stopping the p4 server ; p4 admin stop" EXIT
+
+wait for the server to initialize
+  $ while ! p4 ; do
+  >    sleep 1
+  > done >/dev/null 2>/dev/null
+
+create a client spec
+  $ P4CLIENT=hg-p4-import; export P4CLIENT
+  $ DEPOTPATH=//depot/test-mercurial-import/...
+  $ p4 client -o | sed '/^View:/,$ d' >p4client
+  $ echo View: >>p4client
+  $ echo " $DEPOTPATH //$P4CLIENT/..." >>p4client
+  $ p4 client -i <p4client
+  Client hg-p4-import saved.
+
+populate the depot
+  $ TYPES="text binary symlink"
+  $ TYPES="$TYPES text+m text+w text+x text+k text+kx text+ko text+l text+C text+D text+F text+S text+S2"
+  $ TYPES="$TYPES binary+k binary+x binary+kx symlink+k"
+  $ TYPES="$TYPES ctext cxtext ktext kxtext ltext tempobj ubinary uxbinary xbinary xltext xtempobj xtext"
+not testing these
+  $ #TYPES="$TYPES apple resource unicode utf16 uresource xunicode xutf16"
+  $ for T in $TYPES ; do
+  >    T2=`echo $T | tr [:upper:] [:lower:]`
+  >    case $T in
+  >       apple)
+  >          ;;
+  >       symlink*)
+  >          echo "this is target $T" >target_$T2
+  >          ln -s target_$T file_$T2
+  >          p4 add target_$T2
+  >          p4 add -t $T file_$T2
+  >          ;;
+  >       binary*)
+  >          python -c "file('file_$T2', 'wb').write('this is $T')"
+  >          p4 add -t $T file_$T2
+  >          ;;
+  >       *)
+  >          echo "this is $T" >file_$T2
+  >          p4 add -t $T file_$T2
+  >          ;;
+  >    esac
+  > done
+  //depot/test-mercurial-import/file_text#1 - opened for add
+  //depot/test-mercurial-import/file_binary#1 - opened for add
+  //depot/test-mercurial-import/target_symlink#1 - opened for add
+  //depot/test-mercurial-import/file_symlink#1 - opened for add
+  //depot/test-mercurial-import/file_text+m#1 - opened for add
+  //depot/test-mercurial-import/file_text+w#1 - opened for add
+  //depot/test-mercurial-import/file_text+x#1 - opened for add
+  //depot/test-mercurial-import/file_text+k#1 - opened for add
+  //depot/test-mercurial-import/file_text+kx#1 - opened for add
+  //depot/test-mercurial-import/file_text+ko#1 - opened for add
+  //depot/test-mercurial-import/file_text+l#1 - opened for add
+  //depot/test-mercurial-import/file_text+c#1 - opened for add
+  //depot/test-mercurial-import/file_text+d#1 - opened for add
+  //depot/test-mercurial-import/file_text+f#1 - opened for add
+  //depot/test-mercurial-import/file_text+s#1 - opened for add
+  //depot/test-mercurial-import/file_text+s2#1 - opened for add
+  //depot/test-mercurial-import/file_binary+k#1 - opened for add
+  //depot/test-mercurial-import/file_binary+x#1 - opened for add
+  //depot/test-mercurial-import/file_binary+kx#1 - opened for add
+  //depot/test-mercurial-import/target_symlink+k#1 - opened for add
+  //depot/test-mercurial-import/file_symlink+k#1 - opened for add
+  //depot/test-mercurial-import/file_ctext#1 - opened for add
+  //depot/test-mercurial-import/file_cxtext#1 - opened for add
+  //depot/test-mercurial-import/file_ktext#1 - opened for add
+  //depot/test-mercurial-import/file_kxtext#1 - opened for add
+  //depot/test-mercurial-import/file_ltext#1 - opened for add
+  //depot/test-mercurial-import/file_tempobj#1 - opened for add
+  //depot/test-mercurial-import/file_ubinary#1 - opened for add
+  //depot/test-mercurial-import/file_uxbinary#1 - opened for add
+  //depot/test-mercurial-import/file_xbinary#1 - opened for add
+  //depot/test-mercurial-import/file_xltext#1 - opened for add
+  //depot/test-mercurial-import/file_xtempobj#1 - opened for add
+  //depot/test-mercurial-import/file_xtext#1 - opened for add
+  $ p4 submit -d initial
+  Submitting change 1.
+  Locking 33 files ...
+  add //depot/test-mercurial-import/file_binary#1
+  add //depot/test-mercurial-import/file_binary+k#1
+  add //depot/test-mercurial-import/file_binary+kx#1
+  add //depot/test-mercurial-import/file_binary+x#1
+  add //depot/test-mercurial-import/file_ctext#1
+  add //depot/test-mercurial-import/file_cxtext#1
+  add //depot/test-mercurial-import/file_ktext#1
+  add //depot/test-mercurial-import/file_kxtext#1
+  add //depot/test-mercurial-import/file_ltext#1
+  add //depot/test-mercurial-import/file_symlink#1
+  add //depot/test-mercurial-import/file_symlink+k#1
+  add //depot/test-mercurial-import/file_tempobj#1
+  add //depot/test-mercurial-import/file_text#1
+  add //depot/test-mercurial-import/file_text+c#1
+  add //depot/test-mercurial-import/file_text+d#1
+  add //depot/test-mercurial-import/file_text+f#1
+  add //depot/test-mercurial-import/file_text+k#1
+  add //depot/test-mercurial-import/file_text+ko#1
+  add //depot/test-mercurial-import/file_text+kx#1
+  add //depot/test-mercurial-import/file_text+l#1
+  add //depot/test-mercurial-import/file_text+m#1
+  add //depot/test-mercurial-import/file_text+s#1
+  add //depot/test-mercurial-import/file_text+s2#1
+  add //depot/test-mercurial-import/file_text+w#1
+  add //depot/test-mercurial-import/file_text+x#1
+  add //depot/test-mercurial-import/file_ubinary#1
+  add //depot/test-mercurial-import/file_uxbinary#1
+  add //depot/test-mercurial-import/file_xbinary#1
+  add //depot/test-mercurial-import/file_xltext#1
+  add //depot/test-mercurial-import/file_xtempobj#1
+  add //depot/test-mercurial-import/file_xtext#1
+  add //depot/test-mercurial-import/target_symlink#1
+  add //depot/test-mercurial-import/target_symlink+k#1
+  Change 1 submitted.
+  //depot/test-mercurial-import/file_binary+k#1 - refreshing
+  //depot/test-mercurial-import/file_binary+kx#1 - refreshing
+  //depot/test-mercurial-import/file_ktext#1 - refreshing
+  //depot/test-mercurial-import/file_kxtext#1 - refreshing
+  //depot/test-mercurial-import/file_symlink+k#1 - refreshing
+  //depot/test-mercurial-import/file_text+k#1 - refreshing
+  //depot/test-mercurial-import/file_text+ko#1 - refreshing
+  //depot/test-mercurial-import/file_text+kx#1 - refreshing
+
+test keyword expansion
+  $ p4 edit file_* target_*
+  //depot/test-mercurial-import/file_binary#1 - opened for edit
+  //depot/test-mercurial-import/file_binary+k#1 - opened for edit
+  //depot/test-mercurial-import/file_binary+kx#1 - opened for edit
+  //depot/test-mercurial-import/file_binary+x#1 - opened for edit
+  //depot/test-mercurial-import/file_ctext#1 - opened for edit
+  //depot/test-mercurial-import/file_cxtext#1 - opened for edit
+  //depot/test-mercurial-import/file_ktext#1 - opened for edit
+  //depot/test-mercurial-import/file_kxtext#1 - opened for edit
+  //depot/test-mercurial-import/file_ltext#1 - opened for edit
+  //depot/test-mercurial-import/file_symlink#1 - opened for edit
+  //depot/test-mercurial-import/file_symlink+k#1 - opened for edit
+  //depot/test-mercurial-import/file_tempobj#1 - opened for edit
+  //depot/test-mercurial-import/file_text#1 - opened for edit
+  //depot/test-mercurial-import/file_text+c#1 - opened for edit
+  //depot/test-mercurial-import/file_text+d#1 - opened for edit
+  //depot/test-mercurial-import/file_text+f#1 - opened for edit
+  //depot/test-mercurial-import/file_text+k#1 - opened for edit
+  //depot/test-mercurial-import/file_text+ko#1 - opened for edit
+  //depot/test-mercurial-import/file_text+kx#1 - opened for edit
+  //depot/test-mercurial-import/file_text+l#1 - opened for edit
+  //depot/test-mercurial-import/file_text+m#1 - opened for edit
+  //depot/test-mercurial-import/file_text+s#1 - opened for edit
+  //depot/test-mercurial-import/file_text+s2#1 - opened for edit
+  //depot/test-mercurial-import/file_text+w#1 - opened for edit
+  //depot/test-mercurial-import/file_text+x#1 - opened for edit
+  //depot/test-mercurial-import/file_ubinary#1 - opened for edit
+  //depot/test-mercurial-import/file_uxbinary#1 - opened for edit
+  //depot/test-mercurial-import/file_xbinary#1 - opened for edit
+  //depot/test-mercurial-import/file_xltext#1 - opened for edit
+  //depot/test-mercurial-import/file_xtempobj#1 - opened for edit
+  //depot/test-mercurial-import/file_xtext#1 - opened for edit
+  //depot/test-mercurial-import/target_symlink#1 - opened for edit
+  //depot/test-mercurial-import/target_symlink+k#1 - opened for edit
+  $ for T in $TYPES ; do
+  >    T2=`echo $T | tr [:upper:] [:lower:]`
+  >    echo '$Id$'       >>file_$T2
+  >    echo '$Header$'   >>file_$T2
+  >    echo '$Date$'     >>file_$T2
+  >    echo '$DateTime$' >>file_$T2
+  >    echo '$Change$'   >>file_$T2
+  >    echo '$File$'     >>file_$T2
+  >    echo '$Revision$' >>file_$T2
+  >    echo '$Header$$Header$Header$' >>file_$T2
+  > done
+
+  $ ln -s 'target_$Header$' crazy_symlink+k
+  $ p4 add -t symlink+k crazy_symlink+k
+  //depot/test-mercurial-import/crazy_symlink+k#1 - opened for add
+
+  $ p4 submit -d keywords
+  Submitting change 2.
+  Locking 34 files ...
+  add //depot/test-mercurial-import/crazy_symlink+k#1
+  edit //depot/test-mercurial-import/file_binary#2
+  edit //depot/test-mercurial-import/file_binary+k#2
+  edit //depot/test-mercurial-import/file_binary+kx#2
+  edit //depot/test-mercurial-import/file_binary+x#2
+  edit //depot/test-mercurial-import/file_ctext#2
+  edit //depot/test-mercurial-import/file_cxtext#2
+  edit //depot/test-mercurial-import/file_ktext#2
+  edit //depot/test-mercurial-import/file_kxtext#2
+  edit //depot/test-mercurial-import/file_ltext#2
+  edit //depot/test-mercurial-import/file_symlink#2
+  edit //depot/test-mercurial-import/file_symlink+k#2
+  edit //depot/test-mercurial-import/file_tempobj#2
+  edit //depot/test-mercurial-import/file_text#2
+  edit //depot/test-mercurial-import/file_text+c#2
+  edit //depot/test-mercurial-import/file_text+d#2
+  edit //depot/test-mercurial-import/file_text+f#2
+  edit //depot/test-mercurial-import/file_text+k#2
+  edit //depot/test-mercurial-import/file_text+ko#2
+  edit //depot/test-mercurial-import/file_text+kx#2
+  edit //depot/test-mercurial-import/file_text+l#2
+  edit //depot/test-mercurial-import/file_text+m#2
+  edit //depot/test-mercurial-import/file_text+s#2
+  edit //depot/test-mercurial-import/file_text+s2#2
+  edit //depot/test-mercurial-import/file_text+w#2
+  edit //depot/test-mercurial-import/file_text+x#2
+  edit //depot/test-mercurial-import/file_ubinary#2
+  edit //depot/test-mercurial-import/file_uxbinary#2
+  edit //depot/test-mercurial-import/file_xbinary#2
+  edit //depot/test-mercurial-import/file_xltext#2
+  edit //depot/test-mercurial-import/file_xtempobj#2
+  edit //depot/test-mercurial-import/file_xtext#2
+  edit //depot/test-mercurial-import/target_symlink#2
+  edit //depot/test-mercurial-import/target_symlink+k#2
+  Change 2 submitted.
+  //depot/test-mercurial-import/crazy_symlink+k#1 - refreshing
+  //depot/test-mercurial-import/file_binary+k#2 - refreshing
+  //depot/test-mercurial-import/file_binary+kx#2 - refreshing
+  //depot/test-mercurial-import/file_ktext#2 - refreshing
+  //depot/test-mercurial-import/file_kxtext#2 - refreshing
+  //depot/test-mercurial-import/file_symlink+k#2 - refreshing
+  //depot/test-mercurial-import/file_text+k#2 - refreshing
+  //depot/test-mercurial-import/file_text+ko#2 - refreshing
+  //depot/test-mercurial-import/file_text+kx#2 - refreshing
+
+check keywords in p4
+  $ grep -H Header file_*
+  file_binary:$Header$
+  file_binary:$Header$$Header$Header$
+  file_binary+k:$Header: //depot/test-mercurial-import/file_binary+k#2 $
+  file_binary+k:$Header: //depot/test-mercurial-import/file_binary+k#2 $$Header: //depot/test-mercurial-import/file_binary+k#2 $Header$
+  file_binary+kx:$Header: //depot/test-mercurial-import/file_binary+kx#2 $
+  file_binary+kx:$Header: //depot/test-mercurial-import/file_binary+kx#2 $$Header: //depot/test-mercurial-import/file_binary+kx#2 $Header$
+  file_binary+x:$Header$
+  file_binary+x:$Header$$Header$Header$
+  file_ctext:$Header$
+  file_ctext:$Header$$Header$Header$
+  file_cxtext:$Header$
+  file_cxtext:$Header$$Header$Header$
+  file_ktext:$Header: //depot/test-mercurial-import/file_ktext#2 $
+  file_ktext:$Header: //depot/test-mercurial-import/file_ktext#2 $$Header: //depot/test-mercurial-import/file_ktext#2 $Header$
+  file_kxtext:$Header: //depot/test-mercurial-import/file_kxtext#2 $
+  file_kxtext:$Header: //depot/test-mercurial-import/file_kxtext#2 $$Header: //depot/test-mercurial-import/file_kxtext#2 $Header$
+  file_ltext:$Header$
+  file_ltext:$Header$$Header$Header$
+  file_symlink:$Header$
+  file_symlink:$Header$$Header$Header$
+  file_symlink+k:$Header$
+  file_symlink+k:$Header$$Header$Header$
+  file_tempobj:$Header$
+  file_tempobj:$Header$$Header$Header$
+  file_text:$Header$
+  file_text:$Header$$Header$Header$
+  file_text+c:$Header$
+  file_text+c:$Header$$Header$Header$
+  file_text+d:$Header$
+  file_text+d:$Header$$Header$Header$
+  file_text+f:$Header$
+  file_text+f:$Header$$Header$Header$
+  file_text+k:$Header: //depot/test-mercurial-import/file_text+k#2 $
+  file_text+k:$Header: //depot/test-mercurial-import/file_text+k#2 $$Header: //depot/test-mercurial-import/file_text+k#2 $Header$
+  file_text+ko:$Header: //depot/test-mercurial-import/file_text+ko#2 $
+  file_text+ko:$Header: //depot/test-mercurial-import/file_text+ko#2 $$Header: //depot/test-mercurial-import/file_text+ko#2 $Header$
+  file_text+kx:$Header: //depot/test-mercurial-import/file_text+kx#2 $
+  file_text+kx:$Header: //depot/test-mercurial-import/file_text+kx#2 $$Header: //depot/test-mercurial-import/file_text+kx#2 $Header$
+  file_text+l:$Header$
+  file_text+l:$Header$$Header$Header$
+  file_text+m:$Header$
+  file_text+m:$Header$$Header$Header$
+  file_text+s:$Header$
+  file_text+s:$Header$$Header$Header$
+  file_text+s2:$Header$
+  file_text+s2:$Header$$Header$Header$
+  file_text+w:$Header$
+  file_text+w:$Header$$Header$Header$
+  file_text+x:$Header$
+  file_text+x:$Header$$Header$Header$
+  file_ubinary:$Header$
+  file_ubinary:$Header$$Header$Header$
+  file_uxbinary:$Header$
+  file_uxbinary:$Header$$Header$Header$
+  file_xbinary:$Header$
+  file_xbinary:$Header$$Header$Header$
+  file_xltext:$Header$
+  file_xltext:$Header$$Header$Header$
+  file_xtempobj:$Header$
+  file_xtempobj:$Header$$Header$Header$
+  file_xtext:$Header$
+  file_xtext:$Header$$Header$Header$
+
+convert
+  $ hg convert -s p4 $DEPOTPATH dst
+  initializing destination dst repository
+  reading p4 views
+  collecting p4 changelists
+  1 initial
+  2 keywords
+  scanning source...
+  sorting...
+  converting...
+  1 initial
+  0 keywords
+  $ hg -R dst log --template 'rev={rev} desc="{desc}" tags="{tags}" files="{files}"\n'
+  rev=1 desc="keywords" tags="tip" files="crazy_symlink+k file_binary file_binary+k file_binary+kx file_binary+x file_ctext file_cxtext file_ktext file_kxtext file_ltext file_text file_text+c file_text+d file_text+f file_text+k file_text+ko file_text+kx file_text+l file_text+m file_text+s file_text+s2 file_text+w file_text+x file_ubinary file_uxbinary file_xbinary file_xltext file_xtext target_symlink target_symlink+k"
+  rev=0 desc="initial" tags="" files="file_binary file_binary+k file_binary+kx file_binary+x file_ctext file_cxtext file_ktext file_kxtext file_ltext file_symlink file_symlink+k file_text file_text+c file_text+d file_text+f file_text+k file_text+ko file_text+kx file_text+l file_text+m file_text+s2 file_text+w file_text+x file_ubinary file_uxbinary file_xbinary file_xltext file_xtext target_symlink target_symlink+k"
+
+revision 0
+  $ hg -R dst update 0
+  30 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ head dst/file_* | cat -v
+  ==> dst/file_binary <==
+  this is binary
+  ==> dst/file_binary+k <==
+  this is binary+k
+  ==> dst/file_binary+kx <==
+  this is binary+kx
+  ==> dst/file_binary+x <==
+  this is binary+x
+  ==> dst/file_ctext <==
+  this is ctext
+  
+  ==> dst/file_cxtext <==
+  this is cxtext
+  
+  ==> dst/file_ktext <==
+  this is ktext
+  
+  ==> dst/file_kxtext <==
+  this is kxtext
+  
+  ==> dst/file_ltext <==
+  this is ltext
+  
+  ==> dst/file_symlink <==
+  this is target symlink
+  
+  ==> dst/file_symlink+k <==
+  this is target symlink+k
+  
+  ==> dst/file_text <==
+  this is text
+  
+  ==> dst/file_text+c <==
+  this is text+C
+  
+  ==> dst/file_text+d <==
+  this is text+D
+  
+  ==> dst/file_text+f <==
+  this is text+F
+  
+  ==> dst/file_text+k <==
+  this is text+k
+  
+  ==> dst/file_text+ko <==
+  this is text+ko
+  
+  ==> dst/file_text+kx <==
+  this is text+kx
+  
+  ==> dst/file_text+l <==
+  this is text+l
+  
+  ==> dst/file_text+m <==
+  this is text+m
+  
+  ==> dst/file_text+s2 <==
+  this is text+S2
+  
+  ==> dst/file_text+w <==
+  this is text+w
+  
+  ==> dst/file_text+x <==
+  this is text+x
+  
+  ==> dst/file_ubinary <==
+  this is ubinary
+  
+  ==> dst/file_uxbinary <==
+  this is uxbinary
+  
+  ==> dst/file_xbinary <==
+  this is xbinary
+  
+  ==> dst/file_xltext <==
+  this is xltext
+  
+  ==> dst/file_xtext <==
+  this is xtext
+
+revision 1
+  $ hg -R dst update 1
+  30 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ head dst/file_* | cat -v
+  ==> dst/file_binary <==
+  this is binary$Id$
+  $Header$
+  $Date$
+  $DateTime$
+  $Change$
+  $File$
+  $Revision$
+  $Header$$Header$Header$
+  
+  ==> dst/file_binary+k <==
+  this is binary+k$Id$
+  $Header$
+  $Date$
+  $DateTime$
+  $Change$
+  $File$
+  $Revision$
+  $Header$$Header$Header$
+  
+  ==> dst/file_binary+kx <==
+  this is binary+kx$Id$
+  $Header$
+  $Date$
+  $DateTime$
+  $Change$
+  $File$
+  $Revision$
+  $Header$$Header$Header$
+  
+  ==> dst/file_binary+x <==
+  this is binary+x$Id$
+  $Header$
+  $Date$
+  $DateTime$
+  $Change$
+  $File$
+  $Revision$
+  $Header$$Header$Header$
+  
+  ==> dst/file_ctext <==
+  this is ctext
+  $Id$
+  $Header$
+  $Date$
+  $DateTime$
+  $Change$
+  $File$
+  $Revision$
+  $Header$$Header$Header$
+  
+  ==> dst/file_cxtext <==
+  this is cxtext
+  $Id$
+  $Header$
+  $Date$
+  $DateTime$
+  $Change$
+  $File$
+  $Revision$
+  $Header$$Header$Header$
+  
+  ==> dst/file_ktext <==
+  this is ktext
+  $Id$
+  $Header$
+  $Date$
+  $DateTime$
+  $Change$
+  $File$
+  $Revision$
+  $Header$$Header$Header$
+  
+  ==> dst/file_kxtext <==
+  this is kxtext
+  $Id$
+  $Header$
+  $Date$
+  $DateTime$
+  $Change$
+  $File$
+  $Revision$
+  $Header$$Header$Header$
+  
+  ==> dst/file_ltext <==
+  this is ltext
+  $Id$
+  $Header$
+  $Date$
+  $DateTime$
+  $Change$
+  $File$
+  $Revision$
+  $Header$$Header$Header$
+  
+  ==> dst/file_symlink <==
+  this is target symlink
+  $Id$
+  $Header$
+  $Date$
+  $DateTime$
+  $Change$
+  $File$
+  $Revision$
+  $Header$$Header$Header$
+  
+  ==> dst/file_symlink+k <==
+  this is target symlink+k
+  $Id$
+  $Header$
+  $Date$
+  $DateTime$
+  $Change$
+  $File$
+  $Revision$
+  $Header$$Header$Header$
+  
+  ==> dst/file_text <==
+  this is text
+  $Id$
+  $Header$
+  $Date$
+  $DateTime$
+  $Change$
+  $File$
+  $Revision$
+  $Header$$Header$Header$
+  
+  ==> dst/file_text+c <==
+  this is text+C
+  $Id$
+  $Header$
+  $Date$
+  $DateTime$
+  $Change$
+  $File$
+  $Revision$
+  $Header$$Header$Header$
+  
+  ==> dst/file_text+d <==
+  this is text+D
+  $Id$
+  $Header$
+  $Date$
+  $DateTime$
+  $Change$
+  $File$
+  $Revision$
+  $Header$$Header$Header$
+  
+  ==> dst/file_text+f <==
+  this is text+F
+  $Id$
+  $Header$
+  $Date$
+  $DateTime$
+  $Change$
+  $File$
+  $Revision$
+  $Header$$Header$Header$
+  
+  ==> dst/file_text+k <==
+  this is text+k
+  $Id$
+  $Header$
+  $Date$
+  $DateTime$
+  $Change$
+  $File$
+  $Revision$
+  $Header$$Header$Header$
+  
+  ==> dst/file_text+ko <==
+  this is text+ko
+  $Id$
+  $Header$
+  $Date$
+  $DateTime$
+  $Change$
+  $File$
+  $Revision$
+  $Header$$Header$Header$
+  
+  ==> dst/file_text+kx <==
+  this is text+kx
+  $Id$
+  $Header$
+  $Date$
+  $DateTime$
+  $Change$
+  $File$
+  $Revision$
+  $Header$$Header$Header$
+  
+  ==> dst/file_text+l <==
+  this is text+l
+  $Id$
+  $Header$
+  $Date$
+  $DateTime$
+  $Change$
+  $File$
+  $Revision$
+  $Header$$Header$Header$
+  
+  ==> dst/file_text+m <==
+  this is text+m
+  $Id$
+  $Header$
+  $Date$
+  $DateTime$
+  $Change$
+  $File$
+  $Revision$
+  $Header$$Header$Header$
+  
+  ==> dst/file_text+s <==
+  this is text+S
+  $Id$
+  $Header$
+  $Date$
+  $DateTime$
+  $Change$
+  $File$
+  $Revision$
+  $Header$$Header$Header$
+  
+  ==> dst/file_text+s2 <==
+  this is text+S2
+  $Id$
+  $Header$
+  $Date$
+  $DateTime$
+  $Change$
+  $File$
+  $Revision$
+  $Header$$Header$Header$
+  
+  ==> dst/file_text+w <==
+  this is text+w
+  $Id$
+  $Header$
+  $Date$
+  $DateTime$
+  $Change$
+  $File$
+  $Revision$
+  $Header$$Header$Header$
+  
+  ==> dst/file_text+x <==
+  this is text+x
+  $Id$
+  $Header$
+  $Date$
+  $DateTime$
+  $Change$
+  $File$
+  $Revision$
+  $Header$$Header$Header$
+  
+  ==> dst/file_ubinary <==
+  this is ubinary
+  $Id$
+  $Header$
+  $Date$
+  $DateTime$
+  $Change$
+  $File$
+  $Revision$
+  $Header$$Header$Header$
+  
+  ==> dst/file_uxbinary <==
+  this is uxbinary
+  $Id$
+  $Header$
+  $Date$
+  $DateTime$
+  $Change$
+  $File$
+  $Revision$
+  $Header$$Header$Header$
+  
+  ==> dst/file_xbinary <==
+  this is xbinary
+  $Id$
+  $Header$
+  $Date$
+  $DateTime$
+  $Change$
+  $File$
+  $Revision$
+  $Header$$Header$Header$
+  
+  ==> dst/file_xltext <==
+  this is xltext
+  $Id$
+  $Header$
+  $Date$
+  $DateTime$
+  $Change$
+  $File$
+  $Revision$
+  $Header$$Header$Header$
+  
+  ==> dst/file_xtext <==
+  this is xtext
+  $Id$
+  $Header$
+  $Date$
+  $DateTime$
+  $Change$
+  $File$
+  $Revision$
+  $Header$$Header$Header$
+
+crazy_symlink
+  $ readlink crazy_symlink+k
+  target_$Header: //depot/test-mercurial-import/crazy_symlink+k#1 $
+  $ readlink dst/crazy_symlink+k
+  target_$Header$
+
+exit trap:
+  stopping the p4 server
--- a/tests/test-convert-p4.out	Sun May 13 11:19:48 2012 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,88 +0,0 @@
-% create p4 depot
-% start the p4 server
-% create a client spec
-Client hg-p4-import saved.
-% populate the depot
-//depot/test-mercurial-import/a#1 - opened for add
-//depot/test-mercurial-import/b/c#1 - opened for add
-Submitting change 1.
-Locking 2 files ...
-add //depot/test-mercurial-import/a#1
-add //depot/test-mercurial-import/b/c#1
-Change 1 submitted.
-% change some files
-//depot/test-mercurial-import/a#1 - opened for edit
-Submitting change 2.
-Locking 1 files ...
-edit //depot/test-mercurial-import/a#2
-Change 2 submitted.
-//depot/test-mercurial-import/b/c#1 - opened for edit
-Submitting change 3.
-Locking 1 files ...
-edit //depot/test-mercurial-import/b/c#2
-Change 3 submitted.
-% convert
-initializing destination dst repository
-reading p4 views
-collecting p4 changelists
-1 initial
-2 change a
-3 change b/c
-scanning source...
-sorting...
-converting...
-2 initial
-1 change a
-0 change b/c
-rev=2 desc="change b/c" tags="tip" files="b/c"
-rev=1 desc="change a" tags="" files="a"
-rev=0 desc="initial" tags="" files="a b/c"
-% change some files
-//depot/test-mercurial-import/a#2 - opened for edit
-//depot/test-mercurial-import/b/c#2 - opened for edit
-Submitting change 4.
-Locking 2 files ...
-edit //depot/test-mercurial-import/a#3
-edit //depot/test-mercurial-import/b/c#3
-Change 4 submitted.
-% convert again
-reading p4 views
-collecting p4 changelists
-1 initial
-2 change a
-3 change b/c
-4 change a b/c
-scanning source...
-sorting...
-converting...
-0 change a b/c
-rev=3 desc="change a b/c" tags="tip" files="a b/c"
-rev=2 desc="change b/c" tags="" files="b/c"
-rev=1 desc="change a" tags="" files="a"
-rev=0 desc="initial" tags="" files="a b/c"
-% interesting names
-//depot/test-mercurial-import/d d#1 - opened for add
-//depot/test-mercurial-import/ e/ f#1 - opened for add
-Submitting change 5.
-Locking 2 files ...
-add //depot/test-mercurial-import/ e/ f#1
-add //depot/test-mercurial-import/d d#1
-Change 5 submitted.
-% convert again
-reading p4 views
-collecting p4 changelists
-1 initial
-2 change a
-3 change b/c
-4 change a b/c
-5 add d e f
-scanning source...
-sorting...
-converting...
-0 add d e f
-rev=4 desc="add d e f" tags="tip" files=" e/ f d d"
-rev=3 desc="change a b/c" tags="" files="a b/c"
-rev=2 desc="change b/c" tags="" files="b/c"
-rev=1 desc="change a" tags="" files="a"
-rev=0 desc="initial" tags="" files="a b/c"
-% stop the p4 server
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-convert-p4.t	Sun May 13 12:52:24 2012 +0200
@@ -0,0 +1,152 @@
+  $ "$TESTDIR/hghave" p4 || exit 80
+
+  $ echo "[extensions]" >> $HGRCPATH
+  $ echo "convert = " >> $HGRCPATH
+
+create p4 depot
+  $ P4ROOT=`pwd`/depot; export P4ROOT
+  $ P4AUDIT=$P4ROOT/audit; export P4AUDIT
+  $ P4JOURNAL=$P4ROOT/journal; export P4JOURNAL
+  $ P4LOG=$P4ROOT/log; export P4LOG
+  $ P4PORT=localhost:16661; export P4PORT
+  $ P4DEBUG=1; export P4DEBUG
+
+start the p4 server
+  $ [ ! -d $P4ROOT ] && mkdir $P4ROOT
+  $ p4d -f -J off >$P4ROOT/stdout 2>$P4ROOT/stderr &
+  $ echo $! >> $DAEMON_PIDS
+  $ trap "echo stopping the p4 server ; p4 admin stop" EXIT
+
+  $ # wait for the server to initialize
+  $ while ! p4 ; do
+  >    sleep 1
+  > done >/dev/null 2>/dev/null
+
+create a client spec
+  $ P4CLIENT=hg-p4-import; export P4CLIENT
+  $ DEPOTPATH=//depot/test-mercurial-import/...
+  $ p4 client -o | sed '/^View:/,$ d' >p4client
+  $ echo View: >>p4client
+  $ echo " $DEPOTPATH //$P4CLIENT/..." >>p4client
+  $ p4 client -i <p4client
+  Client hg-p4-import saved.
+
+populate the depot
+  $ echo a > a
+  $ mkdir b
+  $ echo c > b/c
+  $ p4 add a b/c
+  //depot/test-mercurial-import/a#1 - opened for add
+  //depot/test-mercurial-import/b/c#1 - opened for add
+  $ p4 submit -d initial
+  Submitting change 1.
+  Locking 2 files ...
+  add //depot/test-mercurial-import/a#1
+  add //depot/test-mercurial-import/b/c#1
+  Change 1 submitted.
+
+change some files
+  $ p4 edit a
+  //depot/test-mercurial-import/a#1 - opened for edit
+  $ echo aa >> a
+  $ p4 submit -d "change a"
+  Submitting change 2.
+  Locking 1 files ...
+  edit //depot/test-mercurial-import/a#2
+  Change 2 submitted.
+
+  $ p4 edit b/c
+  //depot/test-mercurial-import/b/c#1 - opened for edit
+  $ echo cc >> b/c
+  $ p4 submit -d "change b/c"
+  Submitting change 3.
+  Locking 1 files ...
+  edit //depot/test-mercurial-import/b/c#2
+  Change 3 submitted.
+
+convert
+  $ hg convert -s p4 $DEPOTPATH dst
+  initializing destination dst repository
+  reading p4 views
+  collecting p4 changelists
+  1 initial
+  2 change a
+  3 change b/c
+  scanning source...
+  sorting...
+  converting...
+  2 initial
+  1 change a
+  0 change b/c
+  $ hg -R dst log --template 'rev={rev} desc="{desc}" tags="{tags}" files="{files}"\n'
+  rev=2 desc="change b/c" tags="tip" files="b/c"
+  rev=1 desc="change a" tags="" files="a"
+  rev=0 desc="initial" tags="" files="a b/c"
+
+change some files
+  $ p4 edit a b/c
+  //depot/test-mercurial-import/a#2 - opened for edit
+  //depot/test-mercurial-import/b/c#2 - opened for edit
+  $ echo aaa >> a
+  $ echo ccc >> b/c
+  $ p4 submit -d "change a b/c"
+  Submitting change 4.
+  Locking 2 files ...
+  edit //depot/test-mercurial-import/a#3
+  edit //depot/test-mercurial-import/b/c#3
+  Change 4 submitted.
+
+convert again
+  $ hg convert -s p4 $DEPOTPATH dst
+  reading p4 views
+  collecting p4 changelists
+  1 initial
+  2 change a
+  3 change b/c
+  4 change a b/c
+  scanning source...
+  sorting...
+  converting...
+  0 change a b/c
+  $ hg -R dst log --template 'rev={rev} desc="{desc}" tags="{tags}" files="{files}"\n'
+  rev=3 desc="change a b/c" tags="tip" files="a b/c"
+  rev=2 desc="change b/c" tags="" files="b/c"
+  rev=1 desc="change a" tags="" files="a"
+  rev=0 desc="initial" tags="" files="a b/c"
+
+interesting names
+  $ echo dddd > "d d"
+  $ mkdir " e"
+  $ echo fff >" e/ f"
+  $ p4 add "d d" " e/ f"
+  //depot/test-mercurial-import/d d#1 - opened for add
+  //depot/test-mercurial-import/ e/ f#1 - opened for add
+  $ p4 submit -d "add d e f"
+  Submitting change 5.
+  Locking 2 files ...
+  add //depot/test-mercurial-import/ e/ f#1
+  add //depot/test-mercurial-import/d d#1
+  Change 5 submitted.
+
+convert again
+  $ hg convert -s p4 $DEPOTPATH dst
+  reading p4 views
+  collecting p4 changelists
+  1 initial
+  2 change a
+  3 change b/c
+  4 change a b/c
+  5 add d e f
+  scanning source...
+  sorting...
+  converting...
+  0 add d e f
+  $ hg -R dst log --template 'rev={rev} desc="{desc}" tags="{tags}" files="{files}"\n'
+  rev=4 desc="add d e f" tags="tip" files=" e/ f d d"
+  rev=3 desc="change a b/c" tags="" files="a b/c"
+  rev=2 desc="change b/c" tags="" files="b/c"
+  rev=1 desc="change a" tags="" files="a"
+  rev=0 desc="initial" tags="" files="a b/c"
+
+exit trap:
+  stopping the p4 server
--- a/tests/test-debugcomplete.t	Sun May 13 11:19:48 2012 +0200
+++ b/tests/test-debugcomplete.t	Sun May 13 12:52:24 2012 +0200
@@ -247,7 +247,7 @@
   debugsub: rev
   debugwalk: include, exclude
   debugwireargs: three, four, five, ssh, remotecmd, insecure
-  graft: continue, edit, currentdate, currentuser, date, user, tool, dry-run
+  graft: continue, edit, log, currentdate, currentuser, date, user, tool, dry-run
   grep: print0, all, text, follow, ignore-case, files-with-matches, line-number, rev, user, date, include, exclude
   heads: rev, topo, active, closed, style, template
   help: extension, command
--- a/tests/test-filecache.py	Sun May 13 11:19:48 2012 +0200
+++ b/tests/test-filecache.py	Sun May 13 12:52:24 2012 +0200
@@ -1,6 +1,7 @@
 import sys, os, subprocess
 
-if subprocess.call(['python', '%s/hghave' % os.environ['TESTDIR'], 'cacheable']):
+if subprocess.call(['python', '%s/hghave' % os.environ['TESTDIR'],
+                    'cacheable']):
     sys.exit(80)
 
 from mercurial import util, scmutil, extensions
@@ -77,7 +78,7 @@
 
     try:
         os.remove('x')
-    except:
+    except OSError:
         pass
 
     basic(fakerepo())
--- a/tests/test-graft.t	Sun May 13 11:19:48 2012 +0200
+++ b/tests/test-graft.t	Sun May 13 12:52:24 2012 +0200
@@ -72,28 +72,23 @@
   # HG changeset patch
   # User foo
   # Date 0 0
-  # Node ID d2e44c99fd3f31c176ea4efb9eca9f6306c81756
+  # Node ID ef0ef43d49e79e81ddafdc7997401ba0041efc82
   # Parent  68795b066622ca79a25816a662041d8f78f3cd9e
   2
   
   diff --git a/a b/b
   rename from a
   rename to b
-  --- a/a
-  +++ b/b
-  @@ -1,1 +1,1 @@
-  -a
-  +b
 
 Look for extra:source
 
   $ hg log --debug -r tip
-  changeset:   7:d2e44c99fd3f31c176ea4efb9eca9f6306c81756
+  changeset:   7:ef0ef43d49e79e81ddafdc7997401ba0041efc82
   tag:         tip
   phase:       draft
   parent:      0:68795b066622ca79a25816a662041d8f78f3cd9e
   parent:      -1:0000000000000000000000000000000000000000
-  manifest:    7:5d59766436fd8fbcd38e7bebef0f6eaf3eebe637
+  manifest:    7:e59b6b228f9cbf9903d5e9abf996e083a1f533eb
   user:        foo
   date:        Thu Jan 01 00:00:00 1970 +0000
   files+:      b
@@ -128,15 +123,20 @@
     checking for directory renames
   resolving manifests
    overwrite: False, partial: False
-   ancestor: 68795b066622, local: d2e44c99fd3f+, remote: 5d205f8b35b6
+   ancestor: 68795b066622, local: ef0ef43d49e7+, remote: 5d205f8b35b6
    b: local copied/moved to a -> m
   preserving b for resolve of b
   updating: b 1/1 files (100.00%)
+  picked tool 'internal:merge' for b (binary False symlink False)
+  merging b and a to b
+  my b@ef0ef43d49e7+ other a@5d205f8b35b6 ancestor a@68795b066622
+   premerge successful
+  b
   grafting revision 5
     searching for copies back to rev 1
   resolving manifests
    overwrite: False, partial: False
-   ancestor: 4c60f11aa304, local: d2e44c99fd3f+, remote: 97f8bfe72746
+   ancestor: 4c60f11aa304, local: 6b9e5368ca4e+, remote: 97f8bfe72746
    e: remote is newer -> g
   updating: e 1/1 files (100.00%)
   getting e
@@ -145,7 +145,7 @@
     searching for copies back to rev 1
   resolving manifests
    overwrite: False, partial: False
-   ancestor: 4c60f11aa304, local: 839a7e8fcf80+, remote: 9c233e8e184d
+   ancestor: 4c60f11aa304, local: 1905859650ec+, remote: 9c233e8e184d
    e: versions differ -> m
    d: remote is newer -> g
   preserving e for resolve of e
@@ -154,7 +154,7 @@
   updating: e 2/2 files (100.00%)
   picked tool 'internal:merge' for e (binary False symlink False)
   merging e
-  my e@839a7e8fcf80+ other e@9c233e8e184d ancestor e@68795b066622
+  my e@1905859650ec+ other e@9c233e8e184d ancestor e@68795b066622
   warning: conflicts during merge.
   merging e incomplete! (edit conflicts, then use 'hg resolve --mark')
   abort: unresolved conflicts, can't continue
@@ -200,11 +200,13 @@
 View graph:
 
   $ hg --config extensions.graphlog= log -G --template '{author}@{rev}.{phase}: {desc}\n'
-  @  test@10.draft: 3
+  @  test@11.draft: 3
+  |
+  o  test@10.draft: 4
   |
-  o  test@9.draft: 4
+  o  test@9.draft: 5
   |
-  o  test@8.draft: 5
+  o  bar@8.draft: 1
   |
   o  foo@7.draft: 2
   |
@@ -232,17 +234,17 @@
   grafting revision 7
 
   $ hg log -r 7 --template '{rev}:{node}\n'
-  7:d2e44c99fd3f31c176ea4efb9eca9f6306c81756
+  7:ef0ef43d49e79e81ddafdc7997401ba0041efc82
   $ hg log -r 2 --template '{rev}:{node}\n'
   2:5c095ad7e90f871700f02dd1fa5012cb4498a2d4
 
   $ hg log --debug -r tip
-  changeset:   12:95adbe5de6b10f376b699ece9ed5a57cd7b4b0f6
+  changeset:   13:9db0f28fd3747e92c57d015f53b5593aeec53c2d
   tag:         tip
   phase:       draft
-  parent:      11:b592ea63bb0c19a6c5c44685ee29a2284f9f1b8f
+  parent:      12:b592ea63bb0c19a6c5c44685ee29a2284f9f1b8f
   parent:      -1:0000000000000000000000000000000000000000
-  manifest:    12:9944044f82a462bbaccc9bdf7e0ac5b811db7d1b
+  manifest:    13:dc313617b8c32457c0d589e0dbbedfe71f3cd637
   user:        foo
   date:        Thu Jan 01 00:00:00 1970 +0000
   files+:      b
@@ -260,7 +262,7 @@
   [255]
 
 Disallow grafting already grafted csets with the same origin onto each other
-  $ hg up -q 12
+  $ hg up -q 13
   $ hg graft 2
   skipping already grafted revision 2
   [255]
@@ -273,5 +275,15 @@
   skipping already grafted revision 2
   [255]
   $ hg graft tip
-  skipping already grafted revision 12 (same origin 2)
+  skipping already grafted revision 13 (same origin 2)
   [255]
+
+Graft with --log
+
+  $ hg up -Cq 1
+  $ hg graft 3 --log -u foo
+  grafting revision 3
+  warning: can't find ancestor for 'c' copied from 'b'!
+  $ hg log --template '{rev} {parents} {desc}\n' -r tip
+  14 1:5d205f8b35b6  3
+  (grafted from 4c60f11aa304a54ae1c199feb94e7fc771e51ed8)
--- a/tests/test-hook.t	Sun May 13 11:19:48 2012 +0200
+++ b/tests/test-hook.t	Sun May 13 12:52:24 2012 +0200
@@ -195,6 +195,7 @@
   no changes found
   listkeys hook: HG_NAMESPACE=phases HG_VALUES={'cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b': '1', 'publishing': 'True'} 
   listkeys hook: HG_NAMESPACE=bookmarks HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'} 
+  adding remote bookmark bar
   importing bookmark bar
   $ cd ../a
 
@@ -279,6 +280,7 @@
   adding manifests
   adding file changes
   added 1 changesets with 1 changes to 1 files
+  adding remote bookmark quux
   (run 'hg update' to get a working copy)
   $ hg rollback
   repository tip rolled back to revision 3 (undo pull)
@@ -447,6 +449,7 @@
   adding manifests
   adding file changes
   added 1 changesets with 1 changes to 1 files
+  adding remote bookmark quux
   (run 'hg update' to get a working copy)
 
 make sure --traceback works
--- a/tests/test-keyword.t	Sun May 13 11:19:48 2012 +0200
+++ b/tests/test-keyword.t	Sun May 13 12:52:24 2012 +0200
@@ -558,6 +558,7 @@
   $ hg --debug commit -ma2c -d '1 0' -u 'User Name <user@example.com>'
   c
    c: copy a:0045e12f6c5791aac80ca6cbfd97709a88307292
+  removing unknown node 40a904bbbe4c from 1-phase boundary
   overwriting c expanding keywords
   committed changeset 2:25736cf2f5cbe41f6be4e6784ef6ecf9f3bbcc7d
   $ cat a c
@@ -722,6 +723,7 @@
 
   $ hg --debug commit -l log -d '2 0' -u 'User Name <user@example.com>'
   a
+  removing unknown node 40a904bbbe4c from 1-phase boundary
   overwriting a expanding keywords
   committed changeset 2:bb948857c743469b22bbf51f7ec8112279ca5d83
   $ rm log
--- a/tests/test-largefiles.t	Sun May 13 11:19:48 2012 +0200
+++ b/tests/test-largefiles.t	Sun May 13 12:52:24 2012 +0200
@@ -432,11 +432,48 @@
   large11
   $ cat sub/large2
   large22
+  $ cd ..
+
+Test cloning with --all-largefiles flag
+
+  $ rm -Rf ${USERCACHE}/*
+  $ hg clone --all-largefiles a a-backup
+  updating to branch default
+  5 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  getting changed largefiles
+  3 largefiles updated, 0 removed
+  8 additional largefiles cached
+
+Test pulling with --all-largefiles flag
+
+  $ rm -Rf a-backup
+  $ hg clone -r 1 a a-backup
+  adding changesets
+  adding manifests
+  adding file changes
+  added 2 changesets with 8 changes to 4 files
+  updating to branch default
+  4 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  getting changed largefiles
+  2 largefiles updated, 0 removed
+  $ rm -Rf ${USERCACHE}/*
+  $ cd a-backup
+  $ hg pull --all-largefiles
+  pulling from $TESTTMP/a
+  searching for changes
+  adding changesets
+  adding manifests
+  adding file changes
+  added 6 changesets with 16 changes to 8 files
+  (run 'hg update' to get a working copy)
+  caching new largefiles
+  3 largefiles cached
+  3 additional largefiles cached
+  $ cd ..
 
 Rebasing between two repositories does not revert largefiles to old
 revisions (this was a very bad bug that took a lot of work to fix).
 
-  $ cd ..
   $ hg clone a d
   updating to branch default
   5 files updated, 0 files merged, 0 files removed, 0 files unresolved
@@ -1136,4 +1173,37 @@
   abort: uncommitted changes in subrepo subrepo
   (use --subrepos for recursive commit)
   [255]
+
+Add a normal file to the subrepo, then test archiving
+
+  $ echo 'normal file' > subrepo/normal.txt
+  $ hg -R subrepo add subrepo/normal.txt
+
+Lock in subrepo, otherwise the change isn't archived
+
+  $ hg ci -S -m "add normal file to top level"
+  committing subrepository subrepo
+  Invoking status precommit hook
+  M large.txt
+  A normal.txt
+  Invoking status precommit hook
+  M .hgsubstate
+  $ hg archive -S lf_subrepo_archive
+  $ find lf_subrepo_archive | sort
+  lf_subrepo_archive
+  lf_subrepo_archive/.hg_archival.txt
+  lf_subrepo_archive/.hgsub
+  lf_subrepo_archive/.hgsubstate
+  lf_subrepo_archive/a
+  lf_subrepo_archive/a/b
+  lf_subrepo_archive/a/b/c
+  lf_subrepo_archive/a/b/c/d
+  lf_subrepo_archive/a/b/c/d/e.large.txt
+  lf_subrepo_archive/a/b/c/d/e.normal.txt
+  lf_subrepo_archive/a/b/c/x
+  lf_subrepo_archive/a/b/c/x/y.normal.txt
+  lf_subrepo_archive/subrepo
+  lf_subrepo_archive/subrepo/large.txt
+  lf_subrepo_archive/subrepo/normal.txt
+
   $ cd ..
--- a/tests/test-mq-qpush-fail.t	Sun May 13 11:19:48 2012 +0200
+++ b/tests/test-mq-qpush-fail.t	Sun May 13 12:52:24 2012 +0200
@@ -202,6 +202,42 @@
   $ test -f a.orig && echo 'error: backup with --no-backup'
   [1]
 
+test qpop --check
+
+  $ hg qpush
+  applying p1
+  now at: p1
+  $ hg qpop --check --force
+  abort: cannot use both --force and --check
+  [255]
+  $ echo a >> a
+  $ hg qpop --check
+  abort: local changes found, refresh first
+  [255]
+  $ hg revert -qa a
+  $ rm a
+  $ hg qpop --check
+  abort: local changes found, refresh first
+  [255]
+  $ hg rm -A a
+  $ hg qpop --check
+  abort: local changes found, refresh first
+  [255]
+  $ hg revert -qa a
+  $ echo b > b
+  $ hg add b
+  $ hg qpop --check
+  abort: local changes found, refresh first
+  [255]
+  $ hg forget b
+  $ echo d > d
+  $ hg add d
+  $ hg qpop --check
+  popping p1
+  patch queue now empty
+  $ hg forget d
+  $ rm d
+
 test qpush --force and backup files
 
   $ echo a >> a
@@ -281,3 +317,108 @@
   now at: p2
   $ test -f a.orig && echo 'error: backup with --no-backup'
   [1]
+
+test qpush --check
+
+  $ hg qpush --check --force
+  abort: cannot use both --force and --check
+  [255]
+  $ hg qpush --check --exact
+  abort: cannot use --exact and --check together
+  [255]
+  $ echo b >> b
+  $ hg qpush --check
+  applying p3
+  errors during apply, please fix and refresh p2
+  [2]
+  $ rm b
+  $ hg qpush --check
+  applying p3
+  errors during apply, please fix and refresh p2
+  [2]
+  $ hg rm -A b
+  $ hg qpush --check
+  applying p3
+  errors during apply, please fix and refresh p2
+  [2]
+  $ hg revert -aq b
+  $ echo d > d
+  $ hg add d
+  $ hg qpush --check
+  applying p3
+  errors during apply, please fix and refresh p2
+  [2]
+  $ hg forget d
+  $ rm d
+  $ hg qpop
+  popping p2
+  patch queue now empty
+  $ echo b >> b
+  $ hg qpush -a --check
+  applying p2
+  applying p3
+  errors during apply, please fix and refresh p2
+  [2]
+  $ hg qtop
+  p2
+  $ hg parents --template "{rev} {desc}\n"
+  2 imported patch p2
+  $ hg st b
+  M b
+  $ cat b
+  b
+  b
+
+test qgoto --check
+
+  $ hg revert -aq b
+  $ rm e
+  $ hg qgoto --check --force p3
+  abort: cannot use both --force and --check
+  [255]
+  $ echo a >> a
+  $ hg qgoto --check p3
+  applying p3
+  now at: p3
+  $ hg st a
+  M a
+  $ hg qgoto --check p2
+  popping p3
+  now at: p2
+  $ hg st a
+  M a
+
+test mq.check setting
+
+  $ hg --config mq.check=1 qpush
+  applying p3
+  now at: p3
+  $ hg st a
+  M a
+  $ hg --config mq.check=1 qpop
+  popping p3
+  now at: p2
+  $ hg st a
+  M a
+  $ hg --config mq.check=1 qgoto p3
+  applying p3
+  now at: p3
+  $ hg st a
+  M a
+  $ echo b >> b
+  $ hg --config mq.check=1 qpop --force
+  popping p3
+  now at: p2
+  $ hg st b
+  $ hg --config mq.check=1 qpush --exact
+  abort: local changes found, refresh first
+  [255]
+  $ hg revert -qa a
+  $ hg qpop
+  popping p2
+  patch queue now empty
+  $ echo a >> a
+  $ hg --config mq.check=1 qpush --force
+  applying p2
+  now at: p2
+  $ hg st a
--- a/tests/test-mq.t	Sun May 13 11:19:48 2012 +0200
+++ b/tests/test-mq.t	Sun May 13 12:52:24 2012 +0200
@@ -59,6 +59,15 @@
   You will by default be managing a patch queue named "patches". You can create
   other, independent patch queues with the "hg qqueue" command.
   
+  If the working directory contains uncommitted files, qpush, qpop and qgoto
+  abort immediately. If -f/--force is used, the changes are discarded. Setting:
+  
+    [mq] check = True
+  
+  make them behave as if -c/--check were passed, and non-conflicting local
+  changes will be tolerated and preserved. If incompatible options such as
+  -f/--force or --exact are passed, this setting is ignored.
+  
   list of commands:
   
    qapplied      print the patches already applied
--- a/tests/test-phases.t	Sun May 13 11:19:48 2012 +0200
+++ b/tests/test-phases.t	Sun May 13 12:52:24 2012 +0200
@@ -9,6 +9,15 @@
 
   $ hg init initialrepo
   $ cd initialrepo
+
+Cannot change null revision phase
+
+  $ hg phase --force --secret null
+  abort: cannot change null revision phase
+  [255]
+  $ hg phase null
+  -1: public
+
   $ mkcommit A
 
 New commit are draft by default
--- a/tests/test-progress.t	Sun May 13 11:19:48 2012 +0200
+++ b/tests/test-progress.t	Sun May 13 12:52:24 2012 +0200
@@ -40,7 +40,7 @@
   $ echo "progress=" >> $HGRCPATH
   $ echo "loop=`pwd`/loop.py" >> $HGRCPATH
   $ echo "[progress]" >> $HGRCPATH
-  $ echo  "format = topic bar number" >> $HGRCPATH
+  $ echo "format = topic bar number" >> $HGRCPATH
   $ echo "assume-tty=1" >> $HGRCPATH
   $ echo "width=60" >> $HGRCPATH
 
--- a/tests/test-rebase-collapse.t	Sun May 13 11:19:48 2012 +0200
+++ b/tests/test-rebase-collapse.t	Sun May 13 12:52:24 2012 +0200
@@ -589,4 +589,44 @@
   b
   $ hg log -r . --template "{file_copies}\n"
   d (a)g (b)
+
+Test collapsing a middle revision in-place
+
+  $ hg tglog
+  @  2: 'Collapsed revision
+  |  * move1
+  |  * move2'
+  o  1: 'change'
+  |
+  o  0: 'add'
+  
+  $ hg rebase --collapse -r 1 -d 0
+  abort: can't remove original changesets with unrebased descendants
+  (use --keep to keep original changesets)
+  [255]
+
+Test collapsing in place
+
+  $ hg rebase --collapse -b . -d 0
+  saved backup bundle to $TESTTMP/copies/.hg/strip-backup/1352765a01d4-backup.hg
+  $ hg st --change . --copies
+  M a
+  M c
+  A d
+    a
+  A g
+    b
+  R b
+  $ cat a
+  a
+  a
+  $ cat c
+  c
+  c
+  $ cat d
+  a
+  a
+  $ cat g
+  b
+  b
   $ cd ..
--- a/tests/test-rebase-parameters.t	Sun May 13 11:19:48 2012 +0200
+++ b/tests/test-rebase-parameters.t	Sun May 13 12:52:24 2012 +0200
@@ -158,12 +158,12 @@
   $ cd ..
 
 
-Rebase with dest == `hg branch` => same as no arguments (from 3 onto 8):
+Rebase with dest == branch(.) => same as no arguments (from 3 onto 8):
 
   $ hg clone -q -u 3 a a3
   $ cd a3
 
-  $ hg rebase --dest `hg branch`
+  $ hg rebase --dest 'branch(.)'
   saved backup bundle to $TESTTMP/a3/.hg/strip-backup/*-backup.hg (glob)
 
   $ hg tglog
--- a/tests/test-revset.t	Sun May 13 11:19:48 2012 +0200
+++ b/tests/test-revset.t	Sun May 13 12:52:24 2012 +0200
@@ -32,6 +32,13 @@
   (branches are permanent and global, did you want a bookmark?)
   $ hg ci -Aqm2 -u Bob
 
+  $ hg log -r "extra('branch', 'a-b-c-')" --template '{rev}\n'
+  2
+  $ hg log -r "extra('branch')" --template '{rev}\n'
+  0
+  1
+  2
+
   $ hg co 1
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ hg branch +a+b+c+
--- a/tests/test-transplant.t	Sun May 13 11:19:48 2012 +0200
+++ b/tests/test-transplant.t	Sun May 13 12:52:24 2012 +0200
@@ -120,7 +120,25 @@
   1  r2
   0  r1
 
+test same-parent transplant with --log
 
+  $ hg clone -r 1 ../t ../sameparent
+  adding changesets
+  adding manifests
+  adding file changes
+  added 2 changesets with 2 changes to 2 files
+  updating to branch default
+  2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ cd ../sameparent
+  $ hg transplant --log -s ../prune 5
+  searching for changes
+  applying e234d668f844
+  e234d668f844 transplanted to e07aea8ecf9c
+  $ hg log --template '{rev} {parents} {desc}\n'
+  2  b1
+  (transplanted from e234d668f844e1b1a765f01db83a32c0c7bfa170)
+  1  r2
+  0  r1
 remote transplant
 
   $ hg clone -r 1 ../t ../remote
--- a/tests/test-walkrepo.py	Sun May 13 11:19:48 2012 +0200
+++ b/tests/test-walkrepo.py	Sun May 13 12:52:24 2012 +0200
@@ -24,10 +24,12 @@
     reposet = frozenset(walkrepos('.', followsym=True))
     if sym and (len(reposet) != 3):
         print "reposet = %r" % (reposet,)
-        print "Found %d repositories when I should have found 3" % (len(reposet),)
+        print ("Found %d repositories when I should have found 3"
+               % (len(reposet),))
     if (not sym) and (len(reposet) != 2):
         print "reposet = %r" % (reposet,)
-        print "Found %d repositories when I should have found 2" % (len(reposet),)
+        print ("Found %d repositories when I should have found 2"
+               % (len(reposet),))
     sub1set = frozenset((pjoin('.', 'sub1'),
                          pjoin('.', 'circle', 'subdir', 'sub1')))
     if len(sub1set & reposet) != 1:
@@ -41,7 +43,7 @@
         print "reposet = %r" % (reposet,)
         print "sub1set and reposet should have exactly one path in common."
     sub3 = pjoin('.', 'circle', 'top1')
-    if sym and not (sub3 in reposet):
+    if sym and sub3 not in reposet:
         print "reposet = %r" % (reposet,)
         print "Symbolic links are supported and %s is not in reposet" % (sub3,)