changeset 8602:b26aaf13f29c

merge with crew
author Benoit Boissinot <benoit.boissinot@ens-lyon.org>
date Sun, 24 May 2009 16:33:22 +0200
parents 5edb2a8e29ea (diff) 021de2d12355 (current diff)
children b60617a9cd3c
files
diffstat 30 files changed, 722 insertions(+), 326 deletions(-) [+]
line wrap: on
line diff
--- a/hgext/acl.py	Tue Mar 31 00:04:07 2009 +0900
+++ b/hgext/acl.py	Sun May 24 16:33:22 2009 +0200
@@ -46,7 +46,7 @@
 #   ** = user6
 
 from mercurial.i18n import _
-from mercurial import util
+from mercurial import util, match
 import getpass
 
 def buildmatch(ui, repo, user, key):
@@ -60,8 +60,9 @@
     ui.debug(_('acl: %s enabled, %d entries for user %s\n') %
              (key, len(pats), user))
     if pats:
-        return util.matcher(repo.root, names=pats)[1]
-    return util.never
+        return match.match(repo.root, '', pats)
+    return match.never(repo.root, '')
+
 
 def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
     if hooktype != 'pretxnchangegroup':
--- a/hgext/convert/__init__.py	Tue Mar 31 00:04:07 2009 +0900
+++ b/hgext/convert/__init__.py	Sun May 24 16:33:22 2009 +0200
@@ -128,7 +128,7 @@
     source uses its internal changeset merging code by default but can
     be configured to call the external 'cvsps' program by setting:
         --config convert.cvsps='cvsps -A -u --cvs-direct -q'
-    This is a legacy option and may be removed in future.
+    This option is deprecated and will be removed in Mercurial 1.4.
 
     The options shown are the defaults.
 
--- a/hgext/convert/cvs.py	Tue Mar 31 00:04:07 2009 +0900
+++ b/hgext/convert/cvs.py	Sun May 24 16:33:22 2009 +0200
@@ -25,6 +25,9 @@
         self.cmd = ui.config('convert', 'cvsps', 'builtin')
         cvspsexe = self.cmd.split(None, 1)[0]
         self.builtin = cvspsexe == 'builtin'
+        if not self.builtin:
+            ui.warn(_('warning: support for external cvsps is deprecated and '
+                      'will be removed in Mercurial 1.4\n'))
 
         if not self.builtin:
             checktool(cvspsexe)
--- a/hgext/convert/hg.py	Tue Mar 31 00:04:07 2009 +0900
+++ b/hgext/convert/hg.py	Sun May 24 16:33:22 2009 +0200
@@ -12,10 +12,10 @@
 #   those older versions, then converted, may thus have different
 #   hashes for changesets that are otherwise identical.
 #
-# * By default, the source revision is stored in the converted
-#   revision.  This will cause the converted revision to have a
-#   different identity than the source.  To avoid this, use the
-#   following option: "--config convert.hg.saverev=false"
+# * Using "--config convert.hg.saverev=true" will make the source
+#   identifier to be stored in the converted revision. This will cause
+#   the converted revision to have a different identity than the
+#   source.
 
 
 import os, time
--- a/hgext/keyword.py	Tue Mar 31 00:04:07 2009 +0900
+++ b/hgext/keyword.py	Sun May 24 16:33:22 2009 +0200
@@ -81,7 +81,7 @@
 '''
 
 from mercurial import commands, cmdutil, dispatch, filelog, revlog, extensions
-from mercurial import patch, localrepo, templater, templatefilters, util
+from mercurial import patch, localrepo, templater, templatefilters, util, match
 from mercurial.hgweb import webcommands
 from mercurial.lock import release
 from mercurial.node import nullid, hex
@@ -125,8 +125,8 @@
     def __init__(self, ui, repo):
         self.ui = ui
         self.repo = repo
-        self.matcher = util.matcher(repo.root,
-                                    inc=kwtools['inc'], exc=kwtools['exc'])[1]
+        self.matcher = match.match(repo.root, '', [],
+                                   kwtools['inc'], kwtools['exc'])
         self.restrict = kwtools['hgcmd'] in restricted.split()
 
         kwmaps = self.ui.configitems('keywordmaps')
--- a/i18n/da.po	Tue Mar 31 00:04:07 2009 +0900
+++ b/i18n/da.po	Sun May 24 16:33:22 2009 +0200
@@ -18,7 +18,7 @@
 "Project-Id-Version: Mercurial\n"
 "Report-Msgid-Bugs-To: <mercurial-devel@selenic.com>\n"
 "POT-Creation-Date: 2009-05-21 23:44+0200\n"
-"PO-Revision-Date: 2009-05-21 23:52+0200\n"
+"PO-Revision-Date: 2009-05-24 15:19+0200\n"
 "Last-Translator:  <mg@daimi.au.dk>\n"
 "Language-Team: Danish\n"
 "MIME-Version: 1.0\n"
@@ -54,7 +54,7 @@
 
 #, python-format
 msgid "acl: %s enabled, %d entries for user %s\n"
-msgstr "acl: %s slået til, %d indgange for bruger %s\n"
+msgstr "acl: %s slået til, %d optegnelser for bruger %s\n"
 
 #, python-format
 msgid "config error - hook type \"%s\" cannot stop incoming changesets"
@@ -4143,7 +4143,7 @@
 msgstr ""
 
 msgid "unknown parent"
-msgstr "ukendt forældre"
+msgstr "ukendt forælder"
 
 #, python-format
 msgid "integrity check failed on %s:%d"
@@ -4279,7 +4279,7 @@
 
 #, python-format
 msgid "parent:      %d:%s\n"
-msgstr "forældre:    %d:%s\n"
+msgstr "forælder:    %d:%s\n"
 
 #, python-format
 msgid "manifest:    %d:%s\n"
@@ -4547,7 +4547,7 @@
 "    sammenføje denne omgjorte ændring med et andet hoved (det\n"
 "    nuværende hoved som standard).\n"
 "\n"
-"    Med --merge tilvalget vil forældren til arbejdskataloget bliver\n"
+"    Med --merge tilvalget vil forælderen til arbejdskataloget bliver\n"
 "    husket og det nye hoved vil blive sammenføjet med denne ændring\n"
 "    bagefter. Dette sparer dig for at lave sammenføjningen selv.\n"
 "    Resultatet af denne sammenføjning er ikke arkiveret, som ved en\n"
@@ -4573,7 +4573,7 @@
 
 #, python-format
 msgid "%s is not a parent of %s"
-msgstr "%s er ikke forældre til %s"
+msgstr "%s er ikke forælder til %s"
 
 msgid "cannot use --parent on non-merge changeset"
 msgstr ""
@@ -4813,7 +4813,7 @@
 "\n"
 "    Som udgangspunkt vil clone hente hovedet af 'default' grenen. Hvis\n"
 "    -U/--noupdate tilvalget bruges vil den nye klon kun indeholde et\n"
-"    arkiv (.hg) og intet arbejdskatalog (arbejdskatalogets forældre er\n"
+"    arkiv (.hg) og intet arbejdskatalog (arbejdskatalogets forælder er\n"
 "    sat til nul revisionen).\n"
 "\n"
 "    Se 'hg help urls' for detaljer om gyldige formatter for kilden.\n"
@@ -5111,13 +5111,13 @@
 "\n"
 "    BEMÆRK: diff kan generere overraskende resultater for\n"
 "    sammenføjninger, idet den som udgangspunkt vil sammenligne med\n"
-"    arbejdskatalogets første forældre, hvis der ikke angivet en\n"
+"    arbejdskatalogets første forælder, hvis der ikke angivet en\n"
 "    revision.\n"
 "\n"
 "    Når der gives to revisioner som argumenter, så vises ændringer\n"
 "    mellem disse. Hvis der kun angives en revision, så sammenlignes\n"
 "    denne revision med arbejdskataloget, og når der ikke angives nogen\n"
-"    revisioner, så sammenlignes arbejdskataloget med dennes forældre.\n"
+"    revisioner, så sammenlignes arbejdskataloget med dennes forælder.\n"
 "\n"
 "    Uden -a/--text tilvalget vil diff undgå at generere ændringer for\n"
 "    filer som den detekterer som binære. Med -a vil diff generere\n"
@@ -5172,7 +5172,7 @@
 "\n"
 "    BEMÆRK: export kan generere uventet diff uddata for\n"
 "    sammenføjningsændringer idet den kun vil sammenligne\n"
-"    sammenføjningsændringen med dennes første forældre.\n"
+"    sammenføjningsændringen med dennes første forælder.\n"
 "\n"
 "    Uddata kan gemmes i en fil, og filnavnet er givet ved en\n"
 "    format-streng. Formatteringsreglerne er som følger:\n"
@@ -5194,7 +5194,7 @@
 "    git diff-format. For mere information, læs hg help diffs.\n"
 "\n"
 "    Med --switch-parent tilvalget vil ændringerne blive beregnet i\n"
-"    forhold til den anden forældre. Dette kan være nyttigt til at\n"
+"    forhold til den anden forælder. Dette kan være nyttigt til at\n"
 "    gennemse en sammenføjning.\n"
 "    "
 
@@ -5545,7 +5545,7 @@
 "    arkiveringen skal laves før yderligere opdateringer er tilladt.\n"
 "    Den næste arkiverede ændring får to forældre.\n"
 "\n"
-"    Hvis ingen revision angives og arbejdskatalogets forældre er en\n"
+"    Hvis ingen revision angives og arbejdskatalogets forælder er en\n"
 "    hovedrevision og den nuværende gren indeholder præcis et andet\n"
 "    hoved, så sammenføjes der med dette hoved som standard. Ellers\n"
 "    skal en eksplicit revision angives.\n"
@@ -6384,7 +6384,7 @@
 msgstr ""
 
 msgid "use command to check changeset state"
-msgstr "brug kommando for at tjekke tilstanden af ændringen"
+msgstr "brug kommando for at kontrollere tilstanden af ændringen"
 
 msgid "do not update to target"
 msgstr "undlad at opdatere til målet"
@@ -8677,15 +8677,15 @@
 
 #, python-format
 msgid "unknown parent 1 %s of %s"
-msgstr "ukendt forældre 1 %s til %s"
+msgstr "ukendt forælder 1 %s til %s"
 
 #, python-format
 msgid "unknown parent 2 %s of %s"
-msgstr "ukendt forældre 2 %s til %s"
+msgstr "ukendt forælder 2 %s til %s"
 
 #, python-format
 msgid "checking parents of %s"
-msgstr "tjekkre forældrene til %s"
+msgstr "kontrollerer forældre til %s"
 
 #, python-format
 msgid "duplicate revision %d (%d)"
@@ -8696,7 +8696,7 @@
 msgstr "arkivet bruger revlog format %d\n"
 
 msgid "checking changesets\n"
-msgstr "tjekker ændringer\n"
+msgstr "kontrollerer ændringer\n"
 
 #, python-format
 msgid "unpacking changeset %s"
@@ -8730,7 +8730,7 @@
 msgstr ""
 
 msgid "checking files\n"
-msgstr "tjekker filer\n"
+msgstr "kontrollerer filer\n"
 
 #, python-format
 msgid "cannot decode filename '%s'"
@@ -8765,7 +8765,7 @@
 
 #, python-format
 msgid "checking rename of %s"
-msgstr "tjekker omdøbning af %s"
+msgstr "kontrollerer omdøbning af %s"
 
 #, python-format
 msgid "%s in manifests not found"
--- a/mercurial/cmdutil.py	Tue Mar 31 00:04:07 2009 +0900
+++ b/mercurial/cmdutil.py	Sun May 24 16:33:22 2009 +0200
@@ -447,7 +447,7 @@
     # srcs: list of (hgsep, hgsep, ossep, bool)
     # return: function that takes hgsep and returns ossep
     def targetpathafterfn(pat, dest, srcs):
-        if util.patkind(pat, None)[0]:
+        if _match.patkind(pat):
             # a mercurial pattern
             res = lambda p: os.path.join(dest,
                                          os.path.basename(util.localpath(p)))
@@ -495,7 +495,7 @@
     dest = pats.pop()
     destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
     if not destdirexists:
-        if len(pats) > 1 or util.patkind(pats[0], None)[0]:
+        if len(pats) > 1 or _match.patkind(pats[0]):
             raise util.Abort(_('with multiple sources, destination must be an '
                                'existing directory'))
         if util.endswithsep(dest):
--- a/mercurial/dirstate.py	Tue Mar 31 00:04:07 2009 +0900
+++ b/mercurial/dirstate.py	Sun May 24 16:33:22 2009 +0200
@@ -461,6 +461,23 @@
         work = []
         wadd = work.append
 
+        if match.anypats():
+            #match.match with patterns
+            dostep3 = True
+            nomatches = False
+        elif not match.files():
+            #match.always or match.never
+            dostep3 = matchfn('')
+            nomatches = not dostep3
+        else:
+            #match.exact or match.match without pattern
+            dostep3 = False
+            nomatches = matchfn == match.exact
+
+        if nomatches:
+            #skip step 2
+            dirignore = util.always
+
         files = set(match.files())
         if not files or '.' in files:
             files = ['']
@@ -476,6 +493,10 @@
                 st = lstat(join(nf))
                 kind = getkind(st.st_mode)
                 if kind == dirkind:
+                    dostep3 = True
+                    if nf in dmap:
+                        #file deleted on disc but still in dirstate
+                        results[nf] = None
                     if not dirignore(nf):
                         wadd(nf)
                 elif kind == regkind or kind == lnkkind:
@@ -488,14 +509,20 @@
                 keep = False
                 prefix = nf + "/"
                 for fn in dmap:
-                    if nf == fn or fn.startswith(prefix):
+                    if nf == fn:
+                        if matchfn(nf):
+                            results[nf] = None
+                        keep = True
+                        break
+                    elif fn.startswith(prefix):
+                        dostep3 = True
                         keep = True
                         break
                 if not keep:
                     if inst.errno != errno.ENOENT:
                         fwarn(ff, inst.strerror)
                     elif badfn(ff, inst.strerror):
-                        if (nf in dmap or not ignore(nf)) and matchfn(nf):
+                        if nf not in results and not ignore(nf) and matchfn(nf):
                             results[nf] = None
 
         # step 2: visit subdirectories
@@ -533,11 +560,12 @@
                         results[nf] = None
 
         # step 3: report unseen items in the dmap hash
-        visit = sorted([f for f in dmap if f not in results and matchfn(f)])
-        for nf, st in zip(visit, util.statfiles([join(i) for i in visit])):
-            if not st is None and not getkind(st.st_mode) in (regkind, lnkkind):
-                st = None
-            results[nf] = st
+        if dostep3 and not nomatches:
+            visit = sorted([f for f in dmap if f not in results and matchfn(f)])
+            for nf, st in zip(visit, util.statfiles([join(i) for i in visit])):
+                if not st is None and not getkind(st.st_mode) in (regkind, lnkkind):
+                    st = None
+                results[nf] = st
 
         del results['.hg']
         return results
--- a/mercurial/filemerge.py	Tue Mar 31 00:04:07 2009 +0900
+++ b/mercurial/filemerge.py	Sun May 24 16:33:22 2009 +0200
@@ -7,7 +7,7 @@
 
 from node import short
 from i18n import _
-import util, simplemerge
+import util, simplemerge, match
 import os, tempfile, re, filecmp
 
 def _toolstr(ui, tool, part, default=""):
@@ -55,7 +55,7 @@
 
     # then patterns
     for pat, tool in ui.configitems("merge-patterns"):
-        mf = util.matcher(repo.root, "", [pat], [], [])[1]
+        mf = match.match(repo.root, '', [pat])
         if mf(path) and check(tool, pat, symlink, False):
                 toolpath = _findtool(ui, tool)
                 return (tool, '"' + toolpath + '"')
--- a/mercurial/help.py	Tue Mar 31 00:04:07 2009 +0900
+++ b/mercurial/help.py	Sun May 24 16:33:22 2009 +0200
@@ -341,6 +341,7 @@
     - hgdate: Date. Returns the date as a pair of numbers:
           "1157407993 25200" (Unix timestamp, timezone offset).
     - isodate: Date. Returns the date in ISO 8601 format.
+    - localdate: Date. Converts a date to local date.
     - obfuscate: Any text. Returns the input text rendered as a
           sequence of XML entities.
     - person: Any text. Returns the text before an email address.
--- a/mercurial/hgweb/protocol.py	Tue Mar 31 00:04:07 2009 +0900
+++ b/mercurial/hgweb/protocol.py	Sun May 24 16:33:22 2009 +0200
@@ -5,7 +5,7 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2, incorporated herein by reference.
 
-import cStringIO, zlib, tempfile, errno, os, sys
+import cStringIO, zlib, tempfile, errno, os, sys, urllib
 from mercurial import util, streamclone
 from mercurial.node import bin, hex
 from mercurial import changegroup as changegroupmod
@@ -17,6 +17,7 @@
 __all__ = [
    'lookup', 'heads', 'branches', 'between', 'changegroup',
    'changegroupsubset', 'capabilities', 'unbundle', 'stream_out',
+   'branchmap',
 ]
 
 HGTYPE = 'application/mercurial-0.1'
@@ -37,6 +38,17 @@
     req.respond(HTTP_OK, HGTYPE, length=len(resp))
     yield resp
 
+def branchmap(repo, req):
+    branches = repo.branchmap()
+    heads = []
+    for branch, nodes in branches.iteritems():
+        branchname = urllib.quote(branch)
+        branchnodes = [hex(node) for node in nodes]
+        heads.append('%s %s' % (branchname, ' '.join(branchnodes)))
+    resp = '\n'.join(heads)
+    req.respond(HTTP_OK, HGTYPE, length=len(resp))
+    yield resp
+
 def branches(repo, req):
     nodes = []
     if 'nodes' in req.form:
@@ -97,7 +109,7 @@
     yield z.flush()
 
 def capabilities(repo, req):
-    caps = ['lookup', 'changegroupsubset']
+    caps = ['lookup', 'changegroupsubset', 'branchmap']
     if repo.ui.configbool('server', 'uncompressed', untrusted=True):
         caps.append('stream=%d' % repo.changelog.version)
     if changegroupmod.bundlepriority:
--- a/mercurial/httprepo.py	Tue Mar 31 00:04:07 2009 +0900
+++ b/mercurial/httprepo.py	Sun May 24 16:33:22 2009 +0200
@@ -145,6 +145,19 @@
         except:
             raise error.ResponseError(_("unexpected response:"), d)
 
+    def branchmap(self):
+        d = self.do_read("branchmap")
+        try:
+            branchmap = {}
+            for branchpart in d.splitlines():
+                branchheads = branchpart.split(' ')
+                branchname = urllib.unquote(branchheads[0])
+                branchheads = [bin(x) for x in branchheads[1:]]
+                branchmap[branchname] = branchheads
+            return branchmap
+        except:
+            raise error.ResponseError(_("unexpected response:"), d)
+
     def branches(self, nodes):
         n = " ".join(map(hex, nodes))
         d = self.do_read("branches", nodes=n)
--- a/mercurial/ignore.py	Tue Mar 31 00:04:07 2009 +0900
+++ b/mercurial/ignore.py	Sun May 24 16:33:22 2009 +0200
@@ -6,7 +6,7 @@
 # GNU General Public License version 2, incorporated herein by reference.
 
 from i18n import _
-import util
+import util, match
 import re
 
 _commentre = None
@@ -80,12 +80,13 @@
         return util.never
 
     try:
-        files, ignorefunc, anypats = (
-            util.matcher(root, inc=allpats, src='.hgignore'))
+        ignorefunc = match.match(root, '', [], allpats)
     except util.Abort:
         # Re-raise an exception where the src is the right file
         for f, patlist in pats.iteritems():
-            files, ignorefunc, anypats = (
-                util.matcher(root, inc=patlist, src=f))
+            try:
+                match.match(root, '', [], patlist)
+            except util.Abort, inst:
+                raise util.Abort('%s: %s' % (f, inst[0]))
 
     return ignorefunc
--- a/mercurial/localrepo.py	Tue Mar 31 00:04:07 2009 +0900
+++ b/mercurial/localrepo.py	Sun May 24 16:33:22 2009 +0200
@@ -18,7 +18,7 @@
 propertycache = util.propertycache
 
 class localrepository(repo.repository):
-    capabilities = set(('lookup', 'changegroupsubset'))
+    capabilities = set(('lookup', 'changegroupsubset', 'branchmap'))
     supported = set('revlogv1 store fncache'.split())
 
     def __init__(self, baseui, path=None, create=0):
@@ -360,7 +360,7 @@
 
         return partial
 
-    def _branchheads(self):
+    def branchmap(self):
         tip = self.changelog.tip()
         if self.branchcache is not None and self._branchcachetip == tip:
             return self.branchcache
@@ -392,7 +392,7 @@
         '''return a dict where branch names map to the tipmost head of
         the branch, open heads come before closed'''
         bt = {}
-        for bn, heads in self._branchheads().iteritems():
+        for bn, heads in self.branchmap().iteritems():
             head = None
             for i in range(len(heads)-1, -1, -1):
                 h = heads[i]
@@ -528,7 +528,7 @@
             for pat, cmd in self.ui.configitems(filter):
                 if cmd == '!':
                     continue
-                mf = util.matcher(self.root, "", [pat], [], [])[1]
+                mf = match_.match(self.root, '', [pat])
                 fn = None
                 params = cmd
                 for name, filterfn in self._datafilters.iteritems():
@@ -1125,7 +1125,7 @@
     def branchheads(self, branch=None, start=None, closed=True):
         if branch is None:
             branch = self[None].branch()
-        branches = self._branchheads()
+        branches = self.branchmap()
         if branch not in branches:
             return []
         bheads = branches[branch]
@@ -1429,42 +1429,97 @@
         else:
             bases, heads = update, self.changelog.heads()
 
+        def checkbranch(lheads, rheads, updatelh):
+            '''
+            check whether there are more local heads than remote heads on
+            a specific branch.
+
+            lheads: local branch heads
+            rheads: remote branch heads
+            updatelh: outgoing local branch heads
+            '''
+
+            warn = 0
+
+            if not revs and len(lheads) > len(rheads):
+                warn = 1
+            else:
+                updatelheads = [self.changelog.heads(x, lheads)
+                                for x in updatelh]
+                newheads = set(sum(updatelheads, [])) & set(lheads)
+
+                if not newheads:
+                    return True
+
+                for r in rheads:
+                    if r in self.changelog.nodemap:
+                        desc = self.changelog.heads(r, heads)
+                        l = [h for h in heads if h in desc]
+                        if not l:
+                            newheads.add(r)
+                    else:
+                        newheads.add(r)
+                if len(newheads) > len(rheads):
+                    warn = 1
+
+            if warn:
+                if not rheads: # new branch requires --force
+                    self.ui.warn(_("abort: push creates new"
+                                   " remote branch '%s'!\n" %
+                                   self[updatelh[0]].branch()))
+                else:
+                    self.ui.warn(_("abort: push creates new remote heads!\n"))
+
+                self.ui.status(_("(did you forget to merge?"
+                                 " use push -f to force)\n"))
+                return False
+            return True
+
         if not bases:
             self.ui.status(_("no changes found\n"))
             return None, 1
         elif not force:
-            # check if we're creating new remote heads
-            # to be a remote head after push, node must be either
+            # Check for each named branch if we're creating new remote heads.
+            # To be a remote head after push, node must be either:
             # - unknown locally
             # - a local outgoing head descended from update
             # - a remote head that's known locally and not
             #   ancestral to an outgoing head
+            #
+            # New named branches cannot be created without --force.
 
-            warn = 0
+            if remote_heads != [nullid]:
+                if remote.capable('branchmap'):
+                    localhds = {}
+                    if not revs:
+                        localhds = self.branchmap()
+                    else:
+                        for n in heads:
+                            branch = self[n].branch()
+                            if branch in localhds:
+                                localhds[branch].append(n)
+                            else:
+                                localhds[branch] = [n]
+
+                    remotehds = remote.branchmap()
 
-            if remote_heads == [nullid]:
-                warn = 0
-            elif not revs and len(heads) > len(remote_heads):
-                warn = 1
-            else:
-                newheads = list(heads)
-                for r in remote_heads:
-                    if r in self.changelog.nodemap:
-                        desc = self.changelog.heads(r, heads)
-                        l = [h for h in heads if h in desc]
-                        if not l:
-                            newheads.append(r)
-                    else:
-                        newheads.append(r)
-                if len(newheads) > len(remote_heads):
-                    warn = 1
+                    for lh in localhds:
+                        if lh in remotehds:
+                            rheads = remotehds[lh]
+                        else:
+                            rheads = []
+                        lheads = localhds[lh]
+                        updatelh = [upd for upd in update
+                                    if self[upd].branch() == lh]
+                        if not updatelh:
+                            continue
+                        if not checkbranch(lheads, rheads, updatelh):
+                            return None, 0
+                else:
+                    if not checkbranch(heads, remote_heads, update):
+                        return None, 0
 
-            if warn:
-                self.ui.warn(_("abort: push creates new remote heads!\n"))
-                self.ui.status(_("(did you forget to merge?"
-                                 " use push -f to force)\n"))
-                return None, 0
-            elif inc:
+            if inc:
                 self.ui.warn(_("note: unsynced remote changes!\n"))
 
 
--- a/mercurial/match.py	Tue Mar 31 00:04:07 2009 +0900
+++ b/mercurial/match.py	Sun May 24 16:33:22 2009 +0200
@@ -5,16 +5,76 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2, incorporated herein by reference.
 
-import util
+import util, re
+
+class match(object):
+    def __init__(self, root, cwd, patterns, include=[], exclude=[],
+                 default='glob', exact=False):
+        """build an object to match a set of file patterns
 
-class _match(object):
-    def __init__(self, root, cwd, files, mf, ap):
+        arguments:
+        root - the canonical root of the tree you're matching against
+        cwd - the current working directory, if relevant
+        patterns - patterns to find
+        include - patterns to include
+        exclude - patterns to exclude
+        default - if a pattern in names has no explicit type, assume this one
+        exact - patterns are actually literals
+
+        a pattern is one of:
+        'glob:<glob>' - a glob relative to cwd
+        're:<regexp>' - a regular expression
+        'path:<path>' - a path relative to canonroot
+        'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs)
+        'relpath:<path>' - a path relative to cwd
+        'relre:<regexp>' - a regexp that needn't match the start of a name
+        '<something>' - a pattern of the specified default type
+        """
+
         self._root = root
         self._cwd = cwd
-        self._files = files
-        self._fmap = set(files)
-        self.matchfn = mf
-        self._anypats = ap
+        self._files = []
+        self._anypats = bool(include or exclude)
+
+        if include:
+            im = _buildmatch(_normalize(include, 'glob', root, cwd), '(?:/|$)')
+        if exclude:
+            em = _buildmatch(_normalize(exclude, 'glob', root, cwd), '(?:/|$)')
+        if exact:
+            self._files = patterns
+            pm = self.exact
+        elif patterns:
+            pats = _normalize(patterns, default, root, cwd)
+            self._files = _roots(pats)
+            self._anypats = self._anypats or _anypats(pats)
+            pm = _buildmatch(pats, '$')
+
+        if patterns or exact:
+            if include:
+                if exclude:
+                    m = lambda f: im(f) and not em(f) and pm(f)
+                else:
+                    m = lambda f: im(f) and pm(f)
+            else:
+                if exclude:
+                    m = lambda f: not em(f) and pm(f)
+                else:
+                    m = pm
+        else:
+            if include:
+                if exclude:
+                    m = lambda f: im(f) and not em(f)
+                else:
+                    m = im
+            else:
+                if exclude:
+                    m = lambda f: not em(f)
+                else:
+                    m = lambda f: True
+
+        self.matchfn = m
+        self._fmap = set(self._files)
+
     def __call__(self, fn):
         return self.matchfn(fn)
     def __iter__(self):
@@ -35,20 +95,155 @@
     def anypats(self):
         return self._anypats
 
-class always(_match):
+class exact(match):
+    def __init__(self, root, cwd, files):
+        match.__init__(self, root, cwd, files, exact = True)
+
+class always(match):
     def __init__(self, root, cwd):
-        _match.__init__(self, root, cwd, [], lambda f: True, False)
+        match.__init__(self, root, cwd, [])
+
+class never(match):
+    def __init__(self, root, cwd):
+        match.__init__(self, root, cwd, [], exact = True)
 
-class never(_match):
-    def __init__(self, root, cwd):
-        _match.__init__(self, root, cwd, [], lambda f: False, False)
+def patkind(pat):
+    return _patsplit(pat, None)[0]
+
+def _patsplit(pat, default):
+    """Split a string into an optional pattern kind prefix and the
+    actual pattern."""
+    if ':' in pat:
+        pat, val = pat.split(':', 1)
+        if pat in ('re', 'glob', 'path', 'relglob', 'relpath', 'relre'):
+            return pat, val
+    return default, pat
 
-class exact(_match):
-    def __init__(self, root, cwd, files):
-        _match.__init__(self, root, cwd, files, self.exact, False)
+def _globre(pat):
+    "convert a glob pattern into a regexp"
+    i, n = 0, len(pat)
+    res = ''
+    group = 0
+    escape = re.escape
+    def peek(): return i < n and pat[i]
+    while i < n:
+        c = pat[i]
+        i = i+1
+        if c not in '*?[{},\\':
+            res += escape(c)
+        elif c == '*':
+            if peek() == '*':
+                i += 1
+                res += '.*'
+            else:
+                res += '[^/]*'
+        elif c == '?':
+            res += '.'
+        elif c == '[':
+            j = i
+            if j < n and pat[j] in '!]':
+                j += 1
+            while j < n and pat[j] != ']':
+                j += 1
+            if j >= n:
+                res += '\\['
+            else:
+                stuff = pat[i:j].replace('\\','\\\\')
+                i = j + 1
+                if stuff[0] == '!':
+                    stuff = '^' + stuff[1:]
+                elif stuff[0] == '^':
+                    stuff = '\\' + stuff
+                res = '%s[%s]' % (res, stuff)
+        elif c == '{':
+            group += 1
+            res += '(?:'
+        elif c == '}' and group:
+            res += ')'
+            group -= 1
+        elif c == ',' and group:
+            res += '|'
+        elif c == '\\':
+            p = peek()
+            if p:
+                i += 1
+                res += escape(p)
+            else:
+                res += escape(c)
+        else:
+            res += escape(c)
+    return res
 
-class match(_match):
-    def __init__(self, root, cwd, patterns, include, exclude, default):
-        f, mf, ap = util.matcher(root, cwd, patterns, include, exclude,
-                                 None, default)
-        _match.__init__(self, root, cwd, f, mf, ap)
+def _regex(kind, name, tail):
+    '''convert a pattern into a regular expression'''
+    if not name:
+        return ''
+    if kind == 're':
+        return name
+    elif kind == 'path':
+        return '^' + re.escape(name) + '(?:/|$)'
+    elif kind == 'relglob':
+        return '(?:|.*/)' + _globre(name) + tail
+    elif kind == 'relpath':
+        return re.escape(name) + '(?:/|$)'
+    elif kind == 'relre':
+        if name.startswith('^'):
+            return name
+        return '.*' + name
+    return _globre(name) + tail
+
+def _buildmatch(pats, tail):
+    """build a matching function from a set of patterns"""
+    try:
+        pat = '(?:%s)' % '|'.join([_regex(k, p, tail) for (k, p) in pats])
+        if len(pat) > 20000:
+            raise OverflowError()
+        return re.compile(pat).match
+    except OverflowError:
+        # We're using a Python with a tiny regex engine and we
+        # made it explode, so we'll divide the pattern list in two
+        # until it works
+        l = len(pats)
+        if l < 2:
+            raise
+        a, b = _buildmatch(pats[:l//2], tail), _buildmatch(pats[l//2:], tail)
+        return lambda s: a(s) or b(s)
+    except re.error:
+        for k, p in pats:
+            try:
+                re.compile('(?:%s)' % _regex(k, p, tail))
+            except re.error:
+                raise util.Abort("invalid pattern (%s): %s" % (k, p))
+        raise util.Abort("invalid pattern")
+
+def _normalize(names, default, root, cwd):
+    pats = []
+    for kind, name in [_patsplit(p, default) for p in names]:
+        if kind in ('glob', 'relpath'):
+            name = util.canonpath(root, cwd, name)
+        elif kind in ('relglob', 'path'):
+            name = util.normpath(name)
+
+        pats.append((kind, name))
+    return pats
+
+def _roots(patterns):
+    r = []
+    for kind, name in patterns:
+        if kind == 'glob': # find the non-glob prefix
+            root = []
+            for p in name.split('/'):
+                if '[' in p or '{' in p or '*' in p or '?' in p:
+                    break
+                root.append(p)
+            r.append('/'.join(root) or '.')
+        elif kind in ('relpath', 'path'):
+            r.append(name or '.')
+        elif kind == 'relglob':
+            r.append('.')
+    return r
+
+def _anypats(patterns):
+    for kind, name in patterns:
+        if kind in ('glob', 're', 'relglob', 'relre'):
+            return True
--- a/mercurial/osutil.c	Tue Mar 31 00:04:07 2009 +0900
+++ b/mercurial/osutil.c	Sun May 24 16:33:22 2009 +0200
@@ -402,7 +402,7 @@
 	PyObject *file_obj = NULL;
 	char *name = NULL;
 	char *mode = "rb";
-	DWORD access;
+	DWORD access = 0;
 	DWORD creation;
 	HANDLE handle;
 	int fd, flags = 0;
--- a/mercurial/revlog.py	Tue Mar 31 00:04:07 2009 +0900
+++ b/mercurial/revlog.py	Sun May 24 16:33:22 2009 +0200
@@ -322,7 +322,7 @@
         index = []
         nodemap =  {nullid: nullrev}
         n = off = 0
-        if len(data) < _prereadsize:
+        if len(data) == _prereadsize:
             data += fp.read() # read the rest
         l = len(data)
         while off + s <= l:
@@ -362,23 +362,19 @@
         self.size = struct.calcsize(indexformatng)
 
     def parseindex(self, fp, data, inline):
-        try:
-            size = len(data)
-            if size == _prereadsize:
-                size = util.fstat(fp).st_size
-        except AttributeError:
-            size = 0
-
-        if util.openhardlinks() and not inline and size > _prereadsize:
-            # big index, let's parse it on demand
-            parser = lazyparser(fp, size)
-            index = lazyindex(parser)
-            nodemap = lazymap(parser)
-            e = list(index[0])
-            type = gettype(e[0])
-            e[0] = offset_type(0, type)
-            index[0] = e
-            return index, nodemap, None
+        if len(data) == _prereadsize:
+            if util.openhardlinks() and not inline:
+                # big index, let's parse it on demand
+                parser = lazyparser(fp, size)
+                index = lazyindex(parser)
+                nodemap = lazymap(parser)
+                e = list(index[0])
+                type = gettype(e[0])
+                e[0] = offset_type(0, type)
+                index[0] = e
+                return index, nodemap, None
+            else:
+                data += fp.read()
 
         # call the C implementation to parse the index data
         index, nodemap, cache = parsers.parse_index(data, inline)
--- a/mercurial/sshrepo.py	Tue Mar 31 00:04:07 2009 +0900
+++ b/mercurial/sshrepo.py	Sun May 24 16:33:22 2009 +0200
@@ -8,7 +8,7 @@
 from node import bin, hex
 from i18n import _
 import repo, util, error
-import re
+import re, urllib
 
 class remotelock(object):
     def __init__(self, repo):
@@ -166,6 +166,19 @@
         except:
             self.abort(error.ResponseError(_("unexpected response:"), d))
 
+    def branchmap(self):
+        d = self.call("branchmap")
+        try:
+            branchmap = {}
+            for branchpart in d.splitlines():
+                branchheads = branchpart.split(' ')
+                branchname = urllib.unquote(branchheads[0])
+                branchheads = [bin(x) for x in branchheads[1:]]
+                branchmap[branchname] = branchheads
+            return branchmap
+        except:
+            raise error.ResponseError(_("unexpected response:"), d)
+
     def branches(self, nodes):
         n = " ".join(map(hex, nodes))
         d = self.call("branches", nodes=n)
--- a/mercurial/sshserver.py	Tue Mar 31 00:04:07 2009 +0900
+++ b/mercurial/sshserver.py	Sun May 24 16:33:22 2009 +0200
@@ -9,7 +9,7 @@
 from i18n import _
 from node import bin, hex
 import streamclone, util, hook
-import os, sys, tempfile
+import os, sys, tempfile, urllib
 
 class sshserver(object):
     def __init__(self, ui, repo):
@@ -64,6 +64,15 @@
             success = 0
         self.respond("%s %s\n" % (success, r))
 
+    def do_branchmap(self):
+        branchmap = self.repo.branchmap()
+        heads = []
+        for branch, nodes in branchmap.iteritems():
+            branchname = urllib.quote(branch)
+            branchnodes = [hex(node) for node in nodes]
+            heads.append('%s %s' % (branchname, ' '.join(branchnodes)))
+        self.respond('\n'.join(heads))
+
     def do_heads(self):
         h = self.repo.heads()
         self.respond(" ".join(map(hex, h)) + "\n")
@@ -77,7 +86,7 @@
         capabilities: space separated list of tokens
         '''
 
-        caps = ['unbundle', 'lookup', 'changegroupsubset']
+        caps = ['unbundle', 'lookup', 'changegroupsubset', 'branchmap']
         if self.ui.configbool('server', 'uncompressed'):
             caps.append('stream=%d' % self.repo.changelog.version)
         self.respond("capabilities: %s\n" % (' '.join(caps),))
--- a/mercurial/templatefilters.py	Tue Mar 31 00:04:07 2009 +0900
+++ b/mercurial/templatefilters.py	Sun May 24 16:33:22 2009 +0200
@@ -192,6 +192,7 @@
     "isodatesec": lambda x: util.datestr(x, '%Y-%m-%d %H:%M:%S %1%2'),
     "json": json,
     "jsonescape": jsonescape,
+    "localdate": lambda x: (x[0], util.makedate()[1]),
     "nonempty": nonempty,
     "obfuscate": obfuscate,
     "permissions": permissions,
--- a/mercurial/url.py	Tue Mar 31 00:04:07 2009 +0900
+++ b/mercurial/url.py	Sun May 24 16:33:22 2009 +0200
@@ -7,7 +7,7 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2, incorporated herein by reference.
 
-import urllib, urllib2, urlparse, httplib, os, re
+import urllib, urllib2, urlparse, httplib, os, re, socket, cStringIO
 from i18n import _
 import keepalive, util
 
@@ -245,18 +245,165 @@
             connection.send(self, data)
     return _sendfile
 
+has_https = hasattr(urllib2, 'HTTPSHandler')
+if has_https:
+    try:
+        # avoid using deprecated/broken FakeSocket in python 2.6
+        import ssl
+        _ssl_wrap_socket = ssl.wrap_socket
+    except ImportError:
+        def _ssl_wrap_socket(sock, key_file, cert_file):
+            ssl = socket.ssl(sock, key_file, cert_file)
+            return httplib.FakeSocket(sock, ssl)
+
 class httpconnection(keepalive.HTTPConnection):
     # must be able to send big bundle as stream.
     send = _gen_sendfile(keepalive.HTTPConnection)
 
+    def _proxytunnel(self):
+        proxyheaders = dict(
+                [(x, self.headers[x]) for x in self.headers
+                 if x.lower().startswith('proxy-')])
+        self._set_hostport(self.host, self.port)
+        self.send('CONNECT %s:%d HTTP/1.0\r\n' % (self.realhost, self.realport))
+        for header in proxyheaders.iteritems():
+            self.send('%s: %s\r\n' % header)
+        self.send('\r\n')
+
+        # majority of the following code is duplicated from
+        # httplib.HTTPConnection as there are no adequate places to
+        # override functions to provide the needed functionality
+        res = self.response_class(self.sock,
+                                  strict=self.strict,
+                                  method=self._method)
+
+        while True:
+            version, status, reason = res._read_status()
+            if status != httplib.CONTINUE:
+                break
+            while True:
+                skip = res.fp.readline().strip()
+                if not skip:
+                    break
+        res.status = status
+        res.reason = reason.strip()
+
+        if res.status == 200:
+            while True:
+                line = res.fp.readline()
+                if line == '\r\n':
+                    break
+            return True
+
+        if version == 'HTTP/1.0':
+            res.version = 10
+        elif version.startswith('HTTP/1.'):
+            res.version = 11
+        elif version == 'HTTP/0.9':
+            res.version = 9
+        else:
+            raise httplib.UnknownProtocol(version)
+
+        if res.version == 9:
+            res.length = None
+            res.chunked = 0
+            res.will_close = 1
+            res.msg = httplib.HTTPMessage(cStringIO.StringIO())
+            return False
+
+        res.msg = httplib.HTTPMessage(res.fp)
+        res.msg.fp = None
+
+        # are we using the chunked-style of transfer encoding?
+        trenc = res.msg.getheader('transfer-encoding')
+        if trenc and trenc.lower() == "chunked":
+            res.chunked = 1
+            res.chunk_left = None
+        else:
+            res.chunked = 0
+
+        # will the connection close at the end of the response?
+        res.will_close = res._check_close()
+
+        # do we have a Content-Length?
+        # NOTE: RFC 2616, S4.4, #3 says we ignore this if tr_enc is "chunked"
+        length = res.msg.getheader('content-length')
+        if length and not res.chunked:
+            try:
+                res.length = int(length)
+            except ValueError:
+                res.length = None
+            else:
+                if res.length < 0:  # ignore nonsensical negative lengths
+                    res.length = None
+        else:
+            res.length = None
+
+        # does the body have a fixed length? (of zero)
+        if (status == httplib.NO_CONTENT or status == httplib.NOT_MODIFIED or
+            100 <= status < 200 or # 1xx codes
+            res._method == 'HEAD'):
+            res.length = 0
+
+        # if the connection remains open, and we aren't using chunked, and
+        # a content-length was not provided, then assume that the connection
+        # WILL close.
+        if (not res.will_close and
+           not res.chunked and
+           res.length is None):
+            res.will_close = 1
+
+        self.proxyres = res
+
+        return False
+
+    def connect(self):
+        if has_https and self.realhost: # use CONNECT proxy
+            self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+            self.sock.connect((self.host, self.port))
+            if self._proxytunnel():
+                # we do not support client x509 certificates
+                self.sock = _ssl_wrap_socket(self.sock, None, None)
+        else:
+            keepalive.HTTPConnection.connect(self)
+
+    def getresponse(self):
+        proxyres = getattr(self, 'proxyres', None)
+        if proxyres:
+            if proxyres.will_close:
+                self.close()
+            self.proxyres = None
+            return proxyres
+        return keepalive.HTTPConnection.getresponse(self)
+
 class httphandler(keepalive.HTTPHandler):
     def http_open(self, req):
         return self.do_open(httpconnection, req)
 
+    def _start_transaction(self, h, req):
+        if req.get_selector() == req.get_full_url(): # has proxy
+            urlparts = urlparse.urlparse(req.get_selector())
+            if urlparts[0] == 'https': # only use CONNECT for HTTPS
+                if ':' in urlparts[1]:
+                    realhost, realport = urlparts[1].split(':')
+                else:
+                    realhost = urlparts[1]
+                    realport = 443
+
+                h.realhost = realhost
+                h.realport = realport
+                h.headers = req.headers.copy()
+                h.headers.update(self.parent.addheaders)
+                return keepalive.HTTPHandler._start_transaction(self, h, req)
+
+        h.realhost = None
+        h.realport = None
+        h.headers = None
+        return keepalive.HTTPHandler._start_transaction(self, h, req)
+
     def __del__(self):
         self.close_all()
 
-has_https = hasattr(urllib2, 'HTTPSHandler')
 if has_https:
     class httpsconnection(httplib.HTTPSConnection):
         response_class = keepalive.HTTPResponse
--- a/mercurial/util.py	Tue Mar 31 00:04:07 2009 +0900
+++ b/mercurial/util.py	Sun May 24 16:33:22 2009 +0200
@@ -207,67 +207,6 @@
 def always(fn): return True
 def never(fn): return False
 
-def patkind(name, default):
-    """Split a string into an optional pattern kind prefix and the
-    actual pattern."""
-    for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
-        if name.startswith(prefix + ':'): return name.split(':', 1)
-    return default, name
-
-def globre(pat, head='^', tail='$'):
-    "convert a glob pattern into a regexp"
-    i, n = 0, len(pat)
-    res = ''
-    group = 0
-    def peek(): return i < n and pat[i]
-    while i < n:
-        c = pat[i]
-        i = i+1
-        if c == '*':
-            if peek() == '*':
-                i += 1
-                res += '.*'
-            else:
-                res += '[^/]*'
-        elif c == '?':
-            res += '.'
-        elif c == '[':
-            j = i
-            if j < n and pat[j] in '!]':
-                j += 1
-            while j < n and pat[j] != ']':
-                j += 1
-            if j >= n:
-                res += '\\['
-            else:
-                stuff = pat[i:j].replace('\\','\\\\')
-                i = j + 1
-                if stuff[0] == '!':
-                    stuff = '^' + stuff[1:]
-                elif stuff[0] == '^':
-                    stuff = '\\' + stuff
-                res = '%s[%s]' % (res, stuff)
-        elif c == '{':
-            group += 1
-            res += '(?:'
-        elif c == '}' and group:
-            res += ')'
-            group -= 1
-        elif c == ',' and group:
-            res += '|'
-        elif c == '\\':
-            p = peek()
-            if p:
-                i += 1
-                res += re.escape(p)
-            else:
-                res += re.escape(c)
-        else:
-            res += re.escape(c)
-    return head + res + tail
-
-_globchars = set('[{*?')
-
 def pathto(root, n1, n2):
     '''return the relative path from one place to another.
     root should use os.sep to separate directories
@@ -342,145 +281,6 @@
 
         raise Abort('%s not under root' % myname)
 
-def matcher(canonroot, cwd='', names=[], inc=[], exc=[], src=None, dflt_pat='glob'):
-    """build a function to match a set of file patterns
-
-    arguments:
-    canonroot - the canonical root of the tree you're matching against
-    cwd - the current working directory, if relevant
-    names - patterns to find
-    inc - patterns to include
-    exc - patterns to exclude
-    dflt_pat - if a pattern in names has no explicit type, assume this one
-    src - where these patterns came from (e.g. .hgignore)
-
-    a pattern is one of:
-    'glob:<glob>' - a glob relative to cwd
-    're:<regexp>' - a regular expression
-    'path:<path>' - a path relative to canonroot
-    'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs)
-    'relpath:<path>' - a path relative to cwd
-    'relre:<regexp>' - a regexp that doesn't have to match the start of a name
-    '<something>' - one of the cases above, selected by the dflt_pat argument
-
-    returns:
-    a 3-tuple containing
-    - list of roots (places where one should start a recursive walk of the fs);
-      this often matches the explicit non-pattern names passed in, but also
-      includes the initial part of glob: patterns that has no glob characters
-    - a bool match(filename) function
-    - a bool indicating if any patterns were passed in
-    """
-
-    # a common case: no patterns at all
-    if not names and not inc and not exc:
-        return [], always, False
-
-    def contains_glob(name):
-        for c in name:
-            if c in _globchars: return True
-        return False
-
-    def regex(kind, name, tail):
-        '''convert a pattern into a regular expression'''
-        if not name:
-            return ''
-        if kind == 're':
-            return name
-        elif kind == 'path':
-            return '^' + re.escape(name) + '(?:/|$)'
-        elif kind == 'relglob':
-            return globre(name, '(?:|.*/)', tail)
-        elif kind == 'relpath':
-            return re.escape(name) + '(?:/|$)'
-        elif kind == 'relre':
-            if name.startswith('^'):
-                return name
-            return '.*' + name
-        return globre(name, '', tail)
-
-    def matchfn(pats, tail):
-        """build a matching function from a set of patterns"""
-        if not pats:
-            return
-        try:
-            pat = '(?:%s)' % '|'.join([regex(k, p, tail) for (k, p) in pats])
-            if len(pat) > 20000:
-                raise OverflowError()
-            return re.compile(pat).match
-        except OverflowError:
-            # We're using a Python with a tiny regex engine and we
-            # made it explode, so we'll divide the pattern list in two
-            # until it works
-            l = len(pats)
-            if l < 2:
-                raise
-            a, b = matchfn(pats[:l//2], tail), matchfn(pats[l//2:], tail)
-            return lambda s: a(s) or b(s)
-        except re.error:
-            for k, p in pats:
-                try:
-                    re.compile('(?:%s)' % regex(k, p, tail))
-                except re.error:
-                    if src:
-                        raise Abort("%s: invalid pattern (%s): %s" %
-                                    (src, k, p))
-                    else:
-                        raise Abort("invalid pattern (%s): %s" % (k, p))
-            raise Abort("invalid pattern")
-
-    def globprefix(pat):
-        '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
-        root = []
-        for p in pat.split('/'):
-            if contains_glob(p): break
-            root.append(p)
-        return '/'.join(root) or '.'
-
-    def normalizepats(names, default):
-        pats = []
-        roots = []
-        anypats = False
-        for kind, name in [patkind(p, default) for p in names]:
-            if kind in ('glob', 'relpath'):
-                name = canonpath(canonroot, cwd, name)
-            elif kind in ('relglob', 'path'):
-                name = normpath(name)
-
-            pats.append((kind, name))
-
-            if kind in ('glob', 're', 'relglob', 'relre'):
-                anypats = True
-
-            if kind == 'glob':
-                root = globprefix(name)
-                roots.append(root)
-            elif kind in ('relpath', 'path'):
-                roots.append(name or '.')
-            elif kind == 'relglob':
-                roots.append('.')
-        return roots, pats, anypats
-
-    roots, pats, anypats = normalizepats(names, dflt_pat)
-
-    patmatch = matchfn(pats, '$') or always
-    incmatch = always
-    if inc:
-        dummy, inckinds, dummy = normalizepats(inc, 'glob')
-        incmatch = matchfn(inckinds, '(?:/|$)')
-    excmatch = never
-    if exc:
-        dummy, exckinds, dummy = normalizepats(exc, 'glob')
-        excmatch = matchfn(exckinds, '(?:/|$)')
-
-    if not names and inc and not exc:
-        # common case: hgignore patterns
-        match = incmatch
-    else:
-        match = lambda fn: incmatch(fn) and not excmatch(fn) and patmatch(fn)
-
-    return (roots, match, (inc or exc or anypats) and True)
-
 _hgexecutable = None
 
 def main_is_frozen():
@@ -744,7 +544,7 @@
         '''On Windows, expand the implicit globs in a list of patterns'''
         ret = []
         for p in pats:
-            kind, name = patkind(p, None)
+            kind, name = _patsplit(p, None)
             if kind is None:
                 globbed = glob.glob(name)
                 if globbed:
--- a/mercurial/windows.py	Tue Mar 31 00:04:07 2009 +0900
+++ b/mercurial/windows.py	Sun May 24 16:33:22 2009 +0200
@@ -68,7 +68,7 @@
         return 'command' in os.environ.get('comspec', '')
 
 def openhardlinks():
-    return not _is_win_9x and "win32api" in locals()
+    return not _is_win_9x() and "win32api" in globals()
 
 def system_rcpath():
     try:
--- a/tests/run-tests.py	Tue Mar 31 00:04:07 2009 +0900
+++ b/tests/run-tests.py	Sun May 24 16:33:22 2009 +0200
@@ -688,6 +688,9 @@
             ('.' not in test or test.endswith('.py') or
              test.endswith('.bat'))):
             tests.append(test)
+    if not tests:
+        print "# Ran 0 tests, 0 skipped, 0 failed."
+        return
 
     vlog("# Using TESTDIR", TESTDIR)
     vlog("# Using HGTMP", HGTMP)
--- a/tests/test-acl.out	Tue Mar 31 00:04:07 2009 +0900
+++ b/tests/test-acl.out	Sun May 24 16:33:22 2009 +0200
@@ -42,6 +42,7 @@
 pushing to ../b
 searching for changes
 common changesets up to 6675d58eff77
+invalidating branch cache (tip differs)
 3 changesets found
 list of changesets:
 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
@@ -74,6 +75,7 @@
 pushing to ../b
 searching for changes
 common changesets up to 6675d58eff77
+invalidating branch cache (tip differs)
 3 changesets found
 list of changesets:
 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
@@ -111,6 +113,7 @@
 pushing to ../b
 searching for changes
 common changesets up to 6675d58eff77
+invalidating branch cache (tip differs)
 3 changesets found
 list of changesets:
 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
@@ -408,6 +411,7 @@
 pushing to ../b
 searching for changes
 common changesets up to 6675d58eff77
+invalidating branch cache (tip differs)
 3 changesets found
 list of changesets:
 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
--- a/tests/test-convert-cvs.out	Tue Mar 31 00:04:07 2009 +0900
+++ b/tests/test-convert-cvs.out	Sun May 24 16:33:22 2009 +0200
@@ -12,6 +12,7 @@
 % commit a new revision changing b/c
 checking in src/b/c,v
 % convert fresh repo
+warning: support for external cvsps is deprecated and will be removed in Mercurial 1.4
 initializing destination src-hg repository
 connecting to cvsrepo
 scanning source...
@@ -25,6 +26,7 @@
 c
 c
 % convert fresh repo with --filemap
+warning: support for external cvsps is deprecated and will be removed in Mercurial 1.4
 initializing destination src-filemap repository
 connecting to cvsrepo
 scanning source...
@@ -44,6 +46,7 @@
 checking in src/a,v
 checking in src/b/c,v
 % convert again
+warning: support for external cvsps is deprecated and will be removed in Mercurial 1.4
 connecting to cvsrepo
 scanning source...
 sorting...
@@ -55,6 +58,7 @@
 c
 c
 % convert again with --filemap
+warning: support for external cvsps is deprecated and will be removed in Mercurial 1.4
 connecting to cvsrepo
 scanning source...
 sorting...
@@ -73,6 +77,7 @@
 T b/c
 checking in src/b/c,v
 % convert again
+warning: support for external cvsps is deprecated and will be removed in Mercurial 1.4
 connecting to cvsrepo
 scanning source...
 sorting...
@@ -82,6 +87,7 @@
 c
 d
 % convert again with --filemap
+warning: support for external cvsps is deprecated and will be removed in Mercurial 1.4
 connecting to cvsrepo
 scanning source...
 sorting...
--- a/tests/test-convert.out	Tue Mar 31 00:04:07 2009 +0900
+++ b/tests/test-convert.out	Sun May 24 16:33:22 2009 +0200
@@ -112,7 +112,7 @@
     source uses its internal changeset merging code by default but can
     be configured to call the external 'cvsps' program by setting:
         --config convert.cvsps='cvsps -A -u --cvs-direct -q'
-    This is a legacy option and may be removed in future.
+    This option is deprecated and will be removed in Mercurial 1.4.
 
     The options shown are the defaults.
 
--- a/tests/test-hgweb-commands.out	Tue Mar 31 00:04:07 2009 +0900
+++ b/tests/test-hgweb-commands.out	Sun May 24 16:33:22 2009 +0200
@@ -848,7 +848,7 @@
 % capabilities
 200 Script output follows
 
-lookup changegroupsubset unbundle=HG10GZ,HG10BZ,HG10UN% heads
+lookup changegroupsubset branchmap unbundle=HG10GZ,HG10BZ,HG10UN% heads
 200 Script output follows
 
 1d22e65f027e5a0609357e7d8e7508cd2ba5d2fe
--- a/tests/test-push-warn	Tue Mar 31 00:04:07 2009 +0900
+++ b/tests/test-push-warn	Sun May 24 16:33:22 2009 +0200
@@ -59,4 +59,68 @@
 hg push -r 0 ../e ; echo $?
 hg push -r 1 ../e ; echo $?
 
+cd ..
+
+# issue 736
+echo % issue 736
+hg init f
+cd f
+hg -q branch a
+echo 0 > foo
+hg -q ci -d "1000000 0" -Am 0
+echo 1 > foo
+hg -q ci -d "1000000 0" -m 1
+hg -q up 0
+echo 2 > foo
+hg -q ci -d "1000000 0" -m 2
+hg -q up 0
+hg -q branch b
+echo 3 > foo
+hg -q ci -d "1000000 0" -m 3
+cd ..
+
+hg -q clone f g
+cd g
+
+echo % push on existing branch and new branch
+hg -q up 1
+echo 4 > foo
+hg -q ci -d "1000000 0" -m 4
+hg -q up 0
+echo 5 > foo
+hg -q branch c
+hg -q ci -d "1000000 0" -m 5
+hg push -r 4 -r 5 ../f; echo $?
+
+echo % fail on multiple head push
+hg -q up 1
+echo 6 > foo
+hg -q ci -d "1000000 0" -m 6
+hg push -r 4 -r 6 ../f; echo $?
+
+echo % push replacement head on existing branches
+hg -q up 3
+echo 7 > foo
+hg -q ci -d "1000000 0" -m 7
+hg push -r 6 -r 7 ../f; echo $?
+
+echo % merge of branch a to other branch b followed by unrelated push on branch a
+hg -q up 6
+HGMERGE=true hg -q merge 7
+hg -q ci -d "1000000 0" -m 8
+hg -q up 7
+echo 9 > foo
+hg -q ci -d "1000000 0" -m 9
+hg push -r 8 ../f; echo $?
+hg push -r 9 ../f; echo $?
+
+echo % cheating the counting algorithm
+hg -q up 8
+HGMERGE=true hg -q merge 2
+hg -q ci -d "1000000 0" -m 10
+hg -q up 1
+echo 11 > foo
+hg -q ci -d "1000000 0" -m 11
+hg push -r 10 -r 11 ../f; echo $?
+
 exit 0
--- a/tests/test-push-warn.out	Tue Mar 31 00:04:07 2009 +0900
+++ b/tests/test-push-warn.out	Sun May 24 16:33:22 2009 +0200
@@ -80,3 +80,47 @@
 adding file changes
 added 1 changesets with 1 changes to 1 files
 0
+% issue 736
+% push on existing branch and new branch
+pushing to ../f
+searching for changes
+abort: push creates new remote branch 'c'!
+(did you forget to merge? use push -f to force)
+1
+% fail on multiple head push
+pushing to ../f
+searching for changes
+abort: push creates new remote heads!
+(did you forget to merge? use push -f to force)
+1
+% push replacement head on existing branches
+pushing to ../f
+searching for changes
+adding changesets
+adding manifests
+adding file changes
+added 2 changesets with 2 changes to 1 files
+0
+% merge of branch a to other branch b followed by unrelated push on branch a
+pushing to ../f
+searching for changes
+adding changesets
+adding manifests
+adding file changes
+added 1 changesets with 1 changes to 1 files (-1 heads)
+0
+pushing to ../f
+searching for changes
+adding changesets
+adding manifests
+adding file changes
+added 1 changesets with 1 changes to 1 files (+1 heads)
+0
+% cheating the counting algorithm
+pushing to ../f
+searching for changes
+adding changesets
+adding manifests
+adding file changes
+added 2 changesets with 2 changes to 1 files
+0