Mercurial > hg
changeset 13805:0bc7b1661177
merge with stable
author | Matt Mackall <mpm@selenic.com> |
---|---|
date | Wed, 30 Mar 2011 13:23:24 -0500 |
parents | 7dc2bd4c0dc8 (diff) 5c18a0bca26f (current diff) |
children | 8ba08a16e4e0 |
files | |
diffstat | 142 files changed, 4103 insertions(+), 1784 deletions(-) [+] |
line wrap: on
line diff
--- a/.hgignore Wed Mar 30 02:22:15 2011 +0900 +++ b/.hgignore Wed Mar 30 13:23:24 2011 -0500 @@ -7,6 +7,7 @@ *.mergebackup *.o *.so +*.dll *.pyd *.pyc *.pyo
--- a/contrib/check-code.py Wed Mar 30 02:22:15 2011 +0900 +++ b/contrib/check-code.py Wed Mar 30 13:23:24 2011 -0500 @@ -66,6 +66,7 @@ (r'^source\b', "don't use 'source', use '.'"), (r'touch -d', "don't use 'touch -d', use 'touch -t' instead"), (r'ls\s+[^|-]+\s+-', "options to 'ls' must come before filenames"), + (r'[^>]>\s*\$HGRCPATH', "don't overwrite $HGRCPATH, append to it"), ] testfilters = [ @@ -176,9 +177,10 @@ (r'\([^\)]+\) \w+', "use (int)foo, not (int) foo"), (r'\S+ (\+\+|--)', "use foo++, not foo ++"), (r'\w,\w', "missing whitespace after ,"), - (r'\w[+/*]\w', "missing whitespace in expression"), + (r'^[^#]\w[+/*]\w', "missing whitespace in expression"), (r'^#\s+\w', "use #foo, not # foo"), (r'[^\n]\Z', "no trailing newline"), + (r'^\s*#import\b', "use only #include in standard C code"), ] cfilters = [
--- a/contrib/shrink-revlog.py Wed Mar 30 02:22:15 2011 +0900 +++ b/contrib/shrink-revlog.py Wed Mar 30 13:23:24 2011 -0500 @@ -102,15 +102,16 @@ ui.status(_('writing revs\n')) - count = [0] - def progress(*args): - ui.progress(_('writing'), count[0], total=len(order)) - count[0] += 1 order = [r1.node(r) for r in order] # this is a bit ugly, but it works - lookup = lambda x: "%020d" % r1.linkrev(r1.rev(x)) + count = [0] + def lookup(x): + count[0] += 1 + ui.progress(_('writing'), count[0], total=len(order)) + return "%020d" % r1.linkrev(r1.rev(x)) + unlookup = lambda x: int(x, 10) try:
--- a/contrib/zsh_completion Wed Mar 30 02:22:15 2011 +0900 +++ b/contrib/zsh_completion Wed Mar 30 13:23:24 2011 -0500 @@ -360,8 +360,8 @@ '(--help -h)'{-h,--help}'[display help and exit]' '--debug[debug mode]' '--debugger[start debugger]' - '--encoding[set the charset encoding (default: UTF8)]' - '--encodingmode[set the charset encoding mode (default: strict)]' + '--encoding[set the charset encoding]' + '--encodingmode[set the charset encoding mode]' '--lsprof[print improved command execution profile]' '--traceback[print traceback on exception]' '--time[time how long the command takes]'
--- a/doc/hgrc.5.txt Wed Mar 30 02:22:15 2011 +0900 +++ b/doc/hgrc.5.txt Wed Mar 30 13:23:24 2011 -0500 @@ -911,9 +911,6 @@ The conflict resolution program to use during a manual merge. For more information on merge tools see :hg:`help merge-tools`. For configuring merge tools see the merge-tools_ section. -``patch`` - command to use to apply patches. Look for ``gpatch`` or ``patch`` in - PATH if unset. ``quiet`` Reduce the amount of output printed. True or False. Default is False. ``remotecmd``
--- a/hgext/bugzilla.py Wed Mar 30 02:22:15 2011 +0900 +++ b/hgext/bugzilla.py Wed Mar 30 13:23:24 2011 -0500 @@ -1,6 +1,7 @@ # bugzilla.py - bugzilla integration for mercurial # # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com> +# Copyright 2011 Jim Hague <jim.hague@acm.org> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. @@ -8,56 +9,55 @@ '''hooks for integrating with the Bugzilla bug tracker This hook extension adds comments on bugs in Bugzilla when changesets -that refer to bugs by Bugzilla ID are seen. The hook does not change -bug status. +that refer to bugs by Bugzilla ID are seen. The comment is formatted using +the Mercurial template mechanism. + +The hook does not change bug status. -The hook updates the Bugzilla database directly. Only Bugzilla -installations using MySQL are supported. +Three basic modes of access to Bugzilla are provided: + +1. Access via the Bugzilla XMLRPC interface. Requires Bugzilla 3.4 or later. + +2. Check data via the Bugzilla XMLRPC interface and submit bug change + via email to Bugzilla email interface. Requires Bugzilla 3.4 or later. -The hook relies on a Bugzilla script to send bug change notification -emails. That script changes between Bugzilla versions; the -'processmail' script used prior to 2.18 is replaced in 2.18 and -subsequent versions by 'config/sendbugmail.pl'. Note that these will -be run by Mercurial as the user pushing the change; you will need to -ensure the Bugzilla install file permissions are set appropriately. +2. Writing directly to the Bugzilla database. Only Bugzilla installations + using MySQL are supported. Requires Python MySQLdb. -The extension is configured through three different configuration -sections. These keys are recognized in the [bugzilla] section: - -host - Hostname of the MySQL server holding the Bugzilla database. - -db - Name of the Bugzilla database in MySQL. Default 'bugs'. - -user - Username to use to access MySQL server. Default 'bugs'. +Writing directly to the database is susceptible to schema changes, and +relies on a Bugzilla contrib script to send out bug change +notification emails. This script runs as the user running Mercurial, +must be run on the host with the Bugzilla install, and requires +permission to read Bugzilla configuration details and the necessary +MySQL user and password to have full access rights to the Bugzilla +database. For these reasons this access mode is now considered +deprecated, and will not be updated for new Bugzilla versions going +forward. -password - Password to use to access MySQL server. - -timeout - Database connection timeout (seconds). Default 5. - -version - Bugzilla version. Specify '3.0' for Bugzilla versions 3.0 and later, - '2.18' for Bugzilla versions from 2.18 and '2.16' for versions prior - to 2.18. +Access via XMLRPC needs a Bugzilla username and password to be specified +in the configuration. Comments are added under that username. Since the +configuration must be readable by all Mercurial users, it is recommended +that the rights of that user are restricted in Bugzilla to the minimum +necessary to add comments. -bzuser - Fallback Bugzilla user name to record comments with, if changeset - committer cannot be found as a Bugzilla user. +Access via XMLRPC/email behaves uses XMLRPC to query Bugzilla, but sends +email to the Bugzilla email interface to submit comments to bugs. +The From: address in the email is set to the email address of the Mercurial +user, so the comment appears to come from the Mercurial user. In the event +that the Mercurial user email is not recognised by Bugzilla as a Bugzilla +user, the Bugzilla username and password used to log into Bugzilla are +used instead as the source of the comment. -bzdir - Bugzilla install directory. Used by default notify. Default - '/var/www/html/bugzilla'. +Configuration items common to all access modes: -notify - The command to run to get Bugzilla to send bug change notification - emails. Substitutes from a map with 3 keys, 'bzdir', 'id' (bug id) - and 'user' (committer bugzilla email). Default depends on version; - from 2.18 it is "cd %(bzdir)s && perl -T contrib/sendbugmail.pl - %(id)s %(user)s". +[bugzilla] +version + This access type to use. Values recognised are: + xmlrpc Bugzilla XMLRPC interface. + xmlrpc+email Bugzilla XMLRPC and email interfaces. + 3.0 MySQL access, Bugzilla 3.0 and later. + 2.18 MySQL access, Bugzilla 2.18 and up to but not including 3.0. + 2.16 MySQL access, Bugzilla 2.16 and up to but not including 2.18. regexp Regular expression to match bug IDs in changeset commit message. @@ -82,23 +82,84 @@ 'to bug {bug}.\\ndetails:\\n\\t{desc|tabindent}' strip - The number of slashes to strip from the front of {root} to produce - {webroot}. Default 0. + The number of path separator characters to strip from the front of the + Mercurial repository path ('{root}' in templates) to produce '{webroot}'. + For example, a repository with '{root}' '/var/local/my-project' with a + strip of 2 gives a value for '{webroot}' of 'my-project'. Default 0. + +[web] +baseurl + Base URL for browsing Mercurial repositories. Referenced from + templates as {hgweb}. + +Configuration items common to XMLRPC+email and MySQL access modes: usermap - Path of file containing Mercurial committer ID to Bugzilla user ID + Path of file containing Mercurial committer email to Bugzilla user email mappings. If specified, the file should contain one mapping per line, "committer"="Bugzilla user". See also the [usermap] section. +[usermap] The [usermap] section is used to specify mappings of Mercurial -committer ID to Bugzilla user ID. See also [bugzilla].usermap. -"committer"="Bugzilla user" +committer email to Bugzilla user email. See also [bugzilla].usermap. +Contains entries of the form "committer"="Bugzilla user". + +XMLRPC access mode configuration: + +[bugzilla] +bzurl + The base URL for the Bugzilla installation. + Default 'http://localhost/bugzilla'. + +user + The username to use to log into Bugzilla via XMLRPC. Default 'bugs'. + +password + The password for Bugzilla login. + +XMLRPC+email access mode uses the XMLRPC access mode configuration items, +and also: + +[bugzilla] +bzemail + The Bugzilla email address. + +In addition, the Mercurial email settings must be configured. See the +documentation for 'hgrc', sections '[email]' and '[smtp]'. + +MySQL access mode configuration: -Finally, the [web] section supports one entry: +[bugzilla] +host + Hostname of the MySQL server holding the Bugzilla database. + Default 'localhost'. + +db + Name of the Bugzilla database in MySQL. Default 'bugs'. + +user + Username to use to access MySQL server. Default 'bugs'. + +password + Password to use to access MySQL server. -baseurl - Base URL for browsing Mercurial repositories. Reference from - templates as {hgweb}. +timeout + Database connection timeout (seconds). Default 5. + +bzuser + Fallback Bugzilla user name to record comments with, if changeset + committer cannot be found as a Bugzilla user. + +bzdir + Bugzilla install directory. Used by default notify. Default + '/var/www/html/bugzilla'. + +notify + The command to run to get Bugzilla to send bug change notification + emails. Substitutes from a map with 3 keys, 'bzdir', 'id' (bug id) + and 'user' (committer bugzilla email). Default depends on version; + from 2.18 it is "cd %(bzdir)s && perl -T contrib/sendbugmail.pl + %(id)s %(user)s". Activating the extension:: @@ -109,11 +170,43 @@ # run bugzilla hook on every change pulled or pushed in here incoming.bugzilla = python:hgext.bugzilla.hook -Example configuration: +Example configurations: + +XMLRPC example configuration. This uses the Bugzilla at +'http://my-project.org/bugzilla', logging in as user 'bugmail@my-project.org' +wityh password 'plugh'. It is used with a collection of Mercurial +repositories in '/var/local/hg/repos/'. :: + + [bugzilla] + bzurl=http://my-project.org/bugzilla + user=bugmail@my-project.org + password=plugh + version=xmlrpc + + [web] + baseurl=http://my-project.org/hg -This example configuration is for a collection of Mercurial -repositories in /var/local/hg/repos/ used with a local Bugzilla 3.2 -installation in /opt/bugzilla-3.2. :: +XMLRPC+email example configuration. This uses the Bugzilla at +'http://my-project.org/bugzilla', logging in as user 'bugmail@my-project.org' +wityh password 'plugh'. It is used with a collection of Mercurial +repositories in '/var/local/hg/repos/'. Bug comments are sent to the +Bugzilla email address 'buzilla@my-project.org'. :: + + [bugzilla] + user=bugmail@my-project.org + password=plugh + version=xmlrpc + bzemail=bugzilla@my-project.org + + [web] + baseurl=https://dev.laicatc.com/hg + bugzillaurl=https://dev.laicatc.com/bugzilla + +MySQL example configuration. This is for a collection of Mercurial +repositories in '/var/local/hg/repos/' used with a local Bugzilla 3.2 +installation in /opt/bugzilla-3.2. The MySQL database is on 'localhost', +the Bugzilla database name is 'bugs' and MySQL is accessed with MySQL +username 'bugs' password 'XYZZY'. :: [bugzilla] host=localhost @@ -132,7 +225,7 @@ [usermap] user@emaildomain.com=user.name@bugzilladomain.com -Commits add a comment to the Bugzilla bug record of the form:: +All the above add a comment to the Bugzilla bug record of the form:: Changeset 3b16791d6642 in repository-name. http://dev.domain.com/hg/repository-name/rev/3b16791d6642 @@ -142,31 +235,83 @@ from mercurial.i18n import _ from mercurial.node import short -from mercurial import cmdutil, templater, util -import re, time - -MySQLdb = None +from mercurial import cmdutil, mail, templater, util +import re, time, xmlrpclib -def buglist(ids): - return '(' + ','.join(map(str, ids)) + ')' - -class bugzilla_2_16(object): - '''support for bugzilla version 2.16.''' +class bzaccess(object): + '''Base class for access to Bugzilla.''' def __init__(self, ui): self.ui = ui + usermap = self.ui.config('bugzilla', 'usermap') + if usermap: + self.ui.readconfig(usermap, sections=['usermap']) + + def map_committer(self, user): + '''map name of committer to Bugzilla user name.''' + for committer, bzuser in self.ui.configitems('usermap'): + if committer.lower() == user.lower(): + return bzuser + return user + + # Methods to be implemented by access classes. + def filter_real_bug_ids(self, ids): + '''remove bug IDs that do not exist in Bugzilla from set.''' + pass + + def filter_cset_known_bug_ids(self, node, ids): + '''remove bug IDs where node occurs in comment text from set.''' + pass + + def add_comment(self, bugid, text, committer): + '''add comment to bug. + + If possible add the comment as being from the committer of + the changeset. Otherwise use the default Bugzilla user. + ''' + pass + + def notify(self, ids, committer): + '''Force sending of Bugzilla notification emails.''' + pass + +# Bugzilla via direct access to MySQL database. +class bzmysql(bzaccess): + '''Support for direct MySQL access to Bugzilla. + + The earliest Bugzilla version this is tested with is version 2.16. + + If your Bugzilla is version 3.2 or above, you are strongly + recommended to use the XMLRPC access method instead. + ''' + + @staticmethod + def sql_buglist(ids): + '''return SQL-friendly list of bug ids''' + return '(' + ','.join(map(str, ids)) + ')' + + _MySQLdb = None + + def __init__(self, ui): + try: + import MySQLdb as mysql + bzmysql._MySQLdb = mysql + except ImportError, err: + raise util.Abort(_('python mysql support not available: %s') % err) + + bzaccess.__init__(self, ui) + host = self.ui.config('bugzilla', 'host', 'localhost') user = self.ui.config('bugzilla', 'user', 'bugs') passwd = self.ui.config('bugzilla', 'password') db = self.ui.config('bugzilla', 'db', 'bugs') timeout = int(self.ui.config('bugzilla', 'timeout', 5)) - usermap = self.ui.config('bugzilla', 'usermap') - if usermap: - self.ui.readconfig(usermap, sections=['usermap']) self.ui.note(_('connecting to %s:%s as %s, password %s\n') % (host, db, user, '*' * len(passwd))) - self.conn = MySQLdb.connect(host=host, user=user, passwd=passwd, - db=db, connect_timeout=timeout) + self.conn = bzmysql._MySQLdb.connect(host=host, + user=user, passwd=passwd, + db=db, + connect_timeout=timeout) self.cursor = self.conn.cursor() self.longdesc_id = self.get_longdesc_id() self.user_ids = {} @@ -177,7 +322,7 @@ self.ui.note(_('query: %s %s\n') % (args, kwargs)) try: self.cursor.execute(*args, **kwargs) - except MySQLdb.MySQLError: + except bzmysql._MySQLdb.MySQLError: self.ui.note(_('failed query: %s %s\n') % (args, kwargs)) raise @@ -190,22 +335,22 @@ return ids[0][0] def filter_real_bug_ids(self, ids): - '''filter not-existing bug ids from list.''' - self.run('select bug_id from bugs where bug_id in %s' % buglist(ids)) - return sorted([c[0] for c in self.cursor.fetchall()]) + '''filter not-existing bug ids from set.''' + self.run('select bug_id from bugs where bug_id in %s' % + bzmysql.sql_buglist(ids)) + return set([c[0] for c in self.cursor.fetchall()]) - def filter_unknown_bug_ids(self, node, ids): - '''filter bug ids from list that already refer to this changeset.''' + def filter_cset_known_bug_ids(self, node, ids): + '''filter bug ids that already refer to this changeset from set.''' self.run('''select bug_id from longdescs where bug_id in %s and thetext like "%%%s%%"''' % - (buglist(ids), short(node))) - unknown = set(ids) + (bzmysql.sql_buglist(ids), short(node))) for (id,) in self.cursor.fetchall(): self.ui.status(_('bug %d already knows about changeset %s\n') % (id, short(node))) - unknown.discard(id) - return sorted(unknown) + ids.discard(id) + return ids def notify(self, ids, committer): '''tell bugzilla to send mail.''' @@ -251,15 +396,8 @@ self.user_ids[user] = userid return userid - def map_committer(self, user): - '''map name of committer to bugzilla user name.''' - for committer, bzuser in self.ui.configitems('usermap'): - if committer.lower() == user.lower(): - return bzuser - return user - def get_bugzilla_user(self, committer): - '''see if committer is a registered bugzilla user. Return + '''See if committer is a registered bugzilla user. Return bugzilla username and userid if so. If not, return default bugzilla username and userid.''' user = self.map_committer(committer) @@ -292,19 +430,19 @@ (bugid, userid, now, self.longdesc_id)) self.conn.commit() -class bugzilla_2_18(bugzilla_2_16): +class bzmysql_2_18(bzmysql): '''support for bugzilla 2.18 series.''' def __init__(self, ui): - bugzilla_2_16.__init__(self, ui) + bzmysql.__init__(self, ui) self.default_notify = \ "cd %(bzdir)s && perl -T contrib/sendbugmail.pl %(id)s %(user)s" -class bugzilla_3_0(bugzilla_2_18): +class bzmysql_3_0(bzmysql_2_18): '''support for bugzilla 3.0 series.''' def __init__(self, ui): - bugzilla_2_18.__init__(self, ui) + bzmysql_2_18.__init__(self, ui) def get_longdesc_id(self): '''get identity of longdesc field''' @@ -314,13 +452,176 @@ raise util.Abort(_('unknown database schema')) return ids[0][0] +# Buzgilla via XMLRPC interface. + +class CookieSafeTransport(xmlrpclib.SafeTransport): + """A SafeTransport that retains cookies over its lifetime. + + The regular xmlrpclib transports ignore cookies. Which causes + a bit of a problem when you need a cookie-based login, as with + the Bugzilla XMLRPC interface. + + So this is a SafeTransport which looks for cookies being set + in responses and saves them to add to all future requests. + It appears a SafeTransport can do both HTTP and HTTPS sessions, + which saves us having to do a CookieTransport too. + """ + + # Inspiration drawn from + # http://blog.godson.in/2010/09/how-to-make-python-xmlrpclib-client.html + # http://www.itkovian.net/base/transport-class-for-pythons-xml-rpc-lib/ + + cookies = [] + def send_cookies(self, connection): + if self.cookies: + for cookie in self.cookies: + connection.putheader("Cookie", cookie) + + def request(self, host, handler, request_body, verbose=0): + self.verbose = verbose + + # issue XML-RPC request + h = self.make_connection(host) + if verbose: + h.set_debuglevel(1) + + self.send_request(h, handler, request_body) + self.send_host(h, host) + self.send_cookies(h) + self.send_user_agent(h) + self.send_content(h, request_body) + + # Deal with differences between Python 2.4-2.6 and 2.7. + # In the former h is a HTTP(S). In the latter it's a + # HTTP(S)Connection. Luckily, the 2.4-2.6 implementation of + # HTTP(S) has an underlying HTTP(S)Connection, so extract + # that and use it. + try: + response = h.getresponse() + except AttributeError: + response = h._conn.getresponse() + + # Add any cookie definitions to our list. + for header in response.msg.getallmatchingheaders("Set-Cookie"): + val = header.split(": ", 1)[1] + cookie = val.split(";", 1)[0] + self.cookies.append(cookie) + + if response.status != 200: + raise xmlrpclib.ProtocolError(host + handler, response.status, + response.reason, response.msg.headers) + + payload = response.read() + parser, unmarshaller = self.getparser() + parser.feed(payload) + parser.close() + + return unmarshaller.close() + +class bzxmlrpc(bzaccess): + """Support for access to Bugzilla via the Bugzilla XMLRPC API. + + Requires a minimum Bugzilla version 3.4. + """ + + def __init__(self, ui): + bzaccess.__init__(self, ui) + + bzweb = self.ui.config('bugzilla', 'bzurl', + 'http://localhost/bugzilla/') + bzweb = bzweb.rstrip("/") + "/xmlrpc.cgi" + + user = self.ui.config('bugzilla', 'user', 'bugs') + passwd = self.ui.config('bugzilla', 'password') + + self.bzproxy = xmlrpclib.ServerProxy(bzweb, CookieSafeTransport()) + self.bzproxy.User.login(dict(login=user, password=passwd)) + + def get_bug_comments(self, id): + """Return a string with all comment text for a bug.""" + c = self.bzproxy.Bug.comments(dict(ids=[id])) + return ''.join([t['text'] for t in c['bugs'][str(id)]['comments']]) + + def filter_real_bug_ids(self, ids): + res = set() + bugs = self.bzproxy.Bug.get(dict(ids=sorted(ids), permissive=True)) + for bug in bugs['bugs']: + res.add(bug['id']) + return res + + def filter_cset_known_bug_ids(self, node, ids): + for id in sorted(ids): + if self.get_bug_comments(id).find(short(node)) != -1: + self.ui.status(_('bug %d already knows about changeset %s\n') % + (id, short(node))) + ids.discard(id) + return ids + + def add_comment(self, bugid, text, committer): + self.bzproxy.Bug.add_comment(dict(id=bugid, comment=text)) + +class bzxmlrpcemail(bzxmlrpc): + """Read data from Bugzilla via XMLRPC, send updates via email. + + Advantages of sending updates via email: + 1. Comments can be added as any user, not just logged in user. + 2. Bug statuses and other fields not accessible via XMLRPC can + be updated. This is not currently used. + """ + + def __init__(self, ui): + bzxmlrpc.__init__(self, ui) + + self.bzemail = self.ui.config('bugzilla', 'bzemail') + if not self.bzemail: + raise util.Abort(_("configuration 'bzemail' missing")) + mail.validateconfig(self.ui) + + def send_bug_modify_email(self, bugid, commands, comment, committer): + '''send modification message to Bugzilla bug via email. + + The message format is documented in the Bugzilla email_in.pl + specification. commands is a list of command lines, comment is the + comment text. + + To stop users from crafting commit comments with + Bugzilla commands, specify the bug ID via the message body, rather + than the subject line, and leave a blank line after it. + ''' + user = self.map_committer(committer) + matches = self.bzproxy.User.get(dict(match=[user])) + if not matches['users']: + user = self.ui.config('bugzilla', 'user', 'bugs') + matches = self.bzproxy.User.get(dict(match=[user])) + if not matches['users']: + raise util.Abort(_("default bugzilla user %s email not found") % + user) + user = matches['users'][0]['email'] + + text = "\n".join(commands) + "\n@bug_id = %d\n\n" % bugid + comment + + _charsets = mail._charsets(self.ui) + user = mail.addressencode(self.ui, user, _charsets) + bzemail = mail.addressencode(self.ui, self.bzemail, _charsets) + msg = mail.mimeencode(self.ui, text, _charsets) + msg['From'] = user + msg['To'] = bzemail + msg['Subject'] = mail.headencode(self.ui, "Bug modification", _charsets) + sendmail = mail.connect(self.ui) + sendmail(user, bzemail, msg.as_string()) + + def add_comment(self, bugid, text, committer): + self.send_bug_modify_email(bugid, [], text, committer) + class bugzilla(object): # supported versions of bugzilla. different versions have # different schemas. _versions = { - '2.16': bugzilla_2_16, - '2.18': bugzilla_2_18, - '3.0': bugzilla_3_0 + '2.16': bzmysql, + '2.18': bzmysql_2_18, + '3.0': bzmysql_3_0, + 'xmlrpc': bzxmlrpc, + 'xmlrpc+email': bzxmlrpcemail } _default_bug_re = (r'bugs?\s*,?\s*(?:#|nos?\.?|num(?:ber)?s?)?\s*' @@ -353,10 +654,12 @@ _split_re = None def find_bug_ids(self, ctx): - '''find valid bug ids that are referred to in changeset - comments and that do not already have references to this - changeset.''' + '''return set of integer bug IDs from commit comment. + Extract bug IDs from changeset comments. Filter out any that are + not known to Bugzilla, and any that already have a reference to + the given changeset in their comments. + ''' if bugzilla._bug_re is None: bugzilla._bug_re = re.compile( self.ui.config('bugzilla', 'regexp', bugzilla._default_bug_re), @@ -376,7 +679,7 @@ if ids: ids = self.filter_real_bug_ids(ids) if ids: - ids = self.filter_unknown_bug_ids(ctx.node(), ids) + ids = self.filter_cset_known_bug_ids(ctx.node(), ids) return ids def update(self, bugid, ctx): @@ -418,13 +721,6 @@ '''add comment to bugzilla for each changeset that refers to a bugzilla bug id. only add a comment once per bug, so same change seen multiple times does not fill bug with duplicate data.''' - try: - import MySQLdb as mysql - global MySQLdb - MySQLdb = mysql - except ImportError, err: - raise util.Abort(_('python mysql support not available: %s') % err) - if node is None: raise util.Abort(_('hook type %s does not pass a changeset id') % hooktype) @@ -436,6 +732,6 @@ for id in ids: bz.update(id, ctx) bz.notify(ids, util.email(ctx.user())) - except MySQLdb.MySQLError, err: - raise util.Abort(_('database error: %s') % err.args[1]) + except Exception, e: + raise util.Abort(_('Bugzilla error: %s') % e)
--- a/hgext/color.py Wed Mar 30 02:22:15 2011 +0900 +++ b/hgext/color.py Wed Mar 30 13:23:24 2011 -0500 @@ -18,11 +18,11 @@ '''colorize output from some commands -This extension modifies the status and resolve commands to add color to their -output to reflect file status, the qseries command to add color to reflect -patch status (applied, unapplied, missing), and to diff-related -commands to highlight additions, removals, diff headers, and trailing -whitespace. +This extension modifies the status and resolve commands to add color +to their output to reflect file status, the qseries command to add +color to reflect patch status (applied, unapplied, missing), and to +diff-related commands to highlight additions, removals, diff headers, +and trailing whitespace. Other effects in addition to color, like bold and underlined text, are also available. Effects are rendered with the ECMA-48 SGR control @@ -107,6 +107,7 @@ 'diff.trailingwhitespace': 'bold red_background', 'diffstat.deleted': 'red', 'diffstat.inserted': 'green', + 'ui.prompt': 'yellow', 'log.changeset': 'yellow', 'resolve.resolved': 'green bold', 'resolve.unresolved': 'red bold',
--- a/hgext/convert/__init__.py Wed Mar 30 02:22:15 2011 +0900 +++ b/hgext/convert/__init__.py Wed Mar 30 13:23:24 2011 -0500 @@ -10,7 +10,7 @@ import convcmd import cvsps import subversion -from mercurial import commands +from mercurial import commands, templatekw from mercurial.i18n import _ # Commands definition was moved elsewhere to ease demandload job. @@ -334,3 +334,34 @@ ], _('hg debugcvsps [OPTION]... [PATH]...')), } + +def kwconverted(ctx, name): + rev = ctx.extra().get('convert_revision', '') + if rev.startswith('svn:'): + if name == 'svnrev': + return str(subversion.revsplit(rev)[2]) + elif name == 'svnpath': + return subversion.revsplit(rev)[1] + elif name == 'svnuuid': + return subversion.revsplit(rev)[0] + return rev + +def kwsvnrev(repo, ctx, **args): + """:svnrev: String. Converted subversion revision number.""" + return kwconverted(ctx, 'svnrev') + +def kwsvnpath(repo, ctx, **args): + """:svnpath: String. Converted subversion revision project path.""" + return kwconverted(ctx, 'svnpath') + +def kwsvnuuid(repo, ctx, **args): + """:svnuuid: String. Converted subversion revision repository identifier.""" + return kwconverted(ctx, 'svnuuid') + +def extsetup(ui): + templatekw.keywords['svnrev'] = kwsvnrev + templatekw.keywords['svnpath'] = kwsvnpath + templatekw.keywords['svnuuid'] = kwsvnuuid + +# tell hggettext to extract docstrings from these functions: +i18nfunctions = [kwsvnrev, kwsvnpath, kwsvnuuid]
--- a/hgext/convert/common.py Wed Mar 30 02:22:15 2011 +0900 +++ b/hgext/convert/common.py Wed Mar 30 13:23:24 2011 -0500 @@ -151,6 +151,13 @@ """ return None + def getbookmarks(self): + """Return the bookmarks as a dictionary of name: revision + + Bookmark names are to be UTF-8 strings. + """ + return {} + class converter_sink(object): """Conversion sink (target) interface""" @@ -228,6 +235,13 @@ def after(self): pass + def putbookmarks(self, bookmarks): + """Put bookmarks into sink. + + bookmarks: {bookmarkname: sink_rev_id, ...} + where bookmarkname is an UTF-8 string. + """ + pass class commandline(object): def __init__(self, ui, command): @@ -240,7 +254,7 @@ def postrun(self): pass - def _cmdline(self, cmd, *args, **kwargs): + def _cmdline(self, cmd, closestdin, *args, **kwargs): cmdline = [self.command, cmd] + list(args) for k, v in kwargs.iteritems(): if len(k) == 1: @@ -257,16 +271,23 @@ cmdline = [util.shellquote(arg) for arg in cmdline] if not self.ui.debugflag: cmdline += ['2>', util.nulldev] - cmdline += ['<', util.nulldev] + if closestdin: + cmdline += ['<', util.nulldev] cmdline = ' '.join(cmdline) return cmdline def _run(self, cmd, *args, **kwargs): - cmdline = self._cmdline(cmd, *args, **kwargs) + return self._dorun(util.popen, cmd, True, *args, **kwargs) + + def _run2(self, cmd, *args, **kwargs): + return self._dorun(util.popen2, cmd, False, *args, **kwargs) + + def _dorun(self, openfunc, cmd, closestdin, *args, **kwargs): + cmdline = self._cmdline(cmd, closestdin, *args, **kwargs) self.ui.debug('running: %s\n' % (cmdline,)) self.prerun() try: - return util.popen(cmdline) + return openfunc(cmdline) finally: self.postrun() @@ -322,8 +343,9 @@ self._argmax = self._argmax / 2 - 1 return self._argmax - def limit_arglist(self, arglist, cmd, *args, **kwargs): - limit = self.getargmax() - len(self._cmdline(cmd, *args, **kwargs)) + def limit_arglist(self, arglist, cmd, closestdin, *args, **kwargs): + cmdlen = len(self._cmdline(cmd, closestdin, *args, **kwargs)) + limit = self.getargmax() - cmdlen bytes = 0 fl = [] for fn in arglist: @@ -339,7 +361,7 @@ yield fl def xargs(self, arglist, cmd, *args, **kwargs): - for l in self.limit_arglist(arglist, cmd, *args, **kwargs): + for l in self.limit_arglist(arglist, cmd, True, *args, **kwargs): self.run0(cmd, *(list(args) + l), **kwargs) class mapfile(dict):
--- a/hgext/convert/convcmd.py Wed Mar 30 02:22:15 2011 +0900 +++ b/hgext/convert/convcmd.py Wed Mar 30 13:23:24 2011 -0500 @@ -378,6 +378,16 @@ if tagsparents: self.map[tagsparents[0][0]] = nrev + bookmarks = self.source.getbookmarks() + cbookmarks = {} + for k in bookmarks: + v = bookmarks[k] + if self.map.get(v, SKIPREV) != SKIPREV: + cbookmarks[k] = self.map[v] + + if c and cbookmarks: + self.dest.putbookmarks(cbookmarks) + self.writeauthormap() finally: self.cleanup()
--- a/hgext/convert/git.py Wed Mar 30 02:22:15 2011 +0900 +++ b/hgext/convert/git.py Wed Mar 30 13:23:24 2011 -0500 @@ -17,19 +17,27 @@ # cannot remove environment variable. Just assume none have # both issues. if hasattr(os, 'unsetenv'): - def gitopen(self, s): + def gitopen(self, s, noerr=False): prevgitdir = os.environ.get('GIT_DIR') os.environ['GIT_DIR'] = self.path try: - return util.popen(s, 'rb') + if noerr: + (stdin, stdout, stderr) = util.popen3(s) + return stdout + else: + return util.popen(s, 'rb') finally: if prevgitdir is None: del os.environ['GIT_DIR'] else: os.environ['GIT_DIR'] = prevgitdir else: - def gitopen(self, s): - return util.popen('GIT_DIR=%s %s' % (self.path, s), 'rb') + def gitopen(self, s, noerr=False): + if noerr: + (sin, so, se) = util.popen3('GIT_DIR=%s %s' % (self.path, s)) + return stdout + else: + util.popen('GIT_DIR=%s %s' % (self.path, s), 'rb') def gitread(self, s): fh = self.gitopen(s) @@ -168,3 +176,30 @@ raise util.Abort(_('cannot read changes in %s') % version) return changes + + def getbookmarks(self): + bookmarks = {} + + # Interesting references in git are prefixed + prefix = 'refs/heads/' + prefixlen = len(prefix) + + # factor two commands + gitcmd = { 'remote/': 'git ls-remote --heads origin', + '': 'git show-ref'} + + # Origin heads + for reftype in gitcmd: + try: + fh = self.gitopen(gitcmd[reftype], noerr=True) + for line in fh: + line = line.strip() + rev, name = line.split(None, 1) + if not name.startswith(prefix): + continue + name = '%s%s' % (reftype, name[prefixlen:]) + bookmarks[name] = rev + except: + pass + + return bookmarks
--- a/hgext/convert/hg.py Wed Mar 30 02:22:15 2011 +0900 +++ b/hgext/convert/hg.py Wed Mar 30 13:23:24 2011 -0500 @@ -21,7 +21,7 @@ import os, time, cStringIO from mercurial.i18n import _ from mercurial.node import bin, hex, nullid -from mercurial import hg, util, context, error +from mercurial import hg, util, context, bookmarks, error from common import NoRepo, commit, converter_source, converter_sink @@ -214,6 +214,16 @@ def setfilemapmode(self, active): self.filemapmode = active + def putbookmarks(self, updatedbookmark): + if not len(updatedbookmark): + return + + self.ui.status(_("updating bookmarks\n")) + for bookmark in updatedbookmark: + self.repo._bookmarks[bookmark] = bin(updatedbookmark[bookmark]) + bookmarks.write(self.repo) + + class mercurial_source(converter_source): def __init__(self, ui, path, rev=None): converter_source.__init__(self, ui, path, rev) @@ -374,3 +384,6 @@ return hex(self.repo.lookup(rev)) except error.RepoError: return None + + def getbookmarks(self): + return bookmarks.listbookmarks(self.repo)
--- a/hgext/convert/monotone.py Wed Mar 30 02:22:15 2011 +0900 +++ b/hgext/convert/monotone.py Wed Mar 30 13:23:24 2011 -0500 @@ -19,6 +19,8 @@ self.ui = ui self.path = path + self.automatestdio = False + self.rev = rev norepo = NoRepo(_("%s does not look like a monotone repository") % path) @@ -64,18 +66,103 @@ checktool('mtn', abort=False) - # test if there are any revisions - self.rev = None - try: - self.getheads() - except: - raise norepo - self.rev = rev + def mtnrun(self, *args, **kwargs): + if self.automatestdio: + return self.mtnrunstdio(*args, **kwargs) + else: + return self.mtnrunsingle(*args, **kwargs) - def mtnrun(self, *args, **kwargs): + def mtnrunsingle(self, *args, **kwargs): kwargs['d'] = self.path return self.run0('automate', *args, **kwargs) + def mtnrunstdio(self, *args, **kwargs): + # Prepare the command in automate stdio format + command = [] + for k, v in kwargs.iteritems(): + command.append("%s:%s" % (len(k), k)) + if v: + command.append("%s:%s" % (len(v), v)) + if command: + command.insert(0, 'o') + command.append('e') + + command.append('l') + for arg in args: + command += "%s:%s" % (len(arg), arg) + command.append('e') + command = ''.join(command) + + self.ui.debug("mtn: sending '%s'\n" % command) + self.mtnwritefp.write(command) + self.mtnwritefp.flush() + + return self.mtnstdioreadcommandoutput(command) + + def mtnstdioreadpacket(self): + read = None + commandnbr = '' + while read != ':': + read = self.mtnreadfp.read(1) + if not read: + raise util.Abort(_('bad mtn packet - no end of commandnbr')) + commandnbr += read + commandnbr = commandnbr[:-1] + + stream = self.mtnreadfp.read(1) + if stream not in 'mewptl': + raise util.Abort(_('bad mtn packet - bad stream type %s' % stream)) + + read = self.mtnreadfp.read(1) + if read != ':': + raise util.Abort(_('bad mtn packet - no divider before size')) + + read = None + lengthstr = '' + while read != ':': + read = self.mtnreadfp.read(1) + if not read: + raise util.Abort(_('bad mtn packet - no end of packet size')) + lengthstr += read + try: + length = long(lengthstr[:-1]) + except TypeError: + raise util.Abort(_('bad mtn packet - bad packet size %s') + % lengthstr) + + read = self.mtnreadfp.read(length) + if len(read) != length: + raise util.Abort(_("bad mtn packet - unable to read full packet " + "read %s of %s") % (len(read), length)) + + return (commandnbr, stream, length, read) + + def mtnstdioreadcommandoutput(self, command): + retval = [] + while True: + commandnbr, stream, length, output = self.mtnstdioreadpacket() + self.ui.debug('mtn: read packet %s:%s:%s\n' % + (commandnbr, stream, length)) + + if stream == 'l': + # End of command + if output != '0': + raise util.Abort(_("mtn command '%s' returned %s") % + (command, output)) + break + elif stream in 'ew': + # Error, warning output + self.ui.warn(_('%s error:\n') % self.command) + self.ui.warn(output) + elif stream == 'p': + # Progress messages + self.ui.debug('mtn: ' + output) + elif stream == 'm': + # Main stream - command output + retval.append(output) + + return ''.join(retval) + def mtnloadmanifest(self, rev): if self.manifest_rev == rev: return @@ -204,14 +291,18 @@ return data, attr def getcommit(self, rev): - certs = self.mtngetcerts(rev) + extra = {} + certs = self.mtngetcerts(rev) + if certs.get('suspend') == certs["branch"]: + extra['close'] = '1' return commit( author=certs["author"], date=util.datestr(util.strdate(certs["date"], "%Y-%m-%dT%H:%M:%S")), desc=certs["changelog"], rev=rev, parents=self.mtnrun("parents", rev).splitlines(), - branch=certs["branch"]) + branch=certs["branch"], + extra=extra) def gettags(self): tags = {} @@ -225,3 +316,43 @@ # This function is only needed to support --filemap # ... and we don't support that raise NotImplementedError() + + def before(self): + # Check if we have a new enough version to use automate stdio + version = 0.0 + try: + versionstr = self.mtnrunsingle("interface_version") + version = float(versionstr) + except Exception: + raise util.Abort(_("unable to determine mtn automate interface " + "version")) + + if version >= 12.0: + self.automatestdio = True + self.ui.debug("mtn automate version %s - using automate stdio\n" % + version) + + # launch the long-running automate stdio process + self.mtnwritefp, self.mtnreadfp = self._run2('automate', 'stdio', + '-d', self.path) + # read the headers + read = self.mtnreadfp.readline() + if read != 'format-version: 2\n': + raise util.Abort(_('mtn automate stdio header unexpected: %s') + % read) + while read != '\n': + read = self.mtnreadfp.readline() + if not read: + raise util.Abort(_("failed to reach end of mtn automate " + "stdio headers")) + else: + self.ui.debug("mtn automate version %s - not using automate stdio " + "(automate >= 12.0 - mtn >= 0.46 is needed)\n" % version) + + def after(self): + if self.automatestdio: + self.mtnwritefp.close() + self.mtnwritefp = None + self.mtnreadfp.close() + self.mtnreadfp = None +
--- a/hgext/convert/subversion.py Wed Mar 30 02:22:15 2011 +0900 +++ b/hgext/convert/subversion.py Wed Mar 30 13:23:24 2011 -0500 @@ -41,6 +41,15 @@ class SvnPathNotFound(Exception): pass +def revsplit(rev): + """Parse a revision string and return (uuid, path, revnum).""" + url, revnum = rev.rsplit('@', 1) + parts = url.split('/', 1) + mod = '' + if len(parts) > 1: + mod = '/' + parts[1] + return parts[0][4:], mod, int(revnum) + def geturl(path): try: return svn.client.url_from_path(svn.core.svn_path_canonicalize(path)) @@ -259,6 +268,7 @@ except ValueError: raise util.Abort(_('svn: revision %s is not an integer') % rev) + self.trunkname = self.ui.config('convert', 'svn.trunk', 'trunk').strip('/') self.startrev = self.ui.config('convert', 'svn.startrev', default=0) try: self.startrev = int(self.startrev) @@ -285,7 +295,7 @@ def setrevmap(self, revmap): lastrevs = {} for revid in revmap.iterkeys(): - uuid, module, revnum = self.revsplit(revid) + uuid, module, revnum = revsplit(revid) lastrevnum = lastrevs.setdefault(module, revnum) if revnum > lastrevnum: lastrevs[module] = revnum @@ -380,7 +390,7 @@ files, self.removed, copies = self.expandpaths(rev, paths, parents) else: # Perform a full checkout on roots - uuid, module, revnum = self.revsplit(rev) + uuid, module, revnum = revsplit(rev) entries = svn.client.ls(self.baseurl + urllib.quote(module), optrev(revnum), True, self.ctx) files = [n for n, e in entries.iteritems() @@ -402,7 +412,7 @@ def getcommit(self, rev): if rev not in self.commits: - uuid, module, revnum = self.revsplit(rev) + uuid, module, revnum = revsplit(rev) self.module = module self.reparent(module) # We assume that: @@ -529,16 +539,6 @@ def revnum(self, rev): return int(rev.split('@')[-1]) - def revsplit(self, rev): - url, revnum = rev.rsplit('@', 1) - revnum = int(revnum) - parts = url.split('/', 1) - uuid = parts.pop(0)[4:] - mod = '' - if parts: - mod = '/' + parts[0] - return uuid, mod, revnum - def latest(self, path, stop=0): """Find the latest revid affecting path, up to stop. It may return a revision in a different module, since a branch may be moved without @@ -605,7 +605,7 @@ changed, removed = set(), set() copies = {} - new_module, revnum = self.revsplit(rev)[1:] + new_module, revnum = revsplit(rev)[1:] if new_module != self.module: self.module = new_module self.reparent(self.module) @@ -622,7 +622,7 @@ continue # Copy sources not in parent revisions cannot be # represented, ignore their origin for now - pmodule, prevnum = self.revsplit(parents[0])[1:] + pmodule, prevnum = revsplit(parents[0])[1:] if ent.copyfrom_rev < prevnum: continue copyfrom_path = self.getrelpath(ent.copyfrom_path, pmodule) @@ -633,7 +633,7 @@ copies[self.recode(entrypath)] = self.recode(copyfrom_path) elif kind == 0: # gone, but had better be a deleted *file* self.ui.debug("gone from %s\n" % ent.copyfrom_rev) - pmodule, prevnum = self.revsplit(parents[0])[1:] + pmodule, prevnum = revsplit(parents[0])[1:] parentpath = pmodule + "/" + entrypath fromkind = self._checkpath(entrypath, prevnum, pmodule) @@ -659,7 +659,7 @@ if ent.action == 'R' and parents: # If a directory is replacing a file, mark the previous # file as deleted - pmodule, prevnum = self.revsplit(parents[0])[1:] + pmodule, prevnum = revsplit(parents[0])[1:] pkind = self._checkpath(entrypath, prevnum, pmodule) if pkind == svn.core.svn_node_file: removed.add(self.recode(entrypath)) @@ -681,7 +681,7 @@ continue # Copy sources not in parent revisions cannot be # represented, ignore their origin for now - pmodule, prevnum = self.revsplit(parents[0])[1:] + pmodule, prevnum = revsplit(parents[0])[1:] if ent.copyfrom_rev < prevnum: continue copyfrompath = self.getrelpath(ent.copyfrom_path, pmodule) @@ -736,7 +736,7 @@ # ent.copyfrom_rev may not be the actual last revision previd = self.latest(newpath, ent.copyfrom_rev) if previd is not None: - prevmodule, prevnum = self.revsplit(previd)[1:] + prevmodule, prevnum = revsplit(previd)[1:] if prevnum >= self.startrev: parents = [previd] self.ui.note( @@ -761,9 +761,8 @@ author = author and self.recode(author) or '' try: branch = self.module.split("/")[-1] - trunkname = self.ui.config('convert', 'svn.trunk', 'trunk') - if branch == trunkname.strip('/'): - branch = '' + if branch == self.trunkname: + branch = None except IndexError: branch = None @@ -834,7 +833,7 @@ raise IOError() mode = '' try: - new_module, revnum = self.revsplit(rev)[1:] + new_module, revnum = revsplit(rev)[1:] if self.module != new_module: self.module = new_module self.reparent(self.module) @@ -944,6 +943,7 @@ class svn_sink(converter_sink, commandline): commit_re = re.compile(r'Committed revision (\d+).', re.M) + uuid_re = re.compile(r'Repository UUID:\s*(\S+)', re.M) def prerun(self): if self.wc: @@ -964,8 +964,6 @@ def __init__(self, ui, path): - if svn is None: - raise MissingTool(_('Could not load Subversion python bindings')) converter_sink.__init__(self, ui, path) commandline.__init__(self, ui, 'svn') self.delete = [] @@ -1012,8 +1010,8 @@ fp.close() util.set_flags(hook, False, True) - xport = transport.SvnRaTransport(url=geturl(path)) - self.uuid = svn.ra.get_uuid(xport.ra) + output = self.run0('info') + self.uuid = self.uuid_re.search(output).group(1).strip() def wjoin(self, *names): return os.path.join(self.wc, *names)
--- a/hgext/eol.py Wed Mar 30 02:22:15 2011 +0900 +++ b/hgext/eol.py Wed Mar 30 13:23:24 2011 -0500 @@ -73,11 +73,13 @@ only need to these filters until you have prepared a ``.hgeol`` file. The ``win32text.forbid*`` hooks provided by the win32text extension -have been unified into a single hook named ``eol.hook``. The hook will -lookup the expected line endings from the ``.hgeol`` file, which means -you must migrate to a ``.hgeol`` file first before using the hook. -Remember to enable the eol extension in the repository where you -install the hook. +have been unified into a single hook named ``eol.checkheadshook``. The +hook will lookup the expected line endings from the ``.hgeol`` file, +which means you must migrate to a ``.hgeol`` file first before using +the hook. ``eol.checkheadshook`` only checks heads, intermediate +invalid revisions will be pushed. To forbid them completely, use the +``eol.checkallhook`` hook. These hooks are best used as +``pretxnchangegroup`` hooks. See :hg:`help patterns` for more information about the glob patterns used. @@ -127,36 +129,119 @@ 'cleverdecode:': tocrlf } +class eolfile(object): + def __init__(self, ui, root, data): + self._decode = {'LF': 'to-lf', 'CRLF': 'to-crlf', 'BIN': 'is-binary'} + self._encode = {'LF': 'to-lf', 'CRLF': 'to-crlf', 'BIN': 'is-binary'} -def hook(ui, repo, node, hooktype, **kwargs): - """verify that files have expected EOLs""" + self.cfg = config.config() + # Our files should not be touched. The pattern must be + # inserted first override a '** = native' pattern. + self.cfg.set('patterns', '.hg*', 'BIN') + # We can then parse the user's patterns. + self.cfg.parse('.hgeol', data) + + isrepolf = self.cfg.get('repository', 'native') != 'CRLF' + self._encode['NATIVE'] = isrepolf and 'to-lf' or 'to-crlf' + iswdlf = ui.config('eol', 'native', os.linesep) in ('LF', '\n') + self._decode['NATIVE'] = iswdlf and 'to-lf' or 'to-crlf' + + include = [] + exclude = [] + for pattern, style in self.cfg.items('patterns'): + key = style.upper() + if key == 'BIN': + exclude.append(pattern) + else: + include.append(pattern) + # This will match the files for which we need to care + # about inconsistent newlines. + self.match = match.match(root, '', [], include, exclude) + + def setfilters(self, ui): + for pattern, style in self.cfg.items('patterns'): + key = style.upper() + try: + ui.setconfig('decode', pattern, self._decode[key]) + ui.setconfig('encode', pattern, self._encode[key]) + except KeyError: + ui.warn(_("ignoring unknown EOL style '%s' from %s\n") + % (style, self.cfg.source('patterns', pattern))) + + def checkrev(self, repo, ctx, files): + failed = [] + for f in (files or ctx.files()): + if f not in ctx: + continue + for pattern, style in self.cfg.items('patterns'): + if not match.match(repo.root, '', [pattern])(f): + continue + target = self._encode[style.upper()] + data = ctx[f].data() + if (target == "to-lf" and "\r\n" in data + or target == "to-crlf" and singlelf.search(data)): + failed.append((str(ctx), target, f)) + break + return failed + +def parseeol(ui, repo, nodes): + try: + for node in nodes: + try: + if node is None: + # Cannot use workingctx.data() since it would load + # and cache the filters before we configure them. + data = repo.wfile('.hgeol').read() + else: + data = repo[node]['.hgeol'].data() + return eolfile(ui, repo.root, data) + except (IOError, LookupError): + pass + except error.ParseError, inst: + ui.warn(_("warning: ignoring .hgeol file due to parse error " + "at %s: %s\n") % (inst.args[1], inst.args[0])) + return None + +def _checkhook(ui, repo, node, headsonly): + # Get revisions to check and touched files at the same time files = set() + revs = set() for rev in xrange(repo[node].rev(), len(repo)): - files.update(repo[rev].files()) - tip = repo['tip'] - for f in files: - if f not in tip: - continue - for pattern, target in ui.configitems('encode'): - if match.match(repo.root, '', [pattern])(f): - data = tip[f].data() - if target == "to-lf" and "\r\n" in data: - raise util.Abort(_("%s should not have CRLF line endings") - % f) - elif target == "to-crlf" and singlelf.search(data): - raise util.Abort(_("%s should not have LF line endings") - % f) - # Ignore other rules for this file - break + revs.add(rev) + if headsonly: + ctx = repo[rev] + files.update(ctx.files()) + for pctx in ctx.parents(): + revs.discard(pctx.rev()) + failed = [] + for rev in revs: + ctx = repo[rev] + eol = parseeol(ui, repo, [ctx.node()]) + if eol: + failed.extend(eol.checkrev(repo, ctx, files)) + if failed: + eols = {'to-lf': 'CRLF', 'to-crlf': 'LF'} + msgs = [] + for node, target, f in failed: + msgs.append(_(" %s in %s should not have %s line endings") % + (f, node, eols[target])) + raise util.Abort(_("end-of-line check failed:\n") + "\n".join(msgs)) + +def checkallhook(ui, repo, node, hooktype, **kwargs): + """verify that files have expected EOLs""" + _checkhook(ui, repo, node, False) + +def checkheadshook(ui, repo, node, hooktype, **kwargs): + """verify that files have expected EOLs""" + _checkhook(ui, repo, node, True) + +# "checkheadshook" used to be called "hook" +hook = checkheadshook def preupdate(ui, repo, hooktype, parent1, parent2): #print "preupdate for %s: %s -> %s" % (repo.root, parent1, parent2) - try: - repo.readhgeol(parent1) - except error.ParseError, inst: - ui.warn(_("warning: ignoring .hgeol file due to parse error " - "at %s: %s\n") % (inst.args[1], inst.args[0])) + repo.loadeol([parent1]) return False def uisetup(ui): @@ -184,66 +269,15 @@ class eolrepo(repo.__class__): - _decode = {'LF': 'to-lf', 'CRLF': 'to-crlf', 'BIN': 'is-binary'} - _encode = {'LF': 'to-lf', 'CRLF': 'to-crlf', 'BIN': 'is-binary'} - - def readhgeol(self, node=None, data=None): - if data is None: - try: - if node is None: - data = self.wfile('.hgeol').read() - else: - data = self[node]['.hgeol'].data() - except (IOError, LookupError): - return None - - if self.ui.config('eol', 'native', os.linesep) in ('LF', '\n'): - self._decode['NATIVE'] = 'to-lf' - else: - self._decode['NATIVE'] = 'to-crlf' - - eol = config.config() - # Our files should not be touched. The pattern must be - # inserted first override a '** = native' pattern. - eol.set('patterns', '.hg*', 'BIN') - # We can then parse the user's patterns. - eol.parse('.hgeol', data) - - if eol.get('repository', 'native') == 'CRLF': - self._encode['NATIVE'] = 'to-crlf' - else: - self._encode['NATIVE'] = 'to-lf' - - for pattern, style in eol.items('patterns'): - key = style.upper() - try: - self.ui.setconfig('decode', pattern, self._decode[key]) - self.ui.setconfig('encode', pattern, self._encode[key]) - except KeyError: - self.ui.warn(_("ignoring unknown EOL style '%s' from %s\n") - % (style, eol.source('patterns', pattern))) - - include = [] - exclude = [] - for pattern, style in eol.items('patterns'): - key = style.upper() - if key == 'BIN': - exclude.append(pattern) - else: - include.append(pattern) - - # This will match the files for which we need to care - # about inconsistent newlines. - return match.match(self.root, '', [], include, exclude) + def loadeol(self, nodes): + eol = parseeol(self.ui, self, nodes) + if eol is None: + return None + eol.setfilters(self.ui) + return eol.match def _hgcleardirstate(self): - try: - self._eolfile = self.readhgeol() or self.readhgeol('tip') - except error.ParseError, inst: - ui.warn(_("warning: ignoring .hgeol file due to parse error " - "at %s: %s\n") % (inst.args[1], inst.args[0])) - self._eolfile = None - + self._eolfile = self.loadeol([None, 'tip']) if not self._eolfile: self._eolfile = util.never return
--- a/hgext/extdiff.py Wed Mar 30 02:22:15 2011 +0900 +++ b/hgext/extdiff.py Wed Mar 30 13:23:24 2011 -0500 @@ -121,7 +121,7 @@ msg = _('cannot specify --rev and --change at the same time') raise util.Abort(msg) elif change: - node2 = repo.lookup(change) + node2 = cmdutil.revsingle(repo, change, None).node() node1a, node1b = repo.changelog.parents(node2) else: node1a, node2 = cmdutil.revpair(repo, revs) @@ -187,14 +187,14 @@ # Handle bogus modifies correctly by checking if the files exist if len(common) == 1: common_file = util.localpath(common.pop()) - dir1a = os.path.join(dir1a, common_file) + dir1a = os.path.join(tmproot, dir1a, common_file) label1a = common_file + rev1a - if not os.path.isfile(os.path.join(tmproot, dir1a)): + if not os.path.isfile(dir1a): dir1a = os.devnull if do3way: - dir1b = os.path.join(dir1b, common_file) + dir1b = os.path.join(tmproot, dir1b, common_file) label1b = common_file + rev1b - if not os.path.isfile(os.path.join(tmproot, dir1b)): + if not os.path.isfile(dir1b): dir1b = os.devnull dir2 = os.path.join(dir2root, dir2, common_file) label2 = common_file + rev2
--- a/hgext/graphlog.py Wed Mar 30 02:22:15 2011 +0900 +++ b/hgext/graphlog.py Wed Mar 30 13:23:24 2011 -0500 @@ -324,6 +324,7 @@ except TypeError, e: if len(args) > wrapfn.func_code.co_argcount: raise util.Abort(_('--graph option allows at most one file')) + raise return orig(*args, **kwargs) entry = extensions.wrapcommand(table, cmd, graph) entry[1].append(('G', 'graph', None, _("show the revision DAG")))
--- a/hgext/keyword.py Wed Mar 30 02:22:15 2011 +0900 +++ b/hgext/keyword.py Wed Mar 30 13:23:24 2011 -0500 @@ -109,11 +109,26 @@ } # date like in cvs' $Date -utcdate = lambda x: util.datestr((x[0], 0), '%Y/%m/%d %H:%M:%S') +def utcdate(text): + ''':utcdate: Date. Returns a UTC-date in this format: "2009/08/18 11:00:13". + ''' + return util.datestr((text[0], 0), '%Y/%m/%d %H:%M:%S') # date like in svn's $Date -svnisodate = lambda x: util.datestr(x, '%Y-%m-%d %H:%M:%S %1%2 (%a, %d %b %Y)') +def svnisodate(text): + ''':svnisodate: Date. Returns a date in this format: "2009-08-18 13:00:13 + +0200 (Tue, 18 Aug 2009)". + ''' + return util.datestr(text, '%Y-%m-%d %H:%M:%S %1%2 (%a, %d %b %Y)') # date like in svn's $Id -svnutcdate = lambda x: util.datestr((x[0], 0), '%Y-%m-%d %H:%M:%SZ') +def svnutcdate(text): + ''':svnutcdate: Date. Returns a UTC-date in this format: "2009-08-18 + 11:00:13Z". + ''' + return util.datestr((text[0], 0), '%Y-%m-%d %H:%M:%SZ') + +templatefilters.filters.update({'utcdate': utcdate, + 'svnisodate': svnisodate, + 'svnutcdate': svnutcdate}) # make keyword tools accessible kwtools = {'templater': None, 'hgcmd': ''} @@ -176,9 +191,6 @@ for k, v in kwmaps) else: self.templates = _defaultkwmaps(self.ui) - templatefilters.filters.update({'utcdate': utcdate, - 'svnisodate': svnisodate, - 'svnutcdate': svnutcdate}) @util.propertycache def escape(self):
--- a/hgext/mq.py Wed Mar 30 02:22:15 2011 +0900 +++ b/hgext/mq.py Wed Mar 30 13:23:24 2011 -0500 @@ -1899,7 +1899,7 @@ With -g/--git, patches imported with --rev will use the git diff format. See the diffs help topic for information on why this is important for preserving rename/copy information and permission - changes. + changes. Use :hg:`qfinish` to remove changesets from mq control. To import a patch from standard input, pass - as the patch file. When importing from standard input, a patch name must be specified
--- a/hgext/rebase.py Wed Mar 30 02:22:15 2011 +0900 +++ b/hgext/rebase.py Wed Mar 30 13:23:24 2011 -0500 @@ -90,7 +90,8 @@ contf = opts.get('continue') abortf = opts.get('abort') collapsef = opts.get('collapse', False) - extrafn = opts.get('extrafn') + collapsemsg = cmdutil.logmessage(opts) + extrafn = opts.get('extrafn') # internal, used by e.g. hgsubversion keepf = opts.get('keep', False) keepbranchesf = opts.get('keepbranches', False) detachf = opts.get('detach', False) @@ -98,6 +99,10 @@ # other extensions keepopen = opts.get('keepopen', False) + if collapsemsg and not collapsef: + raise util.Abort( + _('message can only be specified with collapse')) + if contf or abortf: if contf and abortf: raise util.Abort(_('cannot use both abort and continue')) @@ -138,8 +143,7 @@ external = checkexternal(repo, state, targetancestors) if keepbranchesf: - if extrafn: - raise util.Abort(_('cannot use both keepbranches and extrafn')) + assert not extrafn, 'cannot use both keepbranches and extrafn' def extrafn(ctx, extra): extra['branch'] = ctx.branch() @@ -190,11 +194,14 @@ if collapsef and not keepopen: p1, p2 = defineparents(repo, min(state), target, state, targetancestors) - commitmsg = 'Collapsed revision' - for rebased in state: - if rebased not in skipped and state[rebased] != nullmerge: - commitmsg += '\n* %s' % repo[rebased].description() - commitmsg = ui.edit(commitmsg, repo.ui.username()) + if collapsemsg: + commitmsg = collapsemsg + else: + commitmsg = 'Collapsed revision' + for rebased in state: + if rebased not in skipped and state[rebased] != nullmerge: + commitmsg += '\n* %s' % repo[rebased].description() + commitmsg = ui.edit(commitmsg, repo.ui.username()) newrev = concludenode(repo, rev, p1, external, commitmsg=commitmsg, extrafn=extrafn) @@ -270,7 +277,7 @@ if k in m1: if v in m1 or v in m2: repo.dirstate.copy(v, k) - if v in m2 and v not in m1: + if v in m2 and v not in m1 and k in m2: repo.dirstate.remove(v) def concludenode(repo, rev, p1, p2, commitmsg=None, extrafn=None): @@ -354,6 +361,8 @@ 'Update rebased mq patches - finalize and then import them' mqrebase = {} mq = repo.mq + original_series = mq.full_series[:] + for p in mq.applied: rev = repo[p.node].rev() if rev in state: @@ -371,6 +380,15 @@ repo.ui.debug('import mq patch %d (%s)\n' % (state[rev], name)) mq.qimport(repo, (), patchname=name, git=isgit, rev=[str(state[rev])]) + + # Restore missing guards + for s in original_series: + pname = mq.guard_re.split(s, 1)[0] + if pname in mq.full_series: + repo.ui.debug('restoring guard for patch %s' % (pname)) + mq.full_series.remove(pname) + mq.full_series.append(s) + mq.series_dirty = True mq.save_dirty() def storestatus(repo, originalwd, target, state, collapse, keep, keepbranches, @@ -475,9 +493,10 @@ if src: commonbase = repo[src].ancestor(repo[dest]) + samebranch = repo[src].branch() == repo[dest].branch() if commonbase == repo[src]: raise util.Abort(_('source is ancestor of destination')) - if commonbase == repo[dest]: + if samebranch and commonbase == repo[dest]: raise util.Abort(_('source is descendant of destination')) source = repo[src].rev() if detach: @@ -565,6 +584,10 @@ ('d', 'dest', '', _('rebase onto the specified changeset'), _('REV')), ('', 'collapse', False, _('collapse the rebased changesets')), + ('m', 'message', '', + _('use text as collapse commit message'), _('TEXT')), + ('l', 'logfile', '', + _('read collapse commit message from file'), _('FILE')), ('', 'keep', False, _('keep original changesets')), ('', 'keepbranches', False, _('keep original branch names')), ('', 'detach', False, _('force detaching of source from its original '
--- a/hgext/record.py Wed Mar 30 02:22:15 2011 +0900 +++ b/hgext/record.py Wed Mar 30 13:23:24 2011 -0500 @@ -324,10 +324,12 @@ for i, chunk in enumerate(h.hunks): if skipfile is None and skipall is None: chunk.pretty(ui) - msg = (total == 1 - and (_('record this change to %r?') % chunk.filename()) - or (_('record change %d/%d to %r?') % - (pos - len(h.hunks) + i, total, chunk.filename()))) + if total == 1: + msg = _('record this change to %r?') % chunk.filename() + else: + idx = pos - len(h.hunks) + i + msg = _('record change %d/%d to %r?') % (idx, total, + chunk.filename()) r, skipfile, skipall = prompt(skipfile, skipall, msg) if r: if fixoffset:
--- a/hgext/relink.py Wed Mar 30 02:22:15 2011 +0900 +++ b/hgext/relink.py Wed Mar 30 13:23:24 2011 -0500 @@ -172,8 +172,8 @@ ui.progress(_('relinking'), None) - ui.status(_('relinked %d files (%d bytes reclaimed)\n') % - (relinked, savedbytes)) + ui.status(_('relinked %d files (%s reclaimed)\n') % + (relinked, util.bytecount(savedbytes))) cmdtable = { 'relink': (
--- a/hgext/transplant.py Wed Mar 30 02:22:15 2011 +0900 +++ b/hgext/transplant.py Wed Mar 30 13:23:24 2011 -0500 @@ -17,7 +17,7 @@ import os, tempfile from mercurial import bundlerepo, cmdutil, hg, merge, match from mercurial import patch, revlog, util, error -from mercurial import revset +from mercurial import revset, templatekw class transplantentry(object): def __init__(self, lnode, rnode): @@ -177,12 +177,11 @@ lock.release() wlock.release() - def filter(self, filter, changelog, patchfile): + def filter(self, filter, node, changelog, patchfile): '''arbitrarily rewrite changeset before applying it''' self.ui.status(_('filtering %s\n') % patchfile) user, date, msg = (changelog[1], changelog[2], changelog[4]) - fd, headerfile = tempfile.mkstemp(prefix='hg-transplant-') fp = os.fdopen(fd, 'w') fp.write("# HG changeset patch\n") @@ -194,7 +193,9 @@ try: util.system('%s %s %s' % (filter, util.shellquote(headerfile), util.shellquote(patchfile)), - environ={'HGUSER': changelog[1]}, + environ={'HGUSER': changelog[1], + 'HGREVISION': revlog.hex(node), + }, onerr=util.Abort, errprefix=_('filter failed')) user, date, msg = self.parselog(file(headerfile))[1:4] finally: @@ -209,7 +210,7 @@ date = "%d %d" % (time, timezone) extra = {'transplant_source': node} if filter: - (user, date, message) = self.filter(filter, cl, patchfile) + (user, date, message) = self.filter(filter, node, cl, patchfile) if log: # we don't translate messages inserted into commits @@ -345,6 +346,8 @@ message = [] node = revlog.nullid inmsg = False + user = None + date = None for line in fp.read().splitlines(): if inmsg: message.append(line) @@ -359,6 +362,8 @@ elif not line.startswith('# '): inmsg = True message.append(line) + if None in (user, date): + raise util.Abort(_("filter corrupted changeset (no user or date)")) return (node, user, date, '\n'.join(message), parents) def log(self, user, date, message, p1, p2, merge=False): @@ -547,8 +552,8 @@ if source: sourcerepo = ui.expandpath(source) source = hg.repository(ui, sourcerepo) - source, incoming, bundle = bundlerepo.getremotechanges(ui, repo, source, - force=True) + source, common, incoming, bundle = bundlerepo.getremotechanges(ui, repo, + source, force=True) else: source = repo @@ -607,8 +612,15 @@ cs.add(r) return [r for r in s if r in cs] +def kwtransplanted(repo, ctx, **args): + """:transplanted: String. The node identifier of the transplanted + changeset if any.""" + n = ctx.extra().get('transplant_source') + return n and revlog.hex(n) or '' + def extsetup(ui): revset.symbols['transplanted'] = revsettransplanted + templatekw.keywords['transplanted'] = kwtransplanted cmdtable = { "transplant": @@ -632,4 +644,4 @@ } # tell hggettext to extract docstrings from these functions: -i18nfunctions = [revsettransplanted] +i18nfunctions = [revsettransplanted, kwtransplanted]
--- a/mercurial/ancestor.py Wed Mar 30 02:22:15 2011 +0900 +++ b/mercurial/ancestor.py Wed Mar 30 13:23:24 2011 -0500 @@ -9,9 +9,10 @@ def ancestor(a, b, pfunc): """ - return a minimal-distance ancestor of nodes a and b, or None if there is no - such ancestor. Note that there can be several ancestors with the same - (minimal) distance, and the one returned is arbitrary. + Returns the common ancestor of a and b that is furthest from a + root (as measured by longest path) or None if no ancestor is + found. If there are multiple common ancestors at the same + distance, the first one found is returned. pfunc must return a list of parent vertices for a given vertex """ @@ -22,6 +23,7 @@ a, b = sorted([a, b]) # find depth from root of all ancestors + # depth is stored as a negative for heapq parentcache = {} visit = [a, b] depth = {} @@ -39,6 +41,7 @@ if p not in depth: visit.append(p) if visit[-1] == vertex: + # -(maximum distance of parents + 1) depth[vertex] = min([depth[p] for p in pl]) - 1 visit.pop()
--- a/mercurial/archival.py Wed Mar 30 02:22:15 2011 +0900 +++ b/mercurial/archival.py Wed Mar 30 13:23:24 2011 -0500 @@ -9,7 +9,7 @@ from node import hex import cmdutil import util, encoding -import cStringIO, os, stat, tarfile, time, zipfile +import cStringIO, os, tarfile, time, zipfile import zlib, gzip def tidyprefix(dest, kind, prefix): @@ -172,10 +172,10 @@ # unzip will not honor unix file modes unless file creator is # set to unix (id 3). i.create_system = 3 - ftype = stat.S_IFREG + ftype = 0x8000 # UNX_IFREG in unzip source code if islink: mode = 0777 - ftype = stat.S_IFLNK + ftype = 0xa000 # UNX_IFLNK in unzip source code i.external_attr = (mode | ftype) << 16L self.z.writestr(i, data)
--- a/mercurial/bdiff.c Wed Mar 30 02:22:15 2011 +0900 +++ b/mercurial/bdiff.c Wed Mar 30 13:23:24 2011 -0500 @@ -49,7 +49,7 @@ #include "util.h" struct line { - int h, len, n, e; + int hash, len, n, e; const char *l; }; @@ -63,9 +63,10 @@ struct hunk *next; }; -int splitlines(const char *a, int len, struct line **lr) +static int splitlines(const char *a, int len, struct line **lr) { - int h, i; + unsigned hash; + int i; const char *p, *b = a; const char * const plast = a + len - 1; struct line *l; @@ -81,14 +82,14 @@ return -1; /* build the line array and calculate hashes */ - h = 0; + hash = 0; for (p = a; p < a + len; p++) { /* Leonid Yuriev's hash */ - h = (h * 1664525) + *p + 1013904223; + hash = (hash * 1664525) + (unsigned char)*p + 1013904223; if (*p == '\n' || p == plast) { - l->h = h; - h = 0; + l->hash = hash; + hash = 0; l->len = p - b + 1; l->l = b; l->n = INT_MAX; @@ -98,14 +99,15 @@ } /* set up a sentinel */ - l->h = l->len = 0; + l->hash = 0; + l->len = 0; l->l = a + len; return i - 1; } -int inline cmp(struct line *a, struct line *b) +static inline int cmp(struct line *a, struct line *b) { - return a->h != b->h || a->len != b->len || memcmp(a->l, b->l, a->len); + return a->hash != b->hash || a->len != b->len || memcmp(a->l, b->l, a->len); } static int equatelines(struct line *a, int an, struct line *b, int bn) @@ -138,7 +140,7 @@ /* add lines to the hash table chains */ for (i = bn - 1; i >= 0; i--) { /* find the equivalence class */ - for (j = b[i].h & buckets; h[j].pos != INT_MAX; + for (j = b[i].hash & buckets; h[j].pos != INT_MAX; j = (j + 1) & buckets) if (!cmp(b + i, b + h[j].pos)) break; @@ -156,7 +158,7 @@ /* match items in a to their equivalence class in b */ for (i = 0; i < an; i++) { /* find the equivalence class */ - for (j = a[i].h & buckets; h[j].pos != INT_MAX; + for (j = a[i].hash & buckets; h[j].pos != INT_MAX; j = (j + 1) & buckets) if (!cmp(a + i, b + h[j].pos)) break;
--- a/mercurial/bookmarks.py Wed Mar 30 02:22:15 2011 +0900 +++ b/mercurial/bookmarks.py Wed Mar 30 13:23:24 2011 -0500 @@ -101,13 +101,7 @@ if current == mark: return - refs = repo._bookmarks - - # do not update if we do update to a rev equal to the current bookmark - if (mark and mark not in refs and - current and refs[current] == repo.changectx('.').node()): - return - if mark not in refs: + if mark not in repo._bookmarks: mark = '' if not valid(mark): raise util.Abort(_("bookmark '%s' contains illegal " @@ -122,6 +116,15 @@ wlock.release() repo._bookmarkcurrent = mark +def updatecurrentbookmark(repo, oldnode, curbranch): + try: + update(repo, oldnode, repo.branchtags()[curbranch]) + except KeyError: + if curbranch == "default": # no default branch! + update(repo, oldnode, repo.lookup("tip")) + else: + raise util.Abort(_("branch %s not found") % curbranch) + def update(repo, parents, node): marks = repo._bookmarks update = False @@ -163,6 +166,28 @@ finally: w.release() +def updatefromremote(ui, repo, remote): + ui.debug("checking for updated bookmarks\n") + rb = remote.listkeys('bookmarks') + changed = False + for k in rb.keys(): + if k in repo._bookmarks: + nr, nl = rb[k], repo._bookmarks[k] + if nr in repo: + cr = repo[nr] + cl = repo[nl] + if cl.rev() >= cr.rev(): + continue + if cr in cl.descendants(): + repo._bookmarks[k] = cr.node() + changed = True + ui.status(_("updating bookmark %s\n") % k) + else: + ui.warn(_("not updating divergent" + " bookmark %s\n") % k) + if changed: + write(repo) + def diff(ui, repo, remote): ui.status(_("searching for changed bookmarks\n"))
--- a/mercurial/bundlerepo.py Wed Mar 30 02:22:15 2011 +0900 +++ b/mercurial/bundlerepo.py Wed Mar 30 13:23:24 2011 -0500 @@ -286,15 +286,17 @@ repopath, bundlename = parentpath, path return bundlerepository(ui, repopath, bundlename) -def getremotechanges(ui, repo, other, revs=None, bundlename=None, force=False): - tmp = discovery.findcommonincoming(repo, other, heads=revs, force=force) +def getremotechanges(ui, repo, other, revs=None, bundlename=None, + force=False, usecommon=False): + tmp = discovery.findcommonincoming(repo, other, heads=revs, force=force, + commononly=usecommon) common, incoming, rheads = tmp if not incoming: try: os.unlink(bundlename) except: pass - return other, None, None + return other, None, None, None bundle = None if bundlename or not other.local(): @@ -303,7 +305,9 @@ if revs is None and other.capable('changegroupsubset'): revs = rheads - if revs is None: + if usecommon: + cg = other.getbundle('incoming', common=common, heads=revs) + elif revs is None: cg = other.changegroup(incoming, "incoming") else: cg = other.changegroupsubset(incoming, revs, 'incoming') @@ -315,5 +319,5 @@ if not other.local(): # use the created uncompressed bundlerepo other = bundlerepository(ui, repo.root, fname) - return (other, incoming, bundle) + return (other, common, incoming, bundle)
--- a/mercurial/changegroup.py Wed Mar 30 02:22:15 2011 +0900 +++ b/mercurial/changegroup.py Wed Mar 30 13:23:24 2011 -0500 @@ -49,15 +49,6 @@ "HG10GZ": ("HG10GZ", lambda: zlib.compressobj()), } -def collector(cl, mmfs, files): - # Gather information about changeset nodes going out in a bundle. - # We want to gather manifests needed and filelogs affected. - def collect(node): - c = cl.read(node) - files.update(c[3]) - mmfs.setdefault(c[0], node) - return collect - # hgweb uses this list to communicate its preferred type bundlepriority = ['HG10GZ', 'HG10BZ', 'HG10UN']
--- a/mercurial/cmdutil.py Wed Mar 30 02:22:15 2011 +0900 +++ b/mercurial/cmdutil.py Wed Mar 30 13:23:24 2011 -0500 @@ -230,7 +230,7 @@ def make_file(repo, pat, node=None, total=None, seqno=None, revwidth=None, mode='wb', pathname=None): - writable = 'w' in mode or 'a' in mode + writable = mode not in ('r', 'rb') if not pat or pat == '-': fp = writable and sys.stdout or sys.stdin
--- a/mercurial/commands.py Wed Mar 30 02:22:15 2011 +0900 +++ b/mercurial/commands.py Wed Mar 30 13:23:24 2011 -0500 @@ -5,7 +5,7 @@ # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. -from node import hex, nullid, nullrev, short +from node import hex, bin, nullid, nullrev, short from lock import release from i18n import _, gettext import os, re, sys, difflib, time, tempfile @@ -13,7 +13,7 @@ import patch, help, mdiff, url, encoding, templatekw, discovery import archival, changegroup, cmdutil, sshserver, hbisect, hgweb, hgweb.server import merge as mergemod -import minirst, revset +import minirst, revset, templatefilters import dagparser # Commands start here, listed alphabetically @@ -126,8 +126,12 @@ lastfunc = funcmap[-1] funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1]) + def bad(x, y): + raise util.Abort("%s: %s" % (x, y)) + ctx = cmdutil.revsingle(repo, opts.get('rev')) m = cmdutil.match(repo, pats, opts) + m.bad = bad follow = not opts.get('no_follow') for abs in ctx.walk(m): fctx = ctx[abs] @@ -303,7 +307,8 @@ return 0 def bisect(ui, repo, rev=None, extra=None, command=None, - reset=None, good=None, bad=None, skip=None, noupdate=None): + reset=None, good=None, bad=None, skip=None, extend=None, + noupdate=None): """subdivision search of changesets This command helps to find changesets which introduce problems. To @@ -326,6 +331,17 @@ Returns 0 on success. """ + def extendbisectrange(nodes, good): + # bisect is incomplete when it ends on a merge node and + # one of the parent was not checked. + parents = repo[nodes[0]].parents() + if len(parents) > 1: + side = good and state['bad'] or state['good'] + num = len(set(i.node() for i in parents) & set(side)) + if num == 1: + return parents[0].ancestor(parents[1]) + return None + def print_result(nodes, good): displayer = cmdutil.show_changeset(ui, repo, {}) if len(nodes) == 1: @@ -336,14 +352,12 @@ ui.write(_("The first bad revision is:\n")) displayer.show(repo[nodes[0]]) parents = repo[nodes[0]].parents() - if len(parents) > 1: - side = good and state['bad'] or state['good'] - num = len(set(i.node() for i in parents) & set(side)) - if num == 1: - common = parents[0].ancestor(parents[1]) - ui.write(_('Not all ancestors of this changeset have been' - ' checked.\nTo check the other ancestors, start' - ' from the common ancestor, %s.\n' % common)) + extendnode = extendbisectrange(nodes, good) + if extendnode is not None: + ui.write(_('Not all ancestors of this changeset have been' + ' checked.\nUse bisect --extend to continue the ' + 'bisection from\nthe common ancestor, %s.\n') + % short(extendnode.node())) else: # multiple possible revisions if good: @@ -376,7 +390,7 @@ bad = True else: reset = True - elif extra or good + bad + skip + reset + bool(command) > 1: + elif extra or good + bad + skip + reset + extend + bool(command) > 1: raise util.Abort(_('incompatible arguments')) if reset: @@ -440,6 +454,18 @@ # actually bisect nodes, changesets, good = hbisect.bisect(repo.changelog, state) + if extend: + if not changesets: + extendnode = extendbisectrange(nodes, good) + if extendnode is not None: + ui.write(_("Extending search to changeset %d:%s\n" + % (extendnode.rev(), short(extendnode.node())))) + if noupdate: + return + cmdutil.bail_if_changed(repo) + return hg.clean(repo, extendnode.node()) + raise util.Abort(_("nothing to extend")) + if changesets == 0: print_result(nodes, good) else: @@ -1175,6 +1201,7 @@ if len(items) > 1 or items and sections: raise util.Abort(_('only one config item permitted')) for section, name, value in ui.walkconfig(untrusted=untrusted): + value = str(value).replace('\n', '\\n') sectname = section + '.' + name if values: for v in values: @@ -1191,6 +1218,81 @@ ui.configsource(section, name, untrusted)) ui.write('%s=%s\n' % (sectname, value)) +def debugknown(ui, repopath, *ids, **opts): + """test whether node ids are known to a repo + + Every ID must be a full-length hex node id string. Returns a list of 0s and 1s + indicating unknown/known. + """ + repo = hg.repository(ui, repopath) + if not repo.capable('known'): + raise util.Abort("known() not supported by target repository") + flags = repo.known([bin(s) for s in ids]) + ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags]))) + +def debugbundle(ui, bundlepath, all=None, **opts): + """lists the contents of a bundle""" + f = url.open(ui, bundlepath) + try: + gen = changegroup.readbundle(f, bundlepath) + if all: + ui.write("format: id, p1, p2, cset, len(delta)\n") + + def showchunks(named): + ui.write("\n%s\n" % named) + while 1: + chunkdata = gen.parsechunk() + if not chunkdata: + break + node = chunkdata['node'] + p1 = chunkdata['p1'] + p2 = chunkdata['p2'] + cs = chunkdata['cs'] + delta = chunkdata['data'] + ui.write("%s %s %s %s %s\n" % + (hex(node), hex(p1), hex(p2), + hex(cs), len(delta))) + + showchunks("changelog") + showchunks("manifest") + while 1: + fname = gen.chunk() + if not fname: + break + showchunks(fname) + else: + while 1: + chunkdata = gen.parsechunk() + if not chunkdata: + break + node = chunkdata['node'] + ui.write("%s\n" % hex(node)) + finally: + f.close() + +def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts): + """retrieves a bundle from a repo + + Every ID must be a full-length hex node id string. Saves the bundle to the + given file. + """ + repo = hg.repository(ui, repopath) + if not repo.capable('getbundle'): + raise util.Abort("getbundle() not supported by target repository") + args = {} + if common: + args['common'] = [bin(s) for s in common] + if head: + args['heads'] = [bin(s) for s in head] + bundle = repo.getbundle('debug', **args) + + bundletype = opts.get('type', 'bzip2').lower() + btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'} + bundletype = btypes.get(bundletype) + if bundletype not in changegroup.bundletypes: + raise util.Abort(_('unknown bundle type specified with --type')) + changegroup.writebundle(bundle, bundlepath, bundletype) + def debugpushkey(ui, repopath, namespace, *keyinfo): '''access the pushkey key/value protocol @@ -1214,7 +1316,7 @@ def debugrevspec(ui, repo, expr): '''parse and apply a revision specification''' if ui.verbose: - tree = revset.parse(expr) + tree = revset.parse(expr)[0] ui.note(tree, "\n") func = revset.match(expr) for c in func(repo, range(len(repo))): @@ -1238,11 +1340,15 @@ finally: wlock.release() -def debugstate(ui, repo, nodates=None): +def debugstate(ui, repo, nodates=None, datesort=None): """show the contents of the current dirstate""" timestr = "" showdate = not nodates - for file_, ent in sorted(repo.dirstate._map.iteritems()): + if datesort: + keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename + else: + keyfunc = None # sort by filename + for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc): if showdate: if ent[3] == -1: # Pad or slice to locale representation @@ -1457,45 +1563,6 @@ ui.write(_(" (templates seem to have been installed incorrectly)\n")) problems += 1 - # patch - ui.status(_("Checking patch...\n")) - patchproblems = 0 - a = "1\n2\n3\n4\n" - b = "1\n2\n3\ninsert\n4\n" - fa = writetemp(a) - d = mdiff.unidiff(a, None, b, None, os.path.basename(fa), - os.path.basename(fa)) - fd = writetemp(d) - - files = {} - try: - patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files) - except util.Abort, e: - ui.write(_(" patch call failed:\n")) - ui.write(" " + str(e) + "\n") - patchproblems += 1 - else: - if list(files) != [os.path.basename(fa)]: - ui.write(_(" unexpected patch output!\n")) - patchproblems += 1 - a = open(fa).read() - if a != b: - ui.write(_(" patch test failed!\n")) - patchproblems += 1 - - if patchproblems: - if ui.config('ui', 'patch'): - ui.write(_(" (Current patch tool may be incompatible with patch," - " or misconfigured. Please check your configuration" - " file)\n")) - else: - ui.write(_(" Internal patcher failure, please report this error" - " to http://mercurial.selenic.com/wiki/BugTracker\n")) - problems += patchproblems - - os.unlink(fa) - os.unlink(fd) - # editor ui.status(_("Checking commit editor...\n")) editor = ui.geteditor() @@ -1555,6 +1622,21 @@ line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '') ui.write("%s\n" % line.rstrip()) +def debugwireargs(ui, repopath, *vals, **opts): + repo = hg.repository(hg.remoteui(ui, opts), repopath) + for opt in remoteopts: + del opts[opt[1]] + args = {} + for k, v in opts.iteritems(): + if v: + args[k] = v + # run twice to check that we don't mess up the stream for the next command + res1 = repo.debugwireargs(*vals, **args) + res2 = repo.debugwireargs(*vals, **args) + ui.write("%s\n" % res1) + if res1 != res2: + ui.warn("%s\n" % res2) + def diff(ui, repo, *pats, **opts): """diff repository (or selected files) @@ -1595,7 +1677,7 @@ msg = _('cannot specify --rev and --change at the same time') raise util.Abort(msg) elif change: - node2 = repo.lookup(change) + node2 = cmdutil.revsingle(repo, change, None).node() node1 = repo[node2].parents()[0].node() else: node1, node2 = cmdutil.revpair(repo, revs) @@ -1962,7 +2044,7 @@ Returns 0 if successful. """ option_lists = [] - textwidth = ui.termwidth() - 2 + textwidth = min(ui.termwidth(), 80) - 2 def addglobalopts(aliases): if ui.verbose: @@ -2141,6 +2223,8 @@ 'extensions\n')) help.addtopichook('revsets', revset.makedoc) + help.addtopichook('templates', templatekw.makedoc) + help.addtopichook('templates', templatefilters.makedoc) if name and name != 'shortlist': i = None @@ -2267,6 +2351,7 @@ output = [] revs = [] + bms = [] if source: source, branches = hg.parseurl(ui.expandpath(source)) repo = hg.repository(ui, source) @@ -2277,10 +2362,19 @@ rev = revs[0] if not rev: rev = "tip" - if num or branch or tags or bookmarks: - raise util.Abort(_("can't query remote revision number," - " branch, tags, or bookmarks")) - output = [hexfunc(repo.lookup(rev))] + if num or branch or tags: + raise util.Abort( + _("can't query remote revision number, branch, or tags")) + + remoterev = repo.lookup(rev) + if default or id: + output = [hexfunc(remoterev)] + + if 'bookmarks' in repo.listkeys('namespaces'): + hexremoterev = hex(remoterev) + bms = [bm for bm, bmrev in repo.listkeys('bookmarks').iteritems() + if bmrev == hexremoterev] + elif not rev: ctx = repo[None] parents = ctx.parents() @@ -2300,6 +2394,9 @@ if num: output.append(str(ctx.rev())) + if repo.local(): + bms = ctx.bookmarks() + if repo.local() and default and not ui.quiet: b = ctx.branch() if b != 'default': @@ -2310,8 +2407,9 @@ if t: output.append(t) + if default and not ui.quiet: # multiple bookmarks for a single parent separated by '/' - bm = '/'.join(ctx.bookmarks()) + bm = '/'.join(bms) if bm: output.append(bm) @@ -2322,7 +2420,7 @@ output.extend(ctx.tags()) if bookmarks: - output.extend(ctx.bookmarks()) + output.extend(bms) ui.write("%s\n" % ' '.join(output)) @@ -2891,7 +2989,13 @@ else: ui.status(_("not updating, since new heads added\n")) if modheads > 1: - ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n")) + currentbranchheads = len(repo.branchheads()) + if currentbranchheads == modheads: + ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n")) + elif currentbranchheads > 1: + ui.status(_("(run 'hg heads .' to see heads, 'hg merge' to merge)\n")) + else: + ui.status(_("(run 'hg heads' to see heads)\n")) else: ui.status(_("(run 'hg update' to get a working copy)\n")) @@ -2938,6 +3042,7 @@ raise util.Abort(err) modheads = repo.pull(other, heads=revs, force=opts.get('force')) + bookmarks.updatefromremote(ui, repo, other) if checkout: checkout = str(repo.changelog.rev(other.lookup(checkout))) repo._subtoppath = source @@ -3998,15 +4103,16 @@ fnames = (fname1,) + fnames lock = repo.lock() + wc = repo['.'] try: for fname in fnames: f = url.open(ui, fname) gen = changegroup.readbundle(f, fname) modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname, lock=lock) + bookmarks.updatecurrentbookmark(repo, wc.node(), wc.branch()) finally: lock.release() - return postincoming(ui, repo, modheads, opts.get('update'), None) def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False): @@ -4053,7 +4159,7 @@ if rev and node: raise util.Abort(_("please specify just one revision")) - if not rev: + if rev is None or rev == '': rev = node # if we defined a bookmark, we have to remember the original bookmark name @@ -4269,6 +4375,7 @@ ('g', 'good', False, _('mark changeset good')), ('b', 'bad', False, _('mark changeset bad')), ('s', 'skip', False, _('skip testing changeset')), + ('e', 'extend', False, _('extend the bisect range')), ('c', 'command', '', _('use command to check changeset state'), _('CMD')), ('U', 'noupdate', False, _('do not update to target'))], @@ -4359,6 +4466,11 @@ ('n', 'new-file', None, _('add new file at each rev')), ], _('[OPTION]... TEXT')), + "debugbundle": + (debugbundle, + [('a', 'all', None, _('show all details')), + ], + _('FILE')), "debugcheckstate": (debugcheckstate, [], ''), "debugcommands": (debugcommands, [], _('[COMMAND]')), "debugcomplete": @@ -4379,12 +4491,20 @@ _('[-e] DATE [RANGE]')), "debugdata": (debugdata, [], _('FILE REV')), "debugfsinfo": (debugfsinfo, [], _('[PATH]')), + "debuggetbundle": + (debuggetbundle, + [('H', 'head', [], _('id of head node'), _('ID')), + ('C', 'common', [], _('id of common node'), _('ID')), + ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE')), + ], + _('REPO FILE [-H|-C ID]...')), "debugignore": (debugignore, [], ''), "debugindex": (debugindex, [('f', 'format', 0, _('revlog format'), _('FORMAT'))], _('FILE')), "debugindexdot": (debugindexdot, [], _('FILE')), "debuginstall": (debuginstall, [], ''), + "debugknown": (debugknown, [], _('REPO ID...')), "debugpushkey": (debugpushkey, [], _('REPO NAMESPACE [KEY OLD NEW]')), "debugrebuildstate": (debugrebuildstate, @@ -4402,7 +4522,8 @@ (debugsetparents, [], _('REV1 [REV2]')), "debugstate": (debugstate, - [('', 'nodates', None, _('do not display the saved mtime'))], + [('', 'nodates', None, _('do not display the saved mtime')), + ('', 'datesort', None, _('sort by saved mtime'))], _('[OPTION]...')), "debugsub": (debugsub, @@ -4410,6 +4531,12 @@ _('revision to check'), _('REV'))], _('[-r REV] [REV]')), "debugwalk": (debugwalk, walkopts, _('[OPTION]... [FILE]...')), + "debugwireargs": + (debugwireargs, + [('', 'three', '', 'three'), + ('', 'four', '', 'four'), + ] + remoteopts, + _('REPO [OPTIONS]... [ONE [TWO]]')), "^diff": (diff, [('r', 'rev', [], @@ -4743,6 +4870,7 @@ } norepo = ("clone init version help debugcommands debugcomplete" - " debugdate debuginstall debugfsinfo debugpushkey") + " debugdate debuginstall debugfsinfo debugpushkey debugwireargs" + " debugknown debuggetbundle debugbundle") optionalrepo = ("identify paths serve showconfig debugancestor debugdag" " debugdata debugindex debugindexdot")
--- a/mercurial/config.py Wed Mar 30 02:22:15 2011 +0900 +++ b/mercurial/config.py Wed Mar 30 13:23:24 2011 -0500 @@ -138,5 +138,5 @@ def read(self, path, fp=None, sections=None, remap=None): if not fp: - fp = open(path) + fp = util.posixfile(path) self.parse(path, fp.read(), sections, remap, self.read)
--- a/mercurial/dirstate.py Wed Mar 30 02:22:15 2011 +0900 +++ b/mercurial/dirstate.py Wed Mar 30 13:23:24 2011 -0500 @@ -49,6 +49,7 @@ self._rootdir = os.path.join(root, '') self._dirty = False self._dirtypl = False + self._lastnormaltime = None self._ui = ui @propertycache @@ -236,6 +237,7 @@ "_ignore"): if a in self.__dict__: delattr(self, a) + self._lastnormaltime = None self._dirty = False def copy(self, source, dest): @@ -281,9 +283,15 @@ self._dirty = True self._addpath(f) s = os.lstat(self._join(f)) - self._map[f] = ('n', s.st_mode, s.st_size, int(s.st_mtime)) + mtime = int(s.st_mtime) + self._map[f] = ('n', s.st_mode, s.st_size, mtime) if f in self._copymap: del self._copymap[f] + if mtime > self._lastnormaltime: + # Remember the most recent modification timeslot for status(), + # to make sure we won't miss future size-preserving file content + # modifications that happen within the same timeslot. + self._lastnormaltime = mtime def normallookup(self, f): '''Mark a file normal, but possibly dirty.''' @@ -397,6 +405,7 @@ delattr(self, "_dirs") self._copymap = {} self._pl = [nullid, nullid] + self._lastnormaltime = None self._dirty = True def rebuild(self, parent, files): @@ -444,6 +453,7 @@ write(f) st.write(cs.getvalue()) st.rename() + self._lastnormaltime = None self._dirty = self._dirtypl = False def _dirignore(self, f): @@ -680,6 +690,7 @@ # lines are an expansion of "islink => checklink" # where islink means "is this a link?" and checklink # means "can we check links?". + mtime = int(st.st_mtime) if (size >= 0 and (size != st.st_size or ((mode ^ st.st_mode) & 0100 and self._checkexec)) @@ -687,9 +698,15 @@ or size == -2 # other parent or fn in self._copymap): madd(fn) - elif (time != int(st.st_mtime) + elif (mtime != time and (mode & lnkkind != lnkkind or self._checklink)): ladd(fn) + elif mtime == self._lastnormaltime: + # fn may have been changed in the same timeslot without + # changing its size. This can happen if we quickly do + # multiple commits in a single transaction. + # Force lookup, so we don't miss such a racy file change. + ladd(fn) elif listclean: cadd(fn) elif state == 'm':
--- a/mercurial/discovery.py Wed Mar 30 02:22:15 2011 +0900 +++ b/mercurial/discovery.py Wed Mar 30 13:23:24 2011 -0500 @@ -9,9 +9,10 @@ from i18n import _ import util, error -def findcommonincoming(repo, remote, heads=None, force=False): - """Return a tuple (common, missing roots, heads) used to identify - missing nodes from remote. +def findcommonincoming(repo, remote, heads=None, force=False, commononly=False): + """Return a tuple (common, missing, heads) used to identify missing nodes + from remote. "missing" is either a boolean indicating if any nodes are missing + (when commononly=True), or else a list of the root nodes of the missing set. If a list of heads is specified, return only nodes which are heads or ancestors of these heads. @@ -36,6 +37,13 @@ # and start by examining the heads repo.ui.status(_("searching for changes\n")) + if commononly: + myheads = repo.heads() + known = remote.known(myheads) + if util.all(known): + hasincoming = set(heads).difference(set(myheads)) and True + return myheads, hasincoming, heads + unknown = [] for h in heads: if h not in m:
--- a/mercurial/help.py Wed Mar 30 02:22:15 2011 +0900 +++ b/mercurial/help.py Wed Mar 30 13:23:24 2011 -0500 @@ -115,3 +115,19 @@ def addtopichook(topic, rewriter): helphooks.setdefault(topic, []).append(rewriter) + +def makeitemsdoc(topic, doc, marker, items): + """Extract docstring from the items key to function mapping, build a + .single documentation block and use it to overwrite the marker in doc + """ + entries = [] + for name in sorted(items): + text = (items[name].__doc__ or '').rstrip() + if not text: + continue + text = gettext(text) + lines = text.splitlines() + lines[1:] = [(' ' + l.strip()) for l in lines[1:]] + entries.append('\n'.join(lines)) + entries = '\n\n'.join(entries) + return doc.replace(marker, entries)
--- a/mercurial/help/templates.txt Wed Mar 30 02:22:15 2011 +0900 +++ b/mercurial/help/templates.txt Wed Mar 30 13:23:24 2011 -0500 @@ -23,52 +23,7 @@ keywords depends on the exact context of the templater. These keywords are usually available for templating a log-like command: -:author: String. The unmodified author of the changeset. - -:branch: String. The name of the branch on which the changeset was - committed. - -:branches: List of strings. The name of the branch on which the - changeset was committed. Will be empty if the branch name was - default. - -:children: List of strings. The children of the changeset. - -:date: Date information. The date when the changeset was committed. - -:desc: String. The text of the changeset description. - -:diffstat: String. Statistics of changes with the following format: - "modified files: +added/-removed lines" - -:files: List of strings. All files modified, added, or removed by this - changeset. - -:file_adds: List of strings. Files added by this changeset. - -:file_copies: List of strings. Files copied in this changeset with - their sources. - -:file_copies_switch: List of strings. Like "file_copies" but displayed - only if the --copied switch is set. - -:file_mods: List of strings. Files modified by this changeset. - -:file_dels: List of strings. Files removed by this changeset. - -:node: String. The changeset identification hash, as a 40 hexadecimal - digit string. - -:parents: List of strings. The parents of the changeset. - -:rev: Integer. The repository-local changeset revision number. - -:tags: List of strings. Any tags associated with the changeset. - -:latesttag: String. Most recent global tag in the ancestors of this - changeset. - -:latesttagdistance: Integer. Longest path to the latest tag. +.. keywordsmarker The "date" keyword does not produce human-readable output. If you want to use a date in your output, you can use a filter to process @@ -82,82 +37,4 @@ List of filters: -:addbreaks: Any text. Add an XHTML "<br />" tag before the end of - every line except the last. - -:age: Date. Returns a human-readable date/time difference between the - given date/time and the current date/time. - -:basename: Any text. Treats the text as a path, and returns the last - component of the path after splitting by the path separator - (ignoring trailing separators). For example, "foo/bar/baz" becomes - "baz" and "foo/bar//" becomes "bar". - -:stripdir: Treat the text as path and strip a directory level, if - possible. For example, "foo" and "foo/bar" becomes "foo". - -:date: Date. Returns a date in a Unix date format, including the - timezone: "Mon Sep 04 15:13:13 2006 0700". - -:domain: Any text. Finds the first string that looks like an email - address, and extracts just the domain component. Example: ``User - <user@example.com>`` becomes ``example.com``. - -:email: Any text. Extracts the first string that looks like an email - address. Example: ``User <user@example.com>`` becomes - ``user@example.com``. - -:escape: Any text. Replaces the special XML/XHTML characters "&", "<" - and ">" with XML entities. - -:hex: Any text. Convert a binary Mercurial node identifier into - its long hexadecimal representation. - -:fill68: Any text. Wraps the text to fit in 68 columns. - -:fill76: Any text. Wraps the text to fit in 76 columns. - -:firstline: Any text. Returns the first line of text. - -:nonempty: Any text. Returns '(none)' if the string is empty. - -:hgdate: Date. Returns the date as a pair of numbers: "1157407993 - 25200" (Unix timestamp, timezone offset). - -:isodate: Date. Returns the date in ISO 8601 format: "2009-08-18 13:00 - +0200". - -:isodatesec: Date. Returns the date in ISO 8601 format, including - seconds: "2009-08-18 13:00:13 +0200". See also the rfc3339date - filter. - -:localdate: Date. Converts a date to local date. - -:obfuscate: Any text. Returns the input text rendered as a sequence of - XML entities. - -:person: Any text. Returns the text before an email address. - -:rfc822date: Date. Returns a date using the same format used in email - headers: "Tue, 18 Aug 2009 13:00:13 +0200". - -:rfc3339date: Date. Returns a date using the Internet date format - specified in RFC 3339: "2009-08-18T13:00:13+02:00". - -:short: Changeset hash. Returns the short form of a changeset hash, - i.e. a 12 hexadecimal digit string. - -:shortdate: Date. Returns a date like "2006-09-18". - -:stringify: Any type. Turns the value into text by converting values into - text and concatenating them. - -:strip: Any text. Strips all leading and trailing whitespace. - -:tabindent: Any text. Returns the text, with every line except the - first starting with a tab character. - -:urlescape: Any text. Escapes all "special" characters. For example, - "foo bar" becomes "foo%20bar". - -:user: Any text. Returns the user portion of an email address. +.. filtersmarker
--- a/mercurial/hg.py Wed Mar 30 02:22:15 2011 +0900 +++ b/mercurial/hg.py Wed Mar 30 13:23:24 2011 -0500 @@ -9,7 +9,7 @@ from i18n import _ from lock import release from node import hex, nullid, nullrev, short -import localrepo, bundlerepo, httprepo, sshrepo, statichttprepo +import localrepo, bundlerepo, httprepo, sshrepo, statichttprepo, bookmarks import lock, util, extensions, error, encoding, node import cmdutil, discovery, url import merge as mergemod @@ -143,7 +143,7 @@ if not os.path.isdir(root): os.mkdir(root) - os.mkdir(roothg) + util.makedir(roothg, notindexed=True) requirements = '' try: @@ -281,7 +281,7 @@ dir_cleanup.dir_ = hgdir try: dest_path = hgdir - os.mkdir(dest_path) + util.makedir(dest_path, notindexed=True) except OSError, inst: if inst.errno == errno.EEXIST: dir_cleanup.close() @@ -366,6 +366,21 @@ dest_repo.ui.status(_("updating to branch %s\n") % bn) _update(dest_repo, uprev) + # clone all bookmarks + if dest_repo.local() and src_repo.capable("pushkey"): + rb = src_repo.listkeys('bookmarks') + for k, n in rb.iteritems(): + try: + m = dest_repo.lookup(n) + dest_repo._bookmarks[k] = m + except: + pass + if rb: + bookmarks.write(dest_repo) + elif src_repo.local() and dest_repo.capable("pushkey"): + for k, n in src_repo._bookmarks.iteritems(): + dest_repo.pushkey('bookmarks', k, '', hex(n)) + return src_repo, dest_repo finally: release(src_lock, dest_lock) @@ -421,14 +436,19 @@ if revs: revs = [other.lookup(rev) for rev in revs] - other, incoming, bundle = bundlerepo.getremotechanges(ui, repo, other, revs, - opts["bundle"], opts["force"]) - if incoming is None: + usecommon = other.capable('getbundle') + other, common, incoming, bundle = bundlerepo.getremotechanges(ui, repo, other, + revs, opts["bundle"], opts["force"], + usecommon=usecommon) + if not incoming: ui.status(_("no changes found\n")) return subreporecurse() try: - chlist = other.changelog.nodesbetween(incoming, revs)[0] + if usecommon: + chlist = other.changelog.findmissing(common, revs) + else: + chlist = other.changelog.nodesbetween(incoming, revs)[0] displayer = cmdutil.show_changeset(ui, other, opts, buffered) # XXX once graphlog extension makes it into core,
--- a/mercurial/hgweb/common.py Wed Mar 30 02:22:15 2011 +0900 +++ b/mercurial/hgweb/common.py Wed Mar 30 13:23:24 2011 -0500 @@ -73,10 +73,29 @@ def __init__(self, code, message=None, headers=[]): if message is None: message = _statusmessage(code) - Exception.__init__(self, code, message) + Exception.__init__(self) self.code = code self.message = message self.headers = headers + def __str__(self): + return self.message + +class continuereader(object): + def __init__(self, f, write): + self.f = f + self._write = write + self.continued = False + + def read(self, amt=-1): + if not self.continued: + self.continued = True + self._write('HTTP/1.1 100 Continue\r\n\r\n') + return self.f.read(amt) + + def __getattr__(self, attr): + if attr in ('close', 'readline', 'readlines', '__iter__'): + return getattr(self.f, attr) + raise AttributeError() def _statusmessage(code): from BaseHTTPServer import BaseHTTPRequestHandler
--- a/mercurial/hgweb/hgweb_mod.py Wed Mar 30 02:22:15 2011 +0900 +++ b/mercurial/hgweb/hgweb_mod.py Wed Mar 30 13:23:24 2011 -0500 @@ -17,6 +17,7 @@ perms = { 'changegroup': 'pull', 'changegroupsubset': 'pull', + 'getbundle': 'pull', 'stream_out': 'pull', 'listkeys': 'pull', 'unbundle': 'push', @@ -121,7 +122,11 @@ self.check_perm(req, perms[cmd]) return protocol.call(self.repo, req, cmd) except ErrorResponse, inst: - if cmd == 'unbundle': + # A client that sends unbundle without 100-continue will + # break if we respond early. + if (cmd == 'unbundle' and + req.env.get('HTTP_EXPECT', + '').lower() != '100-continue'): req.drain() req.respond(inst, protocol.HGTYPE) return '0\n%s\n' % inst.message
--- a/mercurial/hgweb/hgwebdir_mod.py Wed Mar 30 02:22:15 2011 +0900 +++ b/mercurial/hgweb/hgwebdir_mod.py Wed Mar 30 13:23:24 2011 -0500 @@ -40,9 +40,10 @@ def urlrepos(prefix, roothead, paths): """yield url paths and filesystem paths from a list of repo paths - >>> list(urlrepos('hg', '/opt', ['/opt/r', '/opt/r/r', '/opt'])) + >>> conv = lambda seq: [(v, util.pconvert(p)) for v,p in seq] + >>> conv(urlrepos('hg', '/opt', ['/opt/r', '/opt/r/r', '/opt'])) [('hg/r', '/opt/r'), ('hg/r/r', '/opt/r/r'), ('hg', '/opt')] - >>> list(urlrepos('', '/opt', ['/opt/r', '/opt/r/r', '/opt'])) + >>> conv(urlrepos('', '/opt', ['/opt/r', '/opt/r/r', '/opt'])) [('r', '/opt/r'), ('r/r', '/opt/r/r'), ('', '/opt')] """ for path in paths: @@ -76,7 +77,10 @@ if not os.path.exists(self.conf): raise util.Abort(_('config file %s not found!') % self.conf) u.readconfig(self.conf, remap=map, trust=True) - paths = u.configitems('hgweb-paths') + paths = [] + for name, ignored in u.configitems('hgweb-paths'): + for path in u.configlist('hgweb-paths', name): + paths.append((name, path)) elif isinstance(self.conf, (list, tuple)): paths = self.conf elif isinstance(self.conf, dict): @@ -247,6 +251,9 @@ # update time with local timezone try: r = hg.repository(self.ui, path) + except IOError: + u.warn(_('error accessing repository at %s\n') % path) + continue except error.RepoError: u.warn(_('error accessing repository at %s\n') % path) continue
--- a/mercurial/hgweb/protocol.py Wed Mar 30 02:22:15 2011 +0900 +++ b/mercurial/hgweb/protocol.py Wed Mar 30 13:23:24 2011 -0500 @@ -22,7 +22,7 @@ if k == '*': star = {} for key in self.req.form.keys(): - if key not in keys: + if key != 'cmd' and key not in keys: star[key] = self.req.form[key][0] data['*'] = star else:
--- a/mercurial/hgweb/server.py Wed Mar 30 02:22:15 2011 +0900 +++ b/mercurial/hgweb/server.py Wed Mar 30 13:23:24 2011 -0500 @@ -8,6 +8,7 @@ import os, sys, errno, urllib, BaseHTTPServer, socket, SocketServer, traceback from mercurial import util, error +from mercurial.hgweb import common from mercurial.i18n import _ def _splitURI(uri): @@ -111,6 +112,9 @@ env['SERVER_PROTOCOL'] = self.request_version env['wsgi.version'] = (1, 0) env['wsgi.url_scheme'] = self.url_scheme + if env.get('HTTP_EXPECT', '').lower() == '100-continue': + self.rfile = common.continuereader(self.rfile, self.wfile.write) + env['wsgi.input'] = self.rfile env['wsgi.errors'] = _error_logger(self) env['wsgi.multithread'] = isinstance(self.server,
--- a/mercurial/hgweb/wsgicgi.py Wed Mar 30 02:22:15 2011 +0900 +++ b/mercurial/hgweb/wsgicgi.py Wed Mar 30 13:23:24 2011 -0500 @@ -10,6 +10,7 @@ import os, sys from mercurial import util +from mercurial.hgweb import common def launch(application): util.set_binary(sys.stdin) @@ -23,7 +24,11 @@ if environ['PATH_INFO'].startswith(scriptname): environ['PATH_INFO'] = environ['PATH_INFO'][len(scriptname):] - environ['wsgi.input'] = sys.stdin + stdin = sys.stdin + if environ.get('HTTP_EXPECT', '').lower() == '100-continue': + stdin = common.continuereader(stdin, sys.stdout.write) + + environ['wsgi.input'] = stdin environ['wsgi.errors'] = sys.stderr environ['wsgi.version'] = (1, 0) environ['wsgi.multithread'] = False
--- a/mercurial/httprepo.py Wed Mar 30 02:22:15 2011 +0900 +++ b/mercurial/httprepo.py Wed Mar 30 13:23:24 2011 -0500 @@ -52,10 +52,13 @@ # look up capabilities only when needed + def _fetchcaps(self): + self.caps = set(self._call('capabilities').split()) + def get_caps(self): if self.caps is None: try: - self.caps = set(self._call('capabilities').split()) + self._fetchcaps() except error.RepoError: self.caps = set() self.ui.debug('capabilities: %s\n' % @@ -73,8 +76,7 @@ data = args.pop('data', None) headers = args.pop('headers', {}) self.ui.debug("sending %s command\n" % cmd) - q = {"cmd": cmd} - q.update(args) + q = [('cmd', cmd)] + sorted(args.items()) qs = '?%s' % urllib.urlencode(q) cu = "%s%s" % (self._url, qs) req = urllib2.Request(cu, data, headers) @@ -196,7 +198,13 @@ inst = httpsrepository(ui, path) else: inst = httprepository(ui, path) - inst.between([(nullid, nullid)]) + try: + # Try to do useful work when checking compatibility. + # Usually saves a roundtrip since we want the caps anyway. + inst._fetchcaps() + except error.RepoError: + # No luck, try older compatibility check. + inst.between([(nullid, nullid)]) return inst except error.RepoError: ui.note('(falling back to static-http)\n')
--- a/mercurial/localrepo.py Wed Mar 30 02:22:15 2011 +0900 +++ b/mercurial/localrepo.py Wed Mar 30 13:23:24 2011 -0500 @@ -20,7 +20,8 @@ propertycache = util.propertycache class localrepository(repo.repository): - capabilities = set(('lookup', 'changegroupsubset', 'branchmap', 'pushkey')) + capabilities = set(('lookup', 'changegroupsubset', 'branchmap', 'pushkey', + 'known', 'getbundle')) supportedformats = set(('revlogv1', 'parentdelta')) supported = supportedformats | set(('store', 'fncache', 'shared', 'dotencode')) @@ -46,7 +47,7 @@ if create: if not os.path.exists(path): util.makedirs(path) - os.mkdir(self.path) + util.makedir(self.path, notindexed=True) requirements = ["revlogv1"] if self.ui.configbool('format', 'usestore', True): os.mkdir(os.path.join(self.path, "store")) @@ -558,6 +559,10 @@ repo = (remote and remote.local()) and remote or self return repo[key].branch() + def known(self, nodes): + nm = self.changelog.nodemap + return [(n in nm) for n in nodes] + def local(self): return True @@ -1320,20 +1325,24 @@ def pull(self, remote, heads=None, force=False): lock = self.lock() try: + usecommon = remote.capable('getbundle') tmp = discovery.findcommonincoming(self, remote, heads=heads, - force=force) + force=force, commononly=usecommon) common, fetch, rheads = tmp if not fetch: self.ui.status(_("no changes found\n")) result = 0 else: - if heads is None and fetch == [nullid]: + if heads is None and list(common) == [nullid]: self.ui.status(_("requesting all changes\n")) elif heads is None and remote.capable('changegroupsubset'): # issue1320, avoid a race if remote changed after discovery heads = rheads - if heads is None: + if usecommon: + cg = remote.getbundle('pull', common=common, + heads=heads or rheads) + elif heads is None: cg = remote.changegroup(fetch, 'pull') elif not remote.capable('changegroupsubset'): raise util.Abort(_("partial pull cannot be done because " @@ -1346,27 +1355,6 @@ finally: lock.release() - self.ui.debug("checking for updated bookmarks\n") - rb = remote.listkeys('bookmarks') - changed = False - for k in rb.keys(): - if k in self._bookmarks: - nr, nl = rb[k], self._bookmarks[k] - if nr in self: - cr = self[nr] - cl = self[nl] - if cl.rev() >= cr.rev(): - continue - if cr in cl.descendants(): - self._bookmarks[k] = cr.node() - changed = True - self.ui.status(_("updating bookmark %s\n") % k) - else: - self.ui.warn(_("not updating divergent" - " bookmark %s\n") % k) - if changed: - bookmarks.write(self) - return result def checkpush(self, force, revs): @@ -1446,7 +1434,7 @@ for node in nodes: self.ui.debug("%s\n" % hex(node)) - def changegroupsubset(self, bases, heads, source, extranodes=None): + def changegroupsubset(self, bases, heads, source): """Compute a changegroup consisting of all the nodes that are descendents of any of the bases and ancestors of any of the heads. Return a chunkbuffer object whose read() method will return @@ -1458,177 +1446,108 @@ Another wrinkle is doing the reverse, figuring out which changeset in the changegroup a particular filenode or manifestnode belongs to. - - The caller can specify some nodes that must be included in the - changegroup using the extranodes argument. It should be a dict - where the keys are the filenames (or 1 for the manifest), and the - values are lists of (node, linknode) tuples, where node is a wanted - node and linknode is the changelog node that should be transmitted as - the linkrev. """ - - # Set up some initial variables - # Make it easy to refer to self.changelog cl = self.changelog - # Compute the list of changesets in this changegroup. - # Some bases may turn out to be superfluous, and some heads may be - # too. nodesbetween will return the minimal set of bases and heads - # necessary to re-create the changegroup. if not bases: bases = [nullid] - msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads) + csets, bases, heads = cl.nodesbetween(bases, heads) + # We assume that all ancestors of bases are known + common = set(cl.ancestors(*[cl.rev(n) for n in bases])) + return self._changegroupsubset(common, csets, heads, source) + + def getbundle(self, source, heads=None, common=None): + """Like changegroupsubset, but returns the set difference between the + ancestors of heads and the ancestors common. + + If heads is None, use the local heads. If common is None, use [nullid]. - if extranodes is None: - # can we go through the fast path ? - heads.sort() - allheads = self.heads() - allheads.sort() - if heads == allheads: - return self._changegroup(msng_cl_lst, source) + The nodes in common might not all be known locally due to the way the + current discovery protocol works. + """ + cl = self.changelog + if common: + nm = cl.nodemap + common = [n for n in common if n in nm] + else: + common = [nullid] + if not heads: + heads = cl.heads() + common, missing = cl.findcommonmissing(common, heads) + return self._changegroupsubset(common, missing, heads, source) + + def _changegroupsubset(self, commonrevs, csets, heads, source): + + cl = self.changelog + mf = self.manifest + mfs = {} # needed manifests + fnodes = {} # needed file nodes + + # can we go through the fast path ? + heads.sort() + if heads == sorted(self.heads()): + return self._changegroup(csets, source) # slow path self.hook('preoutgoing', throw=True, source=source) - - self.changegroupinfo(msng_cl_lst, source) - - # We assume that all ancestors of bases are known - commonrevs = set(cl.ancestors(*[cl.rev(n) for n in bases])) - - # Make it easy to refer to self.manifest - mnfst = self.manifest - # We don't know which manifests are missing yet - msng_mnfst_set = {} - # Nor do we know which filenodes are missing. - msng_filenode_set = {} - - # A changeset always belongs to itself, so the changenode lookup - # function for a changenode is identity. - def identity(x): - return x - - # A function generating function that sets up the initial environment - # the inner function. - def filenode_collector(changedfiles): - # This gathers information from each manifestnode included in the - # changegroup about which filenodes the manifest node references - # so we can include those in the changegroup too. - # - # It also remembers which changenode each filenode belongs to. It - # does this by assuming the a filenode belongs to the changenode - # the first manifest that references it belongs to. - def collect_msng_filenodes(mnfstnode): - r = mnfst.rev(mnfstnode) - if mnfst.deltaparent(r) in mnfst.parentrevs(r): - # If the previous rev is one of the parents, - # we only need to see a diff. - deltamf = mnfst.readdelta(mnfstnode) - # For each line in the delta - for f, fnode in deltamf.iteritems(): - # And if the file is in the list of files we care - # about. - if f in changedfiles: - # Get the changenode this manifest belongs to - clnode = msng_mnfst_set[mnfstnode] - # Create the set of filenodes for the file if - # there isn't one already. - ndset = msng_filenode_set.setdefault(f, {}) - # And set the filenode's changelog node to the - # manifest's if it hasn't been set already. - ndset.setdefault(fnode, clnode) - else: - # Otherwise we need a full manifest. - m = mnfst.read(mnfstnode) - # For every file in we care about. - for f in changedfiles: - fnode = m.get(f, None) - # If it's in the manifest - if fnode is not None: - # See comments above. - clnode = msng_mnfst_set[mnfstnode] - ndset = msng_filenode_set.setdefault(f, {}) - ndset.setdefault(fnode, clnode) - return collect_msng_filenodes + self.changegroupinfo(csets, source) # If we determine that a particular file or manifest node must be a # node that the recipient of the changegroup will already have, we can # also assume the recipient will have all the parents. This function # prunes them from the set of missing nodes. def prune(revlog, missingnodes): - hasset = set() - # If a 'missing' filenode thinks it belongs to a changenode we - # assume the recipient must have, then the recipient must have - # that filenode. + # drop any nodes that claim to be part of a cset in commonrevs + drop = set() for n in missingnodes: - clrev = revlog.linkrev(revlog.rev(n)) - if clrev in commonrevs: - hasset.add(n) - for n in hasset: + if revlog.linkrev(revlog.rev(n)) in commonrevs: + drop.add(n) + for n in drop: missingnodes.pop(n, None) - for r in revlog.ancestors(*[revlog.rev(n) for n in hasset]): - missingnodes.pop(revlog.node(r), None) - - # Add the nodes that were explicitly requested. - def add_extra_nodes(name, nodes): - if not extranodes or name not in extranodes: - return - - for node, linknode in extranodes[name]: - if node not in nodes: - nodes[node] = linknode # Now that we have all theses utility functions to help out and # logically divide up the task, generate the group. def gengroup(): # The set of changed files starts empty. changedfiles = set() - collect = changegroup.collector(cl, msng_mnfst_set, changedfiles) + + count = [0] + def clookup(revlog, x): + c = cl.read(x) + changedfiles.update(c[3]) + mfs.setdefault(c[0], x) + count[0] += 1 + self.ui.progress(_('bundling'), count[0], unit=_('changesets')) + return x # Create a changenode group generator that will call our functions # back to lookup the owning changenode and collect information. - group = cl.group(msng_cl_lst, identity, collect) - for cnt, chnk in enumerate(group): - yield chnk - # revlog.group yields three entries per node, so - # dividing by 3 gives an approximation of how many - # nodes have been processed. - self.ui.progress(_('bundling'), cnt / 3, - unit=_('changesets')) - changecount = cnt / 3 + for chunk in cl.group(csets, clookup): + yield chunk + changecount = count[0] + efiles = len(changedfiles) self.ui.progress(_('bundling'), None) - prune(mnfst, msng_mnfst_set) - add_extra_nodes(1, msng_mnfst_set) - msng_mnfst_lst = msng_mnfst_set.keys() - # Sort the manifestnodes by revision number. - msng_mnfst_lst.sort(key=mnfst.rev) + prune(mf, mfs) # Create a generator for the manifestnodes that calls our lookup # and data collection functions back. - group = mnfst.group(msng_mnfst_lst, - lambda mnode: msng_mnfst_set[mnode], - filenode_collector(changedfiles)) - efiles = {} - for cnt, chnk in enumerate(group): - if cnt % 3 == 1: - mnode = chnk[:20] - efiles.update(mnfst.readdelta(mnode)) - yield chnk - # see above comment for why we divide by 3 - self.ui.progress(_('bundling'), cnt / 3, + count = [0] + def mlookup(revlog, x): + clnode = mfs[x] + mdata = mf.readfast(x) + for f in changedfiles: + if f in mdata: + fnodes.setdefault(f, {}).setdefault(mdata[f], clnode) + count[0] += 1 + self.ui.progress(_('bundling'), count[0], unit=_('manifests'), total=changecount) + return mfs[x] + + for chunk in mf.group(mfs, mlookup): + yield chunk self.ui.progress(_('bundling'), None) - efiles = len(efiles) - - # These are no longer needed, dereference and toss the memory for - # them. - msng_mnfst_lst = None - msng_mnfst_set.clear() - if extranodes: - for fname in extranodes: - if isinstance(fname, int): - continue - msng_filenode_set.setdefault(fname, {}) - changedfiles.add(fname) + mfs.clear() + # Go through all our files in order sorted by name. for idx, fname in enumerate(sorted(changedfiles)): filerevlog = self.file(fname) @@ -1636,36 +1555,33 @@ raise util.Abort(_("empty or missing revlog for %s") % fname) # Toss out the filenodes that the recipient isn't really # missing. - missingfnodes = msng_filenode_set.pop(fname, {}) + missingfnodes = fnodes.pop(fname, {}) prune(filerevlog, missingfnodes) - add_extra_nodes(fname, missingfnodes) # If any filenodes are left, generate the group for them, # otherwise don't bother. if missingfnodes: yield changegroup.chunkheader(len(fname)) yield fname - # Sort the filenodes by their revision # (topological order) - nodeiter = list(missingfnodes) - nodeiter.sort(key=filerevlog.rev) # Create a group generator and only pass in a changenode # lookup function as we need to collect no information # from filenodes. - group = filerevlog.group(nodeiter, - lambda fnode: missingfnodes[fnode]) - for chnk in group: + def flookup(revlog, x): # even though we print the same progress on # most loop iterations, put the progress call # here so that time estimates (if any) can be updated self.ui.progress( _('bundling'), idx, item=fname, unit=_('files'), total=efiles) - yield chnk + return missingfnodes[x] + + for chunk in filerevlog.group(missingfnodes, flookup): + yield chunk # Signal that no more groups are left. yield changegroup.closechunk() self.ui.progress(_('bundling'), None) - if msng_cl_lst: - self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source) + if csets: + self.hook('outgoing', node=hex(csets[0]), source=source) return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN') @@ -1689,48 +1605,43 @@ revset = set([cl.rev(n) for n in nodes]) self.changegroupinfo(nodes, source) - def identity(x): - return x - def gennodelst(log): for r in log: if log.linkrev(r) in revset: yield log.node(r) - def lookuplinkrev_func(revlog): - def lookuplinkrev(n): - return cl.node(revlog.linkrev(revlog.rev(n))) - return lookuplinkrev - def gengroup(): '''yield a sequence of changegroup chunks (strings)''' # construct a list of all changed files changedfiles = set() mmfs = {} - collect = changegroup.collector(cl, mmfs, changedfiles) - for cnt, chnk in enumerate(cl.group(nodes, identity, collect)): - # revlog.group yields three entries per node, so - # dividing by 3 gives an approximation of how many - # nodes have been processed. - self.ui.progress(_('bundling'), cnt / 3, unit=_('changesets')) - yield chnk - changecount = cnt / 3 + count = [0] + def clookup(revlog, x): + c = cl.read(x) + changedfiles.update(c[3]) + mmfs.setdefault(c[0], x) + count[0] += 1 + self.ui.progress(_('bundling'), count[0], unit=_('changesets')) + return x + + for chunk in cl.group(nodes, clookup): + yield chunk + efiles = len(changedfiles) + changecount = count[0] self.ui.progress(_('bundling'), None) mnfst = self.manifest nodeiter = gennodelst(mnfst) - efiles = {} - for cnt, chnk in enumerate(mnfst.group(nodeiter, - lookuplinkrev_func(mnfst))): - if cnt % 3 == 1: - mnode = chnk[:20] - efiles.update(mnfst.readdelta(mnode)) - # see above comment for why we divide by 3 - self.ui.progress(_('bundling'), cnt / 3, + count = [0] + def mlookup(revlog, x): + count[0] += 1 + self.ui.progress(_('bundling'), count[0], unit=_('manifests'), total=changecount) - yield chnk - efiles = len(efiles) + return cl.node(revlog.linkrev(revlog.rev(x))) + + for chunk in mnfst.group(nodeiter, mlookup): + yield chunk self.ui.progress(_('bundling'), None) for idx, fname in enumerate(sorted(changedfiles)): @@ -1742,12 +1653,14 @@ if nodeiter: yield changegroup.chunkheader(len(fname)) yield fname - lookup = lookuplinkrev_func(filerevlog) - for chnk in filerevlog.group(nodeiter, lookup): + def flookup(revlog, x): self.ui.progress( _('bundling'), idx, item=fname, total=efiles, unit=_('files')) - yield chnk + return cl.node(revlog.linkrev(revlog.rev(x))) + + for chunk in filerevlog.group(nodeiter, flookup): + yield chunk self.ui.progress(_('bundling'), None) yield changegroup.closechunk() @@ -1915,10 +1828,6 @@ self.hook("incoming", node=hex(cl.node(i)), source=srctype, url=url) - # FIXME - why does this care about tip? - if newheads == oldheads: - bookmarks.update(self, self.dirstate.parents(), self['tip'].node()) - # never return 0 here: if newheads < oldheads: return newheads - oldheads - 1 @@ -2019,6 +1928,10 @@ def listkeys(self, namespace): return pushkey.list(self, namespace) + def debugwireargs(self, one, two, three=None, four=None): + '''used to test argument passing over the wire''' + return "%s %s %s %s" % (one, two, three, four) + # used to avoid circular references so destructors work def aftertrans(files): renamefiles = [tuple(t) for t in files]
--- a/mercurial/manifest.py Wed Mar 30 02:22:15 2011 +0900 +++ b/mercurial/manifest.py Wed Mar 30 13:23:24 2011 -0500 @@ -38,6 +38,13 @@ r = self.rev(node) return self.parse(mdiff.patchtext(self.revdiff(self.deltaparent(r), r))) + def readfast(self, node): + '''use the faster of readdelta or read''' + r = self.rev(node) + if self.deltaparent(r) in self.parentrevs(r): + return self.readdelta(node) + return self.read(node) + def read(self, node): if node == revlog.nullid: return manifestdict() # don't upset local cache
--- a/mercurial/merge.py Wed Mar 30 02:22:15 2011 +0900 +++ b/mercurial/merge.py Wed Mar 30 13:23:24 2011 -0500 @@ -494,7 +494,6 @@ p1, p2 = pl[0], repo[node] pa = p1.ancestor(p2) fp1, fp2, xp1, xp2 = p1.node(), p2.node(), str(p1), str(p2) - fastforward = False ### check phase if not overwrite and len(pl) > 1: @@ -504,9 +503,7 @@ raise util.Abort(_("merging with a working directory ancestor" " has no effect")) elif pa == p1: - if p1.branch() != p2.branch(): - fastforward = True - else: + if p1.branch() == p2.branch(): raise util.Abort(_("nothing to merge (use 'hg update'" " or check 'hg heads')")) if not force and (wc.files() or wc.deleted()): @@ -551,7 +548,7 @@ if not partial: repo.dirstate.setparents(fp1, fp2) recordupdates(repo, action, branchmerge) - if not branchmerge and not fastforward: + if not branchmerge: repo.dirstate.setbranch(p2.branch()) finally: wlock.release()
--- a/mercurial/osutil.c Wed Mar 30 02:22:15 2011 +0900 +++ b/mercurial/osutil.c Wed Mar 30 13:23:24 2011 -0500 @@ -514,6 +514,22 @@ } #endif +#ifdef __APPLE__ +#include <ApplicationServices/ApplicationServices.h> + +static PyObject *isgui(PyObject *self) +{ + CFDictionaryRef dict = CGSessionCopyCurrentDictionary(); + + if (dict != NULL) { + CFRelease(dict); + return Py_True; + } else { + return Py_False; + } +} +#endif + static char osutil_doc[] = "Native operating system services."; static PyMethodDef methods[] = { @@ -524,6 +540,12 @@ "Open a file with POSIX-like semantics.\n" "On error, this function may raise either a WindowsError or an IOError."}, #endif +#ifdef __APPLE__ + { + "isgui", (PyCFunction)isgui, METH_NOARGS, + "Is a CoreGraphics session available?" + }, +#endif {NULL, NULL} };
--- a/mercurial/parser.py Wed Mar 30 02:22:15 2011 +0900 +++ b/mercurial/parser.py Wed Mar 30 13:23:24 2011 -0500 @@ -78,7 +78,9 @@ 'generate a parse tree from a message' self._iter = self._tokenizer(message) self._advance() - return self._parse() + res = self._parse() + token, value, pos = self.current + return res, pos def eval(self, tree): 'recursively evaluate a parse tree using node methods' if not isinstance(tree, tuple):
--- a/mercurial/patch.py Wed Mar 30 02:22:15 2011 +0900 +++ b/mercurial/patch.py Wed Mar 30 13:23:24 2011 -0500 @@ -488,11 +488,6 @@ cand.sort(key=lambda x: abs(x - linenum)) return cand - def hashlines(self): - self.hash = {} - for x, s in enumerate(self.lines): - self.hash.setdefault(s, []).append(x) - def makerejlines(self, fname): base = os.path.basename(fname) yield "--- %s\n+++ %s\n" % (base, base) @@ -574,8 +569,10 @@ self.dirty = 1 return 0 - # ok, we couldn't match the hunk. Lets look for offsets and fuzz it - self.hashlines() + # ok, we couldn't match the hunk. Lets look for offsets and fuzz it + self.hash = {} + for x, s in enumerate(self.lines): + self.hash.setdefault(s, []).append(x) if h.hunk[-1][0] != ' ': # if the hunk tried to put something at the bottom of the file # override the start line and use eof here @@ -613,6 +610,12 @@ self.rej.append(horig) return -1 + def close(self): + if self.dirty: + self.writelines(self.fname, self.lines) + self.write_rej() + return len(self.rej) + class hunk(object): def __init__(self, desc, num, lr, context, create=False, remove=False): self.number = num @@ -680,6 +683,7 @@ del self.b[-1] self.lena -= 1 self.lenb -= 1 + self._fixnewline(lr) def read_context_hunk(self, lr): self.desc = lr.readline() @@ -782,9 +786,14 @@ self.desc = "@@ -%d,%d +%d,%d @@\n" % (self.starta, self.lena, self.startb, self.lenb) self.hunk[0] = self.desc + self._fixnewline(lr) - def fix_newline(self): - diffhelpers.fix_newline(self.hunk, self.a, self.b) + def _fixnewline(self, lr): + l = lr.readline() + if l.startswith('\ '): + diffhelpers.fix_newline(self.hunk, self.a, self.b) + else: + lr.push(l) def complete(self): return len(self.a) == self.lena and len(self.b) == self.lenb @@ -993,7 +1002,6 @@ maps filenames to gitpatch records. Unique event. """ changed = {} - current_hunk = None afile = "" bfile = "" state = None @@ -1011,11 +1019,6 @@ x = lr.readline() if not x: break - if current_hunk: - if x.startswith('\ '): - current_hunk.fix_newline() - yield 'hunk', current_hunk - current_hunk = None if (state == BFILE and ((not context and x[0] == '@') or ((context is not False) and x.startswith('***************')))): if context is None and x.startswith('***************'): @@ -1023,18 +1026,20 @@ gpatch = changed.get(bfile) create = afile == '/dev/null' or gpatch and gpatch.op == 'ADD' remove = bfile == '/dev/null' or gpatch and gpatch.op == 'DELETE' - current_hunk = hunk(x, hunknum + 1, lr, context, create, remove) + h = hunk(x, hunknum + 1, lr, context, create, remove) hunknum += 1 if emitfile: emitfile = False - yield 'file', (afile, bfile, current_hunk) + yield 'file', (afile, bfile, h) + yield 'hunk', h elif state == BFILE and x.startswith('GIT binary patch'): - current_hunk = binhunk(changed[bfile]) + h = binhunk(changed[bfile]) hunknum += 1 if emitfile: emitfile = False - yield 'file', ('a/' + afile, 'b/' + bfile, current_hunk) - current_hunk.extract(lr) + yield 'file', ('a/' + afile, 'b/' + bfile, h) + h.extract(lr) + yield 'hunk', h elif x.startswith('diff --git'): # check for git diff, scanning the whole patch file if needed m = gitre.match(x) @@ -1083,12 +1088,6 @@ emitfile = True state = BFILE hunknum = 0 - if current_hunk: - if current_hunk.complete(): - yield 'hunk', current_hunk - else: - raise PatchError(_("malformed patch %s %s") % (afile, - current_hunk.desc)) def applydiff(ui, fp, changed, strip=1, eolmode='strict'): """Reads a patch from fp and tries to apply it. @@ -1114,14 +1113,6 @@ cwd = os.getcwd() opener = util.opener(cwd) - def closefile(): - if not current_file: - return 0 - if current_file.dirty: - current_file.writelines(current_file.fname, current_file.lines) - current_file.write_rej() - return len(current_file.rej) - for state, values in iterhunks(ui, fp): if state == 'hunk': if not current_file: @@ -1132,7 +1123,8 @@ if ret > 0: err = 1 elif state == 'file': - rejects += closefile() + if current_file: + rejects += current_file.close() afile, bfile, first_hunk = values try: current_file, missing = selectfile(afile, bfile, @@ -1157,13 +1149,14 @@ else: raise util.Abort(_('unsupported parser state: %s') % state) - rejects += closefile() + if current_file: + rejects += current_file.close() if rejects: return -1 return err -def externalpatch(patcher, patchname, ui, strip, cwd, files): +def _externalpatch(patcher, patchname, ui, strip, cwd, files): """use <patcher> to apply <patchname> to the working directory. returns whether patch was applied with fuzz factor.""" @@ -1247,7 +1240,7 @@ files = {} try: if patcher: - return externalpatch(patcher, patchname, ui, strip, cwd, files) + return _externalpatch(patcher, patchname, ui, strip, cwd, files) return internalpatch(patchname, ui, strip, cwd, files, eolmode) except PatchError, err: raise util.Abort(str(err))
--- a/mercurial/repair.py Wed Mar 30 02:22:15 2011 +0900 +++ b/mercurial/repair.py Wed Mar 30 13:23:24 2011 -0500 @@ -11,9 +11,9 @@ from i18n import _ import os -def _bundle(repo, bases, heads, node, suffix, extranodes=None, compress=True): +def _bundle(repo, bases, heads, node, suffix, compress=True): """create a bundle with the specified revisions as a backup""" - cg = repo.changegroupsubset(bases, heads, 'strip', extranodes) + cg = repo.changegroupsubset(bases, heads, 'strip') backupdir = repo.join("strip-backup") if not os.path.isdir(backupdir): os.mkdir(backupdir) @@ -33,40 +33,26 @@ return sorted(files) -def _collectextranodes(repo, files, link): - """return the nodes that have to be saved before the strip""" - def collectone(cl, revlog): - extra = [] - startrev = count = len(revlog) +def _collectbrokencsets(repo, files, striprev): + """return the changesets which will be broken by the truncation""" + s = set() + def collectone(revlog): + links = (revlog.linkrev(i) for i in revlog) # find the truncation point of the revlog - for i in xrange(count): - lrev = revlog.linkrev(i) - if lrev >= link: - startrev = i + 1 + for lrev in links: + if lrev >= striprev: break + # see if any revision after this point has a linkrev + # less than striprev (those will be broken by strip) + for lrev in links: + if lrev < striprev: + s.add(lrev) - # see if any revision after that point has a linkrev less than link - # (we have to manually save these guys) - for i in xrange(startrev, count): - node = revlog.node(i) - lrev = revlog.linkrev(i) - if lrev < link: - extra.append((node, cl.node(lrev))) - - return extra + collectone(repo.manifest) + for fname in files: + collectone(repo.file(fname)) - extranodes = {} - cl = repo.changelog - extra = collectone(cl, repo.manifest) - if extra: - extranodes[1] = extra - for fname in files: - f = repo.file(fname) - extra = collectone(cl, f) - if extra: - extranodes[fname] = extra - - return extranodes + return s def strip(ui, repo, node, backup="all"): cl = repo.changelog @@ -82,28 +68,26 @@ # the list of heads and bases of the set of interesting revisions. # (head = revision in the set that has no descendant in the set; # base = revision in the set that has no ancestor in the set) - tostrip = set((striprev,)) - saveheads = set() - savebases = [] + tostrip = set(cl.descendants(striprev)) + tostrip.add(striprev) + + files = _collectfiles(repo, striprev) + saverevs = _collectbrokencsets(repo, files, striprev) + + # compute heads + saveheads = set(saverevs) for r in xrange(striprev + 1, len(cl)): - parents = cl.parentrevs(r) - if parents[0] in tostrip or parents[1] in tostrip: - # r is a descendant of striprev - tostrip.add(r) - # if this is a merge and one of the parents does not descend - # from striprev, mark that parent as a savehead. - if parents[1] != nullrev: - for p in parents: - if p not in tostrip and p > striprev: - saveheads.add(p) - else: - # if no parents of this revision will be stripped, mark it as - # a savebase - if parents[0] < striprev and parents[1] < striprev: - savebases.append(cl.node(r)) + if r not in tostrip: + saverevs.add(r) + saveheads.difference_update(cl.parentrevs(r)) + saveheads.add(r) + saveheads = [cl.node(r) for r in saveheads] - saveheads.difference_update(parents) - saveheads.add(r) + # compute base nodes + if saverevs: + descendants = set(cl.descendants(*saverevs)) + saverevs.difference_update(descendants) + savebases = [cl.node(r) for r in saverevs] bm = repo._bookmarks updatebm = [] @@ -112,20 +96,15 @@ if rev in tostrip: updatebm.append(m) - saveheads = [cl.node(r) for r in saveheads] - files = _collectfiles(repo, striprev) - - extranodes = _collectextranodes(repo, files, striprev) - # create a changegroup for all the branches we need to keep backupfile = None if backup == "all": backupfile = _bundle(repo, [node], cl.heads(), node, 'backup') repo.ui.status(_("saved backup bundle to %s\n") % backupfile) - if saveheads or extranodes: + if saveheads or savebases: # do not compress partial bundle if we remove it from disk later chgrpfile = _bundle(repo, savebases, saveheads, node, 'temp', - extranodes=extranodes, compress=keeppartialbundle) + compress=keeppartialbundle) mfst = repo.manifest @@ -149,7 +128,7 @@ tr.abort() raise - if saveheads or extranodes: + if saveheads or savebases: ui.note(_("adding branch\n")) f = open(chgrpfile, "rb") gen = changegroup.readbundle(f, chgrpfile)
--- a/mercurial/revlog.py Wed Mar 30 02:22:15 2011 +0900 +++ b/mercurial/revlog.py Wed Mar 30 13:23:24 2011 -0500 @@ -399,11 +399,12 @@ yield i break - def findmissing(self, common=None, heads=None): - """Return the ancestors of heads that are not ancestors of common. + def findcommonmissing(self, common=None, heads=None): + """Return a tuple of the ancestors of common and the ancestors of heads + that are not ancestors of common. - More specifically, return a list of nodes N such that every N - satisfies the following constraints: + More specifically, the second element is a list of nodes N such that + every N satisfies the following constraints: 1. N is an ancestor of some node in 'heads' 2. N is not an ancestor of any node in 'common' @@ -441,7 +442,25 @@ visit.append(p) missing = list(missing) missing.sort() - return [self.node(r) for r in missing] + return has, [self.node(r) for r in missing] + + def findmissing(self, common=None, heads=None): + """Return the ancestors of heads that are not ancestors of common. + + More specifically, return a list of nodes N such that every N + satisfies the following constraints: + + 1. N is an ancestor of some node in 'heads' + 2. N is not an ancestor of any node in 'common' + + The list is sorted by revision number, meaning it is + topologically sorted. + + 'heads' and 'common' are both lists of node IDs. If heads is + not supplied, uses all of the revlog's heads. If common is not + supplied, uses nullid.""" + _common, missing = self.findcommonmissing(common, heads) + return missing def nodesbetween(self, roots=None, heads=None): """Return a topological path from 'roots' to 'heads'. @@ -1039,7 +1058,7 @@ self._cache = (node, curr, text) return node - def group(self, nodelist, lookup, infocollect=None, fullrev=False): + def group(self, nodelist, lookup): """Calculate a delta group, yielding a sequence of changegroup chunks (strings). @@ -1049,12 +1068,9 @@ guaranteed to have this parent as it has all history before these changesets. In the case firstparent is nullrev the changegroup starts with a full revision. - fullrev forces the insertion of the full revision, necessary - in the case of shallow clones where the first parent might - not exist at the reciever. """ - revs = [self.rev(n) for n in nodelist] + revs = sorted([self.rev(n) for n in nodelist]) # if we don't have any revisions touched by these changesets, bail if not revs: @@ -1064,23 +1080,17 @@ # add the parent of the first rev p = self.parentrevs(revs[0])[0] revs.insert(0, p) - if p == nullrev: - fullrev = True # build deltas - for d in xrange(len(revs) - 1): - a, b = revs[d], revs[d + 1] + for r in xrange(len(revs) - 1): + a, b = revs[r], revs[r + 1] nb = self.node(b) - if infocollect is not None: - infocollect(nb) - p = self.parents(nb) - meta = nb + p[0] + p[1] + lookup(nb) - if fullrev: + meta = nb + p[0] + p[1] + lookup(self, nb) + if a == nullrev: d = self.revision(nb) meta += mdiff.trivialdiffheader(len(d)) - fullrev = False else: d = self.revdiff(a, b) yield changegroup.chunkheader(len(meta) + len(d))
--- a/mercurial/revset.py Wed Mar 30 02:22:15 2011 +0900 +++ b/mercurial/revset.py Wed Mar 30 13:23:24 2011 -0500 @@ -6,10 +6,10 @@ # GNU General Public License version 2 or any later version. import re -import parser, util, error, discovery +import parser, util, error, discovery, help, hbisect import bookmarks as bookmarksmod import match as matchmod -from i18n import _, gettext +from i18n import _ elements = { "(": (20, ("group", 1, ")"), ("func", 1, ")")), @@ -298,9 +298,18 @@ return [r for r in subset if r in cs] def branch(repo, subset, x): - """``branch(set)`` - All changesets belonging to the branches of changesets in set. + """``branch(string or set)`` + All changesets belonging to the given branch or the branches of the given + changesets. """ + try: + b = getstring(x, '') + if b in repo.branchmap(): + return [r for r in subset if repo[r].branch() == b] + except error.ParseError: + # not a string, but another revspec, e.g. tip() + pass + s = getset(repo, range(len(repo)), x) b = set() for r in s: @@ -394,7 +403,7 @@ for e in c.files() + [c.user(), c.description()]: if gr.search(e): l.append(r) - continue + break return l def author(repo, subset, x): @@ -423,7 +432,7 @@ for f in repo[r].files(): if m(f): s.append(r) - continue + break return s def contains(repo, subset, x): @@ -438,13 +447,12 @@ for r in subset: if pat in repo[r]: s.append(r) - continue else: for r in subset: for f in repo[r].manifest(): if m(f): s.append(r) - continue + break return s def checkstatus(repo, subset, pat, field): @@ -466,12 +474,11 @@ if fast: if pat in files: s.append(r) - continue else: for f in files: if m(f): s.append(r) - continue + break return s def modifies(repo, subset, x): @@ -683,12 +690,27 @@ for r in bookmarksmod.listbookmarks(repo).values()]) return [r for r in subset if r in bms] +def bisected(repo, subset, x): + """``bisected(string)`` + Changesets marked in the specified bisect state (good, bad, skip). + """ + state = getstring(x, _("bisect requires a string")).lower() + if state not in ('good', 'bad', 'skip', 'unknown'): + raise ParseError(_('invalid bisect state')) + marked = set(repo.changelog.rev(n) for n in hbisect.load_state(repo)[state]) + l = [] + for r in subset: + if r in marked: + l.append(r) + return l + symbols = { "adds": adds, "all": getall, "ancestor": ancestor, "ancestors": ancestors, "author": author, + "bisected": bisected, "bookmark": bookmark, "branch": branch, "children": children, @@ -808,26 +830,16 @@ def match(spec): if not spec: raise error.ParseError(_("empty query")) - tree = parse(spec) + tree, pos = parse(spec) + if (pos != len(spec)): + raise error.ParseError("invalid token", pos) weight, tree = optimize(tree, True) def mfunc(repo, subset): return getset(repo, subset, tree) return mfunc def makedoc(topic, doc): - """Generate and include predicates help in revsets topic.""" - predicates = [] - for name in sorted(symbols): - text = symbols[name].__doc__ - if not text: - continue - text = gettext(text.rstrip()) - lines = text.splitlines() - lines[1:] = [(' ' + l.strip()) for l in lines[1:]] - predicates.append('\n'.join(lines)) - predicates = '\n\n'.join(predicates) - doc = doc.replace('.. predicatesmarker', predicates) - return doc + return help.makeitemsdoc(topic, doc, '.. predicatesmarker', symbols) # tell hggettext to extract docstrings from these functions: i18nfunctions = symbols.values()
--- a/mercurial/sshrepo.py Wed Mar 30 02:22:15 2011 +0900 +++ b/mercurial/sshrepo.py Wed Mar 30 13:23:24 2011 -0500 @@ -119,9 +119,24 @@ def _callstream(self, cmd, **args): self.ui.debug("sending %s command\n" % cmd) self.pipeo.write("%s\n" % cmd) - for k, v in sorted(args.iteritems()): + _func, names = wireproto.commands[cmd] + keys = names.split() + wireargs = {} + for k in keys: + if k == '*': + wireargs['*'] = args + break + else: + wireargs[k] = args[k] + del args[k] + for k, v in sorted(wireargs.iteritems()): self.pipeo.write("%s %d\n" % (k, len(v))) - self.pipeo.write(v) + if isinstance(v, dict): + for dk, dv in v.iteritems(): + self.pipeo.write("%s %d\n" % (dk, len(dv))) + self.pipeo.write(dv) + else: + self.pipeo.write(v) self.pipeo.flush() return self.pipei
--- a/mercurial/sshserver.py Wed Mar 30 02:22:15 2011 +0900 +++ b/mercurial/sshserver.py Wed Mar 30 13:23:24 2011 -0500 @@ -27,21 +27,21 @@ def getargs(self, args): data = {} keys = args.split() - count = len(keys) for n in xrange(len(keys)): argline = self.fin.readline()[:-1] arg, l = argline.split() - val = self.fin.read(int(l)) if arg not in keys: raise util.Abort("unexpected parameter %r" % arg) if arg == '*': star = {} - for n in xrange(int(l)): + for k in xrange(int(l)): + argline = self.fin.readline()[:-1] arg, l = argline.split() val = self.fin.read(int(l)) star[arg] = val data['*'] = star else: + val = self.fin.read(int(l)) data[arg] = val return [data[k] for k in keys]
--- a/mercurial/statichttprepo.py Wed Mar 30 02:22:15 2011 +0900 +++ b/mercurial/statichttprepo.py Wed Mar 30 13:23:24 2011 -0500 @@ -71,7 +71,7 @@ """return a function that opens files over http""" p = base def o(path, mode="r", atomictemp=None): - if 'a' in mode or 'w' in mode: + if mode not in ('r', 'rb'): raise IOError('Permission denied') f = "/".join((p, urllib.quote(path))) return httprangereader(f, urlopener)
--- a/mercurial/subrepo.py Wed Mar 30 02:22:15 2011 +0900 +++ b/mercurial/subrepo.py Wed Mar 30 13:23:24 2011 -0500 @@ -5,10 +5,10 @@ # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. -import errno, os, re, xml.dom.minidom, shutil, urlparse, posixpath +import errno, os, re, xml.dom.minidom, shutil, posixpath import stat, subprocess, tarfile from i18n import _ -import config, util, node, error, cmdutil +import config, util, node, error, cmdutil, url, bookmarks hg = None nullstate = ('', '', 'empty') @@ -193,21 +193,16 @@ """return pull/push path of repo - either based on parent repo .hgsub info or on the top repo config. Abort or return None if no source found.""" if hasattr(repo, '_subparent'): - source = repo._subsource - if source.startswith('/') or '://' in source: - return source + source = url.url(repo._subsource) + source.path = posixpath.normpath(source.path) + if posixpath.isabs(source.path) or source.scheme: + return str(source) parent = _abssource(repo._subparent, push, abort=False) if parent: - if '://' in parent: - if parent[-1] == '/': - parent = parent[:-1] - r = urlparse.urlparse(parent + '/' + source) - r = urlparse.urlunparse((r[0], r[1], - posixpath.normpath(r[2]), - r[3], r[4], r[5])) - return r - else: # plain file system path - return posixpath.normpath(os.path.join(parent, repo._subsource)) + parent = url.url(parent) + parent.path = posixpath.join(parent.path, source.path) + parent.path = posixpath.normpath(parent.path) + return str(parent) else: # recursion reached top repo if hasattr(repo, '_subtoppath'): return repo._subtoppath @@ -432,15 +427,14 @@ def _get(self, state): source, revision, kind = state - try: - self._repo.lookup(revision) - except error.RepoError: + if revision not in self._repo: self._repo._subsource = source srcurl = _abssource(self._repo) self._repo.ui.status(_('pulling subrepo %s from %s\n') % (subrelpath(self), srcurl)) other = hg.repository(self._repo.ui, srcurl) self._repo.pull(other) + bookmarks.updatefromremote(self._repo.ui, self._repo, other) def get(self, state, overwrite=False): self._get(state) @@ -714,6 +708,12 @@ current = None return current + def _gitremote(self, remote): + out = self._gitcommand(['remote', 'show', '-n', remote]) + line = out.split('\n')[1] + i = line.index('URL: ') + len('URL: ') + return line[i:] + def _githavelocally(self, revision): out, code = self._gitdir(['cat-file', '-e', revision]) return code == 0 @@ -767,11 +767,14 @@ def _fetch(self, source, revision): if self._gitmissing(): - self._ui.status(_('cloning subrepo %s\n') % self._relpath) - self._gitnodir(['clone', self._abssource(source), self._abspath]) + source = self._abssource(source) + self._ui.status(_('cloning subrepo %s from %s\n') % + (self._relpath, source)) + self._gitnodir(['clone', source, self._abspath]) if self._githavelocally(revision): return - self._ui.status(_('pulling subrepo %s\n') % self._relpath) + self._ui.status(_('pulling subrepo %s from %s\n') % + (self._relpath, self._gitremote('origin'))) # try only origin: the originally cloned repo self._gitcommand(['fetch']) if not self._githavelocally(revision):
--- a/mercurial/templatefilters.py Wed Mar 30 02:22:15 2011 +0900 +++ b/mercurial/templatefilters.py Wed Mar 30 13:23:24 2011 -0500 @@ -6,13 +6,13 @@ # GNU General Public License version 2 or any later version. import cgi, re, os, time, urllib -import encoding, node, util +import encoding, node, util, help -def stringify(thing): - '''turn nested template iterator into string.''' - if hasattr(thing, '__iter__') and not isinstance(thing, str): - return "".join([stringify(t) for t in thing if t is not None]) - return str(thing) +def addbreaks(text): + """:addbreaks: Any text. Add an XHTML "<br />" tag before the end of + every line except the last. + """ + return text.replace('\n', '<br/>\n') agescales = [("year", 3600 * 24 * 365), ("month", 3600 * 24 * 30), @@ -23,7 +23,9 @@ ("second", 1)] def age(date): - '''turn a (timestamp, tzoff) tuple into an age string.''' + """:age: Date. Returns a human-readable date/time difference between the + given date/time and the current date/time. + """ def plural(t, c): if c == 1: @@ -34,18 +36,65 @@ now = time.time() then = date[0] + future = False if then > now: - return 'in the future' - - delta = max(1, int(now - then)) - if delta > agescales[0][1] * 2: - return util.shortdate(date) + future = True + delta = max(1, int(then - now)) + if delta > agescales[0][1] * 30: + return 'in the distant future' + else: + delta = max(1, int(now - then)) + if delta > agescales[0][1] * 2: + return util.shortdate(date) for t, s in agescales: n = delta // s if n >= 2 or s == 1: + if future: + return '%s from now' % fmt(t, n) return '%s ago' % fmt(t, n) +def basename(path): + """:basename: Any text. Treats the text as a path, and returns the last + component of the path after splitting by the path separator + (ignoring trailing separators). For example, "foo/bar/baz" becomes + "baz" and "foo/bar//" becomes "bar". + """ + return os.path.basename(path) + +def datefilter(text): + """:date: Date. Returns a date in a Unix date format, including the + timezone: "Mon Sep 04 15:13:13 2006 0700". + """ + return util.datestr(text) + +def domain(author): + """:domain: Any text. Finds the first string that looks like an email + address, and extracts just the domain component. Example: ``User + <user@example.com>`` becomes ``example.com``. + """ + f = author.find('@') + if f == -1: + return '' + author = author[f + 1:] + f = author.find('>') + if f >= 0: + author = author[:f] + return author + +def email(text): + """:email: Any text. Extracts the first string that looks like an email + address. Example: ``User <user@example.com>`` becomes + ``user@example.com``. + """ + return util.email(text) + +def escape(text): + """:escape: Any text. Replaces the special XML/XHTML characters "&", "<" + and ">" with XML entities. + """ + return cgi.escape(text, True) + para_re = None space_re = None @@ -74,40 +123,45 @@ return "".join([space_re.sub(' ', util.wrap(para, width=width)) + rest for para, rest in findparas()]) +def fill68(text): + """:fill68: Any text. Wraps the text to fit in 68 columns.""" + return fill(text, 68) + +def fill76(text): + """:fill76: Any text. Wraps the text to fit in 76 columns.""" + return fill(text, 76) + def firstline(text): - '''return the first line of text''' + """:firstline: Any text. Returns the first line of text.""" try: return text.splitlines(True)[0].rstrip('\r\n') except IndexError: return '' -def nl2br(text): - '''replace raw newlines with xhtml line breaks.''' - return text.replace('\n', '<br/>\n') +def hexfilter(text): + """:hex: Any text. Convert a binary Mercurial node identifier into + its long hexadecimal representation. + """ + return node.hex(text) -def obfuscate(text): - text = unicode(text, encoding.encoding, 'replace') - return ''.join(['&#%d;' % ord(c) for c in text]) +def hgdate(text): + """:hgdate: Date. Returns the date as a pair of numbers: "1157407993 + 25200" (Unix timestamp, timezone offset). + """ + return "%d %d" % text -def domain(author): - '''get domain of author, or empty string if none.''' - f = author.find('@') - if f == -1: - return '' - author = author[f + 1:] - f = author.find('>') - if f >= 0: - author = author[:f] - return author +def isodate(text): + """:isodate: Date. Returns the date in ISO 8601 format: "2009-08-18 13:00 + +0200". + """ + return util.datestr(text, '%Y-%m-%d %H:%M %1%2') -def person(author): - '''get name of author, or else username.''' - if not '@' in author: - return author - f = author.find('<') - if f == -1: - return util.shortuser(author) - return author[:f].rstrip() +def isodatesec(text): + """:isodatesec: Date. Returns the date in ISO 8601 format, including + seconds: "2009-08-18 13:00:13 +0200". See also the rfc3339date + filter. + """ + return util.datestr(text, '%Y-%m-%d %H:%M:%S %1%2') def indent(text, prefix): '''indent each non-empty line of text after first with prefix.''' @@ -124,38 +178,6 @@ yield '\n' return "".join(indenter()) -def permissions(flags): - if "l" in flags: - return "lrwxrwxrwx" - if "x" in flags: - return "-rwxr-xr-x" - return "-rw-r--r--" - -def xmlescape(text): - text = (text - .replace('&', '&') - .replace('<', '<') - .replace('>', '>') - .replace('"', '"') - .replace("'", ''')) # ' invalid in HTML - return re.sub('[\x00-\x08\x0B\x0C\x0E-\x1F]', ' ', text) - -def uescape(c): - if ord(c) < 0x80: - return c - else: - return '\\u%04x' % ord(c) - -_escapes = [ - ('\\', '\\\\'), ('"', '\\"'), ('\t', '\\t'), ('\n', '\\n'), - ('\r', '\\r'), ('\f', '\\f'), ('\b', '\\b'), -] - -def jsonescape(s): - for k, v in _escapes: - s = s.replace(k, v) - return ''.join(uescape(c) for c in s) - def json(obj): if obj is None or obj is False or obj is True: return {None: 'null', False: 'false', True: 'true'}[obj] @@ -180,49 +202,163 @@ else: raise TypeError('cannot encode type %s' % obj.__class__.__name__) +def _uescape(c): + if ord(c) < 0x80: + return c + else: + return '\\u%04x' % ord(c) + +_escapes = [ + ('\\', '\\\\'), ('"', '\\"'), ('\t', '\\t'), ('\n', '\\n'), + ('\r', '\\r'), ('\f', '\\f'), ('\b', '\\b'), +] + +def jsonescape(s): + for k, v in _escapes: + s = s.replace(k, v) + return ''.join(_uescape(c) for c in s) + +def localdate(text): + """:localdate: Date. Converts a date to local date.""" + return (text[0], util.makedate()[1]) + +def nonempty(str): + """:nonempty: Any text. Returns '(none)' if the string is empty.""" + return str or "(none)" + +def obfuscate(text): + """:obfuscate: Any text. Returns the input text rendered as a sequence of + XML entities. + """ + text = unicode(text, encoding.encoding, 'replace') + return ''.join(['&#%d;' % ord(c) for c in text]) + +def permissions(flags): + if "l" in flags: + return "lrwxrwxrwx" + if "x" in flags: + return "-rwxr-xr-x" + return "-rw-r--r--" + +def person(author): + """:person: Any text. Returns the text before an email address.""" + if not '@' in author: + return author + f = author.find('<') + if f == -1: + return util.shortuser(author) + return author[:f].rstrip() + +def rfc3339date(text): + """:rfc3339date: Date. Returns a date using the Internet date format + specified in RFC 3339: "2009-08-18T13:00:13+02:00". + """ + return util.datestr(text, "%Y-%m-%dT%H:%M:%S%1:%2") + +def rfc822date(text): + """:rfc822date: Date. Returns a date using the same format used in email + headers: "Tue, 18 Aug 2009 13:00:13 +0200". + """ + return util.datestr(text, "%a, %d %b %Y %H:%M:%S %1%2") + +def short(text): + """:short: Changeset hash. Returns the short form of a changeset hash, + i.e. a 12 hexadecimal digit string. + """ + return text[:12] + +def shortdate(text): + """:shortdate: Date. Returns a date like "2006-09-18".""" + return util.shortdate(text) + +def stringescape(text): + return text.encode('string_escape') + +def stringify(thing): + """:stringify: Any type. Turns the value into text by converting values into + text and concatenating them. + """ + if hasattr(thing, '__iter__') and not isinstance(thing, str): + return "".join([stringify(t) for t in thing if t is not None]) + return str(thing) + +def strip(text): + """:strip: Any text. Strips all leading and trailing whitespace.""" + return text.strip() + def stripdir(text): - '''Treat the text as path and strip a directory level, if possible.''' + """:stripdir: Treat the text as path and strip a directory level, if + possible. For example, "foo" and "foo/bar" becomes "foo". + """ dir = os.path.dirname(text) if dir == "": return os.path.basename(text) else: return dir -def nonempty(str): - return str or "(none)" +def tabindent(text): + """:tabindent: Any text. Returns the text, with every line except the + first starting with a tab character. + """ + return indent(text, '\t') + +def urlescape(text): + """:urlescape: Any text. Escapes all "special" characters. For example, + "foo bar" becomes "foo%20bar". + """ + return urllib.quote(text) + +def userfilter(text): + """:user: Any text. Returns the user portion of an email address.""" + return util.shortuser(text) + +def xmlescape(text): + text = (text + .replace('&', '&') + .replace('<', '<') + .replace('>', '>') + .replace('"', '"') + .replace("'", ''')) # ' invalid in HTML + return re.sub('[\x00-\x08\x0B\x0C\x0E-\x1F]', ' ', text) filters = { - "addbreaks": nl2br, - "basename": os.path.basename, - "stripdir": stripdir, + "addbreaks": addbreaks, "age": age, - "date": lambda x: util.datestr(x), + "basename": basename, + "date": datefilter, "domain": domain, - "email": util.email, - "escape": lambda x: cgi.escape(x, True), - "fill68": lambda x: fill(x, width=68), - "fill76": lambda x: fill(x, width=76), + "email": email, + "escape": escape, + "fill68": fill68, + "fill76": fill76, "firstline": firstline, - "tabindent": lambda x: indent(x, '\t'), - "hgdate": lambda x: "%d %d" % x, - "isodate": lambda x: util.datestr(x, '%Y-%m-%d %H:%M %1%2'), - "isodatesec": lambda x: util.datestr(x, '%Y-%m-%d %H:%M:%S %1%2'), + "hex": hexfilter, + "hgdate": hgdate, + "isodate": isodate, + "isodatesec": isodatesec, "json": json, "jsonescape": jsonescape, - "localdate": lambda x: (x[0], util.makedate()[1]), + "localdate": localdate, "nonempty": nonempty, "obfuscate": obfuscate, "permissions": permissions, "person": person, - "rfc822date": lambda x: util.datestr(x, "%a, %d %b %Y %H:%M:%S %1%2"), - "rfc3339date": lambda x: util.datestr(x, "%Y-%m-%dT%H:%M:%S%1:%2"), - "hex": node.hex, - "short": lambda x: x[:12], - "shortdate": util.shortdate, + "rfc3339date": rfc3339date, + "rfc822date": rfc822date, + "short": short, + "shortdate": shortdate, + "stringescape": stringescape, "stringify": stringify, - "strip": lambda x: x.strip(), - "urlescape": lambda x: urllib.quote(x), - "user": lambda x: util.shortuser(x), - "stringescape": lambda x: x.encode('string_escape'), + "strip": strip, + "stripdir": stripdir, + "tabindent": tabindent, + "urlescape": urlescape, + "user": userfilter, "xmlescape": xmlescape, } + +def makedoc(topic, doc): + return help.makeitemsdoc(topic, doc, '.. filtersmarker', filters) + +# tell hggettext to extract docstrings from these functions: +i18nfunctions = filters.values()
--- a/mercurial/templatekw.py Wed Mar 30 02:22:15 2011 +0900 +++ b/mercurial/templatekw.py Wed Mar 30 13:23:24 2011 -0500 @@ -6,7 +6,7 @@ # GNU General Public License version 2 or any later version. from node import hex -import encoding, patch, util, error +import encoding, patch, util, error, help def showlist(name, values, plural=None, **args): '''expand set of values. @@ -143,32 +143,49 @@ def showauthor(repo, ctx, templ, **args): + """:author: String. The unmodified author of the changeset.""" return ctx.user() def showbranch(**args): + """:branch: String. The name of the branch on which the changeset was + committed. + """ return args['ctx'].branch() def showbranches(**args): + """:branches: List of strings. The name of the branch on which the + changeset was committed. Will be empty if the branch name was + default. + """ branch = args['ctx'].branch() if branch != 'default': return showlist('branch', [branch], plural='branches', **args) def showbookmarks(**args): + """:bookmarks: List of strings. Any bookmarks associated with the + changeset. + """ bookmarks = args['ctx'].bookmarks() return showlist('bookmark', bookmarks, **args) def showchildren(**args): + """:children: List of strings. The children of the changeset.""" ctx = args['ctx'] childrevs = ['%d:%s' % (cctx, cctx) for cctx in ctx.children()] return showlist('children', childrevs, **args) def showdate(repo, ctx, templ, **args): + """:date: Date information. The date when the changeset was committed.""" return ctx.date() def showdescription(repo, ctx, templ, **args): + """:desc: String. The text of the changeset description.""" return ctx.description().strip() def showdiffstat(repo, ctx, templ, **args): + """:diffstat: String. Statistics of changes with the following format: + "modified files: +added/-removed lines" + """ files, adds, removes = 0, 0, 0 for i in patch.diffstatdata(util.iterlines(ctx.diff())): files += 1 @@ -184,10 +201,14 @@ yield templ('extra', **args) def showfileadds(**args): + """:file_adds: List of strings. Files added by this changeset.""" repo, ctx, revcache = args['repo'], args['ctx'], args['revcache'] return showlist('file_add', getfiles(repo, ctx, revcache)[1], **args) def showfilecopies(**args): + """:file_copies: List of strings. Files copied in this changeset with + their sources. + """ cache, ctx = args['cache'], args['ctx'] copies = args['revcache'].get('copies') if copies is None: @@ -207,25 +228,37 @@ # provided before calling the templater, usually with a --copies # command line switch. def showfilecopiesswitch(**args): + """:file_copies_switch: List of strings. Like "file_copies" but displayed + only if the --copied switch is set. + """ copies = args['revcache'].get('copies') or [] c = [{'name': x[0], 'source': x[1]} for x in copies] return showlist('file_copy', c, plural='file_copies', **args) def showfiledels(**args): + """:file_dels: List of strings. Files removed by this changeset.""" repo, ctx, revcache = args['repo'], args['ctx'], args['revcache'] return showlist('file_del', getfiles(repo, ctx, revcache)[2], **args) def showfilemods(**args): + """:file_mods: List of strings. Files modified by this changeset.""" repo, ctx, revcache = args['repo'], args['ctx'], args['revcache'] return showlist('file_mod', getfiles(repo, ctx, revcache)[0], **args) def showfiles(**args): + """:files: List of strings. All files modified, added, or removed by this + changeset. + """ return showlist('file', args['ctx'].files(), **args) def showlatesttag(repo, ctx, templ, cache, **args): + """:latesttag: String. Most recent global tag in the ancestors of this + changeset. + """ return getlatesttags(repo, ctx, cache)[2] def showlatesttagdistance(repo, ctx, templ, cache, **args): + """:latesttagdistance: Integer. Longest path to the latest tag.""" return getlatesttags(repo, ctx, cache)[1] def showmanifest(**args): @@ -236,12 +269,17 @@ return templ('manifest', **args) def shownode(repo, ctx, templ, **args): + """:node: String. The changeset identification hash, as a 40 hexadecimal + digit string. + """ return ctx.hex() def showrev(repo, ctx, templ, **args): + """:rev: Integer. The repository-local changeset revision number.""" return ctx.rev() def showtags(**args): + """:tags: List of strings. Any tags associated with the changeset.""" return showlist('tag', args['ctx'].tags(), **args) # keywords are callables like: @@ -276,3 +314,8 @@ 'tags': showtags, } +def makedoc(topic, doc): + return help.makeitemsdoc(topic, doc, '.. keywordsmarker', keywords) + +# tell hggettext to extract docstrings from these functions: +i18nfunctions = keywords.values()
--- a/mercurial/templater.py Wed Mar 30 02:22:15 2011 +0900 +++ b/mercurial/templater.py Wed Mar 30 13:23:24 2011 -0500 @@ -69,7 +69,6 @@ else: raise error.ParseError(_("syntax error"), pos) pos += 1 - data[2] = pos yield ('end', None, pos) def compiletemplate(tmpl, context): @@ -91,8 +90,8 @@ parsed.append(("string", tmpl[pos:n])) pd = [tmpl, n + 1, stop] - parsed.append(p.parse(pd)) - pos = pd[2] + parseres, pos = p.parse(pd) + parsed.append(parseres) return [compileexp(e, context) for e in parsed]
--- a/mercurial/ui.py Wed Mar 30 02:22:15 2011 +0900 +++ b/mercurial/ui.py Wed Mar 30 13:23:24 2011 -0500 @@ -273,7 +273,7 @@ cfg = self._data(untrusted) for section in cfg.sections(): for name, value in self.configitems(section, untrusted): - yield section, name, str(value).replace('\n', '\\n') + yield section, name, value def plain(self): '''is plain mode active? @@ -483,7 +483,7 @@ self.write(msg, ' ', default, "\n") return default try: - r = self._readline(msg + ' ') + r = self._readline(self.label(msg, 'ui.prompt') + ' ') if not r: return default return r
--- a/mercurial/url.py Wed Mar 30 02:22:15 2011 +0900 +++ b/mercurial/url.py Wed Mar 30 13:23:24 2011 -0500 @@ -12,28 +12,210 @@ from i18n import _ import keepalive, util -def _urlunparse(scheme, netloc, path, params, query, fragment, url): - '''Handle cases where urlunparse(urlparse(x://)) doesn't preserve the "//"''' - result = urlparse.urlunparse((scheme, netloc, path, params, query, fragment)) - if (scheme and - result.startswith(scheme + ':') and - not result.startswith(scheme + '://') and - url.startswith(scheme + '://') - ): - result = scheme + '://' + result[len(scheme + ':'):] - return result +class url(object): + """Reliable URL parser. + + This parses URLs and provides attributes for the following + components: + + <scheme>://<user>:<passwd>@<host>:<port>/<path>?<query>#<fragment> + + Missing components are set to None. The only exception is + fragment, which is set to '' if present but empty. + + If parse_fragment is False, fragment is included in query. If + parse_query is False, query is included in path. If both are + False, both fragment and query are included in path. + + See http://www.ietf.org/rfc/rfc2396.txt for more information. + + Examples: + + >>> url('http://www.ietf.org/rfc/rfc2396.txt') + <url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'> + >>> url('ssh://[::1]:2200//home/joe/repo') + <url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'> + >>> url('file:///home/joe/repo') + <url scheme: 'file', path: '/home/joe/repo'> + >>> url('bundle:foo') + <url scheme: 'bundle', path: 'foo'> + + Authentication credentials: + + >>> url('ssh://joe:xyz@x/repo') + <url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'> + >>> url('ssh://joe@x/repo') + <url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'> + + Query strings and fragments: + + >>> url('http://host/a?b#c') + <url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'> + >>> url('http://host/a?b#c', parse_query=False, parse_fragment=False) + <url scheme: 'http', host: 'host', path: 'a?b#c'> + """ + + _safechars = "!~*'()+" + _safepchars = "/!~*'()+" + + def __init__(self, path, parse_query=True, parse_fragment=True): + # We slowly chomp away at path until we have only the path left + self.scheme = self.user = self.passwd = self.host = None + self.port = self.path = self.query = self.fragment = None + self._localpath = True + + if not path.startswith('/') and ':' in path: + parts = path.split(':', 1) + if parts[0]: + self.scheme, path = parts + self._localpath = False + + if not path: + path = None + if self._localpath: + self.path = '' + return + else: + if parse_fragment and '#' in path: + path, self.fragment = path.split('#', 1) + if not path: + path = None + if self._localpath: + self.path = path + return + + if parse_query and '?' in path: + path, self.query = path.split('?', 1) + if not path: + path = None + if not self.query: + self.query = None + + # // is required to specify a host/authority + if path and path.startswith('//'): + parts = path[2:].split('/', 1) + if len(parts) > 1: + self.host, path = parts + path = path + else: + self.host = parts[0] + path = None + if not self.host: + self.host = None + if path: + path = '/' + path + + if self.host and '@' in self.host: + self.user, self.host = self.host.rsplit('@', 1) + if ':' in self.user: + self.user, self.passwd = self.user.split(':', 1) + if not self.host: + self.host = None -def hidepassword(url): - '''hide user credential in a url string''' - scheme, netloc, path, params, query, fragment = urlparse.urlparse(url) - netloc = re.sub('([^:]*):([^@]*)@(.*)', r'\1:***@\3', netloc) - return _urlunparse(scheme, netloc, path, params, query, fragment, url) + # Don't split on colons in IPv6 addresses without ports + if (self.host and ':' in self.host and + not (self.host.startswith('[') and self.host.endswith(']'))): + self.host, self.port = self.host.rsplit(':', 1) + if not self.host: + self.host = None + self.path = path + + for a in ('user', 'passwd', 'host', 'port', + 'path', 'query', 'fragment'): + v = getattr(self, a) + if v is not None: + setattr(self, a, urllib.unquote(v)) + + def __repr__(self): + attrs = [] + for a in ('scheme', 'user', 'passwd', 'host', 'port', 'path', + 'query', 'fragment'): + v = getattr(self, a) + if v is not None: + attrs.append('%s: %r' % (a, v)) + return '<url %s>' % ', '.join(attrs) + + def __str__(self): + """Join the URL's components back into a URL string. + + Examples: + + >>> str(url('http://user:pw@host:80/?foo#bar')) + 'http://user:pw@host:80/?foo#bar' + >>> str(url('ssh://user:pw@[::1]:2200//home/joe#')) + 'ssh://user:pw@[::1]:2200//home/joe#' + >>> str(url('http://localhost:80//')) + 'http://localhost:80//' + >>> str(url('http://localhost:80/')) + 'http://localhost:80/' + >>> str(url('http://localhost:80')) + 'http://localhost:80' + >>> str(url('bundle:foo')) + 'bundle:foo' + >>> str(url('path')) + 'path' + """ + if self._localpath: + s = self.path + if self.fragment: + s += '#' + self.fragment + return s -def removeauth(url): + s = self.scheme + ':' + if (self.user or self.passwd or self.host or + self.scheme and not self.path): + s += '//' + if self.user: + s += urllib.quote(self.user, safe=self._safechars) + if self.passwd: + s += ':' + urllib.quote(self.passwd, safe=self._safechars) + if self.user or self.passwd: + s += '@' + if self.host: + if not (self.host.startswith('[') and self.host.endswith(']')): + s += urllib.quote(self.host) + else: + s += self.host + if self.port: + s += ':' + urllib.quote(self.port) + if ((self.host and self.path is not None) or + (self.host and self.query or self.fragment)): + s += '/' + if self.path: + s += urllib.quote(self.path, safe=self._safepchars) + if self.query: + s += '?' + urllib.quote(self.query, safe=self._safepchars) + if self.fragment is not None: + s += '#' + urllib.quote(self.fragment, safe=self._safepchars) + return s + + def authinfo(self): + user, passwd = self.user, self.passwd + try: + self.user, self.passwd = None, None + s = str(self) + finally: + self.user, self.passwd = user, passwd + if not self.user: + return (s, None) + return (s, (None, (str(self), self.host), + self.user, self.passwd or '')) + +def has_scheme(path): + return bool(url(path).scheme) + +def hidepassword(u): + '''hide user credential in a url string''' + u = url(u) + if u.passwd: + u.passwd = '***' + return str(u) + +def removeauth(u): '''remove all authentication information from a url string''' - scheme, netloc, path, params, query, fragment = urlparse.urlparse(url) - netloc = netloc[netloc.find('@')+1:] - return _urlunparse(scheme, netloc, path, params, query, fragment, url) + u = url(u) + u.user = u.passwd = None + return str(u) def netlocsplit(netloc): '''split [user[:passwd]@]host[:port] into 4-tuple.'''
--- a/mercurial/util.py Wed Mar 30 02:22:15 2011 +0900 +++ b/mercurial/util.py Wed Mar 30 13:23:24 2011 -0500 @@ -438,6 +438,9 @@ return check +def makedir(path, notindexed): + os.mkdir(path) + def unlinkpath(f): """unlink and remove the directory if it is empty""" os.unlink(f) @@ -769,7 +772,18 @@ def gui(): '''Are we running in a GUI?''' - return os.name == "nt" or os.name == "mac" or os.environ.get("DISPLAY") + if sys.platform == 'darwin': + if 'SSH_CONNECTION' in os.environ: + # handle SSH access to a box where the user is logged in + return False + elif getattr(osutil, 'isgui', None): + # check if a CoreGraphics session is available + return osutil.isgui() + else: + # pure build; use a safe default + return True + else: + return os.name == "nt" or os.environ.get("DISPLAY") def mktempcopy(name, emptyok=False, createmode=None): """Create a temporary file with the same contents from name @@ -1204,7 +1218,10 @@ return parsedate(date, extendeddateformats, d)[0] date = date.strip() - if date[0] == "<": + + if not date: + raise Abort(_("dates cannot consist entirely of whitespace")) + elif date[0] == "<": when = upper(date[1:]) return lambda x: x <= when elif date[0] == ">":
--- a/mercurial/win32.py Wed Mar 30 02:22:15 2011 +0900 +++ b/mercurial/win32.py Wed Mar 30 13:23:24 2011 -0500 @@ -6,7 +6,7 @@ # GNU General Public License version 2 or any later version. import encoding -import ctypes, errno, os, struct, subprocess +import ctypes, errno, os, struct, subprocess, random _kernel32 = ctypes.windll.kernel32 @@ -56,6 +56,10 @@ _OPEN_EXISTING = 3 +# SetFileAttributes +_FILE_ATTRIBUTE_NORMAL = 0x80 +_FILE_ATTRIBUTE_NOT_CONTENT_INDEXED = 0x2000 + # Process Security and Access Rights _PROCESS_QUERY_INFORMATION = 0x0400 @@ -316,3 +320,54 @@ raise ctypes.WinError() return pi.dwProcessId + +def unlink(f): + '''try to implement POSIX' unlink semantics on Windows''' + + # POSIX allows to unlink and rename open files. Windows has serious + # problems with doing that: + # - Calling os.unlink (or os.rename) on a file f fails if f or any + # hardlinked copy of f has been opened with Python's open(). There is no + # way such a file can be deleted or renamed on Windows (other than + # scheduling the delete or rename for the next reboot). + # - Calling os.unlink on a file that has been opened with Mercurial's + # posixfile (or comparable methods) will delay the actual deletion of + # the file for as long as the file is held open. The filename is blocked + # during that time and cannot be used for recreating a new file under + # that same name ("zombie file"). Directories containing such zombie files + # cannot be removed or moved. + # A file that has been opened with posixfile can be renamed, so we rename + # f to a random temporary name before calling os.unlink on it. This allows + # callers to recreate f immediately while having other readers do their + # implicit zombie filename blocking on a temporary name. + + for tries in xrange(10): + temp = '%s-%08x' % (f, random.randint(0, 0xffffffff)) + try: + os.rename(f, temp) # raises OSError EEXIST if temp exists + break + except OSError, e: + if e.errno != errno.EEXIST: + raise + else: + raise IOError, (errno.EEXIST, "No usable temporary filename found") + + try: + os.unlink(temp) + except OSError: + # The unlink might have failed because the READONLY attribute may heave + # been set on the original file. Rename works fine with READONLY set, + # but not os.unlink. Reset all attributes and try again. + _kernel32.SetFileAttributesA(temp, _FILE_ATTRIBUTE_NORMAL) + try: + os.unlink(temp) + except OSError: + # The unlink might have failed due to some very rude AV-Scanners. + # Leaking a tempfile is the lesser evil than aborting here and + # leaving some potentially serious inconsistencies. + pass + +def makedir(path, notindexed): + os.mkdir(path) + if notindexed: + _kernel32.SetFileAttributesA(path, _FILE_ATTRIBUTE_NOT_CONTENT_INDEXED)
--- a/mercurial/windows.py Wed Mar 30 02:22:15 2011 +0900 +++ b/mercurial/windows.py Wed Mar 30 13:23:24 2011 -0500 @@ -7,7 +7,7 @@ from i18n import _ import osutil, error -import errno, msvcrt, os, re, sys, random, subprocess +import errno, msvcrt, os, re, sys, subprocess nulldev = 'NUL:' umask = 002 @@ -286,53 +286,13 @@ def unlinkpath(f): """unlink and remove the directory if it is empty""" - os.unlink(f) + unlink(f) # try removing directories that might now be empty try: _removedirs(os.path.dirname(f)) except OSError: pass -def unlink(f): - '''try to implement POSIX' unlink semantics on Windows''' - - # POSIX allows to unlink and rename open files. Windows has serious - # problems with doing that: - # - Calling os.unlink (or os.rename) on a file f fails if f or any - # hardlinked copy of f has been opened with Python's open(). There is no - # way such a file can be deleted or renamed on Windows (other than - # scheduling the delete or rename for the next reboot). - # - Calling os.unlink on a file that has been opened with Mercurial's - # posixfile (or comparable methods) will delay the actual deletion of - # the file for as long as the file is held open. The filename is blocked - # during that time and cannot be used for recreating a new file under - # that same name ("zombie file"). Directories containing such zombie files - # cannot be removed or moved. - # A file that has been opened with posixfile can be renamed, so we rename - # f to a random temporary name before calling os.unlink on it. This allows - # callers to recreate f immediately while having other readers do their - # implicit zombie filename blocking on a temporary name. - - for tries in xrange(10): - temp = '%s-%08x' % (f, random.randint(0, 0xffffffff)) - try: - os.rename(f, temp) # raises OSError EEXIST if temp exists - break - except OSError, e: - if e.errno != errno.EEXIST: - raise - else: - raise IOError, (errno.EEXIST, "No usable temporary filename found") - - try: - os.unlink(temp) - except: - # Some very rude AV-scanners on Windows may cause this unlink to fail. - # Not aborting here just leaks the temp file, whereas aborting at this - # point may leave serious inconsistencies. Ideally, we would notify - # the user in this case here. - pass - def rename(src, dst): '''atomically rename file src to dst, replacing dst if it exists''' try:
--- a/mercurial/wireproto.py Wed Mar 30 02:22:15 2011 +0900 +++ b/mercurial/wireproto.py Wed Mar 30 13:23:24 2011 -0500 @@ -15,7 +15,9 @@ # list of nodes encoding / decoding def decodelist(l, sep=' '): - return map(bin, l.split(sep)) + if l: + return map(bin, l.split(sep)) + return [] def encodelist(l, sep=' '): return sep.join(map(hex, l)) @@ -35,7 +37,15 @@ d = self._call("heads") try: return decodelist(d[:-1]) - except: + except ValueError: + self._abort(error.ResponseError(_("unexpected response:"), d)) + + def known(self, nodes): + n = encodelist(nodes) + d = self._call("known", nodes=n) + try: + return [bool(int(f)) for f in d] + except ValueError: self._abort(error.ResponseError(_("unexpected response:"), d)) def branchmap(self): @@ -57,7 +67,7 @@ try: br = [tuple(decodelist(b)) for b in d.splitlines()] return br - except: + except ValueError: self._abort(error.ResponseError(_("unexpected response:"), d)) def between(self, pairs): @@ -68,7 +78,7 @@ d = self._call("between", pairs=n) try: r.extend(l and decodelist(l) or [] for l in d.splitlines()) - except: + except ValueError: self._abort(error.ResponseError(_("unexpected response:"), d)) return r @@ -113,6 +123,16 @@ bases=bases, heads=heads) return changegroupmod.unbundle10(self._decompress(f), 'UN') + def getbundle(self, source, heads=None, common=None): + self.requirecap('getbundle', _('look up remote changes')) + opts = {} + if heads is not None: + opts['heads'] = encodelist(heads) + if common is not None: + opts['common'] = encodelist(common) + f = self._callstream("getbundle", **opts) + return changegroupmod.unbundle10(self._decompress(f), 'UN') + def unbundle(self, cg, heads, source): '''Send cg (a readable file-like object representing the changegroup to push, typically a chunkbuffer object) to the @@ -133,6 +153,15 @@ self.ui.status(_('remote: '), l) return ret + def debugwireargs(self, one, two, three=None, four=None): + # don't pass optional arguments left at their default value + opts = {} + if three is not None: + opts['three'] = three + if four is not None: + opts['four'] = four + return self._call('debugwireargs', one=one, two=two, **opts) + # server side class streamres(object): @@ -152,6 +181,17 @@ args = proto.getargs(spec) return func(repo, proto, *args) +def options(cmd, keys, others): + opts = {} + for k in keys: + if k in others: + opts[k] = others[k] + del others[k] + if others: + sys.stderr.write("abort: %s got unexpected arguments %s\n" + % (cmd, ",".join(others))) + return opts + def between(repo, proto, pairs): pairs = [decodelist(p, '-') for p in pairs.split(" ")] r = [] @@ -176,7 +216,7 @@ return "".join(r) def capabilities(repo, proto): - caps = 'lookup changegroupsubset branchmap pushkey'.split() + caps = 'lookup changegroupsubset branchmap pushkey known getbundle'.split() if _allowstream(repo.ui): requiredformats = repo.requirements & repo.supportedformats # if our local revlogs are just revlogv1, add 'stream' cap @@ -199,6 +239,18 @@ cg = repo.changegroupsubset(bases, heads, 'serve') return streamres(proto.groupchunks(cg)) +def debugwireargs(repo, proto, one, two, others): + # only accept optional args from the known set + opts = options('debugwireargs', ['three', 'four'], others) + return repo.debugwireargs(one, two, **opts) + +def getbundle(repo, proto, others): + opts = options('getbundle', ['heads', 'common'], others) + for k, v in opts.iteritems(): + opts[k] = decodelist(v) + cg = repo.getbundle('serve', **opts) + return streamres(proto.groupchunks(cg)) + def heads(repo, proto): h = repo.heads() return encodelist(h) + "\n" @@ -228,6 +280,9 @@ success = 0 return "%s %s\n" % (success, r) +def known(repo, proto, nodes): + return ''.join(b and "1" or "0" for b in repo.known(decodelist(nodes))) + def pushkey(repo, proto, namespace, key, old, new): # compatibility with pre-1.8 clients which were accidentally # sending raw binary nodes rather than utf-8-encoded hex @@ -343,8 +398,11 @@ 'capabilities': (capabilities, ''), 'changegroup': (changegroup, 'roots'), 'changegroupsubset': (changegroupsubset, 'bases heads'), + 'debugwireargs': (debugwireargs, 'one two *'), + 'getbundle': (getbundle, '*'), 'heads': (heads, ''), 'hello': (hello, ''), + 'known': (known, 'nodes'), 'listkeys': (listkeys, 'namespace'), 'lookup': (lookup, 'key'), 'pushkey': (pushkey, 'namespace key old new'),
--- a/setup.py Wed Mar 30 02:22:15 2011 +0900 +++ b/setup.py Wed Mar 30 13:23:24 2011 -0500 @@ -98,24 +98,8 @@ try: import py2exe py2exeloaded = True - - # Help py2exe to find win32com.shell - try: - import modulefinder - import win32com - for p in win32com.__path__[1:]: # Take the path to win32comext - modulefinder.AddPackagePath("win32com", p) - pn = "win32com.shell" - __import__(pn) - m = sys.modules[pn] - for p in m.__path__[1:]: - modulefinder.AddPackagePath(pn, p) - except ImportError: - pass - except ImportError: py2exeloaded = False - pass def runcmd(cmd, env): p = subprocess.Popen(cmd, stdout=subprocess.PIPE, @@ -330,11 +314,17 @@ Extension('mercurial.parsers', ['mercurial/parsers.c']), ] +osutil_ldflags = [] + +if sys.platform == 'darwin': + osutil_ldflags += ['-framework', 'ApplicationServices'] + # disable osutil.c under windows + python 2.4 (issue1364) if sys.platform == 'win32' and sys.version_info < (2, 5, 0, 'final'): pymodules.append('mercurial.pure.osutil') else: - extmodules.append(Extension('mercurial.osutil', ['mercurial/osutil.c'])) + extmodules.append(Extension('mercurial.osutil', ['mercurial/osutil.c'], + extra_link_args=osutil_ldflags)) if sys.platform == 'linux2' and os.uname()[2] > '2.6': # The inotify extension is only usable with Linux 2.6 kernels.
--- a/tests/run-tests.py Wed Mar 30 02:22:15 2011 +0900 +++ b/tests/run-tests.py Wed Mar 30 13:23:24 2011 -0500 @@ -227,8 +227,8 @@ continue for line in f.readlines(): - line = line.strip() - if line and not line.startswith('#'): + line = line.split('#', 1)[0].strip() + if line: blacklist[line] = filename f.close() @@ -694,7 +694,9 @@ runner = shtest # Make a tmp subdirectory to work in - testtmp = os.environ["TESTTMP"] = os.path.join(HGTMP, test) + testtmp = os.environ["TESTTMP"] = os.environ["HOME"] = \ + os.path.join(HGTMP, test) + os.mkdir(testtmp) os.chdir(testtmp)
--- a/tests/test-586.t Wed Mar 30 02:22:15 2011 +0900 +++ b/tests/test-586.t Wed Mar 30 13:23:24 2011 -0500 @@ -17,6 +17,7 @@ pulling from ../a searching for changes warning: repository is unrelated + requesting all changes adding changesets adding manifests adding file changes @@ -66,6 +67,7 @@ pulling from ../repob searching for changes warning: repository is unrelated + requesting all changes adding changesets adding manifests adding file changes
--- a/tests/test-acl.t Wed Mar 30 02:22:15 2011 +0900 +++ b/tests/test-acl.t Wed Mar 30 13:23:24 2011 -0500 @@ -90,37 +90,14 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 adding changesets - bundling: 0 changesets - bundling: 0 changesets - bundling: 0 changesets - bundling: 1 changesets - bundling: 1 changesets bundling: 1 changesets bundling: 2 changesets - bundling: 2 changesets - bundling: 2 changesets bundling: 3 changesets - bundling: 0/3 manifests (0.00%) - bundling: 0/3 manifests (0.00%) - bundling: 0/3 manifests (0.00%) bundling: 1/3 manifests (33.33%) - bundling: 1/3 manifests (33.33%) - bundling: 1/3 manifests (33.33%) - bundling: 2/3 manifests (66.67%) - bundling: 2/3 manifests (66.67%) bundling: 2/3 manifests (66.67%) bundling: 3/3 manifests (100.00%) bundling: foo/Bar/file.txt 0/3 files (0.00%) - bundling: foo/Bar/file.txt 0/3 files (0.00%) - bundling: foo/Bar/file.txt 0/3 files (0.00%) - bundling: foo/Bar/file.txt 0/3 files (0.00%) bundling: foo/file.txt 1/3 files (33.33%) - bundling: foo/file.txt 1/3 files (33.33%) - bundling: foo/file.txt 1/3 files (33.33%) - bundling: foo/file.txt 1/3 files (33.33%) - bundling: quux/file.py 2/3 files (66.67%) - bundling: quux/file.py 2/3 files (66.67%) - bundling: quux/file.py 2/3 files (66.67%) bundling: quux/file.py 2/3 files (66.67%) changesets: 1 chunks add changeset ef1ea85a6374 @@ -168,37 +145,14 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 adding changesets - bundling: 0 changesets - bundling: 0 changesets - bundling: 0 changesets - bundling: 1 changesets - bundling: 1 changesets bundling: 1 changesets bundling: 2 changesets - bundling: 2 changesets - bundling: 2 changesets bundling: 3 changesets - bundling: 0/3 manifests (0.00%) - bundling: 0/3 manifests (0.00%) - bundling: 0/3 manifests (0.00%) bundling: 1/3 manifests (33.33%) - bundling: 1/3 manifests (33.33%) - bundling: 1/3 manifests (33.33%) - bundling: 2/3 manifests (66.67%) - bundling: 2/3 manifests (66.67%) bundling: 2/3 manifests (66.67%) bundling: 3/3 manifests (100.00%) bundling: foo/Bar/file.txt 0/3 files (0.00%) - bundling: foo/Bar/file.txt 0/3 files (0.00%) - bundling: foo/Bar/file.txt 0/3 files (0.00%) - bundling: foo/Bar/file.txt 0/3 files (0.00%) bundling: foo/file.txt 1/3 files (33.33%) - bundling: foo/file.txt 1/3 files (33.33%) - bundling: foo/file.txt 1/3 files (33.33%) - bundling: foo/file.txt 1/3 files (33.33%) - bundling: quux/file.py 2/3 files (66.67%) - bundling: quux/file.py 2/3 files (66.67%) - bundling: quux/file.py 2/3 files (66.67%) bundling: quux/file.py 2/3 files (66.67%) changesets: 1 chunks add changeset ef1ea85a6374 @@ -249,37 +203,14 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 adding changesets - bundling: 0 changesets - bundling: 0 changesets - bundling: 0 changesets - bundling: 1 changesets - bundling: 1 changesets bundling: 1 changesets bundling: 2 changesets - bundling: 2 changesets - bundling: 2 changesets bundling: 3 changesets - bundling: 0/3 manifests (0.00%) - bundling: 0/3 manifests (0.00%) - bundling: 0/3 manifests (0.00%) bundling: 1/3 manifests (33.33%) - bundling: 1/3 manifests (33.33%) - bundling: 1/3 manifests (33.33%) - bundling: 2/3 manifests (66.67%) - bundling: 2/3 manifests (66.67%) bundling: 2/3 manifests (66.67%) bundling: 3/3 manifests (100.00%) bundling: foo/Bar/file.txt 0/3 files (0.00%) - bundling: foo/Bar/file.txt 0/3 files (0.00%) - bundling: foo/Bar/file.txt 0/3 files (0.00%) - bundling: foo/Bar/file.txt 0/3 files (0.00%) bundling: foo/file.txt 1/3 files (33.33%) - bundling: foo/file.txt 1/3 files (33.33%) - bundling: foo/file.txt 1/3 files (33.33%) - bundling: foo/file.txt 1/3 files (33.33%) - bundling: quux/file.py 2/3 files (66.67%) - bundling: quux/file.py 2/3 files (66.67%) - bundling: quux/file.py 2/3 files (66.67%) bundling: quux/file.py 2/3 files (66.67%) changesets: 1 chunks add changeset ef1ea85a6374 @@ -339,37 +270,14 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 adding changesets - bundling: 0 changesets - bundling: 0 changesets - bundling: 0 changesets - bundling: 1 changesets - bundling: 1 changesets bundling: 1 changesets bundling: 2 changesets - bundling: 2 changesets - bundling: 2 changesets bundling: 3 changesets - bundling: 0/3 manifests (0.00%) - bundling: 0/3 manifests (0.00%) - bundling: 0/3 manifests (0.00%) bundling: 1/3 manifests (33.33%) - bundling: 1/3 manifests (33.33%) - bundling: 1/3 manifests (33.33%) - bundling: 2/3 manifests (66.67%) - bundling: 2/3 manifests (66.67%) bundling: 2/3 manifests (66.67%) bundling: 3/3 manifests (100.00%) bundling: foo/Bar/file.txt 0/3 files (0.00%) - bundling: foo/Bar/file.txt 0/3 files (0.00%) - bundling: foo/Bar/file.txt 0/3 files (0.00%) - bundling: foo/Bar/file.txt 0/3 files (0.00%) bundling: foo/file.txt 1/3 files (33.33%) - bundling: foo/file.txt 1/3 files (33.33%) - bundling: foo/file.txt 1/3 files (33.33%) - bundling: foo/file.txt 1/3 files (33.33%) - bundling: quux/file.py 2/3 files (66.67%) - bundling: quux/file.py 2/3 files (66.67%) - bundling: quux/file.py 2/3 files (66.67%) bundling: quux/file.py 2/3 files (66.67%) changesets: 1 chunks add changeset ef1ea85a6374 @@ -426,37 +334,14 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 adding changesets - bundling: 0 changesets - bundling: 0 changesets - bundling: 0 changesets - bundling: 1 changesets - bundling: 1 changesets bundling: 1 changesets bundling: 2 changesets - bundling: 2 changesets - bundling: 2 changesets bundling: 3 changesets - bundling: 0/3 manifests (0.00%) - bundling: 0/3 manifests (0.00%) - bundling: 0/3 manifests (0.00%) bundling: 1/3 manifests (33.33%) - bundling: 1/3 manifests (33.33%) - bundling: 1/3 manifests (33.33%) - bundling: 2/3 manifests (66.67%) - bundling: 2/3 manifests (66.67%) bundling: 2/3 manifests (66.67%) bundling: 3/3 manifests (100.00%) bundling: foo/Bar/file.txt 0/3 files (0.00%) - bundling: foo/Bar/file.txt 0/3 files (0.00%) - bundling: foo/Bar/file.txt 0/3 files (0.00%) - bundling: foo/Bar/file.txt 0/3 files (0.00%) bundling: foo/file.txt 1/3 files (33.33%) - bundling: foo/file.txt 1/3 files (33.33%) - bundling: foo/file.txt 1/3 files (33.33%) - bundling: foo/file.txt 1/3 files (33.33%) - bundling: quux/file.py 2/3 files (66.67%) - bundling: quux/file.py 2/3 files (66.67%) - bundling: quux/file.py 2/3 files (66.67%) bundling: quux/file.py 2/3 files (66.67%) changesets: 1 chunks add changeset ef1ea85a6374 @@ -518,37 +403,14 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 adding changesets - bundling: 0 changesets - bundling: 0 changesets - bundling: 0 changesets - bundling: 1 changesets - bundling: 1 changesets bundling: 1 changesets bundling: 2 changesets - bundling: 2 changesets - bundling: 2 changesets bundling: 3 changesets - bundling: 0/3 manifests (0.00%) - bundling: 0/3 manifests (0.00%) - bundling: 0/3 manifests (0.00%) bundling: 1/3 manifests (33.33%) - bundling: 1/3 manifests (33.33%) - bundling: 1/3 manifests (33.33%) - bundling: 2/3 manifests (66.67%) - bundling: 2/3 manifests (66.67%) bundling: 2/3 manifests (66.67%) bundling: 3/3 manifests (100.00%) bundling: foo/Bar/file.txt 0/3 files (0.00%) - bundling: foo/Bar/file.txt 0/3 files (0.00%) - bundling: foo/Bar/file.txt 0/3 files (0.00%) - bundling: foo/Bar/file.txt 0/3 files (0.00%) bundling: foo/file.txt 1/3 files (33.33%) - bundling: foo/file.txt 1/3 files (33.33%) - bundling: foo/file.txt 1/3 files (33.33%) - bundling: foo/file.txt 1/3 files (33.33%) - bundling: quux/file.py 2/3 files (66.67%) - bundling: quux/file.py 2/3 files (66.67%) - bundling: quux/file.py 2/3 files (66.67%) bundling: quux/file.py 2/3 files (66.67%) changesets: 1 chunks add changeset ef1ea85a6374 @@ -607,37 +469,14 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 adding changesets - bundling: 0 changesets - bundling: 0 changesets - bundling: 0 changesets - bundling: 1 changesets - bundling: 1 changesets bundling: 1 changesets bundling: 2 changesets - bundling: 2 changesets - bundling: 2 changesets bundling: 3 changesets - bundling: 0/3 manifests (0.00%) - bundling: 0/3 manifests (0.00%) - bundling: 0/3 manifests (0.00%) bundling: 1/3 manifests (33.33%) - bundling: 1/3 manifests (33.33%) - bundling: 1/3 manifests (33.33%) - bundling: 2/3 manifests (66.67%) - bundling: 2/3 manifests (66.67%) bundling: 2/3 manifests (66.67%) bundling: 3/3 manifests (100.00%) bundling: foo/Bar/file.txt 0/3 files (0.00%) - bundling: foo/Bar/file.txt 0/3 files (0.00%) - bundling: foo/Bar/file.txt 0/3 files (0.00%) - bundling: foo/Bar/file.txt 0/3 files (0.00%) bundling: foo/file.txt 1/3 files (33.33%) - bundling: foo/file.txt 1/3 files (33.33%) - bundling: foo/file.txt 1/3 files (33.33%) - bundling: foo/file.txt 1/3 files (33.33%) - bundling: quux/file.py 2/3 files (66.67%) - bundling: quux/file.py 2/3 files (66.67%) - bundling: quux/file.py 2/3 files (66.67%) bundling: quux/file.py 2/3 files (66.67%) changesets: 1 chunks add changeset ef1ea85a6374 @@ -701,37 +540,14 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 adding changesets - bundling: 0 changesets - bundling: 0 changesets - bundling: 0 changesets - bundling: 1 changesets - bundling: 1 changesets bundling: 1 changesets bundling: 2 changesets - bundling: 2 changesets - bundling: 2 changesets bundling: 3 changesets - bundling: 0/3 manifests (0.00%) - bundling: 0/3 manifests (0.00%) - bundling: 0/3 manifests (0.00%) bundling: 1/3 manifests (33.33%) - bundling: 1/3 manifests (33.33%) - bundling: 1/3 manifests (33.33%) - bundling: 2/3 manifests (66.67%) - bundling: 2/3 manifests (66.67%) bundling: 2/3 manifests (66.67%) bundling: 3/3 manifests (100.00%) bundling: foo/Bar/file.txt 0/3 files (0.00%) - bundling: foo/Bar/file.txt 0/3 files (0.00%) - bundling: foo/Bar/file.txt 0/3 files (0.00%) - bundling: foo/Bar/file.txt 0/3 files (0.00%) bundling: foo/file.txt 1/3 files (33.33%) - bundling: foo/file.txt 1/3 files (33.33%) - bundling: foo/file.txt 1/3 files (33.33%) - bundling: foo/file.txt 1/3 files (33.33%) - bundling: quux/file.py 2/3 files (66.67%) - bundling: quux/file.py 2/3 files (66.67%) - bundling: quux/file.py 2/3 files (66.67%) bundling: quux/file.py 2/3 files (66.67%) changesets: 1 chunks add changeset ef1ea85a6374 @@ -792,37 +608,14 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 adding changesets - bundling: 0 changesets - bundling: 0 changesets - bundling: 0 changesets - bundling: 1 changesets - bundling: 1 changesets bundling: 1 changesets bundling: 2 changesets - bundling: 2 changesets - bundling: 2 changesets bundling: 3 changesets - bundling: 0/3 manifests (0.00%) - bundling: 0/3 manifests (0.00%) - bundling: 0/3 manifests (0.00%) bundling: 1/3 manifests (33.33%) - bundling: 1/3 manifests (33.33%) - bundling: 1/3 manifests (33.33%) - bundling: 2/3 manifests (66.67%) - bundling: 2/3 manifests (66.67%) bundling: 2/3 manifests (66.67%) bundling: 3/3 manifests (100.00%) bundling: foo/Bar/file.txt 0/3 files (0.00%) - bundling: foo/Bar/file.txt 0/3 files (0.00%) - bundling: foo/Bar/file.txt 0/3 files (0.00%) - bundling: foo/Bar/file.txt 0/3 files (0.00%) bundling: foo/file.txt 1/3 files (33.33%) - bundling: foo/file.txt 1/3 files (33.33%) - bundling: foo/file.txt 1/3 files (33.33%) - bundling: foo/file.txt 1/3 files (33.33%) - bundling: quux/file.py 2/3 files (66.67%) - bundling: quux/file.py 2/3 files (66.67%) - bundling: quux/file.py 2/3 files (66.67%) bundling: quux/file.py 2/3 files (66.67%) changesets: 1 chunks add changeset ef1ea85a6374 @@ -885,37 +678,14 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 adding changesets - bundling: 0 changesets - bundling: 0 changesets - bundling: 0 changesets - bundling: 1 changesets - bundling: 1 changesets bundling: 1 changesets bundling: 2 changesets - bundling: 2 changesets - bundling: 2 changesets bundling: 3 changesets - bundling: 0/3 manifests (0.00%) - bundling: 0/3 manifests (0.00%) - bundling: 0/3 manifests (0.00%) bundling: 1/3 manifests (33.33%) - bundling: 1/3 manifests (33.33%) - bundling: 1/3 manifests (33.33%) - bundling: 2/3 manifests (66.67%) - bundling: 2/3 manifests (66.67%) bundling: 2/3 manifests (66.67%) bundling: 3/3 manifests (100.00%) bundling: foo/Bar/file.txt 0/3 files (0.00%) - bundling: foo/Bar/file.txt 0/3 files (0.00%) - bundling: foo/Bar/file.txt 0/3 files (0.00%) - bundling: foo/Bar/file.txt 0/3 files (0.00%) bundling: foo/file.txt 1/3 files (33.33%) - bundling: foo/file.txt 1/3 files (33.33%) - bundling: foo/file.txt 1/3 files (33.33%) - bundling: foo/file.txt 1/3 files (33.33%) - bundling: quux/file.py 2/3 files (66.67%) - bundling: quux/file.py 2/3 files (66.67%) - bundling: quux/file.py 2/3 files (66.67%) bundling: quux/file.py 2/3 files (66.67%) changesets: 1 chunks add changeset ef1ea85a6374 @@ -982,37 +752,14 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 adding changesets - bundling: 0 changesets - bundling: 0 changesets - bundling: 0 changesets - bundling: 1 changesets - bundling: 1 changesets bundling: 1 changesets bundling: 2 changesets - bundling: 2 changesets - bundling: 2 changesets bundling: 3 changesets - bundling: 0/3 manifests (0.00%) - bundling: 0/3 manifests (0.00%) - bundling: 0/3 manifests (0.00%) bundling: 1/3 manifests (33.33%) - bundling: 1/3 manifests (33.33%) - bundling: 1/3 manifests (33.33%) - bundling: 2/3 manifests (66.67%) - bundling: 2/3 manifests (66.67%) bundling: 2/3 manifests (66.67%) bundling: 3/3 manifests (100.00%) bundling: foo/Bar/file.txt 0/3 files (0.00%) - bundling: foo/Bar/file.txt 0/3 files (0.00%) - bundling: foo/Bar/file.txt 0/3 files (0.00%) - bundling: foo/Bar/file.txt 0/3 files (0.00%) bundling: foo/file.txt 1/3 files (33.33%) - bundling: foo/file.txt 1/3 files (33.33%) - bundling: foo/file.txt 1/3 files (33.33%) - bundling: foo/file.txt 1/3 files (33.33%) - bundling: quux/file.py 2/3 files (66.67%) - bundling: quux/file.py 2/3 files (66.67%) - bundling: quux/file.py 2/3 files (66.67%) bundling: quux/file.py 2/3 files (66.67%) changesets: 1 chunks add changeset ef1ea85a6374 @@ -1082,37 +829,14 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 adding changesets - bundling: 0 changesets - bundling: 0 changesets - bundling: 0 changesets - bundling: 1 changesets - bundling: 1 changesets bundling: 1 changesets bundling: 2 changesets - bundling: 2 changesets - bundling: 2 changesets bundling: 3 changesets - bundling: 0/3 manifests (0.00%) - bundling: 0/3 manifests (0.00%) - bundling: 0/3 manifests (0.00%) bundling: 1/3 manifests (33.33%) - bundling: 1/3 manifests (33.33%) - bundling: 1/3 manifests (33.33%) - bundling: 2/3 manifests (66.67%) - bundling: 2/3 manifests (66.67%) bundling: 2/3 manifests (66.67%) bundling: 3/3 manifests (100.00%) bundling: foo/Bar/file.txt 0/3 files (0.00%) - bundling: foo/Bar/file.txt 0/3 files (0.00%) - bundling: foo/Bar/file.txt 0/3 files (0.00%) - bundling: foo/Bar/file.txt 0/3 files (0.00%) bundling: foo/file.txt 1/3 files (33.33%) - bundling: foo/file.txt 1/3 files (33.33%) - bundling: foo/file.txt 1/3 files (33.33%) - bundling: foo/file.txt 1/3 files (33.33%) - bundling: quux/file.py 2/3 files (66.67%) - bundling: quux/file.py 2/3 files (66.67%) - bundling: quux/file.py 2/3 files (66.67%) bundling: quux/file.py 2/3 files (66.67%) changesets: 1 chunks add changeset ef1ea85a6374 @@ -1176,37 +900,14 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 adding changesets - bundling: 0 changesets - bundling: 0 changesets - bundling: 0 changesets - bundling: 1 changesets - bundling: 1 changesets bundling: 1 changesets bundling: 2 changesets - bundling: 2 changesets - bundling: 2 changesets bundling: 3 changesets - bundling: 0/3 manifests (0.00%) - bundling: 0/3 manifests (0.00%) - bundling: 0/3 manifests (0.00%) bundling: 1/3 manifests (33.33%) - bundling: 1/3 manifests (33.33%) - bundling: 1/3 manifests (33.33%) - bundling: 2/3 manifests (66.67%) - bundling: 2/3 manifests (66.67%) bundling: 2/3 manifests (66.67%) bundling: 3/3 manifests (100.00%) bundling: foo/Bar/file.txt 0/3 files (0.00%) - bundling: foo/Bar/file.txt 0/3 files (0.00%) - bundling: foo/Bar/file.txt 0/3 files (0.00%) - bundling: foo/Bar/file.txt 0/3 files (0.00%) bundling: foo/file.txt 1/3 files (33.33%) - bundling: foo/file.txt 1/3 files (33.33%) - bundling: foo/file.txt 1/3 files (33.33%) - bundling: foo/file.txt 1/3 files (33.33%) - bundling: quux/file.py 2/3 files (66.67%) - bundling: quux/file.py 2/3 files (66.67%) - bundling: quux/file.py 2/3 files (66.67%) bundling: quux/file.py 2/3 files (66.67%) changesets: 1 chunks add changeset ef1ea85a6374 @@ -1282,37 +983,14 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 adding changesets - bundling: 0 changesets - bundling: 0 changesets - bundling: 0 changesets - bundling: 1 changesets - bundling: 1 changesets bundling: 1 changesets bundling: 2 changesets - bundling: 2 changesets - bundling: 2 changesets bundling: 3 changesets - bundling: 0/3 manifests (0.00%) - bundling: 0/3 manifests (0.00%) - bundling: 0/3 manifests (0.00%) bundling: 1/3 manifests (33.33%) - bundling: 1/3 manifests (33.33%) - bundling: 1/3 manifests (33.33%) - bundling: 2/3 manifests (66.67%) - bundling: 2/3 manifests (66.67%) bundling: 2/3 manifests (66.67%) bundling: 3/3 manifests (100.00%) bundling: foo/Bar/file.txt 0/3 files (0.00%) - bundling: foo/Bar/file.txt 0/3 files (0.00%) - bundling: foo/Bar/file.txt 0/3 files (0.00%) - bundling: foo/Bar/file.txt 0/3 files (0.00%) bundling: foo/file.txt 1/3 files (33.33%) - bundling: foo/file.txt 1/3 files (33.33%) - bundling: foo/file.txt 1/3 files (33.33%) - bundling: foo/file.txt 1/3 files (33.33%) - bundling: quux/file.py 2/3 files (66.67%) - bundling: quux/file.py 2/3 files (66.67%) - bundling: quux/file.py 2/3 files (66.67%) bundling: quux/file.py 2/3 files (66.67%) changesets: 1 chunks add changeset ef1ea85a6374 @@ -1380,37 +1058,14 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 adding changesets - bundling: 0 changesets - bundling: 0 changesets - bundling: 0 changesets - bundling: 1 changesets - bundling: 1 changesets bundling: 1 changesets bundling: 2 changesets - bundling: 2 changesets - bundling: 2 changesets bundling: 3 changesets - bundling: 0/3 manifests (0.00%) - bundling: 0/3 manifests (0.00%) - bundling: 0/3 manifests (0.00%) bundling: 1/3 manifests (33.33%) - bundling: 1/3 manifests (33.33%) - bundling: 1/3 manifests (33.33%) - bundling: 2/3 manifests (66.67%) - bundling: 2/3 manifests (66.67%) bundling: 2/3 manifests (66.67%) bundling: 3/3 manifests (100.00%) bundling: foo/Bar/file.txt 0/3 files (0.00%) - bundling: foo/Bar/file.txt 0/3 files (0.00%) - bundling: foo/Bar/file.txt 0/3 files (0.00%) - bundling: foo/Bar/file.txt 0/3 files (0.00%) bundling: foo/file.txt 1/3 files (33.33%) - bundling: foo/file.txt 1/3 files (33.33%) - bundling: foo/file.txt 1/3 files (33.33%) - bundling: foo/file.txt 1/3 files (33.33%) - bundling: quux/file.py 2/3 files (66.67%) - bundling: quux/file.py 2/3 files (66.67%) - bundling: quux/file.py 2/3 files (66.67%) bundling: quux/file.py 2/3 files (66.67%) changesets: 1 chunks add changeset ef1ea85a6374 @@ -1474,37 +1129,14 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 adding changesets - bundling: 0 changesets - bundling: 0 changesets - bundling: 0 changesets - bundling: 1 changesets - bundling: 1 changesets bundling: 1 changesets bundling: 2 changesets - bundling: 2 changesets - bundling: 2 changesets bundling: 3 changesets - bundling: 0/3 manifests (0.00%) - bundling: 0/3 manifests (0.00%) - bundling: 0/3 manifests (0.00%) bundling: 1/3 manifests (33.33%) - bundling: 1/3 manifests (33.33%) - bundling: 1/3 manifests (33.33%) - bundling: 2/3 manifests (66.67%) - bundling: 2/3 manifests (66.67%) bundling: 2/3 manifests (66.67%) bundling: 3/3 manifests (100.00%) bundling: foo/Bar/file.txt 0/3 files (0.00%) - bundling: foo/Bar/file.txt 0/3 files (0.00%) - bundling: foo/Bar/file.txt 0/3 files (0.00%) - bundling: foo/Bar/file.txt 0/3 files (0.00%) bundling: foo/file.txt 1/3 files (33.33%) - bundling: foo/file.txt 1/3 files (33.33%) - bundling: foo/file.txt 1/3 files (33.33%) - bundling: foo/file.txt 1/3 files (33.33%) - bundling: quux/file.py 2/3 files (66.67%) - bundling: quux/file.py 2/3 files (66.67%) - bundling: quux/file.py 2/3 files (66.67%) bundling: quux/file.py 2/3 files (66.67%) changesets: 1 chunks add changeset ef1ea85a6374 @@ -1570,37 +1202,14 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 adding changesets - bundling: 0 changesets - bundling: 0 changesets - bundling: 0 changesets - bundling: 1 changesets - bundling: 1 changesets bundling: 1 changesets bundling: 2 changesets - bundling: 2 changesets - bundling: 2 changesets bundling: 3 changesets - bundling: 0/3 manifests (0.00%) - bundling: 0/3 manifests (0.00%) - bundling: 0/3 manifests (0.00%) bundling: 1/3 manifests (33.33%) - bundling: 1/3 manifests (33.33%) - bundling: 1/3 manifests (33.33%) - bundling: 2/3 manifests (66.67%) - bundling: 2/3 manifests (66.67%) bundling: 2/3 manifests (66.67%) bundling: 3/3 manifests (100.00%) bundling: foo/Bar/file.txt 0/3 files (0.00%) - bundling: foo/Bar/file.txt 0/3 files (0.00%) - bundling: foo/Bar/file.txt 0/3 files (0.00%) - bundling: foo/Bar/file.txt 0/3 files (0.00%) bundling: foo/file.txt 1/3 files (33.33%) - bundling: foo/file.txt 1/3 files (33.33%) - bundling: foo/file.txt 1/3 files (33.33%) - bundling: foo/file.txt 1/3 files (33.33%) - bundling: quux/file.py 2/3 files (66.67%) - bundling: quux/file.py 2/3 files (66.67%) - bundling: quux/file.py 2/3 files (66.67%) bundling: quux/file.py 2/3 files (66.67%) changesets: 1 chunks add changeset ef1ea85a6374 @@ -1665,37 +1274,14 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 adding changesets - bundling: 0 changesets - bundling: 0 changesets - bundling: 0 changesets - bundling: 1 changesets - bundling: 1 changesets bundling: 1 changesets bundling: 2 changesets - bundling: 2 changesets - bundling: 2 changesets bundling: 3 changesets - bundling: 0/3 manifests (0.00%) - bundling: 0/3 manifests (0.00%) - bundling: 0/3 manifests (0.00%) bundling: 1/3 manifests (33.33%) - bundling: 1/3 manifests (33.33%) - bundling: 1/3 manifests (33.33%) - bundling: 2/3 manifests (66.67%) - bundling: 2/3 manifests (66.67%) bundling: 2/3 manifests (66.67%) bundling: 3/3 manifests (100.00%) bundling: foo/Bar/file.txt 0/3 files (0.00%) - bundling: foo/Bar/file.txt 0/3 files (0.00%) - bundling: foo/Bar/file.txt 0/3 files (0.00%) - bundling: foo/Bar/file.txt 0/3 files (0.00%) bundling: foo/file.txt 1/3 files (33.33%) - bundling: foo/file.txt 1/3 files (33.33%) - bundling: foo/file.txt 1/3 files (33.33%) - bundling: foo/file.txt 1/3 files (33.33%) - bundling: quux/file.py 2/3 files (66.67%) - bundling: quux/file.py 2/3 files (66.67%) - bundling: quux/file.py 2/3 files (66.67%) bundling: quux/file.py 2/3 files (66.67%) changesets: 1 chunks add changeset ef1ea85a6374
--- a/tests/test-annotate.t Wed Mar 30 02:22:15 2011 +0900 +++ b/tests/test-annotate.t Wed Mar 30 13:23:24 2011 -0500 @@ -228,3 +228,8 @@ $ hg annotate --follow foo foo: foo +missing file + + $ hg ann nosuchfile + abort: nosuchfile: no such file in rev c8abddb41a00 + [255]
--- a/tests/test-basic.t Wed Mar 30 02:22:15 2011 +0900 +++ b/tests/test-basic.t Wed Mar 30 13:23:24 2011 -0500 @@ -20,6 +20,22 @@ summary: test +Verify that updating to revision 0 via commands.update() works properly + + $ cat <<EOF > update_to_rev0.py + > from mercurial import ui, hg, commands + > myui = ui.ui() + > repo = hg.repository(myui, path='.') + > commands.update(myui, repo, rev=0) + > EOF + $ hg up null + 0 files updated, 0 files merged, 1 files removed, 0 files unresolved + $ python ./update_to_rev0.py + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg identify -n + 0 + + Poke around at hashes: $ hg manifest --debug
--- a/tests/test-bisect.t Wed Mar 30 02:22:15 2011 +0900 +++ b/tests/test-bisect.t Wed Mar 30 13:23:24 2011 -0500 @@ -377,6 +377,44 @@ date: Thu Jan 01 00:00:06 1970 +0000 summary: msg 6 + $ hg log -r "bisected(good)" + changeset: 0:b99c7b9c8e11 + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: msg 0 + + changeset: 5:7874a09ea728 + user: test + date: Thu Jan 01 00:00:05 1970 +0000 + summary: msg 5 + + $ hg log -r "bisected(bad)" + changeset: 6:a3d5c6fdf0d3 + user: test + date: Thu Jan 01 00:00:06 1970 +0000 + summary: msg 6 + + $ hg log -r "bisected(skip)" + changeset: 1:5cd978ea5149 + user: test + date: Thu Jan 01 00:00:01 1970 +0000 + summary: msg 1 + + changeset: 2:db07c04beaca + user: test + date: Thu Jan 01 00:00:02 1970 +0000 + summary: msg 2 + + changeset: 3:b53bea5e2fcb + user: test + date: Thu Jan 01 00:00:03 1970 +0000 + summary: msg 3 + + changeset: 4:9b2ba8336a65 + user: test + date: Thu Jan 01 00:00:04 1970 +0000 + summary: msg 4 + $ set +e
--- a/tests/test-bisect2.t Wed Mar 30 02:22:15 2011 +0900 +++ b/tests/test-bisect2.t Wed Mar 30 13:23:24 2011 -0500 @@ -416,10 +416,14 @@ summary: merge 10,13 Not all ancestors of this changeset have been checked. - To check the other ancestors, start from the common ancestor, dab8161ac8fc. - $ hg bisect -g 8 # dab8161ac8fc + Use bisect --extend to continue the bisection from + the common ancestor, dab8161ac8fc. + $ hg bisect --extend + Extending search to changeset 8:dab8161ac8fc + 2 files updated, 0 files merged, 2 files removed, 0 files unresolved + $ hg bisect -g # dab8161ac8fc Testing changeset 9:3c77083deb4a (3 changesets remaining, ~1 tests) - 1 files updated, 0 files merged, 2 files removed, 0 files unresolved + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -b The first bad revision is: changeset: 9:3c77083deb4a
--- a/tests/test-bookmarks-pushpull.t Wed Mar 30 02:22:15 2011 +0900 +++ b/tests/test-bookmarks-pushpull.t Wed Mar 30 13:23:24 2011 -0500 @@ -26,6 +26,7 @@ adding manifests adding file changes added 1 changesets with 1 changes to 1 files + updating bookmark Y (run 'hg update' to get a working copy) $ hg bookmarks Y 0:4e3505fd9583 @@ -176,5 +177,19 @@ no changes found not updating divergent bookmark X importing bookmark Z + $ hg clone http://localhost:$HGPORT/ cloned-bookmarks + requesting all changes + adding changesets + adding manifests + adding file changes + added 3 changesets with 3 changes to 3 files (+1 heads) + updating to branch default + 2 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg -R cloned-bookmarks bookmarks + X 1:9b140be10808 + Y 0:4e3505fd9583 + Z 2:0d2164f0ce0d + foo -1:000000000000 + foobar -1:000000000000 $ kill `cat ../hg.pid`
--- a/tests/test-bookmarks.t Wed Mar 30 02:22:15 2011 +0900 +++ b/tests/test-bookmarks.t Wed Mar 30 13:23:24 2011 -0500 @@ -244,3 +244,80 @@ $ hg id db815d6d32e6 tip Y/Z/x y + +test clone + + $ hg bookmarks + X2 1:925d80f479bb + Y 2:db815d6d32e6 + * Z 2:db815d6d32e6 + x y 2:db815d6d32e6 + $ hg clone . cloned-bookmarks + updating to branch default + 2 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg -R cloned-bookmarks bookmarks + X2 1:925d80f479bb + Y 2:db815d6d32e6 + Z 2:db815d6d32e6 + x y 2:db815d6d32e6 + +test clone with pull protocol + + $ hg clone --pull . cloned-bookmarks-pull + requesting all changes + adding changesets + adding manifests + adding file changes + added 3 changesets with 3 changes to 3 files (+1 heads) + updating to branch default + 2 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg -R cloned-bookmarks-pull bookmarks + X2 1:925d80f479bb + Y 2:db815d6d32e6 + Z 2:db815d6d32e6 + x y 2:db815d6d32e6 + +test clone with a specific revision + + $ hg clone -r 925d80 . cloned-bookmarks-rev + adding changesets + adding manifests + adding file changes + added 2 changesets with 2 changes to 2 files + updating to branch default + 2 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg -R cloned-bookmarks-rev bookmarks + X2 1:925d80f479bb + +create bundle with two heads + + $ hg clone . tobundle + updating to branch default + 2 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ echo x > tobundle/x + $ hg -R tobundle add tobundle/x + $ hg -R tobundle commit -m'x' + $ hg -R tobundle update -r -2 + 0 files updated, 0 files merged, 1 files removed, 0 files unresolved + $ echo y > tobundle/y + $ hg -R tobundle branch test + marked working directory as branch test + $ hg -R tobundle add tobundle/y + $ hg -R tobundle commit -m'y' + $ hg -R tobundle bundle tobundle.hg + searching for changes + 2 changesets found + $ hg unbundle tobundle.hg + adding changesets + adding manifests + adding file changes + added 2 changesets with 2 changes to 2 files (+1 heads) + (run 'hg heads' to see heads, 'hg merge' to merge) + $ hg update + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg bookmarks + X2 1:925d80f479bb + Y 2:db815d6d32e6 + * Z 3:125c9a1d6df6 + x y 2:db815d6d32e6 +
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-branch-tag-confict.t Wed Mar 30 13:23:24 2011 -0500 @@ -0,0 +1,62 @@ +Initial setup. + + $ hg init repo + $ cd repo + $ touch thefile + $ hg ci -A -m 'Initial commit.' + adding thefile + +Create a tag. + + $ hg tag branchortag + +Create a branch with the same name as the tag. + + $ hg branch branchortag + marked working directory as branch branchortag + $ hg ci -m 'Create a branch with the same name as a tag.' + +This is what we have: + + $ hg log + changeset: 2:02b1af9b58c2 + branch: branchortag + tag: tip + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: Create a branch with the same name as a tag. + + changeset: 1:2635c45ca99b + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: Added tag branchortag for changeset f57387372b5d + + changeset: 0:f57387372b5d + tag: branchortag + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: Initial commit. + +Update to the tag: + + $ hg up 'tag(branchortag)' + 0 files updated, 0 files merged, 1 files removed, 0 files unresolved + $ hg parents + changeset: 0:f57387372b5d + tag: branchortag + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: Initial commit. + +Updating to the branch: + + $ hg up 'branch(branchortag)' + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg parents + changeset: 2:02b1af9b58c2 + branch: branchortag + tag: tip + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: Create a branch with the same name as a tag. +
--- a/tests/test-bundle.t Wed Mar 30 02:22:15 2011 +0900 +++ b/tests/test-bundle.t Wed Mar 30 13:23:24 2011 -0500 @@ -551,26 +551,10 @@ list of changesets: d2ae7f538514cd87c17547b0de4cea71fe1af9fb 5ece8e77363e2b5269e27c66828b72da29e4341a - bundling: 0 changesets - bundling: 0 changesets - bundling: 0 changesets - bundling: 1 changesets - bundling: 1 changesets bundling: 1 changesets bundling: 2 changesets - bundling: 0/2 manifests (0.00%) - bundling: 0/2 manifests (0.00%) - bundling: 0/2 manifests (0.00%) - bundling: 1/2 manifests (50.00%) - bundling: 1/2 manifests (50.00%) bundling: 1/2 manifests (50.00%) bundling: 2/2 manifests (100.00%) bundling: b 0/2 files (0.00%) - bundling: b 0/2 files (0.00%) - bundling: b 0/2 files (0.00%) - bundling: b 0/2 files (0.00%) - bundling: b1 1/2 files (50.00%) - bundling: b1 1/2 files (50.00%) - bundling: b1 1/2 files (50.00%) bundling: b1 1/2 files (50.00%)
--- a/tests/test-command-template.t Wed Mar 30 02:22:15 2011 +0900 +++ b/tests/test-command-template.t Wed Mar 30 13:23:24 2011 -0500 @@ -1115,7 +1115,7 @@ $ hg log --template '{date|age}\n' > /dev/null || exit 1 $ hg log -l1 --template '{date|age}\n' - in the future + 8 years from now $ hg log --template '{date|date}\n' Wed Jan 01 10:01:00 2020 +0000 Mon Jan 12 13:46:40 1970 +0000
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-commit-multiple.t Wed Mar 30 13:23:24 2011 -0500 @@ -0,0 +1,129 @@ +# reproduce issue2264, issue2516 + +create test repo + $ cat <<EOF >> $HGRCPATH + > [extensions] + > transplant = + > graphlog = + > EOF + $ hg init repo + $ cd repo + $ template="{rev} {desc|firstline} [{branch}]\n" + +# we need to start out with two changesets on the default branch +# in order to avoid the cute little optimization where transplant +# pulls rather than transplants +add initial changesets + $ echo feature1 > file1 + $ hg ci -Am"feature 1" + adding file1 + $ echo feature2 >> file2 + $ hg ci -Am"feature 2" + adding file2 + +# The changes to 'bugfix' are enough to show the bug: in fact, with only +# those changes, it's a very noisy crash ("RuntimeError: nothing +# committed after transplant"). But if we modify a second file in the +# transplanted changesets, the bug is much more subtle: transplant +# silently drops the second change to 'bugfix' on the floor, and we only +# see it when we run 'hg status' after transplanting. Subtle data loss +# bugs are worse than crashes, so reproduce the subtle case here. +commit bug fixes on bug fix branch + $ hg branch fixes + marked working directory as branch fixes + $ echo fix1 > bugfix + $ echo fix1 >> file1 + $ hg ci -Am"fix 1" + adding bugfix + $ echo fix2 > bugfix + $ echo fix2 >> file1 + $ hg ci -Am"fix 2" + $ hg glog --template="$template" + @ 3 fix 2 [fixes] + | + o 2 fix 1 [fixes] + | + o 1 feature 2 [default] + | + o 0 feature 1 [default] + +transplant bug fixes onto release branch + $ hg update 0 + 1 files updated, 0 files merged, 2 files removed, 0 files unresolved + $ hg branch release + marked working directory as branch release + $ hg transplant 2 3 + applying [0-9a-f]{12} (re) + [0-9a-f]{12} transplanted to [0-9a-f]{12} (re) + applying [0-9a-f]{12} (re) + [0-9a-f]{12} transplanted to [0-9a-f]{12} (re) + $ hg glog --template="$template" + @ 5 fix 2 [release] + | + o 4 fix 1 [release] + | + | o 3 fix 2 [fixes] + | | + | o 2 fix 1 [fixes] + | | + | o 1 feature 2 [default] + |/ + o 0 feature 1 [default] + + $ hg status + $ hg status --rev 0:4 + M file1 + A bugfix + $ hg status --rev 4:5 + M bugfix + M file1 + +now test that we fixed the bug for all scripts/extensions + $ cat > $TESTTMP/committwice.py <<__EOF__ + > from mercurial import ui, hg, match, node + > from time import sleep + > + > def replacebyte(fn, b): + > f = open(fn, "rb+") + > f.seek(0, 0) + > f.write(b) + > f.close() + > + > def printfiles(repo, rev): + > print "revision %s files: %s" % (rev, repo[rev].files()) + > + > repo = hg.repository(ui.ui(), '.') + > assert len(repo) == 6, \ + > "initial: len(repo): %d, expected: 6" % len(repo) + > + > replacebyte("bugfix", "u") + > sleep(2) + > try: + > print "PRE: len(repo): %d" % len(repo) + > wlock = repo.wlock() + > lock = repo.lock() + > replacebyte("file1", "x") + > repo.commit(text="x", user="test", date=(0, 0)) + > replacebyte("file1", "y") + > repo.commit(text="y", user="test", date=(0, 0)) + > print "POST: len(repo): %d" % len(repo) + > finally: + > lock.release() + > wlock.release() + > printfiles(repo, 6) + > printfiles(repo, 7) + > __EOF__ + $ $PYTHON $TESTTMP/committwice.py + PRE: len(repo): 6 + POST: len(repo): 8 + revision 6 files: ['bugfix', 'file1'] + revision 7 files: ['file1'] + +Do a size-preserving modification outside of that process + $ echo abcd > bugfix + $ hg status + M bugfix + $ hg log --template "{rev} {desc} {files}\n" -r5: + 5 fix 2 bugfix file1 + 6 x bugfix file1 + 7 y file1
--- a/tests/test-convert-baz Wed Mar 30 02:22:15 2011 +0900 +++ b/tests/test-convert-baz Wed Mar 30 13:23:24 2011 -0500 @@ -2,10 +2,6 @@ "$TESTDIR/hghave" baz || exit 80 -mkdir do_not_use_HOME_baz -cd do_not_use_HOME_baz -HOME=`pwd`; export HOME -cd .. baz my-id "mercurial <mercurial@selenic.com>" echo "[extensions]" >> $HGRCPATH
--- a/tests/test-convert-darcs.t Wed Mar 30 02:22:15 2011 +0900 +++ b/tests/test-convert-darcs.t Wed Mar 30 13:23:24 2011 -0500 @@ -4,7 +4,6 @@ $ echo "convert=" >> $HGRCPATH $ echo 'graphlog =' >> $HGRCPATH $ DARCS_EMAIL='test@example.org'; export DARCS_EMAIL - $ HOME=`pwd`/do_not_use_HOME_darcs; export HOME skip if we can't import elementtree
--- a/tests/test-convert-git.t Wed Mar 30 02:22:15 2011 +0900 +++ b/tests/test-convert-git.t Wed Mar 30 13:23:24 2011 -0500 @@ -57,9 +57,11 @@ 2 t4.1 1 t4.2 0 Merge branch other + updating bookmarks $ hg up -q -R git-repo-hg $ hg -R git-repo-hg tip -v changeset: 5:c78094926be2 + bookmark: master tag: tip parent: 3:f5f5cb45432b parent: 4:4e174f80c67c @@ -217,6 +219,7 @@ sorting... converting... 0 addbinary + updating bookmarks $ cd git-repo3-hg $ hg up -C 1 files updated, 0 files merged, 0 files removed, 0 files unresolved @@ -248,8 +251,10 @@ converting... 1 addfoo 0 addfoo2 + updating bookmarks $ hg -R git-repo4-hg log -v changeset: 1:d63e967f93da + bookmark: master tag: tip user: nottest <test@example.org> date: Mon Jan 01 00:00:21 2007 +0000
--- a/tests/test-convert-hg-startrev.t Wed Mar 30 02:22:15 2011 +0900 +++ b/tests/test-convert-hg-startrev.t Wed Mar 30 13:23:24 2011 -0500 @@ -1,5 +1,5 @@ - $ cat > $HGRCPATH <<EOF + $ cat >> $HGRCPATH <<EOF > [extensions] > graphlog = > convert =
--- a/tests/test-convert-mtn.t Wed Mar 30 02:22:15 2011 +0900 +++ b/tests/test-convert-mtn.t Wed Mar 30 13:23:24 2011 -0500 @@ -8,7 +8,6 @@ $ echo "[extensions]" >> $HGRCPATH $ echo "convert=" >> $HGRCPATH $ echo 'graphlog =' >> $HGRCPATH - $ HOME=`pwd`/do_not_use_HOME_mtn; export HOME Windows version of monotone home @@ -208,6 +207,21 @@ $ mtn ci -m divergentdirmove2 mtn: beginning commit on branch 'com.selenic.test' mtn: committed revision 4a736634505795f17786fffdf2c9cbf5b11df6f6 + +test large file support (> 32kB) + + $ python -c 'for x in range(10000): print x' > large-file + $ $TESTDIR/md5sum.py large-file + 5d6de8a95c3b6bf9e0ffb808ba5299c1 large-file + $ mtn add large-file + mtn: adding large-file to workspace manifest + $ mtn ci -m largefile + mtn: beginning commit on branch 'com.selenic.test' + mtn: committed revision f0a20fecd10dc4392d18fe69a03f1f4919d3387b + +test suspending (closing a branch) + + $ mtn suspend f0a20fecd10dc4392d18fe69a03f1f4919d3387b 2> /dev/null $ cd .. convert incrementally @@ -217,27 +231,30 @@ scanning source... sorting... converting... - 11 update2 "with" quotes - 10 createdir1 - 9 movedir1 - 8 movedir - 7 emptydir - 6 dropdirectory - 5 dirfilemove - 4 dirfilemove2 - 3 dirdirmove - 2 dirdirmove2 - 1 divergentdirmove - 0 divergentdirmove2 + 12 update2 "with" quotes + 11 createdir1 + 10 movedir1 + 9 movedir + 8 emptydir + 7 dropdirectory + 6 dirfilemove + 5 dirfilemove2 + 4 dirdirmove + 3 dirdirmove2 + 2 divergentdirmove + 1 divergentdirmove2 + 0 largefile $ glog() > { > hg glog --template '{rev} "{desc|firstline}" files: {files}\n' "$@" > } $ cd repo.mtn-hg $ hg up -C - 11 files updated, 0 files merged, 0 files removed, 0 files unresolved + 12 files updated, 0 files merged, 0 files removed, 0 files unresolved $ glog - @ 13 "divergentdirmove2" files: dir7-2/c dir7/c dir7/dir9/b dir7/dir9/dir8/a dir8-2/a dir9-2/b + @ 14 "largefile" files: large-file + | + o 13 "divergentdirmove2" files: dir7-2/c dir7/c dir7/dir9/b dir7/dir9/dir8/a dir8-2/a dir9-2/b | o 12 "divergentdirmove" files: dir7/c dir7/dir9/b dir7/dir9/dir8/a | @@ -280,6 +297,7 @@ dir8-2/a dir9-2/b e + large-file contents @@ -356,3 +374,15 @@ dir8-2/a dir9-2/b e + +test large file support (> 32kB) + + $ $TESTDIR/md5sum.py large-file + 5d6de8a95c3b6bf9e0ffb808ba5299c1 large-file + +check branch closing + + $ hg branches -a + $ hg branches -c + com.selenic.test 14:* (closed) (glob) +
--- a/tests/test-convert-svn-branches.t Wed Mar 30 02:22:15 2011 +0900 +++ b/tests/test-convert-svn-branches.t Wed Mar 30 13:23:24 2011 -0500 @@ -1,7 +1,7 @@ $ "$TESTDIR/hghave" svn svn-bindings || exit 80 - $ cat > $HGRCPATH <<EOF + $ cat >> $HGRCPATH <<EOF > [extensions] > convert = > graphlog = @@ -32,6 +32,21 @@ 1 move back to old 0 last change to a +Test template keywords + + $ hg -R A-hg log --template '{rev} {svnuuid}{svnpath}@{svnrev}\n' + 10 644ede6c-2b81-4367-9dc8-d786514f2cde/trunk@10 + 9 644ede6c-2b81-4367-9dc8-d786514f2cde/branches/old@9 + 8 644ede6c-2b81-4367-9dc8-d786514f2cde/branches/old2@8 + 7 644ede6c-2b81-4367-9dc8-d786514f2cde/branches/old@7 + 6 644ede6c-2b81-4367-9dc8-d786514f2cde/trunk@6 + 5 644ede6c-2b81-4367-9dc8-d786514f2cde/branches/old@6 + 4 644ede6c-2b81-4367-9dc8-d786514f2cde/branches/old@5 + 3 644ede6c-2b81-4367-9dc8-d786514f2cde/trunk@4 + 2 644ede6c-2b81-4367-9dc8-d786514f2cde/branches/old@3 + 1 644ede6c-2b81-4367-9dc8-d786514f2cde/trunk@2 + 0 644ede6c-2b81-4367-9dc8-d786514f2cde/trunk@1 + Convert again $ hg convert --branchmap=branchmap --datesort svn-repo A-hg
--- a/tests/test-convert-svn-encoding.t Wed Mar 30 02:22:15 2011 +0900 +++ b/tests/test-convert-svn-encoding.t Wed Mar 30 13:23:24 2011 -0500 @@ -1,7 +1,7 @@ $ "$TESTDIR/hghave" svn svn-bindings || exit 80 - $ cat > $HGRCPATH <<EOF + $ cat >> $HGRCPATH <<EOF > [extensions] > convert = > graphlog =
--- a/tests/test-convert-svn-move.t Wed Mar 30 02:22:15 2011 +0900 +++ b/tests/test-convert-svn-move.t Wed Mar 30 13:23:24 2011 -0500 @@ -5,7 +5,7 @@ > { > tr '\\' / > } - $ cat > $HGRCPATH <<EOF + $ cat >> $HGRCPATH <<EOF > [extensions] > convert = > graphlog =
--- a/tests/test-convert-svn-sink.t Wed Mar 30 02:22:15 2011 +0900 +++ b/tests/test-convert-svn-sink.t Wed Mar 30 13:23:24 2011 -0500 @@ -1,5 +1,5 @@ - $ "$TESTDIR/hghave" svn svn-bindings no-outer-repo || exit 80 + $ "$TESTDIR/hghave" svn no-outer-repo || exit 80 $ fixpath() > { @@ -22,7 +22,7 @@ > ) > } - $ cat > $HGRCPATH <<EOF + $ cat >> $HGRCPATH <<EOF > [extensions] > convert = > graphlog =
--- a/tests/test-convert-svn-source.t Wed Mar 30 02:22:15 2011 +0900 +++ b/tests/test-convert-svn-source.t Wed Mar 30 13:23:24 2011 -0500 @@ -5,7 +5,7 @@ > { > tr '\\' / > } - $ cat > $HGRCPATH <<EOF + $ cat >> $HGRCPATH <<EOF > [extensions] > convert = > graphlog =
--- a/tests/test-convert-svn-startrev.t Wed Mar 30 02:22:15 2011 +0900 +++ b/tests/test-convert-svn-startrev.t Wed Mar 30 13:23:24 2011 -0500 @@ -1,7 +1,7 @@ $ "$TESTDIR/hghave" svn svn-bindings || exit 80 - $ cat > $HGRCPATH <<EOF + $ cat >> $HGRCPATH <<EOF > [extensions] > convert = > graphlog =
--- a/tests/test-convert-svn-tags.t Wed Mar 30 02:22:15 2011 +0900 +++ b/tests/test-convert-svn-tags.t Wed Mar 30 13:23:24 2011 -0500 @@ -1,7 +1,7 @@ $ "$TESTDIR/hghave" svn svn-bindings || exit 80 - $ cat > $HGRCPATH <<EOF + $ cat >> $HGRCPATH <<EOF > [extensions] > convert = > graphlog =
--- a/tests/test-convert-tagsbranch-topology.t Wed Mar 30 02:22:15 2011 +0900 +++ b/tests/test-convert-tagsbranch-topology.t Wed Mar 30 13:23:24 2011 -0500 @@ -49,6 +49,7 @@ converting... 0 rev1 updating tags + updating bookmarks Simulate upstream updates after first conversion @@ -67,6 +68,7 @@ converting... 0 rev2 updating tags + updating bookmarks Print the log
--- a/tests/test-convert-tla.t Wed Mar 30 02:22:15 2011 +0900 +++ b/tests/test-convert-tla.t Wed Mar 30 13:23:24 2011 -0500 @@ -1,9 +1,5 @@ $ "$TESTDIR/hghave" tla || exit 80 - $ mkdir do_not_use_HOME_tla - $ cd do_not_use_HOME_tla - $ HOME=`pwd`; export HOME - $ cd .. $ tla my-id "mercurial <mercurial@selenic.com>" $ echo "[extensions]" >> $HGRCPATH $ echo "convert=" >> $HGRCPATH
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-debugbundle.t Wed Mar 30 13:23:24 2011 -0500 @@ -0,0 +1,36 @@ + +Create a test repository: + + $ hg init repo + $ cd repo + $ touch a ; hg add a ; hg ci -ma + $ touch b ; hg add b ; hg ci -mb + $ touch c ; hg add c ; hg ci -mc + $ hg bundle --base 0 --rev tip bundle.hg + 2 changesets found + +Terse output: + + $ hg debugbundle bundle.hg + 0e067c57feba1a5694ca4844f05588bb1bf82342 + 991a3460af53952d10ec8a295d3d2cc2e5fa9690 + +Verbose output: + + $ hg debugbundle --all bundle.hg + format: id, p1, p2, cset, len(delta) + + changelog + 0e067c57feba1a5694ca4844f05588bb1bf82342 3903775176ed42b1458a6281db4a0ccf4d9f287a 0000000000000000000000000000000000000000 0e067c57feba1a5694ca4844f05588bb1bf82342 80 + 991a3460af53952d10ec8a295d3d2cc2e5fa9690 0e067c57feba1a5694ca4844f05588bb1bf82342 0000000000000000000000000000000000000000 991a3460af53952d10ec8a295d3d2cc2e5fa9690 80 + + manifest + 686dbf0aeca417636fa26a9121c681eabbb15a20 8515d4bfda768e04af4c13a69a72e28c7effbea7 0000000000000000000000000000000000000000 0e067c57feba1a5694ca4844f05588bb1bf82342 55 + ae25a31b30b3490a981e7b96a3238cc69583fda1 686dbf0aeca417636fa26a9121c681eabbb15a20 0000000000000000000000000000000000000000 991a3460af53952d10ec8a295d3d2cc2e5fa9690 55 + + b + b80de5d138758541c5f05265ad144ab9fa86d1db 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 0e067c57feba1a5694ca4844f05588bb1bf82342 12 + + c + b80de5d138758541c5f05265ad144ab9fa86d1db 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 991a3460af53952d10ec8a295d3d2cc2e5fa9690 12 +
--- a/tests/test-debugcomplete.t Wed Mar 30 02:22:15 2011 +0900 +++ b/tests/test-debugcomplete.t Wed Mar 30 13:23:24 2011 -0500 @@ -67,6 +67,7 @@ $ hg debugcomplete debug debugancestor debugbuilddag + debugbundle debugcheckstate debugcommands debugcomplete @@ -75,10 +76,12 @@ debugdata debugdate debugfsinfo + debuggetbundle debugignore debugindex debugindexdot debuginstall + debugknown debugpushkey debugrebuildstate debugrename @@ -87,6 +90,7 @@ debugstate debugsub debugwalk + debugwireargs Do not show the alias of a debug command if there are other candidates (this should hide rawcommit) @@ -199,7 +203,7 @@ addremove: similarity, include, exclude, dry-run archive: no-decode, prefix, rev, type, subrepos, include, exclude backout: merge, parent, tool, rev, include, exclude, message, logfile, date, user - bisect: reset, good, bad, skip, command, noupdate + bisect: reset, good, bad, skip, extend, command, noupdate bookmarks: force, rev, delete, rename branch: force, clean branches: active, closed @@ -208,6 +212,7 @@ copy: after, force, include, exclude, dry-run debugancestor: debugbuilddag: mergeable-file, appended-file, overwritten-file, new-file + debugbundle: all debugcheckstate: debugcommands: debugcomplete: options @@ -215,18 +220,21 @@ debugdata: debugdate: extended debugfsinfo: + debuggetbundle: head, common, type debugignore: debugindex: format debugindexdot: debuginstall: + debugknown: debugpushkey: debugrebuildstate: rev debugrename: rev debugrevspec: debugsetparents: - debugstate: nodates + debugstate: nodates, datesort debugsub: rev debugwalk: include, exclude + debugwireargs: three, four, ssh, remotecmd, insecure grep: print0, all, follow, ignore-case, files-with-matches, line-number, rev, user, date, include, exclude heads: rev, topo, active, closed, style, template help:
--- a/tests/test-diff-color.t Wed Mar 30 02:22:15 2011 +0900 +++ b/tests/test-diff-color.t Wed Mar 30 13:23:24 2011 -0500 @@ -81,7 +81,7 @@ \x1b[0;36;1mold mode 100644\x1b[0m (esc) \x1b[0;36;1mnew mode 100755\x1b[0m (esc) 1 hunks, 1 lines changed - examine changes to 'a'? [Ynsfdaq?] + \x1b[0;33mexamine changes to 'a'? [Ynsfdaq?]\x1b[0m (esc) \x1b[0;35m@@ -2,7 +2,7 @@\x1b[0m (esc) c a @@ -91,7 +91,7 @@ a a c - record this change to 'a'? [Ynsfdaq?] + \x1b[0;33mrecord this change to 'a'? [Ynsfdaq?]\x1b[0m (esc) $ echo $ echo "[extensions]" >> $HGRCPATH @@ -110,7 +110,7 @@ \x1b[0;36;1mold mode 100644\x1b[0m (esc) \x1b[0;36;1mnew mode 100755\x1b[0m (esc) 1 hunks, 1 lines changed - examine changes to 'a'? [Ynsfdaq?] + \x1b[0;33mexamine changes to 'a'? [Ynsfdaq?]\x1b[0m (esc) \x1b[0;35m@@ -2,7 +2,7 @@\x1b[0m (esc) c a @@ -120,6 +120,6 @@ a a c - record this change to 'a'? [Ynsfdaq?] + \x1b[0;33mrecord this change to 'a'? [Ynsfdaq?]\x1b[0m (esc) $ echo
--- a/tests/test-eol-add.t Wed Mar 30 02:22:15 2011 +0900 +++ b/tests/test-eol-add.t Wed Mar 30 13:23:24 2011 -0500 @@ -1,6 +1,6 @@ Test adding .hgeol - $ cat > $HGRCPATH <<EOF + $ cat >> $HGRCPATH <<EOF > [diff] > git = 1 > EOF
--- a/tests/test-eol-clone.t Wed Mar 30 02:22:15 2011 +0900 +++ b/tests/test-eol-clone.t Wed Mar 30 13:23:24 2011 -0500 @@ -1,9 +1,6 @@ Testing cloning with the EOL extension - $ cat > $HGRCPATH <<EOF - > [diff] - > git = True - > + $ cat >> $HGRCPATH <<EOF > [extensions] > eol = >
--- a/tests/test-eol-hook.t Wed Mar 30 02:22:15 2011 +0900 +++ b/tests/test-eol-hook.t Wed Mar 30 13:23:24 2011 -0500 @@ -1,14 +1,7 @@ Test the EOL hook - $ cat > $HGRCPATH <<EOF - > [diff] - > git = True - > EOF $ hg init main $ cat > main/.hg/hgrc <<EOF - > [extensions] - > eol = - > > [hooks] > pretxnchangegroup = python:hgext.eol.hook > EOF @@ -47,10 +40,12 @@ adding manifests adding file changes added 1 changesets with 1 changes to 1 files - error: pretxnchangegroup hook failed: a.txt should not have CRLF line endings + error: pretxnchangegroup hook failed: end-of-line check failed: + a.txt in a8ee6548cd86 should not have CRLF line endings transaction abort! rollback completed - abort: a.txt should not have CRLF line endings + abort: end-of-line check failed: + a.txt in a8ee6548cd86 should not have CRLF line endings [255] $ printf "first\nsecond\nthird\n" > a.txt @@ -73,10 +68,12 @@ adding manifests adding file changes added 1 changesets with 1 changes to 1 files - error: pretxnchangegroup hook failed: crlf.txt should not have LF line endings + error: pretxnchangegroup hook failed: end-of-line check failed: + crlf.txt in 004ba2132725 should not have LF line endings transaction abort! rollback completed - abort: crlf.txt should not have LF line endings + abort: end-of-line check failed: + crlf.txt in 004ba2132725 should not have LF line endings [255] $ printf "first\r\nsecond\r\nthird\r\n" > crlf.txt @@ -88,3 +85,133 @@ adding manifests adding file changes added 2 changesets with 2 changes to 1 files + + $ printf "first\r\nsecond" > b.txt + $ hg add b.txt + $ hg commit -m 'CRLF b.txt' + $ hg push ../main + pushing to ../main + searching for changes + adding changesets + adding manifests + adding file changes + added 1 changesets with 1 changes to 1 files + error: pretxnchangegroup hook failed: end-of-line check failed: + b.txt in fbcf9b1025f5 should not have CRLF line endings + transaction abort! + rollback completed + abort: end-of-line check failed: + b.txt in fbcf9b1025f5 should not have CRLF line endings + [255] + + $ hg up -r -2 + 0 files updated, 0 files merged, 1 files removed, 0 files unresolved + $ printf "some\nother\nfile" > c.txt + $ hg add c.txt + $ hg commit -m "LF c.txt, b.txt doesn't exist here" + created new head + $ hg push -f ../main + pushing to ../main + searching for changes + adding changesets + adding manifests + adding file changes + added 2 changesets with 2 changes to 2 files (+1 heads) + error: pretxnchangegroup hook failed: end-of-line check failed: + b.txt in fbcf9b1025f5 should not have CRLF line endings + transaction abort! + rollback completed + abort: end-of-line check failed: + b.txt in fbcf9b1025f5 should not have CRLF line endings + [255] + +Test checkheadshook alias + + $ cat > ../main/.hg/hgrc <<EOF + > [hooks] + > pretxnchangegroup = python:hgext.eol.checkheadshook + > EOF + $ hg push -f ../main + pushing to ../main + searching for changes + adding changesets + adding manifests + adding file changes + added 2 changesets with 2 changes to 2 files (+1 heads) + error: pretxnchangegroup hook failed: end-of-line check failed: + b.txt in fbcf9b1025f5 should not have CRLF line endings + transaction abort! + rollback completed + abort: end-of-line check failed: + b.txt in fbcf9b1025f5 should not have CRLF line endings + [255] + +We can fix the head and push again + + $ hg up 6 + 1 files updated, 0 files merged, 1 files removed, 0 files unresolved + $ printf "first\nsecond" > b.txt + $ hg ci -m "remove CRLF from b.txt" + $ hg push -f ../main + pushing to ../main + searching for changes + adding changesets + adding manifests + adding file changes + added 3 changesets with 3 changes to 2 files (+1 heads) + $ hg -R ../main rollback + repository tip rolled back to revision 5 (undo push) + working directory now based on revision -1 + +Test it still fails with checkallhook + + $ cat > ../main/.hg/hgrc <<EOF + > [hooks] + > pretxnchangegroup = python:hgext.eol.checkallhook + > EOF + $ hg push -f ../main + pushing to ../main + searching for changes + adding changesets + adding manifests + adding file changes + added 3 changesets with 3 changes to 2 files (+1 heads) + error: pretxnchangegroup hook failed: end-of-line check failed: + b.txt in fbcf9b1025f5 should not have CRLF line endings + transaction abort! + rollback completed + abort: end-of-line check failed: + b.txt in fbcf9b1025f5 should not have CRLF line endings + [255] + +But we can push the clean head + + $ hg push -r7 -f ../main + pushing to ../main + searching for changes + adding changesets + adding manifests + adding file changes + added 1 changesets with 1 changes to 1 files + +Test multiple files/revisions output + + $ printf "another\r\nbad\r\none" > d.txt + $ hg add d.txt + $ hg ci -m "add d.txt" + $ hg push -f ../main + pushing to ../main + searching for changes + adding changesets + adding manifests + adding file changes + added 3 changesets with 3 changes to 2 files (+1 heads) + error: pretxnchangegroup hook failed: end-of-line check failed: + d.txt in a7040e68714f should not have CRLF line endings + b.txt in fbcf9b1025f5 should not have CRLF line endings + transaction abort! + rollback completed + abort: end-of-line check failed: + d.txt in a7040e68714f should not have CRLF line endings + b.txt in fbcf9b1025f5 should not have CRLF line endings + [255]
--- a/tests/test-eol-patch.t Wed Mar 30 02:22:15 2011 +0900 +++ b/tests/test-eol-patch.t Wed Mar 30 13:23:24 2011 -0500 @@ -1,6 +1,6 @@ Test EOL patching - $ cat > $HGRCPATH <<EOF + $ cat >> $HGRCPATH <<EOF > [diff] > git = 1 > EOF
--- a/tests/test-eol-tag.t Wed Mar 30 02:22:15 2011 +0900 +++ b/tests/test-eol-tag.t Wed Mar 30 13:23:24 2011 -0500 @@ -2,10 +2,7 @@ Testing tagging with the EOL extension - $ cat > $HGRCPATH <<EOF - > [diff] - > git = True - > + $ cat >> $HGRCPATH <<EOF > [extensions] > eol = >
--- a/tests/test-eol-update.t Wed Mar 30 02:22:15 2011 +0900 +++ b/tests/test-eol-update.t Wed Mar 30 13:23:24 2011 -0500 @@ -1,6 +1,6 @@ Test EOL update - $ cat > $HGRCPATH <<EOF + $ cat >> $HGRCPATH <<EOF > [diff] > git = 1 > EOF
--- a/tests/test-eol.t Wed Mar 30 02:22:15 2011 +0900 +++ b/tests/test-eol.t Wed Mar 30 13:23:24 2011 -0500 @@ -1,6 +1,6 @@ Test EOL extension - $ cat > $HGRCPATH <<EOF + $ cat >> $HGRCPATH <<EOF > [diff] > git = True > EOF
--- a/tests/test-extdiff.t Wed Mar 30 02:22:15 2011 +0900 +++ b/tests/test-extdiff.t Wed Mar 30 13:23:24 2011 -0500 @@ -57,7 +57,7 @@ Should diff cloned files directly: $ hg falabala -r 0:1 - diffing a.8a5febb7f867/a a.34eed99112ab/a + diffing */extdiff.*/a.8a5febb7f867/a a.34eed99112ab/a (glob) [1] Test diff during merge: @@ -75,7 +75,7 @@ Should diff cloned file against wc file: $ hg falabala - diffing a.2a13a4d2da36/a $TESTTMP/a/a + diffing */extdiff.*/a.2a13a4d2da36/a */a/a (glob) [1] @@ -83,13 +83,13 @@ $ hg ci -d '2 0' -mtest3 $ hg falabala -c 1 - diffing a.8a5febb7f867/a a.34eed99112ab/a + diffing */extdiff.*/a.8a5febb7f867/a a.34eed99112ab/a (glob) [1] Check diff are made from the first parent: $ hg falabala -c 3 || echo "diff-like tools yield a non-zero exit code" - diffing a.2a13a4d2da36/a a.46c0e4daeb72/a + diffing */extdiff.*/a.2a13a4d2da36/a a.46c0e4daeb72/a (glob) diff-like tools yield a non-zero exit code Test extdiff of multiple files in tmp dir: @@ -161,10 +161,19 @@ Test extdiff with --option: $ hg extdiff -p echo -o this -c 1 - this a.8a5febb7f867/a a.34eed99112ab/a + this */extdiff.*/a.8a5febb7f867/a a.34eed99112ab/a (glob) [1] $ hg falabala -o this -c 1 - diffing this a.8a5febb7f867/a a.34eed99112ab/a + diffing this */extdiff.*/a.8a5febb7f867/a a.34eed99112ab/a (glob) [1] +Test with revsets: + + $ hg extdif -p echo -c "rev(1)" + */extdiff.*/a.8a5febb7f867/a a.34eed99112ab/a (glob) + [1] + + $ hg extdif -p echo -r "0::1" + */extdiff.*/a.8a5febb7f867/a a.34eed99112ab/a (glob) + [1]
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-getbundle.t Wed Mar 30 13:23:24 2011 -0500 @@ -0,0 +1,253 @@ + += Test the getbundle() protocol function = + +Enable graphlog extension: + + $ echo "[extensions]" >> $HGRCPATH + $ echo "graphlog=" >> $HGRCPATH + +Create a test repository: + + $ hg init repo + $ cd repo + $ hg debugbuilddag -n -m '+2 :fork +5 :p1 *fork +6 :p2 /p1 :m1 +3' > /dev/null + $ hg glog --template '{node}\n' + @ 2bba2f40f321484159b395a43f20101d4bb7ead0 + | + o d9e5488323c782fe684573f3043369d199038b6f + | + o 6e9a5adf5437e49c746288cf95c5ac34fa8f2f72 + | + o 733bf0910832b26b768a09172f325f995b5476e1 + |\ + | o b5af5d6ea56d73ce24c40bc3cd19a862f74888ac + | | + | o 6b57ee934bb2996050540f84cdfc8dcad1e7267d + | | + | o 2c0ec49482e8abe888b7bd090b5827acfc22b3d7 + | | + | o c1818a9f5977dd4139a48f93f5425c67d44a9368 + | | + | o 6c725a58ad10aea441540bfd06c507f63e8b9cdd + | | + | o 18063366a155bd56b5618229ae2ac3e91849aa5e + | | + | o a21d913c992197a2eb60b298521ec0f045a04799 + | | + o | b6b2b682253df2ffedc10e9415e4114202b303c5 + | | + o | 2114148793524fd045998f71a45b0aaf139f752b + | | + o | 74a573f2ae100f1cedfad9aa7b96f8eaab1dabfc + | | + o | ea919464b16e003894c48b6cb68df3cd9411b544 + | | + o | 0f82d97ec2778746743fbc996740d409558fda22 + |/ + o 6e23b016bc0f0e79c7bd9dd372ccee07055d7fd4 + | + o 10e64d654571f11577745b4d8372e859d9e4df63 + + $ cd .. + + += Test locally = + +Get everything: + + $ hg debuggetbundle repo bundle + $ hg debugbundle bundle + 10e64d654571f11577745b4d8372e859d9e4df63 + 6e23b016bc0f0e79c7bd9dd372ccee07055d7fd4 + 0f82d97ec2778746743fbc996740d409558fda22 + ea919464b16e003894c48b6cb68df3cd9411b544 + 74a573f2ae100f1cedfad9aa7b96f8eaab1dabfc + 2114148793524fd045998f71a45b0aaf139f752b + b6b2b682253df2ffedc10e9415e4114202b303c5 + a21d913c992197a2eb60b298521ec0f045a04799 + 18063366a155bd56b5618229ae2ac3e91849aa5e + 6c725a58ad10aea441540bfd06c507f63e8b9cdd + c1818a9f5977dd4139a48f93f5425c67d44a9368 + 2c0ec49482e8abe888b7bd090b5827acfc22b3d7 + 6b57ee934bb2996050540f84cdfc8dcad1e7267d + b5af5d6ea56d73ce24c40bc3cd19a862f74888ac + 733bf0910832b26b768a09172f325f995b5476e1 + 6e9a5adf5437e49c746288cf95c5ac34fa8f2f72 + d9e5488323c782fe684573f3043369d199038b6f + 2bba2f40f321484159b395a43f20101d4bb7ead0 + +Get part of linear run: + + $ hg debuggetbundle repo bundle -H d9e5488323c782fe684573f3043369d199038b6f -C 733bf0910832b26b768a09172f325f995b5476e1 + $ hg debugbundle bundle + 6e9a5adf5437e49c746288cf95c5ac34fa8f2f72 + d9e5488323c782fe684573f3043369d199038b6f + +Get missing branch and merge: + + $ hg debuggetbundle repo bundle -H d9e5488323c782fe684573f3043369d199038b6f -C 6b57ee934bb2996050540f84cdfc8dcad1e7267d + $ hg debugbundle bundle + 0f82d97ec2778746743fbc996740d409558fda22 + ea919464b16e003894c48b6cb68df3cd9411b544 + 74a573f2ae100f1cedfad9aa7b96f8eaab1dabfc + 2114148793524fd045998f71a45b0aaf139f752b + b6b2b682253df2ffedc10e9415e4114202b303c5 + b5af5d6ea56d73ce24c40bc3cd19a862f74888ac + 733bf0910832b26b768a09172f325f995b5476e1 + 6e9a5adf5437e49c746288cf95c5ac34fa8f2f72 + d9e5488323c782fe684573f3043369d199038b6f + +Get from only one head: + + $ hg debuggetbundle repo bundle -H 6c725a58ad10aea441540bfd06c507f63e8b9cdd -C 6e23b016bc0f0e79c7bd9dd372ccee07055d7fd4 + $ hg debugbundle bundle + a21d913c992197a2eb60b298521ec0f045a04799 + 18063366a155bd56b5618229ae2ac3e91849aa5e + 6c725a58ad10aea441540bfd06c507f63e8b9cdd + +Get parts of two branches: + + $ hg debuggetbundle repo bundle -H 6b57ee934bb2996050540f84cdfc8dcad1e7267d -C c1818a9f5977dd4139a48f93f5425c67d44a9368 -H 2114148793524fd045998f71a45b0aaf139f752b -C ea919464b16e003894c48b6cb68df3cd9411b544 + $ hg debugbundle bundle + 74a573f2ae100f1cedfad9aa7b96f8eaab1dabfc + 2114148793524fd045998f71a45b0aaf139f752b + 2c0ec49482e8abe888b7bd090b5827acfc22b3d7 + 6b57ee934bb2996050540f84cdfc8dcad1e7267d + +Check that we get all needed file changes: + + $ hg debugbundle bundle --all + format: id, p1, p2, cset, len(delta) + + changelog + 74a573f2ae100f1cedfad9aa7b96f8eaab1dabfc ea919464b16e003894c48b6cb68df3cd9411b544 0000000000000000000000000000000000000000 74a573f2ae100f1cedfad9aa7b96f8eaab1dabfc 99 + 2114148793524fd045998f71a45b0aaf139f752b 74a573f2ae100f1cedfad9aa7b96f8eaab1dabfc 0000000000000000000000000000000000000000 2114148793524fd045998f71a45b0aaf139f752b 99 + 2c0ec49482e8abe888b7bd090b5827acfc22b3d7 c1818a9f5977dd4139a48f93f5425c67d44a9368 0000000000000000000000000000000000000000 2c0ec49482e8abe888b7bd090b5827acfc22b3d7 102 + 6b57ee934bb2996050540f84cdfc8dcad1e7267d 2c0ec49482e8abe888b7bd090b5827acfc22b3d7 0000000000000000000000000000000000000000 6b57ee934bb2996050540f84cdfc8dcad1e7267d 102 + + manifest + dac7984588fc4eea7acbf39693a9c1b06f5b175d 591f732a3faf1fb903815273f3c199a514a61ccb 0000000000000000000000000000000000000000 74a573f2ae100f1cedfad9aa7b96f8eaab1dabfc 113 + 0772616e6b48a76afb6c1458e193cbb3dae2e4ff dac7984588fc4eea7acbf39693a9c1b06f5b175d 0000000000000000000000000000000000000000 2114148793524fd045998f71a45b0aaf139f752b 113 + eb498cd9af6c44108e43041e951ce829e29f6c80 bff2f4817ced57b386caf7c4e3e36a4bc9af7e93 0000000000000000000000000000000000000000 2c0ec49482e8abe888b7bd090b5827acfc22b3d7 295 + b15709c071ddd2d93188508ba156196ab4f19620 eb498cd9af6c44108e43041e951ce829e29f6c80 0000000000000000000000000000000000000000 6b57ee934bb2996050540f84cdfc8dcad1e7267d 114 + + mf + 4f73f97080266ab8e0c0561ca8d0da3eaf65b695 301ca08d026bb72cb4258a9d211bdf7ca0bcd810 0000000000000000000000000000000000000000 74a573f2ae100f1cedfad9aa7b96f8eaab1dabfc 17 + c7b583de053293870e145f45bd2d61643563fd06 4f73f97080266ab8e0c0561ca8d0da3eaf65b695 0000000000000000000000000000000000000000 2114148793524fd045998f71a45b0aaf139f752b 18 + 266ee3c0302a5a18f1cf96817ac79a51836179e9 edc0f6b8db80d68ae6aff2b19f7e5347ab68fa63 0000000000000000000000000000000000000000 2c0ec49482e8abe888b7bd090b5827acfc22b3d7 149 + 698c6a36220548cd3903ca7dada27c59aa500c52 266ee3c0302a5a18f1cf96817ac79a51836179e9 0000000000000000000000000000000000000000 6b57ee934bb2996050540f84cdfc8dcad1e7267d 19 + + nf11 + 33fbc651630ffa7ccbebfe4eb91320a873e7291c 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 2c0ec49482e8abe888b7bd090b5827acfc22b3d7 16 + + nf12 + ddce0544363f037e9fb889faca058f52dc01c0a5 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 6b57ee934bb2996050540f84cdfc8dcad1e7267d 16 + + nf4 + 3c1407305701051cbed9f9cb9a68bdfb5997c235 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 74a573f2ae100f1cedfad9aa7b96f8eaab1dabfc 15 + + nf5 + 0dbd89c185f53a1727c54cd1ce256482fa23968e 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 2114148793524fd045998f71a45b0aaf139f752b 15 + +Get branch and merge: + + $ hg debuggetbundle repo bundle -C 10e64d654571f11577745b4d8372e859d9e4df63 -H 6e9a5adf5437e49c746288cf95c5ac34fa8f2f72 + $ hg debugbundle bundle + 6e23b016bc0f0e79c7bd9dd372ccee07055d7fd4 + 0f82d97ec2778746743fbc996740d409558fda22 + ea919464b16e003894c48b6cb68df3cd9411b544 + 74a573f2ae100f1cedfad9aa7b96f8eaab1dabfc + 2114148793524fd045998f71a45b0aaf139f752b + b6b2b682253df2ffedc10e9415e4114202b303c5 + a21d913c992197a2eb60b298521ec0f045a04799 + 18063366a155bd56b5618229ae2ac3e91849aa5e + 6c725a58ad10aea441540bfd06c507f63e8b9cdd + c1818a9f5977dd4139a48f93f5425c67d44a9368 + 2c0ec49482e8abe888b7bd090b5827acfc22b3d7 + 6b57ee934bb2996050540f84cdfc8dcad1e7267d + b5af5d6ea56d73ce24c40bc3cd19a862f74888ac + 733bf0910832b26b768a09172f325f995b5476e1 + 6e9a5adf5437e49c746288cf95c5ac34fa8f2f72 + + += Test via HTTP = + +Get everything: + + $ hg serve -R repo -p $HGPORT -d --pid-file=hg.pid -E error.log -A access.log + $ cat hg.pid >> $DAEMON_PIDS + $ hg debuggetbundle http://localhost:$HGPORT/ bundle + $ hg debugbundle bundle + 10e64d654571f11577745b4d8372e859d9e4df63 + 6e23b016bc0f0e79c7bd9dd372ccee07055d7fd4 + 0f82d97ec2778746743fbc996740d409558fda22 + ea919464b16e003894c48b6cb68df3cd9411b544 + 74a573f2ae100f1cedfad9aa7b96f8eaab1dabfc + 2114148793524fd045998f71a45b0aaf139f752b + b6b2b682253df2ffedc10e9415e4114202b303c5 + a21d913c992197a2eb60b298521ec0f045a04799 + 18063366a155bd56b5618229ae2ac3e91849aa5e + 6c725a58ad10aea441540bfd06c507f63e8b9cdd + c1818a9f5977dd4139a48f93f5425c67d44a9368 + 2c0ec49482e8abe888b7bd090b5827acfc22b3d7 + 6b57ee934bb2996050540f84cdfc8dcad1e7267d + b5af5d6ea56d73ce24c40bc3cd19a862f74888ac + 733bf0910832b26b768a09172f325f995b5476e1 + 6e9a5adf5437e49c746288cf95c5ac34fa8f2f72 + d9e5488323c782fe684573f3043369d199038b6f + 2bba2f40f321484159b395a43f20101d4bb7ead0 + +Get parts of two branches: + + $ hg debuggetbundle http://localhost:$HGPORT/ bundle -H 6b57ee934bb2996050540f84cdfc8dcad1e7267d -C c1818a9f5977dd4139a48f93f5425c67d44a9368 -H 2114148793524fd045998f71a45b0aaf139f752b -C ea919464b16e003894c48b6cb68df3cd9411b544 + $ hg debugbundle bundle + 74a573f2ae100f1cedfad9aa7b96f8eaab1dabfc + 2114148793524fd045998f71a45b0aaf139f752b + 2c0ec49482e8abe888b7bd090b5827acfc22b3d7 + 6b57ee934bb2996050540f84cdfc8dcad1e7267d + +Check that we get all needed file changes: + + $ hg debugbundle bundle --all + format: id, p1, p2, cset, len(delta) + + changelog + 74a573f2ae100f1cedfad9aa7b96f8eaab1dabfc ea919464b16e003894c48b6cb68df3cd9411b544 0000000000000000000000000000000000000000 74a573f2ae100f1cedfad9aa7b96f8eaab1dabfc 99 + 2114148793524fd045998f71a45b0aaf139f752b 74a573f2ae100f1cedfad9aa7b96f8eaab1dabfc 0000000000000000000000000000000000000000 2114148793524fd045998f71a45b0aaf139f752b 99 + 2c0ec49482e8abe888b7bd090b5827acfc22b3d7 c1818a9f5977dd4139a48f93f5425c67d44a9368 0000000000000000000000000000000000000000 2c0ec49482e8abe888b7bd090b5827acfc22b3d7 102 + 6b57ee934bb2996050540f84cdfc8dcad1e7267d 2c0ec49482e8abe888b7bd090b5827acfc22b3d7 0000000000000000000000000000000000000000 6b57ee934bb2996050540f84cdfc8dcad1e7267d 102 + + manifest + dac7984588fc4eea7acbf39693a9c1b06f5b175d 591f732a3faf1fb903815273f3c199a514a61ccb 0000000000000000000000000000000000000000 74a573f2ae100f1cedfad9aa7b96f8eaab1dabfc 113 + 0772616e6b48a76afb6c1458e193cbb3dae2e4ff dac7984588fc4eea7acbf39693a9c1b06f5b175d 0000000000000000000000000000000000000000 2114148793524fd045998f71a45b0aaf139f752b 113 + eb498cd9af6c44108e43041e951ce829e29f6c80 bff2f4817ced57b386caf7c4e3e36a4bc9af7e93 0000000000000000000000000000000000000000 2c0ec49482e8abe888b7bd090b5827acfc22b3d7 295 + b15709c071ddd2d93188508ba156196ab4f19620 eb498cd9af6c44108e43041e951ce829e29f6c80 0000000000000000000000000000000000000000 6b57ee934bb2996050540f84cdfc8dcad1e7267d 114 + + mf + 4f73f97080266ab8e0c0561ca8d0da3eaf65b695 301ca08d026bb72cb4258a9d211bdf7ca0bcd810 0000000000000000000000000000000000000000 74a573f2ae100f1cedfad9aa7b96f8eaab1dabfc 17 + c7b583de053293870e145f45bd2d61643563fd06 4f73f97080266ab8e0c0561ca8d0da3eaf65b695 0000000000000000000000000000000000000000 2114148793524fd045998f71a45b0aaf139f752b 18 + 266ee3c0302a5a18f1cf96817ac79a51836179e9 edc0f6b8db80d68ae6aff2b19f7e5347ab68fa63 0000000000000000000000000000000000000000 2c0ec49482e8abe888b7bd090b5827acfc22b3d7 149 + 698c6a36220548cd3903ca7dada27c59aa500c52 266ee3c0302a5a18f1cf96817ac79a51836179e9 0000000000000000000000000000000000000000 6b57ee934bb2996050540f84cdfc8dcad1e7267d 19 + + nf11 + 33fbc651630ffa7ccbebfe4eb91320a873e7291c 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 2c0ec49482e8abe888b7bd090b5827acfc22b3d7 16 + + nf12 + ddce0544363f037e9fb889faca058f52dc01c0a5 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 6b57ee934bb2996050540f84cdfc8dcad1e7267d 16 + + nf4 + 3c1407305701051cbed9f9cb9a68bdfb5997c235 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 74a573f2ae100f1cedfad9aa7b96f8eaab1dabfc 15 + + nf5 + 0dbd89c185f53a1727c54cd1ce256482fa23968e 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 2114148793524fd045998f71a45b0aaf139f752b 15 + +Verify we hit the HTTP server: + + $ cat access.log + * - - [*] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob) + * - - [*] "GET /?cmd=getbundle HTTP/1.1" 200 - (glob) + * - - [*] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob) + * - - [*] "GET /?cmd=getbundle&common=c1818a9f5977dd4139a48f93f5425c67d44a9368+ea919464b16e003894c48b6cb68df3cd9411b544&heads=6b57ee934bb2996050540f84cdfc8dcad1e7267d+2114148793524fd045998f71a45b0aaf139f752b HTTP/1.1" 200 - (glob) + + $ cat error.log +
--- a/tests/test-globalopts.t Wed Mar 30 02:22:15 2011 +0900 +++ b/tests/test-globalopts.t Wed Mar 30 13:23:24 2011 -0500 @@ -28,6 +28,7 @@ pulling from ../b searching for changes warning: repository is unrelated + requesting all changes adding changesets adding manifests adding file changes
--- a/tests/test-help.t Wed Mar 30 02:22:15 2011 +0900 +++ b/tests/test-help.t Wed Mar 30 13:23:24 2011 -0500 @@ -765,6 +765,14 @@ working directory is checked out, it is equivalent to null. If an uncommitted merge is in progress, "." is the revision of the first parent. +Test templating help + + $ hg help templating | egrep '(desc|diffstat|firstline|nonempty) ' + desc String. The text of the changeset description. + diffstat String. Statistics of changes with the following format: + firstline Any text. Returns the first line of text. + nonempty Any text. Returns '(none)' if the string is empty. + Test help hooks $ cat > helphook1.py <<EOF
--- a/tests/test-hgrc.t Wed Mar 30 02:22:15 2011 +0900 +++ b/tests/test-hgrc.t Wed Mar 30 13:23:24 2011 -0500 @@ -20,12 +20,12 @@ $ cd foobar $ cat .hg/hgrc [paths] - default = */foo%bar (glob) + default = $TESTTMP/foo%bar $ hg paths - default = */foo%bar (glob) + default = $TESTTMP/foo%bar $ hg showconfig - bundle.mainreporoot=*/foobar (glob) - paths.default=*/foo%bar (glob) + bundle.mainreporoot=$TESTTMP/foobar + paths.default=$TESTTMP/foo%bar $ cd .. issue1829: wrong indentation
--- a/tests/test-hgweb-commands.t Wed Mar 30 02:22:15 2011 +0900 +++ b/tests/test-hgweb-commands.t Wed Mar 30 13:23:24 2011 -0500 @@ -905,7 +905,7 @@ $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '?cmd=capabilities'; echo 200 Script output follows - lookup changegroupsubset branchmap pushkey unbundle=HG10GZ,HG10BZ,HG10UN + lookup changegroupsubset branchmap pushkey known getbundle unbundle=HG10GZ,HG10BZ,HG10UN heads
--- a/tests/test-http-clone-r.t Wed Mar 30 02:22:15 2011 +0900 +++ b/tests/test-http-clone-r.t Wed Mar 30 13:23:24 2011 -0500 @@ -214,7 +214,7 @@ adding changesets adding manifests adding file changes - added 1 changesets with 0 changes to 1 files (+1 heads) + added 1 changesets with 0 changes to 0 files (+1 heads) (run 'hg heads' to see heads, 'hg merge' to merge) $ hg verify checking changesets @@ -238,7 +238,7 @@ adding changesets adding manifests adding file changes - added 2 changesets with 0 changes to 1 files (+1 heads) + added 2 changesets with 0 changes to 0 files (+1 heads) (run 'hg heads' to see heads, 'hg merge' to merge) $ hg verify checking changesets
--- a/tests/test-http-proxy.t Wed Mar 30 02:22:15 2011 +0900 +++ b/tests/test-http-proxy.t Wed Mar 30 13:23:24 2011 -0500 @@ -98,27 +98,23 @@ updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cat proxy.log - * - - [*] "GET http://localhost:$HGPORT/?pairs=0000000000000000000000000000000000000000-0000000000000000000000000000000000000000&cmd=between HTTP/1.1" - - (glob) * - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob) * - - [*] "GET http://localhost:$HGPORT/?cmd=stream_out HTTP/1.1" - - (glob) - * - - [*] "GET http://localhost:$HGPORT/?pairs=0000000000000000000000000000000000000000-0000000000000000000000000000000000000000&cmd=between HTTP/1.1" - - (glob) - * - - [*] "GET http://localhost:$HGPORT/?cmd=heads HTTP/1.1" - - (glob) - * - - [*] "GET http://localhost:$HGPORT/?cmd=changegroup&roots=0000000000000000000000000000000000000000 HTTP/1.1" - - (glob) + * - - [*] "GET http://localhost:$HGPORT/?cmd=listkeys&namespace=bookmarks HTTP/1.1" - - (glob) * - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob) - * - - [*] "GET http://localhost:$HGPORT/?cmd=listkeys&namespace=bookmarks HTTP/1.1" - - (glob) - * - - [*] "GET http://localhost:$HGPORT/?pairs=0000000000000000000000000000000000000000-0000000000000000000000000000000000000000&cmd=between HTTP/1.1" - - (glob) * - - [*] "GET http://localhost:$HGPORT/?cmd=heads HTTP/1.1" - - (glob) - * - - [*] "GET http://localhost:$HGPORT/?cmd=changegroup&roots=0000000000000000000000000000000000000000 HTTP/1.1" - - (glob) - * - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob) + * - - [*] "GET http://localhost:$HGPORT/?cmd=getbundle&common=0000000000000000000000000000000000000000&heads=83180e7845de420a1bb46896fd5fe05294f8d629 HTTP/1.1" - - (glob) * - - [*] "GET http://localhost:$HGPORT/?cmd=listkeys&namespace=bookmarks HTTP/1.1" - - (glob) - * - - [*] "GET http://localhost:$HGPORT/?pairs=0000000000000000000000000000000000000000-0000000000000000000000000000000000000000&cmd=between HTTP/1.1" - - (glob) + * - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob) * - - [*] "GET http://localhost:$HGPORT/?cmd=heads HTTP/1.1" - - (glob) - * - - [*] "GET http://localhost:$HGPORT/?cmd=changegroup&roots=0000000000000000000000000000000000000000 HTTP/1.1" - - (glob) - * - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob) + * - - [*] "GET http://localhost:$HGPORT/?cmd=getbundle&common=0000000000000000000000000000000000000000&heads=83180e7845de420a1bb46896fd5fe05294f8d629 HTTP/1.1" - - (glob) * - - [*] "GET http://localhost:$HGPORT/?cmd=listkeys&namespace=bookmarks HTTP/1.1" - - (glob) - * - - [*] "GET http://localhost:$HGPORT/?pairs=0000000000000000000000000000000000000000-0000000000000000000000000000000000000000&cmd=between HTTP/1.1" - - (glob) + * - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob) * - - [*] "GET http://localhost:$HGPORT/?cmd=heads HTTP/1.1" - - (glob) - * - - [*] "GET http://localhost:$HGPORT/?cmd=changegroup&roots=0000000000000000000000000000000000000000 HTTP/1.1" - - (glob) + * - - [*] "GET http://localhost:$HGPORT/?cmd=getbundle&common=0000000000000000000000000000000000000000&heads=83180e7845de420a1bb46896fd5fe05294f8d629 HTTP/1.1" - - (glob) + * - - [*] "GET http://localhost:$HGPORT/?cmd=listkeys&namespace=bookmarks HTTP/1.1" - - (glob) * - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob) + * - - [*] "GET http://localhost:$HGPORT/?cmd=heads HTTP/1.1" - - (glob) + * - - [*] "GET http://localhost:$HGPORT/?cmd=getbundle&common=0000000000000000000000000000000000000000&heads=83180e7845de420a1bb46896fd5fe05294f8d629 HTTP/1.1" - - (glob) * - - [*] "GET http://localhost:$HGPORT/?cmd=listkeys&namespace=bookmarks HTTP/1.1" - - (glob)
--- a/tests/test-https.t Wed Mar 30 02:22:15 2011 +0900 +++ b/tests/test-https.t Wed Mar 30 13:23:24 2011 -0500 @@ -118,9 +118,9 @@ adding manifests adding file changes added 1 changesets with 4 changes to 4 files - warning: localhost certificate with fingerprint 91:4f:1a:ff:87:24:9c:09:b6:85:9b:88:b1:90:6d:30:75:64:91:ca not verified (check hostfingerprints or web.cacerts config setting) updating to branch default 4 files updated, 0 files merged, 0 files removed, 0 files unresolved + warning: localhost certificate with fingerprint 91:4f:1a:ff:87:24:9c:09:b6:85:9b:88:b1:90:6d:30:75:64:91:ca not verified (check hostfingerprints or web.cacerts config setting) $ hg verify -R copy-pull checking changesets checking manifests
--- a/tests/test-identify.t Wed Mar 30 02:22:15 2011 +0900 +++ b/tests/test-identify.t Wed Mar 30 13:23:24 2011 -0500 @@ -65,26 +65,43 @@ remote with rev number? $ hg id -n http://localhost:$HGPORT1/ - abort: can't query remote revision number, branch, tags, or bookmarks + abort: can't query remote revision number, branch, or tags [255] remote with tags? $ hg id -t http://localhost:$HGPORT1/ - abort: can't query remote revision number, branch, tags, or bookmarks + abort: can't query remote revision number, branch, or tags [255] remote with branch? $ hg id -b http://localhost:$HGPORT1/ - abort: can't query remote revision number, branch, tags, or bookmarks + abort: can't query remote revision number, branch, or tags [255] -remote with bookmarks? +test bookmark support - $ hg id -B http://localhost:$HGPORT1/ - abort: can't query remote revision number, branch, tags, or bookmarks - [255] + $ hg bookmark Y + $ hg bookmark Z + $ hg bookmarks + Y 0:cb9a9f314b8b + * Z 0:cb9a9f314b8b + $ hg id + cb9a9f314b8b+ tip Y/Z + $ hg id --bookmarks + Y Z + +test remote identify with bookmarks + + $ hg id http://localhost:$HGPORT1/ + cb9a9f314b8b Y/Z + $ hg id --bookmarks http://localhost:$HGPORT1/ + Y Z + $ hg id -r . http://localhost:$HGPORT1/ + cb9a9f314b8b Y/Z + $ hg id --bookmarks -r . http://localhost:$HGPORT1/ + Y Z Make sure we do not obscure unknown requires file entries (issue2649)
--- a/tests/test-init.t Wed Mar 30 02:22:15 2011 +0900 +++ b/tests/test-init.t Wed Mar 30 13:23:24 2011 -0500 @@ -199,3 +199,17 @@ store fncache dotencode + +clone bookmarks + + $ hg -R local bookmark test + $ hg -R local bookmarks + * test 0:08b9e9f63b32 + $ hg clone -e "python ./dummyssh" local ssh://user@dummy/remote-bookmarks + searching for changes + remote: adding changesets + remote: adding manifests + remote: adding file changes + remote: added 1 changesets with 1 changes to 1 files + $ hg -R remote-bookmarks bookmarks + test 0:08b9e9f63b32
--- a/tests/test-install.t Wed Mar 30 02:22:15 2011 +0900 +++ b/tests/test-install.t Wed Mar 30 13:23:24 2011 -0500 @@ -3,7 +3,6 @@ Checking encoding (ascii)... Checking installed modules (*/mercurial)... (glob) Checking templates... - Checking patch... Checking commit editor... Checking username... No problems detected @@ -13,7 +12,6 @@ Checking encoding (ascii)... Checking installed modules (*/mercurial)... (glob) Checking templates... - Checking patch... Checking commit editor... Checking username... no username supplied (see "hg help config")
--- a/tests/test-keyword.t Wed Mar 30 02:22:15 2011 +0900 +++ b/tests/test-keyword.t Wed Mar 30 13:23:24 2011 -0500 @@ -209,7 +209,7 @@ To: Test changeset a2392c293916 in $TESTTMP/Test - details: *cmd=changeset;node=a2392c293916 (glob) + details: $TESTTMP/Test?cmd=changeset;node=a2392c293916 description: addsym
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-known.t Wed Mar 30 13:23:24 2011 -0500 @@ -0,0 +1,37 @@ + += Test the known() protocol function = + +Create a test repository: + + $ hg init repo + $ cd repo + $ touch a ; hg add a ; hg ci -ma + $ touch b ; hg add b ; hg ci -mb + $ touch c ; hg add c ; hg ci -mc + $ hg log --template '{node}\n' + 991a3460af53952d10ec8a295d3d2cc2e5fa9690 + 0e067c57feba1a5694ca4844f05588bb1bf82342 + 3903775176ed42b1458a6281db4a0ccf4d9f287a + $ cd .. + +Test locally: + + $ hg debugknown repo 991a3460af53952d10ec8a295d3d2cc2e5fa9690 0e067c57feba1a5694ca4844f05588bb1bf82342 3903775176ed42b1458a6281db4a0ccf4d9f287a + 111 + $ hg debugknown repo 000a3460af53952d10ec8a295d3d2cc2e5fa9690 0e067c57feba1a5694ca4844f05588bb1bf82342 0003775176ed42b1458a6281db4a0ccf4d9f287a + 010 + $ hg debugknown repo + + +Test via HTTP: + + $ hg serve -R repo -p $HGPORT -d --pid-file=hg.pid -E error.log -A access.log + $ cat hg.pid >> $DAEMON_PIDS + $ hg debugknown http://localhost:$HGPORT/ 991a3460af53952d10ec8a295d3d2cc2e5fa9690 0e067c57feba1a5694ca4844f05588bb1bf82342 3903775176ed42b1458a6281db4a0ccf4d9f287a + 111 + $ hg debugknown http://localhost:$HGPORT/ 000a3460af53952d10ec8a295d3d2cc2e5fa9690 0e067c57feba1a5694ca4844f05588bb1bf82342 0003775176ed42b1458a6281db4a0ccf4d9f287a + 010 + $ hg debugknown http://localhost:$HGPORT/ + + $ cat error.log +
--- a/tests/test-log.t Wed Mar 30 02:22:15 2011 +0900 +++ b/tests/test-log.t Wed Mar 30 13:23:24 2011 -0500 @@ -512,7 +512,11 @@ date: Thu Jan 01 00:00:01 1970 +0000 summary: r1 +log -d " " (whitespaces only) + $ hg log -d " " + abort: dates cannot consist entirely of whitespace + [255] log -d -1
--- a/tests/test-mq-strip.t Wed Mar 30 02:22:15 2011 +0900 +++ b/tests/test-mq-strip.t Wed Mar 30 13:23:24 2011 -0500 @@ -410,7 +410,7 @@ abort: local changes found [255] $ hg strip tip --keep - saved backup bundle to * (glob) + saved backup bundle to $TESTTMP/test/.hg/strip-backup/*-backup.hg (glob) $ hg log --graph @ changeset: 0:9ab35a2d17cb tag: tip
--- a/tests/test-paths.t Wed Mar 30 02:22:15 2011 +0900 +++ b/tests/test-paths.t Wed Mar 30 13:23:24 2011 -0500 @@ -25,3 +25,23 @@ $ SOMETHING=/foo hg paths dupe = $TESTTMP/b expand = /foo/bar + $ cd .. + +'file:' disables [paths] entries for clone destination + + $ cat >> $HGRCPATH <<EOF + > [paths] + > gpath1 = http://hg.example.com + > EOF + + $ hg clone a gpath1 + abort: cannot create new http repository + [255] + + $ hg clone a file:gpath1 + updating to branch default + 0 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ cd gpath1 + $ hg -q id + 000000000000 +
--- a/tests/test-pull-branch.t Wed Mar 30 02:22:15 2011 +0900 +++ b/tests/test-pull-branch.t Wed Mar 30 13:23:24 2011 -0500 @@ -134,3 +134,77 @@ not updating, since new heads added (run 'hg heads' to see heads, 'hg merge' to merge) +Make changes on new branch on tt + + $ hg branch branchC + marked working directory as branch branchC + $ echo b1 > bar + $ hg ci -Am "commit on branchC on tt" + adding bar + +Make changes on default branch on t + + $ cd ../t + $ hg up -C default + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ echo a1 > bar + $ hg ci -Am "commit on default on t" + adding bar + +Pull branchC from tt + + $ hg pull ../tt + pulling from ../tt + searching for changes + adding changesets + adding manifests + adding file changes + added 1 changesets with 1 changes to 1 files (+1 heads) + (run 'hg heads' to see heads) + +Make changes on default and branchC on tt + + $ cd ../tt + $ hg pull ../t + pulling from ../t + searching for changes + adding changesets + adding manifests + adding file changes + added 1 changesets with 1 changes to 1 files (+1 heads) + (run 'hg heads' to see heads) + $ hg up -C default + 2 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ echo a1 > bar1 + $ hg ci -Am "commit on default on tt" + adding bar1 + $ hg up branchC + 2 files updated, 0 files merged, 1 files removed, 0 files unresolved + $ echo a1 > bar2 + $ hg ci -Am "commit on branchC on tt" + adding bar2 + +Make changes on default and branchC on t + + $ cd ../t + $ hg up default + 0 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ echo a1 > bar3 + $ hg ci -Am "commit on default on t" + adding bar3 + $ hg up branchC + 2 files updated, 0 files merged, 1 files removed, 0 files unresolved + $ echo a1 > bar4 + $ hg ci -Am "commit on branchC on tt" + adding bar4 + +Pull from tt + + $ hg pull ../tt + pulling from ../tt + searching for changes + adding changesets + adding manifests + adding file changes + added 2 changesets with 2 changes to 2 files (+2 heads) + (run 'hg heads .' to see heads, 'hg merge' to merge)
--- a/tests/test-rebase-collapse.t Wed Mar 30 02:22:15 2011 +0900 +++ b/tests/test-rebase-collapse.t Wed Mar 30 13:23:24 2011 -0500 @@ -137,6 +137,40 @@ $ cd .. +Rebasing G onto H with custom message: + + $ hg clone -q -u . a a3 + $ cd a3 + + $ hg rebase --base 6 -m 'custom message' + abort: message can only be specified with collapse + [255] + + $ hg rebase --base 6 --collapse -m 'custom message' + saved backup bundle to $TESTTMP/a3/.hg/strip-backup/*-backup.hg (glob) + + $ hg tglog + @ 6: 'custom message' + | + o 5: 'H' + | + o 4: 'F' + | + | o 3: 'D' + | | + | o 2: 'C' + | | + | o 1: 'B' + |/ + o 0: 'A' + + $ hg manifest + A + E + F + H + + $ cd .. Create repo b:
--- a/tests/test-rebase-mq.t Wed Mar 30 02:22:15 2011 +0900 +++ b/tests/test-rebase-mq.t Wed Mar 30 13:23:24 2011 -0500 @@ -235,3 +235,73 @@ -mq1 +mq2 + +Rebase with guards + + $ hg init foo + $ cd foo + $ echo a > a + $ hg ci -Am a + adding a + +Create mq repo with guarded patches foo and bar: + + $ hg qinit + $ hg qnew foo + $ hg qguard foo +baz + $ echo foo > foo + $ hg qref + $ hg qpop + popping foo + patch queue now empty + + $ hg qnew bar + $ hg qguard bar +baz + $ echo bar > bar + $ hg qref + + $ hg qguard -l + bar: +baz + foo: +baz + + $ hg tglog + @ 1:* '[mq]: bar' tags: bar qbase qtip tip (glob) + | + o 0:* 'a' tags: qparent (glob) + +Create new head to rebase bar onto: + + $ hg up -C 0 + 0 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ echo b > b + $ hg add b + $ hg ci -m b + created new head + $ hg up -C 1 + 0 files updated, 0 files merged, 1 files removed, 0 files unresolved + $ echo a >> a + $ hg qref + + $ hg tglog + @ 2:* '[mq]: bar' tags: bar qbase qtip tip (glob) + | + | o 1:* 'b' tags: (glob) + |/ + o 0:* 'a' tags: qparent (glob) + + +Rebase bar: + + $ hg -q rebase -d 1 + + $ hg qguard -l + foo: +baz + bar: +baz + + $ hg tglog + @ 2:* '[mq]: bar' tags: bar qbase qtip tip (glob) + | + o 1:* 'b' tags: qparent (glob) + | + o 0:* 'a' tags: (glob) +
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-rebase-named-branches.t Wed Mar 30 13:23:24 2011 -0500 @@ -0,0 +1,171 @@ + $ cat >> $HGRCPATH <<EOF + > [extensions] + > graphlog= + > rebase= + > + > [alias] + > tglog = log -G --template "{rev}: '{desc}' {branches}\n" + > EOF + + + $ hg init a + $ cd a + + $ echo A > A + $ hg ci -Am A + adding A + + $ echo B > B + $ hg ci -Am B + adding B + + $ hg up -q -C 0 + + $ echo C > C + $ hg ci -Am C + adding C + created new head + + $ hg up -q -C 0 + + $ echo D > D + $ hg ci -Am D + adding D + created new head + + $ hg merge -r 2 + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + (branch merge, don't forget to commit) + + $ hg ci -m E + + $ hg up -q -C 3 + + $ echo F > F + $ hg ci -Am F + adding F + created new head + + $ cd .. + + +Rebasing descendant onto ancestor across different named branches + + $ hg clone -q -u . a a1 + + $ cd a1 + + $ hg branch dev + marked working directory as branch dev + + $ echo x > x + + $ hg add x + + $ hg ci -m 'extra named branch' + + $ hg tglog + @ 6: 'extra named branch' dev + | + o 5: 'F' + | + | o 4: 'E' + |/| + o | 3: 'D' + | | + | o 2: 'C' + |/ + | o 1: 'B' + |/ + o 0: 'A' + + $ hg rebase -s 6 -d 5 + saved backup bundle to $TESTTMP/a1/.hg/strip-backup/*-backup.hg (glob) + + $ hg tglog + @ 6: 'extra named branch' + | + o 5: 'F' + | + | o 4: 'E' + |/| + o | 3: 'D' + | | + | o 2: 'C' + |/ + | o 1: 'B' + |/ + o 0: 'A' + + $ cd .. + +Rebasing descendant onto ancestor across the same named branches + + $ hg clone -q -u . a a2 + + $ cd a2 + + $ echo x > x + + $ hg add x + + $ hg ci -m 'G' + + $ hg tglog + @ 6: 'G' + | + o 5: 'F' + | + | o 4: 'E' + |/| + o | 3: 'D' + | | + | o 2: 'C' + |/ + | o 1: 'B' + |/ + o 0: 'A' + + $ hg rebase -s 6 -d 5 + abort: source is descendant of destination + [255] + + $ cd .. + +Rebasing ancestor onto descendant across different named branches + + $ hg clone -q -u . a a3 + + $ cd a3 + + $ hg branch dev + marked working directory as branch dev + + $ echo x > x + + $ hg add x + + $ hg ci -m 'extra named branch' + + $ hg tglog + @ 6: 'extra named branch' dev + | + o 5: 'F' + | + | o 4: 'E' + |/| + o | 3: 'D' + | | + | o 2: 'C' + |/ + | o 1: 'B' + |/ + o 0: 'A' + + $ hg rebase -s 5 -d 6 + abort: source is ancestor of destination + [255] + + $ cd .. + +
--- a/tests/test-rebase-rename.t Wed Mar 30 02:22:15 2011 +0900 +++ b/tests/test-rebase-rename.t Wed Mar 30 13:23:24 2011 -0500 @@ -119,3 +119,52 @@ copy from a copy to a-copied + $ cd .. + + +Test rebase across repeating renames: + + $ hg init repo + + $ cd repo + + $ echo testing > file1.txt + $ hg add file1.txt + $ hg ci -m "Adding file1" + + $ hg rename file1.txt file2.txt + $ hg ci -m "Rename file1 to file2" + + $ echo Unrelated change > unrelated.txt + $ hg add unrelated.txt + $ hg ci -m "Unrelated change" + + $ hg rename file2.txt file1.txt + $ hg ci -m "Rename file2 back to file1" + + $ hg update -r -2 + 1 files updated, 0 files merged, 1 files removed, 0 files unresolved + + $ echo Another unrelated change >> unrelated.txt + $ hg ci -m "Another unrelated change" + created new head + + $ hg tglog + @ 4: 'Another unrelated change' + | + | o 3: 'Rename file2 back to file1' + |/ + o 2: 'Unrelated change' + | + o 1: 'Rename file1 to file2' + | + o 0: 'Adding file1' + + + $ hg rebase -s 4 -d 3 + saved backup bundle to $TESTTMP/repo/.hg/strip-backup/*-backup.hg (glob) + + $ hg diff --stat -c . + unrelated.txt | 1 + + 1 files changed, 1 insertions(+), 0 deletions(-) +
--- a/tests/test-relink.t Wed Mar 30 02:22:15 2011 +0900 +++ b/tests/test-relink.t Wed Mar 30 13:23:24 2011 -0500 @@ -20,23 +20,29 @@ $ hg init repo $ cd repo - $ echo '[ui]' > .hg/hgrc - $ echo 'username= A. Foo <a.foo@bar.com>' >> .hg/hgrc $ echo a > a $ echo b > b $ hg ci -Am addfile adding a adding b - $ echo a >> a - $ echo a >> b + $ cat $TESTDIR/binfile.bin >> a + $ cat $TESTDIR/binfile.bin >> b $ hg ci -Am changefiles +make another commit to create files larger than 1 KB to test +formatting of final byte count + + $ cat $TESTDIR/binfile.bin >> a + $ cat $TESTDIR/binfile.bin >> b + $ hg ci -m anotherchange + don't sit forever trying to double-lock the source repo $ hg relink . relinking $TESTTMP/repo/.hg/store to $TESTTMP/repo/.hg/store there is nothing to relink + Test files are read in binary mode $ python -c "file('.hg/store/data/dummy.i', 'wb').write('a\r\nb\n')" @@ -53,8 +59,6 @@ updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd clone - $ echo '[ui]' >> .hg/hgrc - $ echo 'username= A. Baz <a.baz@bar.com>' >> .hg/hgrc $ hg pull -q $ echo b >> b $ hg ci -m changeb @@ -81,7 +85,7 @@ pruned down to 2 probably relinkable files relinking: data/a.i 1/2 files (50.00%) not linkable: data/dummy.i - relinked 1 files (136 bytes reclaimed) + relinked 1 files (1.37 KB reclaimed) $ cd ..
--- a/tests/test-rename-merge1.t Wed Mar 30 02:22:15 2011 +0900 +++ b/tests/test-rename-merge1.t Wed Mar 30 13:23:24 2011 -0500 @@ -131,27 +131,27 @@ $ hg init repo2089 $ cd repo2089 - $ echo 0 > A - $ hg -q ci -Am 0 + $ echo c0 > f1 + $ hg ci -Aqm0 - $ hg -q up -C null - $ echo 1 > A - $ hg -q ci -Am 1 + $ hg up null -q + $ echo c1 > f1 + $ hg ci -Aqm1 - $ hg -q up -C 0 + $ hg up 0 -q $ hg merge 1 -q --tool internal:local - $ echo 2 > A - $ hg -q ci -m 2 + $ echo c2 > f1 + $ hg ci -qm2 - $ hg -q up -C 1 - $ hg mv A a - $ hg -q ci -Am 3 + $ hg up 1 -q + $ hg mv f1 f2 + $ hg ci -Aqm3 - $ hg -q up -C 2 + $ hg up 2 -q $ hg merge 3 - merging A and a to a + merging f1 and f2 to f2 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) - $ cat a - 2 + $ cat f2 + c2
--- a/tests/test-revset-outgoing.t Wed Mar 30 02:22:15 2011 +0900 +++ b/tests/test-revset-outgoing.t Wed Mar 30 13:23:24 2011 -0500 @@ -39,7 +39,7 @@ $ cd b $ cat .hg/hgrc [paths] - default = */a#stable (glob) + default = $TESTTMP/a#stable $ echo red >> a $ hg ci -qm3 @@ -60,7 +60,7 @@ $ hg tout - comparing with */a (glob) + comparing with $TESTTMP/a searching for changes 2:1d4099801a4e: '3' stable @@ -79,11 +79,11 @@ $ cat .hg/hgrc [paths] - default = */a#stable (glob) + default = $TESTTMP/a#stable green = ../a#default $ hg tout green - comparing with */a (glob) + comparing with $TESTTMP/a searching for changes 3:f0461977a3db: '4'
--- a/tests/test-revset.t Wed Mar 30 02:22:15 2011 +0900 +++ b/tests/test-revset.t Wed Mar 30 13:23:24 2011 -0500 @@ -356,3 +356,10 @@ 9 $ log 'ancestors(8) and (heads(branch("-a-b-c-")) or heads(branch(é)))' 4 + +issue2654: report a parse error if the revset was not completely parsed + + $ log '1 OR 2' + hg: parse error at 2: invalid token + [255] +
--- a/tests/test-schemes.t Wed Mar 30 02:22:15 2011 +0900 +++ b/tests/test-schemes.t Wed Mar 30 13:23:24 2011 -0500 @@ -25,10 +25,11 @@ $ hg incoming --debug parts://localhost using http://localhost:$HGPORT/ - sending between command + sending capabilities command comparing with parts://localhost sending heads command searching for changes + sending known command no changes found [1]
--- a/tests/test-serve Wed Mar 30 02:22:15 2011 +0900 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,58 +0,0 @@ -#!/bin/sh - -hgserve() -{ - hg serve -a localhost -d --pid-file=hg.pid -E errors.log -v $@ \ - | sed -e "s/:$HGPORT1\\([^0-9]\\)/:HGPORT1\1/g" \ - -e "s/:$HGPORT2\\([^0-9]\\)/:HGPORT2\1/g" \ - -e 's/http:\/\/[^/]*\//http:\/\/localhost\//' - cat hg.pid >> "$DAEMON_PIDS" - echo % errors - cat errors.log - sleep 1 - if [ "$KILLQUIETLY" = "Y" ]; then - kill `cat hg.pid` 2>/dev/null - else - kill `cat hg.pid` - fi - sleep 1 -} - -hg init test -cd test - -echo '[web]' > .hg/hgrc -echo 'accesslog = access.log' >> .hg/hgrc -echo "port = $HGPORT1" >> .hg/hgrc - -echo % Without -v -hg serve -a localhost -p $HGPORT -d --pid-file=hg.pid -E errors.log -cat hg.pid >> "$DAEMON_PIDS" -if [ -f access.log ]; then - echo 'access log created - .hg/hgrc respected' -fi -echo % errors -cat errors.log - -echo % With -v -hgserve - -echo % With -v and -p HGPORT2 -hgserve -p "$HGPORT2" - -echo '% With -v and -p daytime (should fail because low port)' -KILLQUIETLY=Y -hgserve -p daytime -KILLQUIETLY=N - -echo % With --prefix foo -hgserve --prefix foo - -echo % With --prefix /foo -hgserve --prefix /foo - -echo % With --prefix foo/ -hgserve --prefix foo/ - -echo % With --prefix /foo/ -hgserve --prefix /foo/
--- a/tests/test-serve.out Wed Mar 30 02:22:15 2011 +0900 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,25 +0,0 @@ -% Without -v -access log created - .hg/hgrc respected -% errors -% With -v -listening at http://localhost/ (bound to 127.0.0.1:HGPORT1) -% errors -% With -v and -p HGPORT2 -listening at http://localhost/ (bound to 127.0.0.1:HGPORT2) -% errors -% With -v and -p daytime (should fail because low port) -abort: cannot start server at 'localhost:13': Permission denied -abort: child process failed to start -% errors -% With --prefix foo -listening at http://localhost/foo/ (bound to 127.0.0.1:HGPORT1) -% errors -% With --prefix /foo -listening at http://localhost/foo/ (bound to 127.0.0.1:HGPORT1) -% errors -% With --prefix foo/ -listening at http://localhost/foo/ (bound to 127.0.0.1:HGPORT1) -% errors -% With --prefix /foo/ -listening at http://localhost/foo/ (bound to 127.0.0.1:HGPORT1) -% errors
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-serve.t Wed Mar 30 13:23:24 2011 -0500 @@ -0,0 +1,82 @@ + + $ hgserve() + > { + > hg serve -a localhost -d --pid-file=hg.pid -E errors.log -v $@ \ + > | sed -e "s/:$HGPORT1\\([^0-9]\\)/:HGPORT1\1/g" \ + > -e "s/:$HGPORT2\\([^0-9]\\)/:HGPORT2\1/g" \ + > -e 's/http:\/\/[^/]*\//http:\/\/localhost\//' + > cat hg.pid >> "$DAEMON_PIDS" + > echo % errors + > cat errors.log + > sleep 1 + > if [ "$KILLQUIETLY" = "Y" ]; then + > kill `cat hg.pid` 2>/dev/null + > else + > kill `cat hg.pid` + > fi + > sleep 1 + > } + + $ hg init test + $ cd test + $ echo '[web]' > .hg/hgrc + $ echo 'accesslog = access.log' >> .hg/hgrc + $ echo "port = $HGPORT1" >> .hg/hgrc + +Without -v + + $ hg serve -a localhost -p $HGPORT -d --pid-file=hg.pid -E errors.log + $ cat hg.pid >> "$DAEMON_PIDS" + $ if [ -f access.log ]; then + $ echo 'access log created - .hg/hgrc respected' + access log created - .hg/hgrc respected + $ fi + +errors + + $ cat errors.log + +With -v + + $ hgserve + listening at http://localhost/ (bound to 127.0.0.1:HGPORT1) + % errors + +With -v and -p HGPORT2 + + $ hgserve -p "$HGPORT2" + listening at http://localhost/ (bound to 127.0.0.1:HGPORT2) + % errors + +With -v and -p daytime (should fail because low port) + + $ KILLQUIETLY=Y + $ hgserve -p daytime + abort: cannot start server at 'localhost:13': Permission denied + abort: child process failed to start + % errors + $ KILLQUIETLY=N + +With --prefix foo + + $ hgserve --prefix foo + listening at http://localhost/foo/ (bound to 127.0.0.1:HGPORT1) + % errors + +With --prefix /foo + + $ hgserve --prefix /foo + listening at http://localhost/foo/ (bound to 127.0.0.1:HGPORT1) + % errors + +With --prefix foo/ + + $ hgserve --prefix foo/ + listening at http://localhost/foo/ (bound to 127.0.0.1:HGPORT1) + % errors + +With --prefix /foo/ + + $ hgserve --prefix /foo/ + listening at http://localhost/foo/ (bound to 127.0.0.1:HGPORT1) + % errors
--- a/tests/test-ssh-clone-r.t Wed Mar 30 02:22:15 2011 +0900 +++ b/tests/test-ssh-clone-r.t Wed Mar 30 13:23:24 2011 -0500 @@ -232,7 +232,7 @@ adding changesets adding manifests adding file changes - added 1 changesets with 0 changes to 1 files (+1 heads) + added 1 changesets with 0 changes to 0 files (+1 heads) (run 'hg heads' to see heads, 'hg merge' to merge) $ hg verify checking changesets @@ -256,7 +256,7 @@ adding changesets adding manifests adding file changes - added 2 changesets with 0 changes to 1 files (+1 heads) + added 2 changesets with 0 changes to 0 files (+1 heads) (run 'hg heads' to see heads, 'hg merge' to merge) $ hg verify checking changesets
--- a/tests/test-ssh.t Wed Mar 30 02:22:15 2011 +0900 +++ b/tests/test-ssh.t Wed Mar 30 13:23:24 2011 -0500 @@ -263,10 +263,28 @@ summary: z +clone bookmarks + + $ hg -R ../remote bookmark test + $ hg -R ../remote bookmarks + * test 2:6c0482d977a3 + $ hg clone -e "python ../dummyssh" ssh://user@dummy/remote local-bookmarks + requesting all changes + adding changesets + adding manifests + adding file changes + added 4 changesets with 5 changes to 4 files (+1 heads) + updating to branch default + 3 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg -R local-bookmarks bookmarks + test 2:6c0482d977a3 + passwords in ssh urls are not supported +(we use a glob here because different Python versions give different +results here) $ hg push ssh://user:erroneouspwd@dummy/remote - pushing to ssh://user:***@dummy/remote + pushing to ssh://user:*@dummy/remote (glob) abort: password in URL not supported! [255] @@ -290,3 +308,4 @@ Got arguments 1:user@dummy 2:hg -R remote serve --stdio Got arguments 1:user@dummy 2:hg -R remote serve --stdio Got arguments 1:user@dummy 2:hg -R remote serve --stdio + Got arguments 1:user@dummy 2:hg -R remote serve --stdio
--- a/tests/test-subrepo-git.t Wed Mar 30 02:22:15 2011 +0900 +++ b/tests/test-subrepo-git.t Wed Mar 30 13:23:24 2011 -0500 @@ -73,7 +73,7 @@ $ cd t $ hg clone . ../tc updating to branch default - cloning subrepo s + cloning subrepo s from $TESTTMP/gitroot 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd ../tc $ hg debugsub @@ -96,7 +96,7 @@ $ cd ../t $ hg clone . ../ta updating to branch default - cloning subrepo s + cloning subrepo s from $TESTTMP/gitroot 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd ../ta @@ -115,7 +115,7 @@ $ cd ../t $ hg clone . ../tb updating to branch default - cloning subrepo s + cloning subrepo s from $TESTTMP/gitroot 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd ../tb/s @@ -155,7 +155,7 @@ added 1 changesets with 1 changes to 1 files (+1 heads) (run 'hg heads' to see heads, 'hg merge' to merge) $ hg merge 2>/dev/null - pulling subrepo s + pulling subrepo s from $TESTTMP/gitroot 0 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ cat s/f @@ -199,7 +199,7 @@ $ cd ../t $ hg clone . ../td updating to branch default - cloning subrepo s + cloning subrepo s from $TESTTMP/gitroot checking out detached HEAD in subrepo s check out a git branch if you intend to make changes 3 files updated, 0 files merged, 0 files removed, 0 files unresolved @@ -232,7 +232,7 @@ $ cd ../tb $ hg pull -q $ hg update 2>/dev/null - pulling subrepo s + pulling subrepo s from $TESTTMP/gitroot 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg debugsub path s @@ -262,7 +262,7 @@ $ cd ../tc $ hg pull -q $ hg archive --subrepos -r 5 ../archive 2>/dev/null - pulling subrepo s + pulling subrepo s from $TESTTMP/gitroot $ cd ../archive $ cat s/f f @@ -282,7 +282,7 @@ $ hg clone ../t inner updating to branch default - cloning subrepo s + cloning subrepo s from $TESTTMP/gitroot 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo inner = inner > .hgsub $ hg add .hgsub @@ -311,7 +311,7 @@ $ mkdir d $ hg clone t d/t updating to branch default - cloning subrepo s + cloning subrepo s from $TESTTMP/gitroot 3 files updated, 0 files merged, 0 files removed, 0 files unresolved Don't crash if the subrepo is missing @@ -329,7 +329,7 @@ abort: subrepo s is missing [255] $ hg update -C - cloning subrepo s + cloning subrepo s from $TESTTMP/gitroot 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg sum | grep commit commit: (clean)
--- a/tests/test-subrepo-paths.t Wed Mar 30 02:22:15 2011 +0900 +++ b/tests/test-subrepo-paths.t Wed Mar 30 13:23:24 2011 -0500 @@ -21,6 +21,15 @@ source C:\libs\foo-lib\ revision +test cumulative remapping, the $HGRCPATH file is loaded first + + $ echo '[subpaths]' >> $HGRCPATH + $ echo 'libfoo = libbar' >> $HGRCPATH + $ hg debugsub + path sub + source C:\libs\bar-lib\ + revision + test bad subpaths pattern $ cat > .hg/hgrc <<EOF
--- a/tests/test-transplant.t Wed Mar 30 02:22:15 2011 +0900 +++ b/tests/test-transplant.t Wed Mar 30 13:23:24 2011 -0500 @@ -68,6 +68,18 @@ $ hg help revsets | grep transplanted "transplanted(set)" +test tranplanted keyword + + $ hg log --template '{rev} {transplanted}\n' + 7 a53251cdf717679d1907b289f991534be05c997a + 6 722f4667af767100cb15b6a79324bf8abbfe1ef4 + 5 37a1297eb21b3ef5c5d2ffac22121a0988ed9f21 + 4 + 3 + 2 + 1 + 0 + $ hg clone ../t ../prune updating to branch default 4 files updated, 0 files merged, 0 files removed, 0 files unresolved @@ -330,6 +342,40 @@ [255] $ cd .. +test environment passed to filter + + $ hg init filter-environment + $ cd filter-environment + $ cat <<'EOF' >test-filter-environment + > #!/bin/sh + > echo "Transplant by $HGUSER" >> $1 + > echo "Transplant from rev $HGREVISION" >> $1 + > EOF + $ chmod +x test-filter-environment + $ hg transplant -s ../t --filter ./test-filter-environment 0 + filtering * (glob) + applying 17ab29e464c6 + 17ab29e464c6 transplanted to 5190e68026a0 + + $ hg log --template '{rev} {parents} {desc}\n' + 0 r1 + Transplant by test + Transplant from rev 17ab29e464c6ca53e329470efe2a9918ac617a6f + $ cd .. + +test transplant with filter handles invalid changelog + + $ hg init filter-invalid-log + $ cd filter-invalid-log + $ cat <<'EOF' >test-filter-invalid-log + > #!/bin/sh + > echo "" > $1 + > EOF + $ chmod +x test-filter-invalid-log + $ hg transplant -s ../t --filter ./test-filter-invalid-log 0 + filtering * (glob) + abort: filter corrupted changeset (no user or date) + [255] test with a win32ext like setup (differing EOLs)
--- a/tests/test-unrelated-pull.t Wed Mar 30 02:22:15 2011 +0900 +++ b/tests/test-unrelated-pull.t Wed Mar 30 13:23:24 2011 -0500 @@ -23,6 +23,7 @@ pulling from ../a searching for changes warning: repository is unrelated + requesting all changes adding changesets adding manifests adding file changes
--- a/tests/test-url.py Wed Mar 30 02:22:15 2011 +0900 +++ b/tests/test-url.py Wed Mar 30 13:23:24 2011 -0500 @@ -49,6 +49,142 @@ check(_verifycert(None, 'example.com'), 'no certificate received') +import doctest + +def test_url(): + """ + >>> from mercurial.url import url + + This tests for edge cases in url.URL's parsing algorithm. Most of + these aren't useful for documentation purposes, so they aren't + part of the class's doc tests. + + Query strings and fragments: + + >>> url('http://host/a?b#c') + <url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'> + >>> url('http://host/a?') + <url scheme: 'http', host: 'host', path: 'a'> + >>> url('http://host/a#b#c') + <url scheme: 'http', host: 'host', path: 'a', fragment: 'b#c'> + >>> url('http://host/a#b?c') + <url scheme: 'http', host: 'host', path: 'a', fragment: 'b?c'> + >>> url('http://host/?a#b') + <url scheme: 'http', host: 'host', path: '', query: 'a', fragment: 'b'> + >>> url('http://host/?a#b', parse_query=False) + <url scheme: 'http', host: 'host', path: '?a', fragment: 'b'> + >>> url('http://host/?a#b', parse_fragment=False) + <url scheme: 'http', host: 'host', path: '', query: 'a#b'> + >>> url('http://host/?a#b', parse_query=False, parse_fragment=False) + <url scheme: 'http', host: 'host', path: '?a#b'> + + IPv6 addresses: + + >>> url('ldap://[2001:db8::7]/c=GB?objectClass?one') + <url scheme: 'ldap', host: '[2001:db8::7]', path: 'c=GB', + query: 'objectClass?one'> + >>> url('ldap://joe:xxx@[2001:db8::7]:80/c=GB?objectClass?one') + <url scheme: 'ldap', user: 'joe', passwd: 'xxx', host: '[2001:db8::7]', + port: '80', path: 'c=GB', query: 'objectClass?one'> + + Missing scheme, host, etc.: + + >>> url('://192.0.2.16:80/') + <url path: '://192.0.2.16:80/'> + >>> url('http://mercurial.selenic.com') + <url scheme: 'http', host: 'mercurial.selenic.com'> + >>> url('/foo') + <url path: '/foo'> + >>> url('bundle:/foo') + <url scheme: 'bundle', path: '/foo'> + >>> url('a?b#c') + <url path: 'a?b', fragment: 'c'> + >>> url('http://x.com?arg=/foo') + <url scheme: 'http', host: 'x.com', query: 'arg=/foo'> + >>> url('http://joe:xxx@/foo') + <url scheme: 'http', user: 'joe', passwd: 'xxx', path: 'foo'> + + Just a scheme and a path: + + >>> url('mailto:John.Doe@example.com') + <url scheme: 'mailto', path: 'John.Doe@example.com'> + >>> url('a:b:c:d') + <url scheme: 'a', path: 'b:c:d'> + + SSH examples: + + >>> url('ssh://joe@host//home/joe') + <url scheme: 'ssh', user: 'joe', host: 'host', path: '/home/joe'> + >>> url('ssh://joe:xxx@host/src') + <url scheme: 'ssh', user: 'joe', passwd: 'xxx', host: 'host', path: 'src'> + >>> url('ssh://joe:xxx@host') + <url scheme: 'ssh', user: 'joe', passwd: 'xxx', host: 'host'> + >>> url('ssh://joe@host') + <url scheme: 'ssh', user: 'joe', host: 'host'> + >>> url('ssh://host') + <url scheme: 'ssh', host: 'host'> + >>> url('ssh://') + <url scheme: 'ssh'> + >>> url('ssh:') + <url scheme: 'ssh'> + + Non-numeric port: + + >>> url('http://example.com:dd') + <url scheme: 'http', host: 'example.com', port: 'dd'> + >>> url('ssh://joe:xxx@host:ssh/foo') + <url scheme: 'ssh', user: 'joe', passwd: 'xxx', host: 'host', port: 'ssh', + path: 'foo'> + + Bad authentication credentials: + + >>> url('http://joe@joeville:123@4:@host/a?b#c') + <url scheme: 'http', user: 'joe@joeville', passwd: '123@4:', + host: 'host', path: 'a', query: 'b', fragment: 'c'> + >>> url('http://!*#?/@!*#?/:@host/a?b#c') + <url scheme: 'http', host: '!*', fragment: '?/@!*#?/:@host/a?b#c'> + >>> url('http://!*#?@!*#?:@host/a?b#c') + <url scheme: 'http', host: '!*', fragment: '?@!*#?:@host/a?b#c'> + >>> url('http://!*@:!*@@host/a?b#c') + <url scheme: 'http', user: '!*@', passwd: '!*@', host: 'host', + path: 'a', query: 'b', fragment: 'c'> + + File paths: + + >>> url('a/b/c/d.g.f') + <url path: 'a/b/c/d.g.f'> + >>> url('/x///z/y/') + <url path: '/x///z/y/'> + + Empty URL: + + >>> u = url('') + >>> u + <url path: ''> + >>> str(u) + '' + + Empty path with query string: + + >>> str(url('http://foo/?bar')) + 'http://foo/?bar' + + Invalid path: + + >>> u = url('http://foo/bar') + >>> u.path = 'bar' + >>> str(u) + 'http://foo/bar' + + >>> u = url('file:///foo/bar/baz') + >>> u + <url scheme: 'file', path: '/foo/bar/baz'> + >>> str(u) + 'file:/foo/bar/baz' + """ + +doctest.testmod(optionflags=doctest.NORMALIZE_WHITESPACE) + # Unicode (IDN) certname isn't supported check(_verifycert(cert(u'\u4f8b.jp'), 'example.jp'), 'IDN in certificate not supported')
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-wireproto.t Wed Mar 30 13:23:24 2011 -0500 @@ -0,0 +1,60 @@ + +Test wire protocol argument passing + +Setup repo: + + $ hg init repo + +Local: + + $ hg debugwireargs repo eins zwei --three drei --four vier + eins zwei drei vier + $ hg debugwireargs repo eins zwei --four vier + eins zwei None vier + $ hg debugwireargs repo eins zwei + eins zwei None None + +HTTP: + + $ hg serve -R repo -p $HGPORT -d --pid-file=hg1.pid -E error.log -A access.log + $ cat hg1.pid >> $DAEMON_PIDS + + $ hg debugwireargs http://localhost:$HGPORT/ un deux trois quatre + un deux trois quatre + $ hg debugwireargs http://localhost:$HGPORT/ eins zwei --four vier + eins zwei None vier + $ hg debugwireargs http://localhost:$HGPORT/ eins zwei + eins zwei None None + $ cat access.log + * - - [*] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob) + * - - [*] "GET /?cmd=debugwireargs&four=quatre&one=un&three=trois&two=deux HTTP/1.1" 200 - (glob) + * - - [*] "GET /?cmd=debugwireargs&four=quatre&one=un&three=trois&two=deux HTTP/1.1" 200 - (glob) + * - - [*] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob) + * - - [*] "GET /?cmd=debugwireargs&four=vier&one=eins&two=zwei HTTP/1.1" 200 - (glob) + * - - [*] "GET /?cmd=debugwireargs&four=vier&one=eins&two=zwei HTTP/1.1" 200 - (glob) + * - - [*] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob) + * - - [*] "GET /?cmd=debugwireargs&one=eins&two=zwei HTTP/1.1" 200 - (glob) + * - - [*] "GET /?cmd=debugwireargs&one=eins&two=zwei HTTP/1.1" 200 - (glob) + +SSH (try to exercise the ssh functionality with a dummy script): + + $ cat <<EOF > dummyssh + > import sys + > import os + > os.chdir(os.path.dirname(sys.argv[0])) + > if sys.argv[1] != "user@dummy": + > sys.exit(-1) + > if not os.path.exists("dummyssh"): + > sys.exit(-1) + > os.environ["SSH_CLIENT"] = "127.0.0.1 1 2" + > r = os.system(sys.argv[2]) + > sys.exit(bool(r)) + > EOF + + $ hg debugwireargs --ssh "python ./dummyssh" ssh://user@dummy/repo uno due tre quattro + uno due tre quattro + $ hg debugwireargs --ssh "python ./dummyssh" ssh://user@dummy/repo eins zwei --four vier + eins zwei None vier + $ hg debugwireargs --ssh "python ./dummyssh" ssh://user@dummy/repo eins zwei + eins zwei None None +