Mercurial > hg
changeset 6045:012ad48a07fa
merge with crew-stable
author | Thomas Arendsen Hein <thomas@intevation.de> |
---|---|
date | Fri, 08 Feb 2008 11:55:17 +0100 |
parents | 9360a58a09e6 (diff) 6605a03cbf87 (current diff) |
children | 588ad9227b63 |
files | |
diffstat | 277 files changed, 12027 insertions(+), 4398 deletions(-) [+] |
line wrap: on
line diff
--- a/.hgignore Fri Feb 08 11:50:37 2008 +0100 +++ b/.hgignore Fri Feb 08 11:55:17 2008 +0100 @@ -22,8 +22,10 @@ MANIFEST patches mercurial/__version__.py +Output/Mercurial-*.exe .DS_Store +tags +cscope.* syntax: regexp ^\.pc/ -Output/Mercurial-[0-9.]*.exe
--- a/CONTRIBUTORS Fri Feb 08 11:50:37 2008 +0100 +++ b/CONTRIBUTORS Fri Feb 08 11:55:17 2008 +0100 @@ -1,4 +1,7 @@ -Andrea Arcangeli <andrea at suse.de> +[This file is here for historical purposes, all recent contributors +should appear in the changelog directly] + +Andrea Arcangeli <andrea at suse.de> Thomas Arendsen Hein <thomas at intevation.de> Goffredo Baroncelli <kreijack at libero.it> Muli Ben-Yehuda <mulix at mulix.org> @@ -36,5 +39,3 @@ Rafael Villar Burke <pachi at mmn-arquitectos.com> Tristan Wibberley <tristan at wibberley.org> Mark Williamson <mark.williamson at cl.cam.ac.uk> - -If you are a contributor and don't see your name here, please let me know.
--- a/contrib/bash_completion Fri Feb 08 11:50:37 2008 +0100 +++ b/contrib/bash_completion Fri Feb 08 11:55:17 2008 +0100 @@ -305,6 +305,15 @@ _hg_ext_mq_patchlist qunapplied } +_hg_cmd_qgoto() +{ + if [[ "$prev" = @(-n|--name) ]]; then + _hg_ext_mq_queues + return + fi + _hg_ext_mq_patchlist qseries +} + _hg_cmd_qdelete() { local qcmd=qunapplied
--- a/contrib/churn.py Fri Feb 08 11:50:37 2008 +0100 +++ b/contrib/churn.py Fri Feb 08 11:55:17 2008 +0100 @@ -12,7 +12,7 @@ # <alias email> <actual email> from mercurial.i18n import gettext as _ -from mercurial import hg, mdiff, cmdutil, ui, util, templater, node +from mercurial import hg, mdiff, cmdutil, ui, util, templatefilters, node import os, sys def get_tty_width(): @@ -69,7 +69,7 @@ modified, added, removed, deleted, unknown = changes who = repo.changelog.read(node2)[1] - who = templater.email(who) # get the email of the person + who = util.email(who) # get the email of the person mmap1 = repo.manifest.read(repo.changelog.read(node1)[0]) mmap2 = repo.manifest.read(repo.changelog.read(node2)[0]) @@ -114,23 +114,24 @@ who, lines = __gather(ui, repo, node1, node2) # remap the owner if possible - if amap.has_key(who): + if who in amap: ui.note("using '%s' alias for '%s'\n" % (amap[who], who)) who = amap[who] - if not stats.has_key(who): + if not who in stats: stats[who] = 0 stats[who] += lines ui.note("rev %d: %d lines by %s\n" % (rev, lines, who)) if progress: + nr_revs = max(nr_revs, 1) if int(100.0*(cur_rev - 1)/nr_revs) < int(100.0*cur_rev/nr_revs): - ui.write("%d%%.." % (int(100.0*cur_rev/nr_revs),)) + ui.write("\rGenerating stats: %d%%" % (int(100.0*cur_rev/nr_revs),)) sys.stdout.flush() if progress: - ui.write("done\n") + ui.write("\r") sys.stdout.flush() return stats @@ -144,6 +145,7 @@ return s[0:l] def graph(n, maximum, width, char): + maximum = max(1, maximum) n = int(n * width / float(maximum)) return char * (n) @@ -178,6 +180,8 @@ ordered = stats.items() ordered.sort(lambda x, y: cmp(y[1], x[1])) + if not ordered: + return maximum = ordered[0][1] width = get_tty_width()
--- a/contrib/hgk Fri Feb 08 11:50:37 2008 +0100 +++ b/contrib/hgk Fri Feb 08 11:55:17 2008 +0100 @@ -274,6 +274,7 @@ set comname {} set comdate {} set rev {} + set branch {} if {![info exists nchildren($id)]} { set children($id) {} set nchildren($id) 0 @@ -310,6 +311,8 @@ set comname [join [lrange $line 1 [expr {$x - 1}]]] } elseif {$tag == "revision"} { set rev [lindex $line 1] + } elseif {$tag == "branch"} { + set branch [join [lrange $line 1 end]] } } } else { @@ -334,7 +337,7 @@ set comdate [clock format $comdate -format "%Y-%m-%d %H:%M:%S"] } set commitinfo($id) [list $headline $auname $audate \ - $comname $comdate $comment $rev] + $comname $comdate $comment $rev $branch] } proc readrefs {} { @@ -649,7 +652,7 @@ if {$stuffsaved} return if {![winfo viewable .]} return catch { - set f [open "~/.gitk-new" w] + set f [open "~/.hgk-new" w] puts $f [list set mainfont $mainfont] puts $f [list set curidfont $curidfont] puts $f [list set textfont $textfont] @@ -687,7 +690,7 @@ puts $f "#" puts $f "set authorcolors {$authorcolors}" close $f - file rename -force "~/.gitk-new" "~/.gitk" + file rename -force "~/.hgk-new" "~/.hgk" } set stuffsaved 1 } @@ -2286,6 +2289,9 @@ $ctext mark gravity fmark.0 left set info $commitinfo($id) $ctext insert end "Revision: [lindex $info 6]\n" + if {[llength [lindex $info 7]] > 0} { + $ctext insert end "Branch: [lindex $info 7]\n" + } $ctext insert end "Author: [lindex $info 1] [lindex $info 2]\n" $ctext insert end "Committer: [lindex $info 3] [lindex $info 4]\n" if {[info exists idtags($id)]} { @@ -3844,10 +3850,10 @@ set colors {green red blue magenta darkgrey brown orange} set authorcolors { - deeppink mediumorchid blue burlywood4 goldenrod slateblue red2 navy dimgrey + black blue deeppink mediumorchid blue burlywood4 goldenrod slateblue red2 navy dimgrey } -catch {source ~/.gitk} +catch {source ~/.hgk} if {$curidfont == ""} { # initialize late based on current mainfont set curidfont "$mainfont bold italic underline"
--- a/contrib/hgwebdir.fcgi Fri Feb 08 11:50:37 2008 +0100 +++ b/contrib/hgwebdir.fcgi Fri Feb 08 11:55:17 2008 +0100 @@ -23,6 +23,7 @@ from mercurial.hgweb.hgwebdir_mod import hgwebdir from mercurial.hgweb.request import wsgiapplication +from mercurial import dispatch, ui from flup.server.fcgi import WSGIServer # The config file looks like this. You can have paths to individual @@ -44,7 +45,8 @@ # Alternatively you can pass a list of ('virtual/path', '/real/path') tuples # or use a dictionary with entries like 'virtual/path': '/real/path' -def make_web_app(): - return hgwebdir("hgweb.config") +def web_app(ui): + return lambda: hgwebdir("hgweb.config", ui) -WSGIServer(wsgiapplication(make_web_app)).run() +u = ui.ui(report_untrusted=False, interactive=False) +dispatch.profiled(u, lambda: WSGIServer(wsgiapplication(web_app(u))).run())
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/mergetools.hgrc Fri Feb 08 11:55:17 2008 +0100 @@ -0,0 +1,49 @@ +# Some default global settings for common merge tools + +[merge-tools] +kdiff3.args=--auto -L1 base --L2 local --L3 other $base $local $other -o $output +kdiff3.regkey=Software\KDiff3 +kdiff3.regappend=\kdiff3.exe +kdiff3.fixeol=True +kdiff3.gui=True + +gvimdiff.args=--nofork -d -g -O $local $other $base +gvimdiff.regkey=Software\Vim\GVim +gvimdiff.regname=path +gvimdiff.priority=-9 + +merge.checkconflicts=True +merge.priority=-10 + +gpyfm.gui=True + +meld.gui=True + +tkdiff.args=$local $other -a $base -o $output +tkdiff.gui=True +tkdiff.priority=-8 + +xxdiff.args=--show-merged-pane --exit-with-merge-status --title1 local --title2 base --title3 other --merged-filename $output --merge $local $base $other +xxdiff.gui=True +xxdiff.priority=-8 + +diffmerge.args=--nosplash --merge --title1=base --title2=local --title3=other $base $local $other +diffmerge.gui=True + +p4merge.args=$base $local $other $output +p4merge.regkey=Software\Perforce\Environment +p4merge.regname=P4INSTROOT +p4merge.regappend=\p4merge.exe +p4merge.gui=True +p4merge.priority=-8 + +tortoisemerge.args=/base: $output /mine:$local /theirs:$other /merged:$output +tortoisemerge.regkey=Software\TortoiseSVN +tortoisemerge.gui=True + +ecmerge.args=$base $local $other --mode=merge3 --title0=base --title1=local --title2=other --to=$output +ecmerge.regkey=Software\Elli\xc3\xa9 Computing\Merge +ecmerge.gui=True + +filemerge.args=-left $other -right $local -ancestor $base -merge $output +filemerge.gui=True
--- a/contrib/simplemerge Fri Feb 08 11:50:37 2008 +0100 +++ b/contrib/simplemerge Fri Feb 08 11:55:17 2008 +0100 @@ -1,503 +1,11 @@ #!/usr/bin/env python -# Copyright (C) 2004, 2005 Canonical Ltd -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 2 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA - - -# mbp: "you know that thing where cvs gives you conflict markers?" -# s: "i hate that." from mercurial import demandimport demandimport.enable() -from mercurial import util, mdiff, fancyopts +import os, sys from mercurial.i18n import _ - - -class CantReprocessAndShowBase(Exception): - pass - - -def warn(message): - sys.stdout.flush() - sys.stderr.write(message) - sys.stderr.flush() - - -def intersect(ra, rb): - """Given two ranges return the range where they intersect or None. - - >>> intersect((0, 10), (0, 6)) - (0, 6) - >>> intersect((0, 10), (5, 15)) - (5, 10) - >>> intersect((0, 10), (10, 15)) - >>> intersect((0, 9), (10, 15)) - >>> intersect((0, 9), (7, 15)) - (7, 9) - """ - assert ra[0] <= ra[1] - assert rb[0] <= rb[1] - - sa = max(ra[0], rb[0]) - sb = min(ra[1], rb[1]) - if sa < sb: - return sa, sb - else: - return None - - -def compare_range(a, astart, aend, b, bstart, bend): - """Compare a[astart:aend] == b[bstart:bend], without slicing. - """ - if (aend-astart) != (bend-bstart): - return False - for ia, ib in zip(xrange(astart, aend), xrange(bstart, bend)): - if a[ia] != b[ib]: - return False - else: - return True - - - - -class Merge3Text(object): - """3-way merge of texts. - - Given strings BASE, OTHER, THIS, tries to produce a combined text - incorporating the changes from both BASE->OTHER and BASE->THIS.""" - def __init__(self, basetext, atext, btext, base=None, a=None, b=None): - self.basetext = basetext - self.atext = atext - self.btext = btext - if base is None: - base = mdiff.splitnewlines(basetext) - if a is None: - a = mdiff.splitnewlines(atext) - if b is None: - b = mdiff.splitnewlines(btext) - self.base = base - self.a = a - self.b = b - - - - def merge_lines(self, - name_a=None, - name_b=None, - name_base=None, - start_marker='<<<<<<<', - mid_marker='=======', - end_marker='>>>>>>>', - base_marker=None, - reprocess=False): - """Return merge in cvs-like form. - """ - self.conflicts = False - newline = '\n' - if len(self.a) > 0: - if self.a[0].endswith('\r\n'): - newline = '\r\n' - elif self.a[0].endswith('\r'): - newline = '\r' - if base_marker and reprocess: - raise CantReprocessAndShowBase() - if name_a: - start_marker = start_marker + ' ' + name_a - if name_b: - end_marker = end_marker + ' ' + name_b - if name_base and base_marker: - base_marker = base_marker + ' ' + name_base - merge_regions = self.merge_regions() - if reprocess is True: - merge_regions = self.reprocess_merge_regions(merge_regions) - for t in merge_regions: - what = t[0] - if what == 'unchanged': - for i in range(t[1], t[2]): - yield self.base[i] - elif what == 'a' or what == 'same': - for i in range(t[1], t[2]): - yield self.a[i] - elif what == 'b': - for i in range(t[1], t[2]): - yield self.b[i] - elif what == 'conflict': - self.conflicts = True - yield start_marker + newline - for i in range(t[3], t[4]): - yield self.a[i] - if base_marker is not None: - yield base_marker + newline - for i in range(t[1], t[2]): - yield self.base[i] - yield mid_marker + newline - for i in range(t[5], t[6]): - yield self.b[i] - yield end_marker + newline - else: - raise ValueError(what) - - - - - - def merge_annotated(self): - """Return merge with conflicts, showing origin of lines. - - Most useful for debugging merge. - """ - for t in self.merge_regions(): - what = t[0] - if what == 'unchanged': - for i in range(t[1], t[2]): - yield 'u | ' + self.base[i] - elif what == 'a' or what == 'same': - for i in range(t[1], t[2]): - yield what[0] + ' | ' + self.a[i] - elif what == 'b': - for i in range(t[1], t[2]): - yield 'b | ' + self.b[i] - elif what == 'conflict': - yield '<<<<\n' - for i in range(t[3], t[4]): - yield 'A | ' + self.a[i] - yield '----\n' - for i in range(t[5], t[6]): - yield 'B | ' + self.b[i] - yield '>>>>\n' - else: - raise ValueError(what) - - - - - - def merge_groups(self): - """Yield sequence of line groups. Each one is a tuple: - - 'unchanged', lines - Lines unchanged from base - - 'a', lines - Lines taken from a - - 'same', lines - Lines taken from a (and equal to b) - - 'b', lines - Lines taken from b - - 'conflict', base_lines, a_lines, b_lines - Lines from base were changed to either a or b and conflict. - """ - for t in self.merge_regions(): - what = t[0] - if what == 'unchanged': - yield what, self.base[t[1]:t[2]] - elif what == 'a' or what == 'same': - yield what, self.a[t[1]:t[2]] - elif what == 'b': - yield what, self.b[t[1]:t[2]] - elif what == 'conflict': - yield (what, - self.base[t[1]:t[2]], - self.a[t[3]:t[4]], - self.b[t[5]:t[6]]) - else: - raise ValueError(what) - - - def merge_regions(self): - """Return sequences of matching and conflicting regions. - - This returns tuples, where the first value says what kind we - have: - - 'unchanged', start, end - Take a region of base[start:end] - - 'same', astart, aend - b and a are different from base but give the same result - - 'a', start, end - Non-clashing insertion from a[start:end] - - Method is as follows: - - The two sequences align only on regions which match the base - and both descendents. These are found by doing a two-way diff - of each one against the base, and then finding the - intersections between those regions. These "sync regions" - are by definition unchanged in both and easily dealt with. - - The regions in between can be in any of three cases: - conflicted, or changed on only one side. - """ - - # section a[0:ia] has been disposed of, etc - iz = ia = ib = 0 - - for zmatch, zend, amatch, aend, bmatch, bend in self.find_sync_regions(): - #print 'match base [%d:%d]' % (zmatch, zend) - - matchlen = zend - zmatch - assert matchlen >= 0 - assert matchlen == (aend - amatch) - assert matchlen == (bend - bmatch) - - len_a = amatch - ia - len_b = bmatch - ib - len_base = zmatch - iz - assert len_a >= 0 - assert len_b >= 0 - assert len_base >= 0 - - #print 'unmatched a=%d, b=%d' % (len_a, len_b) - - if len_a or len_b: - # try to avoid actually slicing the lists - equal_a = compare_range(self.a, ia, amatch, - self.base, iz, zmatch) - equal_b = compare_range(self.b, ib, bmatch, - self.base, iz, zmatch) - same = compare_range(self.a, ia, amatch, - self.b, ib, bmatch) - - if same: - yield 'same', ia, amatch - elif equal_a and not equal_b: - yield 'b', ib, bmatch - elif equal_b and not equal_a: - yield 'a', ia, amatch - elif not equal_a and not equal_b: - yield 'conflict', iz, zmatch, ia, amatch, ib, bmatch - else: - raise AssertionError("can't handle a=b=base but unmatched") - - ia = amatch - ib = bmatch - iz = zmatch - - # if the same part of the base was deleted on both sides - # that's OK, we can just skip it. - - - if matchlen > 0: - assert ia == amatch - assert ib == bmatch - assert iz == zmatch - - yield 'unchanged', zmatch, zend - iz = zend - ia = aend - ib = bend - - - def reprocess_merge_regions(self, merge_regions): - """Where there are conflict regions, remove the agreed lines. - - Lines where both A and B have made the same changes are - eliminated. - """ - for region in merge_regions: - if region[0] != "conflict": - yield region - continue - type, iz, zmatch, ia, amatch, ib, bmatch = region - a_region = self.a[ia:amatch] - b_region = self.b[ib:bmatch] - matches = mdiff.get_matching_blocks(''.join(a_region), - ''.join(b_region)) - next_a = ia - next_b = ib - for region_ia, region_ib, region_len in matches[:-1]: - region_ia += ia - region_ib += ib - reg = self.mismatch_region(next_a, region_ia, next_b, - region_ib) - if reg is not None: - yield reg - yield 'same', region_ia, region_len+region_ia - next_a = region_ia + region_len - next_b = region_ib + region_len - reg = self.mismatch_region(next_a, amatch, next_b, bmatch) - if reg is not None: - yield reg - - - def mismatch_region(next_a, region_ia, next_b, region_ib): - if next_a < region_ia or next_b < region_ib: - return 'conflict', None, None, next_a, region_ia, next_b, region_ib - mismatch_region = staticmethod(mismatch_region) - - - def find_sync_regions(self): - """Return a list of sync regions, where both descendents match the base. - - Generates a list of (base1, base2, a1, a2, b1, b2). There is - always a zero-length sync region at the end of all the files. - """ - - ia = ib = 0 - amatches = mdiff.get_matching_blocks(self.basetext, self.atext) - bmatches = mdiff.get_matching_blocks(self.basetext, self.btext) - len_a = len(amatches) - len_b = len(bmatches) - - sl = [] - - while ia < len_a and ib < len_b: - abase, amatch, alen = amatches[ia] - bbase, bmatch, blen = bmatches[ib] - - # there is an unconflicted block at i; how long does it - # extend? until whichever one ends earlier. - i = intersect((abase, abase+alen), (bbase, bbase+blen)) - if i: - intbase = i[0] - intend = i[1] - intlen = intend - intbase - - # found a match of base[i[0], i[1]]; this may be less than - # the region that matches in either one - assert intlen <= alen - assert intlen <= blen - assert abase <= intbase - assert bbase <= intbase - - asub = amatch + (intbase - abase) - bsub = bmatch + (intbase - bbase) - aend = asub + intlen - bend = bsub + intlen - - assert self.base[intbase:intend] == self.a[asub:aend], \ - (self.base[intbase:intend], self.a[asub:aend]) - - assert self.base[intbase:intend] == self.b[bsub:bend] - - sl.append((intbase, intend, - asub, aend, - bsub, bend)) - - # advance whichever one ends first in the base text - if (abase + alen) < (bbase + blen): - ia += 1 - else: - ib += 1 - - intbase = len(self.base) - abase = len(self.a) - bbase = len(self.b) - sl.append((intbase, intbase, abase, abase, bbase, bbase)) - - return sl - - - - def find_unconflicted(self): - """Return a list of ranges in base that are not conflicted.""" - am = mdiff.get_matching_blocks(self.basetext, self.atext) - bm = mdiff.get_matching_blocks(self.basetext, self.btext) - - unc = [] - - while am and bm: - # there is an unconflicted block at i; how long does it - # extend? until whichever one ends earlier. - a1 = am[0][0] - a2 = a1 + am[0][2] - b1 = bm[0][0] - b2 = b1 + bm[0][2] - i = intersect((a1, a2), (b1, b2)) - if i: - unc.append(i) - - if a2 < b2: - del am[0] - else: - del bm[0] - - return unc - - -# bzr compatible interface, for the tests -class Merge3(Merge3Text): - """3-way merge of texts. - - Given BASE, OTHER, THIS, tries to produce a combined text - incorporating the changes from both BASE->OTHER and BASE->THIS. - All three will typically be sequences of lines.""" - def __init__(self, base, a, b): - basetext = '\n'.join([i.strip('\n') for i in base] + ['']) - atext = '\n'.join([i.strip('\n') for i in a] + ['']) - btext = '\n'.join([i.strip('\n') for i in b] + ['']) - if util.binary(basetext) or util.binary(atext) or util.binary(btext): - raise util.Abort(_("don't know how to merge binary files")) - Merge3Text.__init__(self, basetext, atext, btext, base, a, b) - - -def simplemerge(local, base, other, **opts): - def readfile(filename): - f = open(filename, "rb") - text = f.read() - f.close() - if util.binary(text): - msg = _("%s looks like a binary file.") % filename - if not opts.get('text'): - raise util.Abort(msg) - elif not opts.get('quiet'): - warn(_('warning: %s\n') % msg) - return text - - name_a = local - name_b = other - labels = opts.get('label', []) - if labels: - name_a = labels.pop(0) - if labels: - name_b = labels.pop(0) - if labels: - raise util.Abort(_("can only specify two labels.")) - - localtext = readfile(local) - basetext = readfile(base) - othertext = readfile(other) - - orig = local - local = os.path.realpath(local) - if not opts.get('print'): - opener = util.opener(os.path.dirname(local)) - out = opener(os.path.basename(local), "w", atomictemp=True) - else: - out = sys.stdout - - reprocess = not opts.get('no_minimal') - - m3 = Merge3Text(basetext, localtext, othertext) - for line in m3.merge_lines(name_a=name_a, name_b=name_b, - reprocess=reprocess): - out.write(line) - - if not opts.get('print'): - out.rename() - - if m3.conflicts: - if not opts.get('quiet'): - warn(_("warning: conflicts during merge.\n")) - return 1 +from mercurial import simplemerge, fancyopts, util options = [('L', 'label', [], _('labels to use on conflict markers')), ('a', 'text', None, _('treat all files as text')), @@ -517,6 +25,9 @@ By default, LOCAL is overwritten with the results of this operation. ''') +class ParseError(Exception): + """Exception raised on errors in parsing the command line.""" + def showhelp(): sys.stdout.write(usage) sys.stdout.write('\noptions:\n') @@ -530,33 +41,24 @@ for first, second in out_opts: sys.stdout.write(' %-*s %s\n' % (opts_len, first, second)) -class ParseError(Exception): - """Exception raised on errors in parsing the command line.""" - -def main(argv): +try: + opts = {} try: - opts = {} - try: - args = fancyopts.fancyopts(argv[1:], options, opts) - except fancyopts.getopt.GetoptError, e: - raise ParseError(e) - if opts['help']: - showhelp() - return 0 - if len(args) != 3: - raise ParseError(_('wrong number of arguments')) - return simplemerge(*args, **opts) - except ParseError, e: - sys.stdout.write("%s: %s\n" % (sys.argv[0], e)) + args = fancyopts.fancyopts(sys.argv[1:], options, opts) + except fancyopts.getopt.GetoptError, e: + raise ParseError(e) + if opts['help']: showhelp() - return 1 - except util.Abort, e: - sys.stderr.write("abort: %s\n" % e) - return 255 - except KeyboardInterrupt: - return 255 - -if __name__ == '__main__': - import sys - import os - sys.exit(main(sys.argv)) + sys.exit(0) + if len(args) != 3: + raise ParseError(_('wrong number of arguments')) + sys.exit(simplemerge.simplemerge(*args, **opts)) +except ParseError, e: + sys.stdout.write("%s: %s\n" % (sys.argv[0], e)) + showhelp() + sys.exit(1) +except util.Abort, e: + sys.stderr.write("abort: %s\n" % e) + sys.exit(255) +except KeyboardInterrupt: + sys.exit(255)
--- a/contrib/win32/ReadMe.html Fri Feb 08 11:50:37 2008 +0100 +++ b/contrib/win32/ReadMe.html Fri Feb 08 11:55:17 2008 +0100 @@ -33,7 +33,7 @@ href="http://hgbook.red-bean.com/">Distributed revision control with Mercurial</a>.</p> - <p>By default, Mercurial installs to <tt>C:\Mercurial</tt>. The + <p>By default, Mercurial installs to <tt>C:\Program Files\Mercurial</tt>. The Mercurial command is called <tt>hg.exe</tt>.</p> <h1>Testing Mercurial after you've installed it</h1>
--- a/contrib/win32/mercurial.iss Fri Feb 08 11:50:37 2008 +0100 +++ b/contrib/win32/mercurial.iss Fri Feb 08 11:55:17 2008 +0100 @@ -15,8 +15,8 @@ AppID={{4B95A5F1-EF59-4B08-BED8-C891C46121B3} AppContact=mercurial@selenic.com OutputBaseFilename=Mercurial-snapshot -DefaultDirName={sd}\Mercurial -SourceDir=C:\hg\hg-release +DefaultDirName={pf}\Mercurial +SourceDir=..\.. VersionInfoDescription=Mercurial distributed SCM VersionInfoCopyright=Copyright 2005-2007 Matt Mackall and others VersionInfoCompany=Matt Mackall and others @@ -29,17 +29,17 @@ [Files] Source: contrib\mercurial.el; DestDir: {app}/Contrib +Source: contrib\vim\*.*; DestDir: {app}/Contrib/Vim +Source: contrib\zsh_completion; DestDir: {app}/Contrib Source: contrib\win32\ReadMe.html; DestDir: {app}; Flags: isreadme Source: contrib\win32\mercurial.ini; DestDir: {app}; DestName: Mercurial.ini; Flags: confirmoverwrite Source: contrib\win32\postinstall.txt; DestDir: {app}; DestName: ReleaseNotes.txt Source: dist\hg.exe; DestDir: {app}; AfterInstall: Touch('{app}\hg.exe.local') Source: dist\library.zip; DestDir: {app} -Source: dist\patch.exe; DestDir: {app} Source: dist\mfc71.dll; DestDir: {app} Source: dist\msvcr71.dll; DestDir: {app} Source: dist\w9xpopen.exe; DestDir: {app} Source: dist\add_path.exe; DestDir: {app} -Source: doc\*.txt; DestDir: {app}\Docs Source: doc\*.html; DestDir: {app}\Docs Source: templates\*.*; DestDir: {app}\Templates; Flags: recursesubdirs createallsubdirs Source: CONTRIBUTORS; DestDir: {app}; DestName: Contributors.txt
--- a/contrib/zsh_completion Fri Feb 08 11:50:37 2008 +0100 +++ b/contrib/zsh_completion Fri Feb 08 11:55:17 2008 +0100 @@ -13,6 +13,9 @@ # option) any later version. # +emulate -LR zsh +setopt extendedglob + local curcontext="$curcontext" state line typeset -A _hg_cmd_globals @@ -153,9 +156,9 @@ typeset -a tags local tag rev - _hg_cmd tags 2> /dev/null | while read tag rev + _hg_cmd tags 2> /dev/null | while read tag do - tags+=($tag) + tags+=(${tag/ # [0-9]#:*}) done (( $#tags )) && _describe -t tags 'tags' tags } @@ -674,13 +677,13 @@ # MQ _hg_qseries() { typeset -a patches - patches=($(_hg_cmd qseries 2>/dev/null)) + patches=(${(f)"$(_hg_cmd qseries 2>/dev/null)"}) (( $#patches )) && _describe -t hg-patches 'patches' patches } _hg_qapplied() { typeset -a patches - patches=($(_hg_cmd qapplied 2>/dev/null)) + patches=(${(f)"$(_hg_cmd qapplied 2>/dev/null)"}) if (( $#patches )) then patches+=(qbase qtip) @@ -690,7 +693,7 @@ _hg_qunapplied() { typeset -a patches - patches=($(_hg_cmd qunapplied 2>/dev/null)) + patches=(${(f)"$(_hg_cmd qunapplied 2>/dev/null)"}) (( $#patches )) && _describe -t hg-unapplied-patches 'unapplied patches' patches } @@ -730,6 +733,12 @@ '*:unapplied patch:_hg_qunapplied' } +_hg_cmd_qgoto() { + _arguments -s -w : $_hg_global_opts \ + '(--force -f)'{-f,--force}'[overwrite any local changes]' \ + ':patch:_hg_qseries' +} + _hg_cmd_qguard() { _arguments -s -w : $_hg_global_opts \ '(--list -l)'{-l,--list}'[list all patches and guards]' \
--- a/doc/hg.1.txt Fri Feb 08 11:50:37 2008 +0100 +++ b/doc/hg.1.txt Fri Feb 08 11:55:17 2008 +0100 @@ -91,11 +91,11 @@ FILES ----- - .hgignore:: + repo/.hgignore:: This file contains regular expressions (one per line) that describe file names that should be ignored by hg. For details, see hgignore(5). - .hgtags:: + repo/.hgtags:: This file contains changeset hash values and text tag names (one of each separated by spaces) that correspond to tagged versions of the repository contents.
--- a/doc/hgmerge.1.txt Fri Feb 08 11:50:37 2008 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,35 +0,0 @@ -HGMERGE(1) -========== -Matt Mackall <mpm@selenic.com> -v0.1, 27 May 2005 - -NAME ----- -hgmerge - default wrapper to merge files in Mercurial SCM system - -SYNOPSIS --------- -'hgmerge' local ancestor remote - -DESCRIPTION ------------ -The hgmerge(1) command provides a graphical interface to merge files in the -Mercurial system. It is a simple wrapper around kdiff3, merge(1) and tkdiff(1), -or simply diff(1) and patch(1) depending on what is present on the system. - -hgmerge(1) is used by the Mercurial SCM if the environment variable HGMERGE is -not set. - -AUTHOR ------- -Written by Vincent Danjean <Vincent.Danjean@free.fr> - -SEE ALSO --------- -hg(1) - the command line interface to Mercurial SCM - -COPYING -------- -Copyright \(C) 2005-2007 Matt Mackall. -Free use of this software is granted under the terms of the GNU General -Public License (GPL).
--- a/doc/hgrc.5.txt Fri Feb 08 11:50:37 2008 +0100 +++ b/doc/hgrc.5.txt Fri Feb 08 11:55:17 2008 +0100 @@ -17,7 +17,9 @@ Mercurial reads configuration data from several files, if they exist. The names of these files depend on the system on which Mercurial is -installed. +installed. Windows registry keys contain PATH-like strings, every +part must reference a Mercurial.ini file or be a directory where *.rc +files will be read. (Unix) <install-root>/etc/mercurial/hgrc.d/*.rc:: (Unix) <install-root>/etc/mercurial/hgrc:: @@ -29,6 +31,8 @@ (Unix) /etc/mercurial/hgrc.d/*.rc:: (Unix) /etc/mercurial/hgrc:: +(Windows) HKEY_LOCAL_MACHINE\SOFTWARE\Mercurial:: + or:: (Windows) C:\Mercurial\Mercurial.ini:: Per-system configuration files, for the system on which Mercurial is running. Options in these files apply to all Mercurial @@ -120,21 +124,26 @@ NOTE: the tempfile mechanism is recommended for Windows systems, where the standard shell I/O redirection operators often have - strange effects. In particular, if you are doing line ending - conversion on Windows using the popular dos2unix and unix2dos - programs, you *must* use the tempfile mechanism, as using pipes will - corrupt the contents of your files. + strange effects and may corrupt the contents of your files. - Tempfile example: + The most common usage is for LF <-> CRLF translation on Windows. + For this, use the "smart" convertors which check for binary files: + [extensions] + hgext.win32text = [encode] - # convert files to unix line ending conventions on checkin - **.txt = tempfile: dos2unix -n INFILE OUTFILE - + ** = cleverencode: [decode] - # convert files to windows line ending conventions when writing - # them to the working dir - **.txt = tempfile: unix2dos -n INFILE OUTFILE + ** = cleverdecode: + + or if you only want to translate certain files: + + [extensions] + hgext.win32text = + [encode] + **.txt = dumbencode: + [decode] + **.txt = dumbdecode: defaults:: Use the [defaults] section to define command defaults, i.e. the @@ -224,6 +233,78 @@ you to store longer filenames in some situations at the expense of compatibility. +merge-patterns:: + This section specifies merge tools to associate with particular file + patterns. Tools matched here will take precedence over the default + merge tool. Patterns are globs by default, rooted at the repository root. + + Example: + + [merge-patterns] + **.c = kdiff3 + **.jpg = myimgmerge + +merge-tools:: + This section configures external merge tools to use for file-level + merges. + + Example ~/.hgrc: + + [merge-tools] + # Override stock tool location + kdiff3.executable = ~/bin/kdiff3 + # Specify command line + kdiff3.args = $base $local $other -o $output + # Give higher priority + kdiff3.priority = 1 + + # Define new tool + myHtmlTool.args = -m $local $other $base $output + myHtmlTool.regkey = Software\FooSoftware\HtmlMerge + myHtmlTool.priority = 1 + + Supported arguments: + priority;; + The priority in which to evaluate this tool. + Default: 0. + executable;; + Either just the name of the executable or its pathname. + Default: the tool name. + args;; + The arguments to pass to the tool executable. You can refer to the files + being merged as well as the output file through these variables: $base, + $local, $other, $output. + Default: $local $base $other + premerge;; + Attempt to run internal non-interactive 3-way merge tool before + launching external tool. + Default: True + binary;; + This tool can merge binary files. Defaults to False, unless tool + was selected by file pattern match. + symlink;; + This tool can merge symlinks. Defaults to False, even if tool was + selected by file pattern match. + checkconflicts;; + Check whether there are conflicts even though the tool reported + success. + Default: False + fixeol;; + Attempt to fix up EOL changes caused by the merge tool. + Default: False + gui:; + This tool requires a graphical interface to run. Default: False + regkey;; + Windows registry key which describes install location of this tool. + Mercurial will search for this key first under HKEY_CURRENT_USER and + then under HKEY_LOCAL_MACHINE. Default: None + regname;; + Name of value to read from specified registry key. Defaults to the + unnamed (default) value. + regappend;; + String to append to the value read from the registry, typically the + executable name of the tool. Default: None + hooks:: Commands or Python functions that get automatically executed by various actions such as starting or finishing a commit. Multiple @@ -277,7 +358,7 @@ commit to proceed. Non-zero status will cause the commit to fail. Parent changeset IDs are in $HG_PARENT1 and $HG_PARENT2. preoutgoing;; - Run before computing changes to send from the local repository to + Run before collecting changes to send from the local repository to another. Non-zero status will cause failure. This lets you prevent pull over http or ssh. Also prevents against local pull, push (outbound) or bundle commands, but not effective, since you @@ -394,6 +475,20 @@ Optional. Directory or URL to use when pushing if no destination is specified. +profile:: + Configuration of profiling options, for in-depth performance + analysis. Mostly useful to developers. + enable;; + Enable a particular profiling mode. Useful for profiling + server-side processes. "lsprof" enables modern profiling. + "hotshot" is deprecated, and produces less reliable results. + Default is no profiling. + output;; + The name of a file to write profiling data to. Each occurrence of + "%%p" will be replaced with the current process ID (the repeated + "%" protects against the config parser's string interpolator). + Default output is to stderr. + server:: Controls generic server settings. uncompressed;; @@ -444,7 +539,18 @@ Template string for commands that print changesets. merge;; The conflict resolution program to use during a manual merge. - Default is "hgmerge". + There are some internal tools available: + + internal:local;; + keep the local version + internal:other;; + use the other version + internal:merge;; + use the internal non-interactive merge tool + internal:fail;; + fail to merge + + See the merge-tools section for more information on configuring tools. patch;; command to use to apply patches. Look for 'gpatch' or 'patch' in PATH if unset. @@ -515,7 +621,7 @@ Example: "http://hgserver/repos/" contact;; Name or email address of the person in charge of the repository. - Default is "unknown". + Defaults to ui.username or $EMAIL or "unknown" if unset or empty. deny_push;; Whether to deny pushing to the repository. If empty or not set, push is not denied. If the special value "*", all remote users @@ -544,6 +650,8 @@ Maximum number of files to list per changeset. Default is 10. port;; Port to listen on. Default is 8000. + prefix;; + Prefix path to serve from. Default is '' (server root). push_ssl;; Whether to require that inbound pushes be transported over SSL to prevent password sniffing. Default is true.
--- a/doc/ja/hgmerge.1.ja.txt Fri Feb 08 11:50:37 2008 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,37 +0,0 @@ -HGMERGE(1) -========== -Matt Mackall <mpm@selenic.com> -v0.1, 27 May 2005 - -名前 --- -hgmerge - Mercurial ソースコード管理システムでファイルをマージする -のに使われるデフォルトのラッパー - -書式 --- -'hgmerge' local ancestor remote - -説明 --- -hgmerge(1) コマンドは Mercurial システムでファイルをマージするため -のグラフィカルなインターフェイスを提供します。これは kdiff3, -merge(1), tkdiff(1), または単純に diff(1) と patch(1) のラッパーで、 -どれがシステム上にあるかに依存します。 - -hgmerge(1) は Mercurial ソースコード管理システムで環境変数 -HGMERGE が設定されていない場合に使われます。 - -著者 --- -Vincent Danjean <Vincent.Danjean@free.fr> によって書かれました。 - -関連情報 --- -hg(1) - Mercurial システムへのコマンドラインインターフェイス - -著作権情報 ----- -Copyright (C) 2005-2007 Matt Mackall. -このソフトウェアの自由な使用は GNU 一般公有使用許諾 (GPL) のもとで -認められます。
--- a/hg Fri Feb 08 11:50:37 2008 +0100 +++ b/hg Fri Feb 08 11:55:17 2008 +0100 @@ -10,5 +10,11 @@ # enable importing on demand to reduce startup time from mercurial import demandimport; demandimport.enable() +import sys +import mercurial.util import mercurial.dispatch + +for fp in (sys.stdin, sys.stdout, sys.stderr): + mercurial.util.set_binary(fp) + mercurial.dispatch.run()
--- a/hgext/bugzilla.py Fri Feb 08 11:50:37 2008 +0100 +++ b/hgext/bugzilla.py Fri Feb 08 11:55:17 2008 +0100 @@ -282,7 +282,7 @@ root=self.repo.root, webroot=webroot(self.repo.root)) data = self.ui.popbuffer() - self.add_comment(bugid, data, templater.email(ctx.user())) + self.add_comment(bugid, data, util.email(ctx.user())) def hook(ui, repo, hooktype, node=None, **kwargs): '''add comment to bugzilla for each changeset that refers to a
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/hgext/color.py Fri Feb 08 11:55:17 2008 +0100 @@ -0,0 +1,219 @@ +# color.py color output for the status and qseries commands +# +# Copyright (C) 2007 Kevin Christen <kevin.christen@gmail.com> +# +# This program is free software; you can redistribute it and/or modify it +# under the terms of the GNU General Public License as published by the +# Free Software Foundation; either version 2 of the License, or (at your +# option) any later version. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General +# Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +'''add color output to the status and qseries commands + +This extension modifies the status command to add color to its output to +reflect file status, and the qseries command to add color to reflect patch +status (applied, unapplied, missing). Other effects in addition to color, +like bold and underlined text, are also available. Effects are rendered +with the ECMA-48 SGR control function (aka ANSI escape codes). This module +also provides the render_text function, which can be used to add effects to +any text. + +To enable this extension, add this to your .hgrc file: +[extensions] +color = + +Default effects my be overriden from the .hgrc file: + +[color] +status.modified = blue bold underline red_background +status.added = green bold +status.removed = red bold blue_background +status.deleted = cyan bold underline +status.unknown = magenta bold underline +status.ignored = black bold + + 'none' turns off all effects +status.clean = none +status.copied = none + +qseries.applied = blue bold underline +qseries.unapplied = black bold +qseries.missing = red bold +''' + +import re, sys + +from mercurial import commands, cmdutil, ui +from mercurial.i18n import _ + +# start and stop parameters for effects +_effect_params = { 'none': (0, 0), + 'black': (30, 39), + 'red': (31, 39), + 'green': (32, 39), + 'yellow': (33, 39), + 'blue': (34, 39), + 'magenta': (35, 39), + 'cyan': (36, 39), + 'white': (37, 39), + 'bold': (1, 22), + 'italic': (3, 23), + 'underline': (4, 24), + 'inverse': (7, 27), + 'black_background': (40, 49), + 'red_background': (41, 49), + 'green_background': (42, 49), + 'yellow_background': (43, 49), + 'blue_background': (44, 49), + 'purple_background': (45, 49), + 'cyan_background': (46, 49), + 'white_background': (47, 49), } + +def render_effects(text, *effects): + 'Wrap text in commands to turn on each effect.' + start = [] + stop = [] + for effect in effects: + start.append(str(_effect_params[effect][0])) + stop.append(str(_effect_params[effect][1])) + start = '\033[' + ';'.join(start) + 'm' + stop = '\033[' + ';'.join(stop) + 'm' + return start + text + stop + +def colorstatus(statusfunc, ui, repo, *pats, **opts): + '''run the status command with colored output''' + + delimiter = opts['print0'] and '\0' or '\n' + + # run status and capture it's output + ui.pushbuffer() + retval = statusfunc(ui, repo, *pats, **opts) + # filter out empty strings + lines = [ line for line in ui.popbuffer().split(delimiter) if line ] + + if opts['no_status']: + # if --no-status, run the command again without that option to get + # output with status abbreviations + opts['no_status'] = False + ui.pushbuffer() + statusfunc(ui, repo, *pats, **opts) + # filter out empty strings + lines_with_status = [ line for + line in ui.popbuffer().split(delimiter) if line ] + else: + lines_with_status = lines + + # apply color to output and display it + for i in xrange(0, len(lines)): + status = _status_abbreviations[lines_with_status[i][0]] + effects = _status_effects[status] + if effects: + lines[i] = render_effects(lines[i], *effects) + sys.stdout.write(lines[i] + delimiter) + return retval + +_status_abbreviations = { 'M': 'modified', + 'A': 'added', + 'R': 'removed', + '!': 'deleted', + '?': 'unknown', + 'I': 'ignored', + 'C': 'clean', + ' ': 'copied', } + +_status_effects = { 'modified': ('blue', 'bold'), + 'added': ('green', 'bold'), + 'removed': ('red', 'bold'), + 'deleted': ('cyan', 'bold', 'underline'), + 'unknown': ('magenta', 'bold', 'underline'), + 'ignored': ('black', 'bold'), + 'clean': ('none', ), + 'copied': ('none', ), } + +def colorqseries(qseriesfunc, ui, repo, *dummy, **opts): + '''run the qseries command with colored output''' + ui.pushbuffer() + retval = qseriesfunc(ui, repo, **opts) + patches = ui.popbuffer().splitlines() + for patch in patches: + if opts['missing']: + effects = _patch_effects['missing'] + # Determine if patch is applied. Search for beginning of output + # line in the applied patch list, in case --summary has been used + # and output line isn't just the patch name. + elif [ applied for applied in repo.mq.applied + if patch.startswith(applied.name) ]: + effects = _patch_effects['applied'] + else: + effects = _patch_effects['unapplied'] + sys.stdout.write(render_effects(patch, *effects) + '\n') + return retval + +_patch_effects = { 'applied': ('blue', 'bold', 'underline'), + 'missing': ('red', 'bold'), + 'unapplied': ('black', 'bold'), } + +def uisetup(ui): + '''Initialize the extension.''' + nocoloropt = ('', 'no-color', None, _("don't colorize output")) + _decoratecmd(ui, 'status', commands.table, colorstatus, nocoloropt) + _configcmdeffects(ui, 'status', _status_effects); + if ui.config('extensions', 'hgext.mq', default=None) is not None: + from hgext import mq + _decoratecmd(ui, 'qseries', mq.cmdtable, colorqseries, nocoloropt) + _configcmdeffects(ui, 'qseries', _patch_effects); + +def _decoratecmd(ui, cmd, table, delegate, *delegateoptions): + '''Replace the function that implements cmd in table with a decorator. + + The decorator that becomes the new implementation of cmd calls + delegate. The delegate's first argument is the replaced function, + followed by the normal Mercurial command arguments (ui, repo, ...). If + the delegate adds command options, supply them as delegateoptions. + ''' + cmdkey, cmdentry = _cmdtableitem(ui, cmd, table) + decorator = lambda ui, repo, *args, **opts: \ + _colordecorator(delegate, cmdentry[0], + ui, repo, *args, **opts) + # make sure 'hg help cmd' still works + decorator.__doc__ = cmdentry[0].__doc__ + decoratorentry = (decorator,) + cmdentry[1:] + for option in delegateoptions: + decoratorentry[1].append(option) + table[cmdkey] = decoratorentry + +def _cmdtableitem(ui, cmd, table): + '''Return key, value from table for cmd, or None if not found.''' + aliases, entry = cmdutil.findcmd(ui, cmd, table) + for candidatekey, candidateentry in table.iteritems(): + if candidateentry is entry: + return candidatekey, entry + +def _colordecorator(colorfunc, nocolorfunc, ui, repo, *args, **opts): + '''Delegate to colorfunc or nocolorfunc, depending on conditions. + + Delegate to colorfunc unless --no-color option is set or output is not + to a tty. + ''' + if opts['no_color'] or not sys.stdout.isatty(): + return nocolorfunc(ui, repo, *args, **opts) + return colorfunc(nocolorfunc, ui, repo, *args, **opts) + +def _configcmdeffects(ui, cmdname, effectsmap): + '''Override default effects for cmdname with those from .hgrc file. + + Entries in the .hgrc file are in the [color] section, and look like + 'cmdname'.'status' (for instance, 'status.modified = blue bold inverse'). + ''' + for status in effectsmap: + effects = ui.config('color', cmdname + '.' + status) + if effects: + effectsmap[status] = re.split('\W+', effects)
--- a/hgext/convert/__init__.py Fri Feb 08 11:50:37 2008 +0100 +++ b/hgext/convert/__init__.py Fri Feb 08 11:55:17 2008 +0100 @@ -19,9 +19,11 @@ - Darcs - git - Subversion + - GNU Arch Accepted destination formats: - Mercurial + - Subversion (history on branches is not preserved) If no revision is given, all revisions will be converted. Otherwise, convert will only import up to the named revision (given in a format @@ -56,7 +58,7 @@ exclude path/to/file rename from/file to/file - + The 'include' directive causes a file, or all files under a directory, to be included in the destination repository, and the exclusion of all other files and dirs not explicitely included. @@ -64,6 +66,24 @@ The 'rename' directive renames a file or directory. To rename from a subdirectory into the root of the repository, use '.' as the path to rename to. + + Back end options: + + --config convert.hg.clonebranches=False (boolean) + hg target: XXX not documented + --config convert.hg.saverev=True (boolean) + hg source: allow target to preserve source revision ID + --config convert.hg.tagsbranch=default (branch name) + hg target: XXX not documented + --config convert.hg.usebranchnames=True (boolean) + hg target: preserve branch names + + --config convert.svn.branches=branches (directory name) + svn source: specify the directory containing branches + --config convert.svn.tags=tags (directory name) + svn source: specify the directory containing tags + --config convert.svn.trunk=trunk (directory name) + svn source: specify the name of the trunk branch """ return convcmd.convert(ui, src, dest, revmapfile, **opts)
--- a/hgext/convert/common.py Fri Feb 08 11:50:37 2008 +0100 +++ b/hgext/convert/common.py Fri Feb 08 11:55:17 2008 +0100 @@ -1,6 +1,9 @@ # common code for the convert extension -import base64 +import base64, errno +import os import cPickle as pickle +from mercurial import util +from mercurial.i18n import _ def encodeargs(args): def encodearg(s): @@ -15,6 +18,11 @@ s = base64.decodestring(s) return pickle.loads(s) +def checktool(exe, name=None): + name = name or exe + if not util.find_exe(exe): + raise util.Abort('cannot find required "%s" tool' % name) + class NoRepo(Exception): pass SKIPREV = 'SKIP' @@ -33,7 +41,7 @@ class converter_source(object): """Conversion source interface""" - def __init__(self, ui, path, rev=None): + def __init__(self, ui, path=None, rev=None): """Initialize conversion source (or raise NoRepo("message") exception if path is not a valid repository)""" self.ui = ui @@ -48,11 +56,8 @@ def after(self): pass - def setrevmap(self, revmap, order): - """set the map of already-converted revisions - - order is a list with the keys from revmap in the order they - appear in the revision map file.""" + def setrevmap(self, revmap): + """set the map of already-converted revisions""" pass def getheads(self): @@ -100,17 +105,22 @@ def getchangedfiles(self, rev, i): """Return the files changed by rev compared to parent[i]. - + i is an index selecting one of the parents of rev. The return value should be the list of files that are different in rev and this parent. If rev has no parents, i is None. - + This function is only needed to support --filemap """ raise NotImplementedError() + def converted(self, rev, sinkrev): + '''Notify the source that a revision has been converted.''' + pass + + class converter_sink(object): """Conversion sink (target) interface""" @@ -183,3 +193,162 @@ filter empty revisions. """ pass + + def before(self): + pass + + def after(self): + pass + + +class commandline(object): + def __init__(self, ui, command): + self.ui = ui + self.command = command + + def prerun(self): + pass + + def postrun(self): + pass + + def _cmdline(self, cmd, *args, **kwargs): + cmdline = [self.command, cmd] + list(args) + for k, v in kwargs.iteritems(): + if len(k) == 1: + cmdline.append('-' + k) + else: + cmdline.append('--' + k.replace('_', '-')) + try: + if len(k) == 1: + cmdline.append('' + v) + else: + cmdline[-1] += '=' + v + except TypeError: + pass + cmdline = [util.shellquote(arg) for arg in cmdline] + cmdline += ['2>', util.nulldev, '<', util.nulldev] + cmdline = ' '.join(cmdline) + self.ui.debug(cmdline, '\n') + return cmdline + + def _run(self, cmd, *args, **kwargs): + cmdline = self._cmdline(cmd, *args, **kwargs) + self.prerun() + try: + return util.popen(cmdline) + finally: + self.postrun() + + def run(self, cmd, *args, **kwargs): + fp = self._run(cmd, *args, **kwargs) + output = fp.read() + self.ui.debug(output) + return output, fp.close() + + def runlines(self, cmd, *args, **kwargs): + fp = self._run(cmd, *args, **kwargs) + output = fp.readlines() + self.ui.debug(output) + return output, fp.close() + + def checkexit(self, status, output=''): + if status: + if output: + self.ui.warn(_('%s error:\n') % self.command) + self.ui.warn(output) + msg = util.explain_exit(status)[0] + raise util.Abort(_('%s %s') % (self.command, msg)) + + def run0(self, cmd, *args, **kwargs): + output, status = self.run(cmd, *args, **kwargs) + self.checkexit(status, output) + return output + + def runlines0(self, cmd, *args, **kwargs): + output, status = self.runlines(cmd, *args, **kwargs) + self.checkexit(status, output) + return output + + def getargmax(self): + if '_argmax' in self.__dict__: + return self._argmax + + # POSIX requires at least 4096 bytes for ARG_MAX + self._argmax = 4096 + try: + self._argmax = os.sysconf("SC_ARG_MAX") + except: + pass + + # Windows shells impose their own limits on command line length, + # down to 2047 bytes for cmd.exe under Windows NT/2k and 2500 bytes + # for older 4nt.exe. See http://support.microsoft.com/kb/830473 for + # details about cmd.exe limitations. + + # Since ARG_MAX is for command line _and_ environment, lower our limit + # (and make happy Windows shells while doing this). + + self._argmax = self._argmax/2 - 1 + return self._argmax + + def limit_arglist(self, arglist, cmd, *args, **kwargs): + limit = self.getargmax() - len(self._cmdline(cmd, *args, **kwargs)) + bytes = 0 + fl = [] + for fn in arglist: + b = len(fn) + 3 + if bytes + b < limit or len(fl) == 0: + fl.append(fn) + bytes += b + else: + yield fl + fl = [fn] + bytes = b + if fl: + yield fl + + def xargs(self, arglist, cmd, *args, **kwargs): + for l in self.limit_arglist(arglist, cmd, *args, **kwargs): + self.run0(cmd, *(list(args) + l), **kwargs) + +class mapfile(dict): + def __init__(self, ui, path): + super(mapfile, self).__init__() + self.ui = ui + self.path = path + self.fp = None + self.order = [] + self._read() + + def _read(self): + if self.path is None: + return + try: + fp = open(self.path, 'r') + except IOError, err: + if err.errno != errno.ENOENT: + raise + return + for line in fp: + key, value = line[:-1].split(' ', 1) + if key not in self: + self.order.append(key) + super(mapfile, self).__setitem__(key, value) + fp.close() + + def __setitem__(self, key, value): + if self.fp is None: + try: + self.fp = open(self.path, 'a') + except IOError, err: + raise util.Abort(_('could not open map file %r: %s') % + (self.path, err.strerror)) + self.fp.write('%s %s\n' % (key, value)) + self.fp.flush() + super(mapfile, self).__setitem__(key, value) + + def close(self): + if self.fp: + self.fp.close() + self.fp = None
--- a/hgext/convert/convcmd.py Fri Feb 08 11:50:37 2008 +0100 +++ b/hgext/convert/convcmd.py Fri Feb 08 11:55:17 2008 +0100 @@ -5,12 +5,13 @@ # This software may be used and distributed according to the terms # of the GNU General Public License, incorporated herein by reference. -from common import NoRepo, SKIPREV, converter_source, converter_sink +from common import NoRepo, SKIPREV, converter_source, converter_sink, mapfile from cvs import convert_cvs from darcs import darcs_source from git import convert_git from hg import mercurial_source, mercurial_sink -from subversion import svn_source, debugsvnlog +from subversion import debugsvnlog, svn_source, svn_sink +from gnuarch import gnuarch_source import filemap import os, shutil @@ -23,10 +24,12 @@ ('svn', svn_source), ('hg', mercurial_source), ('darcs', darcs_source), + ('gnuarch', gnuarch_source), ] sink_converters = [ ('hg', mercurial_sink), + ('svn', svn_sink), ] def convertsource(ui, path, type, rev): @@ -59,23 +62,10 @@ self.ui = ui self.opts = opts self.commitcache = {} - self.revmapfile = revmapfile - self.revmapfilefd = None self.authors = {} self.authorfile = None - self.maporder = [] - self.map = {} - try: - origrevmapfile = open(self.revmapfile, 'r') - for l in origrevmapfile: - sv, dv = l[:-1].split() - if sv not in self.map: - self.maporder.append(sv) - self.map[sv] = dv - origrevmapfile.close() - except IOError: - pass + self.map = mapfile(ui, revmapfile) # Read first the dst author map if any authorfile = self.dest.authorfile() @@ -86,6 +76,8 @@ self.readauthormap(opts.get('authors')) self.authorfile = self.dest.authorfile() + self.splicemap = mapfile(ui, ui.config('convert', 'splicemap')) + def walktree(self, heads): '''Return a mapping that identifies the uncommitted parents of every uncommitted changeset.''' @@ -157,22 +149,13 @@ if pl: depth[n] = max([depth[p] for p in pl]) + 1 - s = [(depth[n], self.commitcache[n].date, n) for n in s] + s = [(depth[n], util.parsedate(self.commitcache[n].date), n) + for n in s] s.sort() s = [e[2] for e in s] return s - def mapentry(self, src, dst): - if self.revmapfilefd is None: - try: - self.revmapfilefd = open(self.revmapfile, "a") - except IOError, (errno, strerror): - raise util.Abort("Could not open map file %s: %s, %s\n" % (self.revmapfile, errno, strerror)) - self.map[src] = dst - self.revmapfilefd.write("%s %s\n" % (src, dst)) - self.revmapfilefd.flush() - def writeauthormap(self): authorfile = self.authorfile if authorfile: @@ -219,7 +202,7 @@ dest = SKIPREV else: dest = self.map[changes] - self.mapentry(rev, dest) + self.map[rev] = dest return files, copies = changes pbranches = [] @@ -245,15 +228,25 @@ # Merely marks that a copy happened. self.dest.copyfile(copyf, f) - parents = [b[0] for b in pbranches] + try: + parents = [self.splicemap[rev]] + self.ui.debug('spliced in %s as parents of %s\n' % + (parents, rev)) + except KeyError: + parents = [b[0] for b in pbranches] newnode = self.dest.putcommit(filenames, parents, commit) - self.mapentry(rev, newnode) + self.source.converted(rev, newnode) + self.map[rev] = newnode def convert(self): + + def recode(s): + return s.decode('utf-8').encode(orig_encoding, 'replace') + try: self.source.before() self.dest.before() - self.source.setrevmap(self.map, self.maporder) + self.source.setrevmap(self.map) self.ui.status("scanning source...\n") heads = self.source.getheads() parents = self.walktree(heads) @@ -268,7 +261,11 @@ desc = self.commitcache[c].desc if "\n" in desc: desc = desc.splitlines()[0] - self.ui.status("%d %s\n" % (num, desc)) + # convert log message to local encoding without using + # tolocal() because util._encoding conver() use it as + # 'utf-8' + self.ui.status("%d %s\n" % (num, recode(desc))) + self.ui.note(_("source: %s\n" % recode(c))) self.copy(c) tags = self.source.gettags() @@ -283,7 +280,7 @@ # write another hash correspondence to override the previous # one so we don't end up with extra tag heads if nrev: - self.mapentry(c, nrev) + self.map[c] = nrev self.writeauthormap() finally: @@ -294,10 +291,13 @@ self.dest.after() finally: self.source.after() - if self.revmapfilefd: - self.revmapfilefd.close() + self.map.close() + +orig_encoding = 'ascii' def convert(ui, src, dest=None, revmapfile=None, **opts): + global orig_encoding + orig_encoding = util._encoding util._encoding = 'UTF-8' if not dest:
--- a/hgext/convert/cvs.py Fri Feb 08 11:50:37 2008 +0100 +++ b/hgext/convert/cvs.py Fri Feb 08 11:55:17 2008 +0100 @@ -1,9 +1,10 @@ # CVS conversion code inspired by hg-cvs-import and git-cvsimport import os, locale, re, socket +from cStringIO import StringIO from mercurial import util -from common import NoRepo, commit, converter_source +from common import NoRepo, commit, converter_source, checktool class convert_cvs(converter_source): def __init__(self, ui, path, rev=None): @@ -13,6 +14,9 @@ if not os.path.exists(cvs): raise NoRepo("%s does not look like a CVS checkout" % path) + for tool in ('cvsps', 'cvs'): + checktool(tool) + self.changeset = {} self.files = {} self.tags = {} @@ -196,7 +200,7 @@ if conntype != "pserver": if conntype == "rsh": - rsh = os.environ.get("CVS_RSH" or "rsh") + rsh = os.environ.get("CVS_RSH") or "ssh" if user: cmd = [rsh, '-l', user, host] + cmd else: @@ -227,6 +231,20 @@ return self.heads def _getfile(self, name, rev): + + def chunkedread(fp, count): + # file-objects returned by socked.makefile() do not handle + # large read() requests very well. + chunksize = 65536 + output = StringIO() + while count > 0: + data = fp.read(min(count, chunksize)) + if not data: + raise util.Abort("%d bytes missing from remote file" % count) + count -= len(data) + output.write(data) + return output.getvalue() + if rev.endswith("(DEAD)"): raise IOError @@ -245,14 +263,14 @@ self.readp.readline() # entries mode = self.readp.readline()[:-1] count = int(self.readp.readline()[:-1]) - data = self.readp.read(count) + data = chunkedread(self.readp, count) elif line.startswith(" "): data += line[1:] elif line.startswith("M "): pass elif line.startswith("Mbinary "): count = int(self.readp.readline()[:-1]) - data = self.readp.read(count) + data = chunkedread(self.readp, count) else: if line == "ok\n": return (data, "x" in mode and "x" or "")
--- a/hgext/convert/darcs.py Fri Feb 08 11:50:37 2008 +0100 +++ b/hgext/convert/darcs.py Fri Feb 08 11:55:17 2008 +0100 @@ -1,6 +1,6 @@ # darcs support for the convert extension -from common import NoRepo, commit, converter_source +from common import NoRepo, checktool, commandline, commit, converter_source from mercurial.i18n import _ from mercurial import util import os, shutil, tempfile @@ -17,15 +17,18 @@ except ImportError: ElementTree = None -class darcs_source(converter_source): +class darcs_source(converter_source, commandline): def __init__(self, ui, path, rev=None): - super(darcs_source, self).__init__(ui, path, rev=rev) + converter_source.__init__(self, ui, path, rev=rev) + commandline.__init__(self, ui, 'darcs') # check for _darcs, ElementTree, _darcs/inventory so that we can # easily skip test-convert-darcs if ElementTree is not around if not os.path.exists(os.path.join(path, '_darcs')): raise NoRepo("%s does not look like a darcs repo" % path) + checktool('darcs') + if ElementTree is None: raise util.Abort(_("Python ElementTree module is not available")) @@ -45,7 +48,8 @@ output, status = self.run('init', repodir=self.tmppath) self.checkexit(status) - tree = self.xml('changes', '--xml-output', '--summary') + tree = self.xml('changes', xml_output=True, summary=True, + repodir=self.path) tagname = None child = None for elt in tree.findall('patch'): @@ -65,31 +69,9 @@ self.ui.debug('cleaning up %s\n' % self.tmppath) shutil.rmtree(self.tmppath, ignore_errors=True) - def _run(self, cmd, *args, **kwargs): - cmdline = ['darcs', cmd, '--repodir', kwargs.get('repodir', self.path)] - cmdline += args - cmdline = [util.shellquote(arg) for arg in cmdline] - cmdline += ['<', util.nulldev] - cmdline = ' '.join(cmdline) - self.ui.debug(cmdline, '\n') - return util.popen(cmdline) - - def run(self, cmd, *args, **kwargs): - fp = self._run(cmd, *args, **kwargs) - output = fp.read() - return output, fp.close() - - def checkexit(self, status, output=''): - if status: - if output: - self.ui.warn(_('darcs error:\n')) - self.ui.warn(output) - msg = util.explain_exit(status)[0] - raise util.Abort(_('darcs %s') % msg) - - def xml(self, cmd, *opts): + def xml(self, cmd, **kwargs): etree = ElementTree() - fp = self._run(cmd, *opts) + fp = self._run(cmd, **kwargs) etree.parse(fp) self.checkexit(fp.close()) return etree.getroot() @@ -105,15 +87,15 @@ desc=desc.strip(), parents=self.parents[rev]) def pull(self, rev): - output, status = self.run('pull', self.path, '--all', - '--match', 'hash %s' % rev, - '--no-test', '--no-posthook', - '--external-merge', '/bin/false', + output, status = self.run('pull', self.path, all=True, + match='hash %s' % rev, + no_test=True, no_posthook=True, + external_merge='/bin/false', repodir=self.tmppath) if status: if output.find('We have conflicts in') == -1: self.checkexit(status, output) - output, status = self.run('revert', '--all', repodir=self.tmppath) + output, status = self.run('revert', all=True, repodir=self.tmppath) self.checkexit(status, output) def getchanges(self, rev):
--- a/hgext/convert/filemap.py Fri Feb 08 11:50:37 2008 +0100 +++ b/hgext/convert/filemap.py Fri Feb 08 11:55:17 2008 +0100 @@ -7,9 +7,10 @@ import shlex from mercurial.i18n import _ from mercurial import util -from common import SKIPREV +from common import SKIPREV, converter_source def rpairs(name): + yield '.', name e = len(name) while e != -1: yield name[:e], name[e+1:] @@ -110,9 +111,9 @@ # touch files we're interested in, but also merges that merge two # or more interesting revisions. -class filemap_source(object): +class filemap_source(converter_source): def __init__(self, ui, baseconverter, filemap): - self.ui = ui + super(filemap_source, self).__init__(ui) self.base = baseconverter self.filemapper = filemapper(ui, filemap) self.commits = {} @@ -134,7 +135,7 @@ def after(self): self.base.after() - def setrevmap(self, revmap, order): + def setrevmap(self, revmap): # rebuild our state to make things restartable # # To avoid calling getcommit for every revision that has already @@ -149,7 +150,7 @@ seen = {SKIPREV: SKIPREV} dummyset = util.set() converted = [] - for rev in order: + for rev in revmap.order: mapped = revmap[rev] wanted = mapped not in seen if wanted: @@ -163,7 +164,7 @@ arg = None converted.append((rev, wanted, arg)) self.convertedorder = converted - return self.base.setrevmap(revmap, order) + return self.base.setrevmap(revmap) def rebuild(self): if self._rebuilt: @@ -350,9 +351,3 @@ def gettags(self): return self.base.gettags() - - def before(self): - pass - - def after(self): - pass
--- a/hgext/convert/git.py Fri Feb 08 11:50:37 2008 +0100 +++ b/hgext/convert/git.py Fri Feb 08 11:55:17 2008 +0100 @@ -3,7 +3,7 @@ import os from mercurial import util -from common import NoRepo, commit, converter_source +from common import NoRepo, commit, converter_source, checktool class convert_git(converter_source): # Windows does not support GIT_DIR= construct while other systems @@ -31,6 +31,9 @@ path += "/.git" if not os.path.exists(path + "/objects"): raise NoRepo("%s does not look like a Git repo" % path) + + checktool('git-rev-parse', 'git') + self.path = path def getheads(self):
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/hgext/convert/gnuarch.py Fri Feb 08 11:55:17 2008 +0100 @@ -0,0 +1,267 @@ +# GNU Arch support for the convert extension + +from common import NoRepo, checktool, commandline, commit, converter_source +from mercurial.i18n import _ +from mercurial import util +import os, shutil, tempfile, stat + +class gnuarch_source(converter_source, commandline): + + class gnuarch_rev: + def __init__(self, rev): + self.rev = rev + self.summary = '' + self.date = None + self.author = '' + self.add_files = [] + self.mod_files = [] + self.del_files = [] + self.ren_files = {} + self.ren_dirs = {} + + def __init__(self, ui, path, rev=None): + super(gnuarch_source, self).__init__(ui, path, rev=rev) + + if not os.path.exists(os.path.join(path, '{arch}')): + raise NoRepo(_("couldn't open GNU Arch repo %s" % path)) + + # Could use checktool, but we want to check for baz or tla. + self.execmd = None + if util.find_exe('tla'): + self.execmd = 'tla' + else: + if util.find_exe('baz'): + self.execmd = 'baz' + else: + raise util.Abort(_('cannot find a GNU Arch tool')) + + commandline.__init__(self, ui, self.execmd) + + self.path = os.path.realpath(path) + self.tmppath = None + + self.treeversion = None + self.lastrev = None + self.changes = {} + self.parents = {} + self.tags = {} + self.modecache = {} + + def before(self): + if self.execmd == 'tla': + output = self.run0('tree-version', self.path) + else: + output = self.run0('tree-version', '-d', self.path) + self.treeversion = output.strip() + + self.ui.status(_('analyzing tree version %s...\n' % self.treeversion)) + + # Get name of temporary directory + version = self.treeversion.split('/') + self.tmppath = os.path.join(tempfile.gettempdir(), + 'hg-%s' % version[1]) + + # Generate parents dictionary + child = [] + output, status = self.runlines('revisions', self.treeversion) + self.checkexit(status, 'archive registered?') + for l in output: + rev = l.strip() + self.changes[rev] = self.gnuarch_rev(rev) + + # Read author, date and summary + catlog = self.runlines0('cat-log', '-d', self.path, rev) + self._parsecatlog(catlog, rev) + + self.parents[rev] = child + child = [rev] + if rev == self.rev: + break + self.parents[None] = child + + def after(self): + self.ui.debug(_('cleaning up %s\n' % self.tmppath)) + shutil.rmtree(self.tmppath, ignore_errors=True) + + def getheads(self): + return self.parents[None] + + def getfile(self, name, rev): + if rev != self.lastrev: + raise util.Abort(_('internal calling inconsistency')) + + # Raise IOError if necessary (i.e. deleted files). + if not os.path.exists(os.path.join(self.tmppath, name)): + raise IOError + + data, mode = self._getfile(name, rev) + self.modecache[(name, rev)] = mode + + return data + + def getmode(self, name, rev): + return self.modecache[(name, rev)] + + def getchanges(self, rev): + self.modecache = {} + self._update(rev) + changes = [] + copies = {} + + for f in self.changes[rev].add_files: + changes.append((f, rev)) + + for f in self.changes[rev].mod_files: + changes.append((f, rev)) + + for f in self.changes[rev].del_files: + changes.append((f, rev)) + + for src in self.changes[rev].ren_files: + to = self.changes[rev].ren_files[src] + changes.append((src, rev)) + changes.append((to, rev)) + copies[src] = to + + for src in self.changes[rev].ren_dirs: + to = self.changes[rev].ren_dirs[src] + chgs, cps = self._rendirchanges(src, to); + changes += [(f, rev) for f in chgs] + for c in cps: + copies[c] = cps[c] + + changes.sort() + self.lastrev = rev + + return changes, copies + + def getcommit(self, rev): + changes = self.changes[rev] + return commit(author = changes.author, date = changes.date, + desc = changes.summary, parents = self.parents[rev]) + + def gettags(self): + return self.tags + + def _execute(self, cmd, *args, **kwargs): + cmdline = [self.execmd, cmd] + cmdline += args + cmdline = [util.shellquote(arg) for arg in cmdline] + cmdline += ['>', util.nulldev, '2>', util.nulldev] + cmdline = util.quotecommand(' '.join(cmdline)) + self.ui.debug(cmdline, '\n') + return os.system(cmdline) + + def _update(self, rev): + if rev == 'base-0': + # Initialise 'base-0' revision + self.ui.debug(_('obtaining revision %s...\n' % rev)) + revision = '%s--%s' % (self.treeversion, rev) + output = self._execute('get', revision, self.tmppath) + self.ui.debug(_('analysing revision %s...\n' % rev)) + files = self._readcontents(self.tmppath) + self.changes[rev].add_files += files + else: + self.ui.debug(_('applying revision %s...\n' % rev)) + revision = '%s--%s' % (self.treeversion, rev) + output = self._execute('replay', '-d', self.tmppath, revision) + + old_rev = self.parents[rev][0] + self.ui.debug(_('computing changeset between %s and %s...\n' \ + % (old_rev, rev))) + rev_a = '%s--%s' % (self.treeversion, old_rev) + rev_b = '%s--%s' % (self.treeversion, rev) + delta = self.runlines0('delta', '-n', rev_a, rev_b) + self._parsedelta(delta, rev) + + def _getfile(self, name, rev): + mode = os.lstat(os.path.join(self.tmppath, name)).st_mode + if stat.S_ISLNK(mode): + data = os.readlink(os.path.join(self.tmppath, name)) + mode = mode and 'l' or '' + else: + data = open(os.path.join(self.tmppath, name), 'rb').read() + mode = (mode & 0111) and 'x' or '' + return data, mode + + def _exclude(self, name): + exclude = [ '{arch}', '.arch-ids', '.arch-inventory' ] + for exc in exclude: + if name.find(exc) != -1: + return True + return False + + def _readcontents(self, path): + files = [] + contents = os.listdir(path) + while len(contents) > 0: + c = contents.pop() + p = os.path.join(path, c) + # os.walk could be used, but here we avoid internal GNU + # Arch files and directories, thus saving a lot time. + if not self._exclude(p): + if os.path.isdir(p): + contents += [os.path.join(c, f) for f in os.listdir(p)] + else: + files.append(c) + return files + + def _rendirchanges(self, src, dest): + changes = [] + copies = {} + files = self._readcontents(os.path.join(self.tmppath, dest)) + for f in files: + s = os.path.join(src, f) + d = os.path.join(dest, f) + changes.append(s) + changes.append(d) + copies[s] = d + return changes, copies + + def _parsecatlog(self, data, rev): + summary = [] + for l in data: + l = l.strip() + if summary: + summary.append(l) + elif l.startswith('Summary:'): + summary.append(l[len('Summary: '):]) + elif l.startswith('Standard-date:'): + date = l[len('Standard-date: '):] + strdate = util.strdate(date, '%Y-%m-%d %H:%M:%S') + self.changes[rev].date = util.datestr(strdate) + elif l.startswith('Creator:'): + self.changes[rev].author = l[len('Creator: '):] + self.changes[rev].summary = '\n'.join(summary) + + def _parsedelta(self, data, rev): + for l in data: + l = l.strip() + if l.startswith('A') and not l.startswith('A/'): + file = l[1:].strip() + if not self._exclude(file): + self.changes[rev].add_files.append(file) + elif l.startswith('/>'): + dirs = l[2:].strip().split(' ') + if len(dirs) == 1: + dirs = l[2:].strip().split('\t') + if not self._exclude(dirs[0]) and not self._exclude(dirs[1]): + self.changes[rev].ren_dirs[dirs[0]] = dirs[1] + elif l.startswith('M'): + file = l[1:].strip() + if not self._exclude(file): + self.changes[rev].mod_files.append(file) + elif l.startswith('->'): + file = l[2:].strip() + if not self._exclude(file): + self.changes[rev].mod_files.append(file) + elif l.startswith('D') and not l.startswith('D/'): + file = l[1:].strip() + if not self._exclude(file): + self.changes[rev].del_files.append(file) + elif l.startswith('=>'): + files = l[2:].strip().split(' ') + if len(files) == 1: + files = l[2:].strip().split('\t') + if not self._exclude(files[0]) and not self._exclude(files[1]): + self.changes[rev].ren_files[files[0]] = files[1]
--- a/hgext/convert/hg.py Fri Feb 08 11:50:37 2008 +0100 +++ b/hgext/convert/hg.py Fri Feb 08 11:55:17 2008 +0100 @@ -1,10 +1,16 @@ # hg backend for convert extension -# Note for hg->hg conversion: Old versions of Mercurial didn't trim -# the whitespace from the ends of commit messages, but new versions -# do. Changesets created by those older versions, then converted, may -# thus have different hashes for changesets that are otherwise -# identical. +# Notes for hg->hg conversion: +# +# * Old versions of Mercurial didn't trim the whitespace from the ends +# of commit messages, but new versions do. Changesets created by +# those older versions, then converted, may thus have different +# hashes for changesets that are otherwise identical. +# +# * By default, the source revision is stored in the converted +# revision. This will cause the converted revision to have a +# different identity than the source. To avoid this, use the +# following option: "--config convert.hg.saverev=false" import os, time @@ -26,8 +32,6 @@ self.repo = hg.repository(self.ui, path) if not self.repo.local(): raise NoRepo(_('%s is not a local Mercurial repo') % path) - ui.status(_('destination %s is a Mercurial repository\n') % - path) except hg.RepoError, err: ui.print_exc() raise NoRepo(err.args[0]) @@ -46,11 +50,13 @@ self.filemapmode = False def before(self): + self.ui.debug(_('run hg sink pre-conversion action\n')) self.wlock = self.repo.wlock() self.lock = self.repo.lock() self.repo.dirstate.clear() def after(self): + self.ui.debug(_('run hg sink post-conversion action\n')) self.repo.dirstate.invalidate() self.lock = None self.wlock = None @@ -191,7 +197,7 @@ except hg.RepoError, inst: tagparent = nullid self.repo.rawcommit([".hgtags"], "update tags", "convert-repo", - date, tagparent, nullid) + date, tagparent, nullid, extra=extra) return hex(self.repo.changelog.tip()) def setfilemapmode(self, active): @@ -200,6 +206,7 @@ class mercurial_source(converter_source): def __init__(self, ui, path, rev=None): converter_source.__init__(self, ui, path, rev) + self.saverev = ui.configbool('convert', 'hg.saverev', True) try: self.repo = hg.repository(self.ui, path) # try to provoke an exception if this isn't really a hg @@ -212,6 +219,7 @@ self.lastrev = None self.lastctx = None self._changescache = None + self.convertfp = None def changectx(self, rev): if self.lastrev != rev: @@ -257,8 +265,12 @@ def getcommit(self, rev): ctx = self.changectx(rev) parents = [hex(p.node()) for p in ctx.parents() if p.node() != nullid] + if self.saverev: + crev = rev + else: + crev = None return commit(author=ctx.user(), date=util.datestr(ctx.date()), - desc=ctx.description(), parents=parents, + desc=ctx.description(), rev=crev, parents=parents, branch=ctx.branch(), extra=ctx.extra()) def gettags(self): @@ -275,3 +287,15 @@ return changes[0] + changes[1] + changes[2] + def converted(self, rev, destrev): + if self.convertfp is None: + self.convertfp = open(os.path.join(self.path, '.hg', 'shamap'), + 'a') + self.convertfp.write('%s %s\n' % (destrev, rev)) + self.convertfp.flush() + + def before(self): + self.ui.debug(_('run hg source pre-conversion action\n')) + + def after(self): + self.ui.debug(_('run hg source post-conversion action\n'))
--- a/hgext/convert/subversion.py Fri Feb 08 11:50:37 2008 +0100 +++ b/hgext/convert/subversion.py Fri Feb 08 11:55:17 2008 +0100 @@ -17,9 +17,13 @@ import locale import os +import re import sys import cPickle as pickle -from mercurial import util +import tempfile + +from mercurial import strutil, util +from mercurial.i18n import _ # Subversion stuff. Works best with very recent Python SVN bindings # e.g. SVN 1.5 or backports. Thanks to the bzr folks for enhancing @@ -28,6 +32,7 @@ from cStringIO import StringIO from common import NoRepo, commit, converter_source, encodeargs, decodeargs +from common import commandline, converter_sink, mapfile try: from svn.core import SubversionException, Pool @@ -46,7 +51,10 @@ except SubversionException: pass if os.path.isdir(path): - return 'file://%s' % os.path.normpath(os.path.abspath(path)) + path = os.path.normpath(os.path.abspath(path)) + if os.name == 'nt': + path = '/' + util.normpath(path) + return 'file://%s' % path return path def optrev(number): @@ -81,6 +89,9 @@ receiver) except SubversionException, (inst, num): pickle.dump(num, fp, protocol) + except IOError: + # Caller may interrupt the iteration + pickle.dump(None, fp, protocol) else: pickle.dump(None, fp, protocol) fp.close() @@ -94,7 +105,53 @@ args = decodeargs(sys.stdin.read()) get_log_child(sys.stdout, *args) +class logstream: + """Interruptible revision log iterator.""" + def __init__(self, stdout): + self._stdout = stdout + + def __iter__(self): + while True: + entry = pickle.load(self._stdout) + try: + orig_paths, revnum, author, date, message = entry + except: + if entry is None: + break + raise SubversionException("child raised exception", entry) + yield entry + + def close(self): + if self._stdout: + self._stdout.close() + self._stdout = None + +def get_log(url, paths, start, end, limit=0, discover_changed_paths=True, + strict_node_history=False): + args = [url, paths, start, end, limit, discover_changed_paths, + strict_node_history] + arg = encodeargs(args) + hgexe = util.hgexecutable() + cmd = '%s debugsvnlog' % util.shellquote(hgexe) + stdin, stdout = os.popen2(cmd, 'b') + stdin.write(arg) + stdin.close() + return logstream(stdout) + # SVN conversion code stolen from bzr-svn and tailor +# +# Subversion looks like a versioned filesystem, branches structures +# are defined by conventions and not enforced by the tool. First, +# we define the potential branches (modules) as "trunk" and "branches" +# children directories. Revisions are then identified by their +# module and revision number (and a repository identifier). +# +# The revision graph is really a tree (or a forest). By default, a +# revision parent is the previous revision in the same module. If the +# module directory is copied/moved from another module then the +# revision is the module root and its parent the source revision in +# the parent module. A revision has at most one parent. +# class svn_source(converter_source): def __init__(self, ui, url, rev=None): super(svn_source, self).__init__(ui, url, rev=rev) @@ -125,7 +182,7 @@ self.ctx = self.transport.client self.base = svn.ra.get_repos_root(self.ra) self.module = self.url[len(self.base):] - self.modulemap = {} # revision, module + self.rootmodule = self.module self.commits = {} self.paths = {} self.uuid = svn.ra.get_uuid(self.ra).decode(self.encoding) @@ -144,14 +201,23 @@ except IOError, e: pass - self.last_changed = self.latest(self.module, latest) - - self.head = self.revid(self.last_changed) + self.head = self.latest(self.module, latest) + if not self.head: + raise util.Abort(_('no revision found in module %s') % + self.module.encode(self.encoding)) + self.last_changed = self.revnum(self.head) + self._changescache = None - def setrevmap(self, revmap, order): + if os.path.exists(os.path.join(url, '.svn/entries')): + self.wc = url + else: + self.wc = None + self.convertfp = None + + def setrevmap(self, revmap): lastrevs = {} - for revid in revmap.keys(): + for revid in revmap.iterkeys(): uuid, module, revnum = self.revsplit(revid) lastrevnum = lastrevs.setdefault(module, revnum) if revnum > lastrevnum: @@ -167,46 +233,54 @@ return False def getheads(self): - # detect standard /branches, /tags, /trunk layout + + def getcfgpath(name, rev): + cfgpath = self.ui.config('convert', 'svn.' + name) + path = (cfgpath or name).strip('/') + if not self.exists(path, rev): + if cfgpath: + raise util.Abort(_('expected %s to be at %r, but not found') + % (name, path)) + return None + self.ui.note(_('found %s at %r\n') % (name, path)) + return path + rev = optrev(self.last_changed) - rpath = self.url.strip('/') - cfgtrunk = self.ui.config('convert', 'svn.trunk') - cfgbranches = self.ui.config('convert', 'svn.branches') - cfgtags = self.ui.config('convert', 'svn.tags') - trunk = (cfgtrunk or 'trunk').strip('/') - branches = (cfgbranches or 'branches').strip('/') - tags = (cfgtags or 'tags').strip('/') - if self.exists(trunk, rev) and self.exists(branches, rev) and self.exists(tags, rev): - self.ui.note('found trunk at %r, branches at %r and tags at %r\n' % - (trunk, branches, tags)) - oldmodule = self.module + oldmodule = '' + trunk = getcfgpath('trunk', rev) + tags = getcfgpath('tags', rev) + branches = getcfgpath('branches', rev) + + # If the project has a trunk or branches, we will extract heads + # from them. We keep the project root otherwise. + if trunk: + oldmodule = self.module or '' self.module += '/' + trunk - lt = self.latest(self.module, self.last_changed) - self.head = self.revid(lt) - self.heads = [self.head] + self.head = self.latest(self.module, self.last_changed) + if not self.head: + raise util.Abort(_('no revision found in module %s') % + self.module.encode(self.encoding)) + + # First head in the list is the module's head + self.heads = [self.head] + self.tags = '%s/%s' % (oldmodule , (tags or 'tags')) + + # Check if branches bring a few more heads to the list + if branches: + rpath = self.url.strip('/') branchnames = svn.client.ls(rpath + '/' + branches, rev, False, self.ctx) for branch in branchnames.keys(): - if oldmodule: - module = oldmodule + '/' + branches + '/' + branch - else: - module = '/' + branches + '/' + branch - brevnum = self.latest(module, self.last_changed) - brev = self.revid(brevnum, module) - self.ui.note('found branch %s at %d\n' % (branch, brevnum)) - self.heads.append(brev) + module = '%s/%s/%s' % (oldmodule, branches, branch) + brevid = self.latest(module, self.last_changed) + if not brevid: + self.ui.note(_('ignoring empty branch %s\n') % + branch.encode(self.encoding)) + continue + self.ui.note('found branch %s at %d\n' % + (branch, self.revnum(brevid))) + self.heads.append(brevid) - if oldmodule: - self.tags = '%s/%s' % (oldmodule, tags) - else: - self.tags = '/%s' % tags - - elif cfgtrunk or cfgbranches or cfgtags: - raise util.Abort('trunk/branch/tags layout expected, but not found') - else: - self.ui.note('working with one branch\n') - self.heads = [self.head] - self.tags = tags return self.heads def getfile(self, file, rev): @@ -223,7 +297,17 @@ self._changescache = None self.modecache = {} (paths, parents) = self.paths[rev] - files, copies = self.expandpaths(rev, paths, parents) + if parents: + files, copies = self.expandpaths(rev, paths, parents) + else: + # Perform a full checkout on roots + uuid, module, revnum = self.revsplit(rev) + entries = svn.client.ls(self.base + module, optrev(revnum), + True, self.ctx) + files = [n for n,e in entries.iteritems() + if e.kind == svn.core.svn_node_file] + copies = {} + files.sort() files = zip(files, [rev] * len(files)) @@ -241,45 +325,26 @@ uuid, module, revnum = self.revsplit(rev) self.module = module self.reparent(module) + # We assume that: + # - requests for revisions after "stop" come from the + # revision graph backward traversal. Cache all of them + # down to stop, they will be used eventually. + # - requests for revisions before "stop" come to get + # isolated branches parents. Just fetch what is needed. stop = self.lastrevs.get(module, 0) - self._fetch_revisions(from_revnum=revnum, to_revnum=stop) + if revnum < stop: + stop = revnum + 1 + self._fetch_revisions(revnum, stop) commit = self.commits[rev] # caller caches the result, so free it here to release memory del self.commits[rev] return commit - def get_log(self, paths, start, end, limit=0, discover_changed_paths=True, - strict_node_history=False): - - def parent(fp): - while True: - entry = pickle.load(fp) - try: - orig_paths, revnum, author, date, message = entry - except: - if entry is None: - break - raise SubversionException("child raised exception", entry) - yield entry - - args = [self.url, paths, start, end, limit, discover_changed_paths, - strict_node_history] - arg = encodeargs(args) - hgexe = util.hgexecutable() - cmd = '%s debugsvnlog' % util.shellquote(hgexe) - stdin, stdout = os.popen2(cmd, 'b') - - stdin.write(arg) - stdin.close() - - for p in parent(stdout): - yield p - def gettags(self): tags = {} start = self.revnum(self.head) try: - for entry in self.get_log([self.tags], 0, start): + for entry in get_log(self.url, [self.tags], 0, start): orig_paths, revnum, author, date, message = entry for path in orig_paths: if not path.startswith(self.tags+'/'): @@ -293,6 +358,15 @@ self.ui.note('no tags found at revision %d\n' % start) return tags + def converted(self, rev, destrev): + if not self.wc: + return + if self.convertfp is None: + self.convertfp = open(os.path.join(self.wc, '.svn', 'hg-shamap'), + 'a') + self.convertfp.write('%s %d\n' % (destrev, self.revnum(rev))) + self.convertfp.flush() + # -- helper functions -- def revid(self, revnum, module=None): @@ -315,7 +389,11 @@ return uuid, mod, revnum def latest(self, path, stop=0): - 'find the latest revision affecting path, up to stop' + """Find the latest revid affecting path, up to stop. It may return + a revision in a different module, since a branch may be moved without + a change being reported. Return None if computed module does not + belong to rootmodule subtree. + """ if not stop: stop = svn.ra.get_latest_revnum(self.ra) try: @@ -327,7 +405,31 @@ if not dirent: raise util.Abort('%s not found up to revision %d' % (path, stop)) - return dirent.created_rev + # stat() gives us the previous revision on this line of development, but + # it might be in *another module*. Fetch the log and detect renames down + # to the latest revision. + stream = get_log(self.url, [path], stop, dirent.created_rev) + try: + for entry in stream: + paths, revnum, author, date, message = entry + if revnum <= dirent.created_rev: + break + + for p in paths: + if not path.startswith(p) or not paths[p].copyfrom_path: + continue + newpath = paths[p].copyfrom_path + path[len(p):] + self.ui.debug("branch renamed from %s to %s at %d\n" % + (path, newpath, revnum)) + path = newpath + break + finally: + stream.close() + + if not path.startswith(self.rootmodule): + self.ui.debug(_('ignoring foreign branch %r\n') % path) + return None + return self.revid(dirent.created_rev, path) def get_blacklist(self): """Avoid certain revision numbers. @@ -375,13 +477,11 @@ entries = [] copyfrom = {} # Map of entrypath, revision for finding source of deleted revisions. copies = {} - revnum = self.revnum(rev) - if revnum in self.modulemap: - new_module = self.modulemap[revnum] - if new_module != self.module: - self.module = new_module - self.reparent(self.module) + new_module, revnum = self.revsplit(rev)[1:] + if new_module != self.module: + self.module = new_module + self.reparent(self.module) for path, ent in paths: entrypath = get_entry_from_path(path, module=self.module) @@ -392,7 +492,9 @@ if ent.copyfrom_path: copyfrom_path = get_entry_from_path(ent.copyfrom_path) if copyfrom_path: - self.ui.debug("Copied to %s from %s@%s\n" % (entry, copyfrom_path, ent.copyfrom_rev)) + self.ui.debug("Copied to %s from %s@%s\n" % + (entrypath, copyfrom_path, + ent.copyfrom_rev)) # It's probably important for hg that the source # exists in the revision's parent, not just the # ent.copyfrom_rev @@ -405,12 +507,9 @@ # if a branch is created but entries are removed in the same # changeset, get the right fromrev - if parents: - uuid, old_module, fromrev = self.revsplit(parents[0]) - else: - fromrev = revnum - 1 - # might always need to be revnum - 1 in these 3 lines? - old_module = self.modulemap.get(fromrev, self.module) + # parents cannot be empty here, you cannot remove things from + # a root revision. + uuid, old_module, fromrev = self.revsplit(parents[0]) basepath = old_module + "/" + get_entry_from_path(path, module=self.module) entrypath = old_module + "/" + get_entry_from_path(path, module=self.module) @@ -486,6 +585,9 @@ # If the directory just had a prop change, # then we shouldn't need to look for its children. + if ent.action == 'M': + continue + # Also this could create duplicate entries. Not sure # whether this will matter. Maybe should make entries a set. # print "Changed directory", revnum, path, ent.action, ent.copyfrom_path, ent.copyfrom_rev @@ -546,42 +648,46 @@ return (util.unique(entries), copies) - def _fetch_revisions(self, from_revnum = 0, to_revnum = 347): + def _fetch_revisions(self, from_revnum, to_revnum): + if from_revnum < to_revnum: + from_revnum, to_revnum = to_revnum, from_revnum + self.child_cset = None def parselogentry(orig_paths, revnum, author, date, message): + """Return the parsed commit object or None, and True if + the revision is a branch root. + """ self.ui.debug("parsing revision %d (%d changes)\n" % (revnum, len(orig_paths))) - if revnum in self.modulemap: - new_module = self.modulemap[revnum] - if new_module != self.module: - self.module = new_module - self.reparent(self.module) - + branched = False rev = self.revid(revnum) # branch log might return entries for a parent we already have - if (rev in self.commits or - (revnum < self.lastrevs.get(self.module, 0))): - return + + if (rev in self.commits or revnum < to_revnum): + return None, branched parents = [] - # check whether this revision is the start of a branch - if self.module in orig_paths: - ent = orig_paths[self.module] + # check whether this revision is the start of a branch or part + # of a branch renaming + orig_paths = orig_paths.items() + orig_paths.sort() + root_paths = [(p,e) for p,e in orig_paths if self.module.startswith(p)] + if root_paths: + path, ent = root_paths[-1] if ent.copyfrom_path: + branched = True + newpath = ent.copyfrom_path + self.module[len(path):] # ent.copyfrom_rev may not be the actual last revision - prev = self.latest(ent.copyfrom_path, ent.copyfrom_rev) - self.modulemap[prev] = ent.copyfrom_path - parents = [self.revid(prev, ent.copyfrom_path)] - self.ui.note('found parent of branch %s at %d: %s\n' % \ - (self.module, prev, ent.copyfrom_path)) + previd = self.latest(newpath, ent.copyfrom_rev) + if previd is not None: + parents = [previd] + prevmodule, prevnum = self.revsplit(previd)[1:] + self.ui.note('found parent of branch %s at %d: %s\n' % + (self.module, prevnum, prevmodule)) else: self.ui.debug("No copyfrom path, don't know what to do.\n") - self.modulemap[revnum] = self.module # track backwards in time - - orig_paths = orig_paths.items() - orig_paths.sort() paths = [] # filter out unrelated paths for path, ent in orig_paths: @@ -590,8 +696,6 @@ continue paths.append((path, ent)) - self.paths[rev] = (paths, parents) - # Example SVN datetime. Includes microseconds. # ISO-8601 conformant # '2007-01-04T17:35:00.902377Z' @@ -614,23 +718,52 @@ rev=rev.encode('utf-8')) self.commits[rev] = cset + # The parents list is *shared* among self.paths and the + # commit object. Both will be updated below. + self.paths[rev] = (paths, cset.parents) if self.child_cset and not self.child_cset.parents: - self.child_cset.parents = [rev] + self.child_cset.parents[:] = [rev] self.child_cset = cset + return cset, branched self.ui.note('fetching revision log for "%s" from %d to %d\n' % (self.module, from_revnum, to_revnum)) try: - for entry in self.get_log([self.module], from_revnum, to_revnum): - orig_paths, revnum, author, date, message = entry - if self.is_blacklisted(revnum): - self.ui.note('skipping blacklisted revision %d\n' % revnum) - continue - if orig_paths is None: - self.ui.debug('revision %d has no entries\n' % revnum) - continue - parselogentry(orig_paths, revnum, author, date, message) + firstcset = None + branched = False + stream = get_log(self.url, [self.module], from_revnum, to_revnum) + try: + for entry in stream: + paths, revnum, author, date, message = entry + if self.is_blacklisted(revnum): + self.ui.note('skipping blacklisted revision %d\n' + % revnum) + continue + if paths is None: + self.ui.debug('revision %d has no entries\n' % revnum) + continue + cset, branched = parselogentry(paths, revnum, author, + date, message) + if cset: + firstcset = cset + if branched: + break + finally: + stream.close() + + if not branched and firstcset and not firstcset.parents: + # The first revision of the sequence (the last fetched one) + # has invalid parents if not a branch root. Find the parent + # revision now, if any. + try: + firstrevnum = self.revnum(firstcset.rev) + if firstrevnum > 1: + latest = self.latest(self.module, firstrevnum - 1) + if latest: + firstcset.parents.append(latest) + except util.Abort: + pass except SubversionException, (inst, num): if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION: raise NoSuchRevision(branch=self, @@ -642,9 +775,9 @@ # TODO: ra.get_file transmits the whole file instead of diffs. mode = '' try: - revnum = self.revnum(rev) - if self.module != self.modulemap[revnum]: - self.module = self.modulemap[revnum] + new_module, revnum = self.revsplit(rev)[1:] + if self.module != new_module: + self.module = new_module self.reparent(self.module) info = svn.ra.get_file(self.ra, file, revnum, io) if isinstance(info, list): @@ -669,3 +802,240 @@ pool = Pool() rpath = '/'.join([self.base, path]).strip('/') return ['%s/%s' % (path, x) for x in svn.client.ls(rpath, optrev(revnum), True, self.ctx, pool).keys()] + +pre_revprop_change = '''#!/bin/sh + +REPOS="$1" +REV="$2" +USER="$3" +PROPNAME="$4" +ACTION="$5" + +if [ "$ACTION" = "M" -a "$PROPNAME" = "svn:log" ]; then exit 0; fi +if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-branch" ]; then exit 0; fi +if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-rev" ]; then exit 0; fi + +echo "Changing prohibited revision property" >&2 +exit 1 +''' + +class svn_sink(converter_sink, commandline): + commit_re = re.compile(r'Committed revision (\d+).', re.M) + + def prerun(self): + if self.wc: + os.chdir(self.wc) + + def postrun(self): + if self.wc: + os.chdir(self.cwd) + + def join(self, name): + return os.path.join(self.wc, '.svn', name) + + def revmapfile(self): + return self.join('hg-shamap') + + def authorfile(self): + return self.join('hg-authormap') + + def __init__(self, ui, path): + converter_sink.__init__(self, ui, path) + commandline.__init__(self, ui, 'svn') + self.delete = [] + self.setexec = [] + self.delexec = [] + self.copies = [] + self.wc = None + self.cwd = os.getcwd() + + path = os.path.realpath(path) + + created = False + if os.path.isfile(os.path.join(path, '.svn', 'entries')): + self.wc = path + self.run0('update') + else: + wcpath = os.path.join(os.getcwd(), os.path.basename(path) + '-wc') + + if os.path.isdir(os.path.dirname(path)): + if not os.path.exists(os.path.join(path, 'db', 'fs-type')): + ui.status(_('initializing svn repo %r\n') % + os.path.basename(path)) + commandline(ui, 'svnadmin').run0('create', path) + created = path + path = util.normpath(path) + if not path.startswith('/'): + path = '/' + path + path = 'file://' + path + + ui.status(_('initializing svn wc %r\n') % os.path.basename(wcpath)) + self.run0('checkout', path, wcpath) + + self.wc = wcpath + self.opener = util.opener(self.wc) + self.wopener = util.opener(self.wc) + self.childmap = mapfile(ui, self.join('hg-childmap')) + self.is_exec = util.checkexec(self.wc) and util.is_exec or None + + if created: + hook = os.path.join(created, 'hooks', 'pre-revprop-change') + fp = open(hook, 'w') + fp.write(pre_revprop_change) + fp.close() + util.set_flags(hook, "x") + + xport = transport.SvnRaTransport(url=geturl(path)) + self.uuid = svn.ra.get_uuid(xport.ra) + + def wjoin(self, *names): + return os.path.join(self.wc, *names) + + def putfile(self, filename, flags, data): + if 'l' in flags: + self.wopener.symlink(data, filename) + else: + try: + if os.path.islink(self.wjoin(filename)): + os.unlink(filename) + except OSError: + pass + self.wopener(filename, 'w').write(data) + + if self.is_exec: + was_exec = self.is_exec(self.wjoin(filename)) + else: + # On filesystems not supporting execute-bit, there is no way + # to know if it is set but asking subversion. Setting it + # systematically is just as expensive and much simpler. + was_exec = 'x' not in flags + + util.set_flags(self.wjoin(filename), flags) + if was_exec: + if 'x' not in flags: + self.delexec.append(filename) + else: + if 'x' in flags: + self.setexec.append(filename) + + def delfile(self, name): + self.delete.append(name) + + def copyfile(self, source, dest): + self.copies.append([source, dest]) + + def _copyfile(self, source, dest): + # SVN's copy command pukes if the destination file exists, but + # our copyfile method expects to record a copy that has + # already occurred. Cross the semantic gap. + wdest = self.wjoin(dest) + exists = os.path.exists(wdest) + if exists: + fd, tempname = tempfile.mkstemp( + prefix='hg-copy-', dir=os.path.dirname(wdest)) + os.close(fd) + os.unlink(tempname) + os.rename(wdest, tempname) + try: + self.run0('copy', source, dest) + finally: + if exists: + try: + os.unlink(wdest) + except OSError: + pass + os.rename(tempname, wdest) + + def dirs_of(self, files): + dirs = set() + for f in files: + if os.path.isdir(self.wjoin(f)): + dirs.add(f) + for i in strutil.rfindall(f, '/'): + dirs.add(f[:i]) + return dirs + + def add_dirs(self, files): + add_dirs = [d for d in self.dirs_of(files) + if not os.path.exists(self.wjoin(d, '.svn', 'entries'))] + if add_dirs: + add_dirs.sort() + self.xargs(add_dirs, 'add', non_recursive=True, quiet=True) + return add_dirs + + def add_files(self, files): + if files: + self.xargs(files, 'add', quiet=True) + return files + + def tidy_dirs(self, names): + dirs = list(self.dirs_of(names)) + dirs.sort(reverse=True) + deleted = [] + for d in dirs: + wd = self.wjoin(d) + if os.listdir(wd) == '.svn': + self.run0('delete', d) + deleted.append(d) + return deleted + + def addchild(self, parent, child): + self.childmap[parent] = child + + def revid(self, rev): + return u"svn:%s@%s" % (self.uuid, rev) + + def putcommit(self, files, parents, commit): + for parent in parents: + try: + return self.revid(self.childmap[parent]) + except KeyError: + pass + entries = set(self.delete) + files = util.frozenset(files) + entries.update(self.add_dirs(files.difference(entries))) + if self.copies: + for s, d in self.copies: + self._copyfile(s, d) + self.copies = [] + if self.delete: + self.xargs(self.delete, 'delete') + self.delete = [] + entries.update(self.add_files(files.difference(entries))) + entries.update(self.tidy_dirs(entries)) + if self.delexec: + self.xargs(self.delexec, 'propdel', 'svn:executable') + self.delexec = [] + if self.setexec: + self.xargs(self.setexec, 'propset', 'svn:executable', '*') + self.setexec = [] + + fd, messagefile = tempfile.mkstemp(prefix='hg-convert-') + fp = os.fdopen(fd, 'w') + fp.write(commit.desc) + fp.close() + try: + output = self.run0('commit', + username=util.shortuser(commit.author), + file=messagefile, + encoding='utf-8') + try: + rev = self.commit_re.search(output).group(1) + except AttributeError: + self.ui.warn(_('unexpected svn output:\n')) + self.ui.warn(output) + raise util.Abort(_('unable to cope with svn output')) + if commit.rev: + self.run('propset', 'hg:convert-rev', commit.rev, + revprop=True, revision=rev) + if commit.branch and commit.branch != 'default': + self.run('propset', 'hg:convert-branch', commit.branch, + revprop=True, revision=rev) + for parent in parents: + self.addchild(parent, rev) + return self.revid(rev) + finally: + os.unlink(messagefile) + + def puttags(self, tags): + self.ui.warn(_('XXX TAGS NOT IMPLEMENTED YET\n'))
--- a/hgext/fetch.py Fri Feb 08 11:50:37 2008 +0100 +++ b/hgext/fetch.py Fri Feb 08 11:55:17 2008 +0100 @@ -43,7 +43,8 @@ if not err: mod, add, rem = repo.status()[:3] message = (cmdutil.logmessage(opts) or - (_('Automated merge with %s') % other.url())) + (_('Automated merge with %s') % + util.removeauth(other.url()))) n = repo.commit(mod + add + rem, message, opts['user'], opts['date'], force_editor=opts.get('force_editor')) @@ -54,7 +55,8 @@ cmdutil.setremoteconfig(ui, opts) other = hg.repository(ui, ui.expandpath(source)) - ui.status(_('pulling from %s\n') % ui.expandpath(source)) + ui.status(_('pulling from %s\n') % + util.hidepassword(ui.expandpath(source))) revs = None if opts['rev'] and not other.local(): raise util.Abort(_("fetch -r doesn't work for remote repositories yet"))
--- a/hgext/gpg.py Fri Feb 08 11:50:37 2008 +0100 +++ b/hgext/gpg.py Fri Feb 08 11:55:17 2008 +0100 @@ -249,7 +249,7 @@ message = opts['message'] if not message: message = "\n".join([_("Added signature for changeset %s") - % hgnode.hex(n) + % hgnode.short(n) for n in nodes]) try: repo.commit([".hgsigs"], message, opts['user'], opts['date'])
--- a/hgext/graphlog.py Fri Feb 08 11:50:37 2008 +0100 +++ b/hgext/graphlog.py Fri Feb 08 11:55:17 2008 +0100 @@ -5,11 +5,12 @@ # This software may be used and distributed according to the terms of # the GNU General Public License, incorporated herein by reference. +import os import sys from mercurial.cmdutil import revrange, show_changeset from mercurial.i18n import _ from mercurial.node import nullid, nullrev -from mercurial.util import Abort +from mercurial.util import Abort, canonpath def revision_grapher(repo, start_rev, stop_rev): """incremental revision grapher @@ -63,6 +64,62 @@ revs = next_revs curr_rev -= 1 +def filelog_grapher(repo, path, start_rev, stop_rev): + """incremental file log grapher + + This generator function walks through the revision history of a + single file from revision start_rev to revision stop_rev (which must + be less than or equal to start_rev) and for each revision emits + tuples with the following elements: + + - Current revision. + - Current node. + - Column of the current node in the set of ongoing edges. + - Edges; a list of (col, next_col) indicating the edges between + the current node and its parents. + - Number of columns (ongoing edges) in the current revision. + - The difference between the number of columns (ongoing edges) + in the next revision and the number of columns (ongoing edges) + in the current revision. That is: -1 means one column removed; + 0 means no columns added or removed; 1 means one column added. + """ + + assert start_rev >= stop_rev + curr_rev = start_rev + revs = [] + filerev = repo.file(path).count() - 1 + while filerev >= 0: + fctx = repo.filectx(path, fileid=filerev) + + # Compute revs and next_revs. + if filerev not in revs: + revs.append(filerev) + rev_index = revs.index(filerev) + next_revs = revs[:] + + # Add parents to next_revs. + parents = [f.filerev() for f in fctx.parents() if f.path() == path] + parents_to_add = [] + for parent in parents: + if parent not in next_revs: + parents_to_add.append(parent) + parents_to_add.sort() + next_revs[rev_index:rev_index + 1] = parents_to_add + + edges = [] + for parent in parents: + edges.append((rev_index, next_revs.index(parent))) + + changerev = fctx.linkrev() + if changerev <= start_rev: + node = repo.changelog.node(changerev) + n_columns_diff = len(next_revs) - len(revs) + yield (changerev, node, rev_index, edges, len(revs), n_columns_diff) + if changerev <= stop_rev: + break + revs = next_revs + filerev -= 1 + def get_rev_parents(repo, rev): return [x for x in repo.changelog.parentrevs(rev) if x != nullrev] @@ -141,7 +198,7 @@ else: return (repo.changelog.count() - 1, 0) -def graphlog(ui, repo, **opts): +def graphlog(ui, repo, path=None, **opts): """show revision history alongside an ASCII revision graph Print a revision history alongside a revision graph drawn with @@ -157,7 +214,11 @@ if start_rev == nullrev: return cs_printer = show_changeset(ui, repo, opts) - grapher = revision_grapher(repo, start_rev, stop_rev) + if path: + cpath = canonpath(repo.root, os.getcwd(), path) + grapher = filelog_grapher(repo, cpath, start_rev, stop_rev) + else: + grapher = revision_grapher(repo, start_rev, stop_rev) repo_parents = repo.dirstate.parents() prev_n_columns_diff = 0 prev_node_index = 0 @@ -261,5 +322,5 @@ ('r', 'rev', [], _('show the specified revision or range')), ('', 'style', '', _('display using template map file')), ('', 'template', '', _('display with template'))], - _('hg glog [OPTION]...')), + _('hg glog [OPTION]... [FILE]')), }
--- a/hgext/hbisect.py Fri Feb 08 11:50:37 2008 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,313 +0,0 @@ -# bisect extension for mercurial -# -# Copyright 2005, 2006 Benoit Boissinot <benoit.boissinot@ens-lyon.org> -# Inspired by git bisect, extension skeleton taken from mq.py. -# -# This software may be used and distributed according to the terms -# of the GNU General Public License, incorporated herein by reference. - -from mercurial.i18n import _ -from mercurial import hg, util, commands, cmdutil -import os, sys, sets - -versionstr = "0.0.3" - -def lookup_rev(ui, repo, rev=None): - """returns rev or the checked-out revision if rev is None""" - if not rev is None: - return repo.lookup(rev) - parents = [p for p in repo.dirstate.parents() if p != hg.nullid] - if len(parents) != 1: - raise util.Abort(_("unexpected number of parents, " - "please commit or revert")) - return parents.pop() - -def check_clean(ui, repo): - modified, added, removed, deleted, unknown = repo.status()[:5] - if modified or added or removed: - ui.warn("Repository is not clean, please commit or revert\n") - sys.exit(1) - -class bisect(object): - """dichotomic search in the DAG of changesets""" - def __init__(self, ui, repo): - self.repo = repo - self.path = repo.join("bisect") - self.opener = util.opener(self.path) - self.ui = ui - self.goodrevs = [] - self.badrev = None - self.good_path = "good" - self.bad_path = "bad" - self.is_reset = False - - if os.path.exists(os.path.join(self.path, self.good_path)): - self.goodrevs = self.opener(self.good_path).read().splitlines() - self.goodrevs = [hg.bin(x) for x in self.goodrevs] - if os.path.exists(os.path.join(self.path, self.bad_path)): - r = self.opener(self.bad_path).read().splitlines() - if r: - self.badrev = hg.bin(r.pop(0)) - - def write(self): - if self.is_reset: - return - if not os.path.isdir(self.path): - os.mkdir(self.path) - f = self.opener(self.good_path, "w") - f.write("\n".join([hg.hex(r) for r in self.goodrevs])) - if len(self.goodrevs) > 0: - f.write("\n") - f = self.opener(self.bad_path, "w") - if self.badrev: - f.write(hg.hex(self.badrev) + "\n") - - def init(self): - """start a new bisection""" - if os.path.isdir(self.path): - raise util.Abort(_("bisect directory already exists\n")) - os.mkdir(self.path) - check_clean(self.ui, self.repo) - return 0 - - def reset(self): - """finish a bisection""" - if os.path.isdir(self.path): - sl = [os.path.join(self.path, p) - for p in [self.bad_path, self.good_path]] - for s in sl: - if os.path.exists(s): - os.unlink(s) - os.rmdir(self.path) - # Not sure about this - #self.ui.write("Going back to tip\n") - #self.repo.update(self.repo.changelog.tip()) - self.is_reset = True - return 0 - - def num_ancestors(self, head=None, stop=None): - """ - returns a dict with the mapping: - node -> number of ancestors (self included) - for all nodes who are ancestor of head and - not in stop. - """ - if head is None: - head = self.badrev - return self.__ancestors_and_nb_ancestors(head, stop)[1] - - def ancestors(self, head=None, stop=None): - """ - returns the set of the ancestors of head (self included) - who are not in stop. - """ - if head is None: - head = self.badrev - return self.__ancestors_and_nb_ancestors(head, stop)[0] - - def __ancestors_and_nb_ancestors(self, head, stop=None): - """ - if stop is None then ancestors of goodrevs are used as - lower limit. - - returns (anc, n_child) where anc is the set of the ancestors of head - and n_child is a dictionary with the following mapping: - node -> number of ancestors (self included) - """ - cl = self.repo.changelog - if not stop: - stop = sets.Set([]) - for i in xrange(len(self.goodrevs)-1, -1, -1): - g = self.goodrevs[i] - if g in stop: - continue - stop.update(cl.reachable(g)) - def num_children(a): - """ - returns a dictionnary with the following mapping - node -> [number of children, empty set] - """ - d = {a: [0, sets.Set([])]} - for i in xrange(cl.rev(a)+1): - n = cl.node(i) - if not d.has_key(n): - d[n] = [0, sets.Set([])] - parents = [p for p in cl.parents(n) if p != hg.nullid] - for p in parents: - d[p][0] += 1 - return d - - if head in stop: - raise util.Abort(_("Inconsistent state, %s:%s is good and bad") - % (cl.rev(head), hg.short(head))) - n_child = num_children(head) - for i in xrange(cl.rev(head)+1): - n = cl.node(i) - parents = [p for p in cl.parents(n) if p != hg.nullid] - for p in parents: - n_child[p][0] -= 1 - if not n in stop: - n_child[n][1].union_update(n_child[p][1]) - if n_child[p][0] == 0: - n_child[p] = len(n_child[p][1]) - if not n in stop: - n_child[n][1].add(n) - if n_child[n][0] == 0: - if n == head: - anc = n_child[n][1] - n_child[n] = len(n_child[n][1]) - return anc, n_child - - def next(self): - if not self.badrev: - raise util.Abort(_("You should give at least one bad revision")) - if not self.goodrevs: - self.ui.warn(_("No good revision given\n")) - self.ui.warn(_("Marking the first revision as good\n")) - ancestors, num_ancestors = self.__ancestors_and_nb_ancestors( - self.badrev) - tot = len(ancestors) - if tot == 1: - if ancestors.pop() != self.badrev: - raise util.Abort(_("Could not find the first bad revision")) - self.ui.write(_("The first bad revision is:\n")) - displayer = cmdutil.show_changeset(self.ui, self.repo, {}) - displayer.show(changenode=self.badrev) - return None - best_rev = None - best_len = -1 - for n in ancestors: - l = num_ancestors[n] - l = min(l, tot - l) - if l > best_len: - best_len = l - best_rev = n - assert best_rev is not None - nb_tests = 0 - q, r = divmod(tot, 2) - while q: - nb_tests += 1 - q, r = divmod(q, 2) - msg = _("Testing changeset %s:%s (%s changesets remaining, " - "~%s tests)\n") % (self.repo.changelog.rev(best_rev), - hg.short(best_rev), tot, nb_tests) - self.ui.write(msg) - return best_rev - - def autonext(self): - """find and update to the next revision to test""" - check_clean(self.ui, self.repo) - rev = self.next() - if rev is not None: - return hg.clean(self.repo, rev) - - def good(self, rev): - self.goodrevs.append(rev) - - def autogood(self, rev=None): - """mark revision as good and update to the next revision to test""" - check_clean(self.ui, self.repo) - rev = lookup_rev(self.ui, self.repo, rev) - self.good(rev) - if self.badrev: - return self.autonext() - - def bad(self, rev): - self.badrev = rev - - def autobad(self, rev=None): - """mark revision as bad and update to the next revision to test""" - check_clean(self.ui, self.repo) - rev = lookup_rev(self.ui, self.repo, rev) - self.bad(rev) - if self.goodrevs: - self.autonext() - -# should we put it in the class ? -def test(ui, repo, rev): - """test the bisection code""" - b = bisect(ui, repo) - rev = repo.lookup(rev) - ui.write("testing with rev %s\n" % hg.hex(rev)) - anc = b.ancestors() - while len(anc) > 1: - if not rev in anc: - ui.warn("failure while bisecting\n") - sys.exit(1) - ui.write("it worked :)\n") - new_rev = b.next() - ui.write("choosing if good or bad\n") - if rev in b.ancestors(head=new_rev): - b.bad(new_rev) - ui.write("it is bad\n") - else: - b.good(new_rev) - ui.write("it is good\n") - anc = b.ancestors() - #repo.update(new_rev, force=True) - for v in anc: - if v != rev: - ui.warn("fail to found cset! :(\n") - return 1 - ui.write("Found bad cset: %s\n" % hg.hex(b.badrev)) - ui.write("Everything is ok :)\n") - return 0 - -def bisect_run(ui, repo, cmd=None, *args): - """Dichotomic search in the DAG of changesets - -This extension helps to find changesets which cause problems. -To use, mark the earliest changeset you know introduces the problem -as bad, then mark the latest changeset which is free from the problem -as good. Bisect will update your working directory to a revision for -testing. Once you have performed tests, mark the working directory -as bad or good and bisect will either update to another candidate -changeset or announce that it has found the bad revision. - -Note: bisect expects bad revisions to be descendants of good revisions. -If you are looking for the point at which a problem was fixed, then make -the problem-free state "bad" and the problematic state "good." - -For subcommands see "hg bisect help\" - """ - def help_(cmd=None, *args): - """show help for a given bisect subcommand or all subcommands""" - cmdtable = bisectcmdtable - if cmd: - doc = cmdtable[cmd][0].__doc__ - synopsis = cmdtable[cmd][2] - ui.write(synopsis + "\n") - ui.write("\n" + doc + "\n") - return - ui.write(_("list of subcommands for the bisect extension\n\n")) - cmds = cmdtable.keys() - cmds.sort() - m = max([len(c) for c in cmds]) - for cmd in cmds: - doc = cmdtable[cmd][0].__doc__.splitlines(0)[0].rstrip() - ui.write(" %-*s %s\n" % (m, cmd, doc)) - - b = bisect(ui, repo) - bisectcmdtable = { - "init": (b.init, 0, _("hg bisect init")), - "bad": (b.autobad, 1, _("hg bisect bad [<rev>]")), - "good": (b.autogood, 1, _("hg bisect good [<rev>]")), - "next": (b.autonext, 0, _("hg bisect next")), - "reset": (b.reset, 0, _("hg bisect reset")), - "help": (help_, 1, _("hg bisect help [<subcommand>]")), - } - - if not bisectcmdtable.has_key(cmd): - ui.warn(_("bisect: Unknown sub-command\n")) - return help_() - if len(args) > bisectcmdtable[cmd][1]: - ui.warn(_("bisect: Too many arguments\n")) - return help_() - ret = bisectcmdtable[cmd][0](*args) - b.write() - return ret - -cmdtable = { - "bisect": (bisect_run, [], _("hg bisect [help|init|reset|next|good|bad]")), - #"bisect-test": (test, [], "hg bisect-test rev"), -}
--- a/hgext/hgk.py Fri Feb 08 11:50:37 2008 +0100 +++ b/hgext/hgk.py Fri Feb 08 11:55:17 2008 +0100 @@ -13,7 +13,7 @@ # querying of information, and an extension to mercurial named hgk.py, # which provides hooks for hgk to get information. hgk can be found in # the contrib directory, and hgk.py can be found in the hgext -# directory. +# directory. # # To load the hgext.py extension, add it to your .hgrc file (you have # to use your global $HOME/.hgrc file, not one in a repository). You @@ -45,7 +45,7 @@ # Revisions context menu will now display additional entries to fire # vdiff on hovered and selected revisions. -import sys, os +import os from mercurial import hg, fancyopts, commands, ui, util, patch, revlog def difftree(ui, repo, node1=None, node2=None, *files, **opts): @@ -61,17 +61,14 @@ for f in modified: # TODO get file permissions - print ":100664 100664 %s %s M\t%s\t%s" % (hg.short(mmap[f]), - hg.short(mmap2[f]), - f, f) + ui.write(":100664 100664 %s %s M\t%s\t%s\n" % + (hg.short(mmap[f]), hg.short(mmap2[f]), f, f)) for f in added: - print ":000000 100664 %s %s N\t%s\t%s" % (empty, - hg.short(mmap2[f]), - f, f) + ui.write(":000000 100664 %s %s N\t%s\t%s\n" % + (empty, hg.short(mmap2[f]), f, f)) for f in removed: - print ":100664 000000 %s %s D\t%s\t%s" % (hg.short(mmap[f]), - empty, - f, f) + ui.write(":100664 000000 %s %s D\t%s\t%s\n" % + (hg.short(mmap[f]), empty, f, f)) ## while True: @@ -93,7 +90,7 @@ node1 = repo.changelog.parents(node1)[0] if opts['patch']: if opts['pretty']: - catcommit(repo, node2, "") + catcommit(ui, repo, node2, "") patch.diff(repo, node1, node2, files=files, opts=patch.diffopts(ui, {'git': True})) @@ -102,14 +99,14 @@ if not opts['stdin']: break -def catcommit(repo, n, prefix, ctx=None): +def catcommit(ui, repo, n, prefix, ctx=None): nlprefix = '\n' + prefix; if ctx is None: ctx = repo.changectx(n) (p1, p2) = ctx.parents() - print "tree %s" % (hg.short(ctx.changeset()[0])) # use ctx.node() instead ?? - if p1: print "parent %s" % (hg.short(p1.node())) - if p2: print "parent %s" % (hg.short(p2.node())) + ui.write("tree %s\n" % hg.short(ctx.changeset()[0])) # use ctx.node() instead ?? + if p1: ui.write("parent %s\n" % hg.short(p1.node())) + if p2: ui.write("parent %s\n" % hg.short(p2.node())) date = ctx.date() description = ctx.description().replace("\0", "") lines = description.splitlines() @@ -118,23 +115,24 @@ else: committer = ctx.user() - print "author %s %s %s" % (ctx.user(), int(date[0]), date[1]) - print "committer %s %s %s" % (committer, int(date[0]), date[1]) - print "revision %d" % ctx.rev() - print "" + ui.write("author %s %s %s\n" % (ctx.user(), int(date[0]), date[1])) + ui.write("committer %s %s %s\n" % (committer, int(date[0]), date[1])) + ui.write("revision %d\n" % ctx.rev()) + ui.write("branch %s\n\n" % ctx.branch()) + if prefix != "": - print "%s%s" % (prefix, description.replace('\n', nlprefix).strip()) + ui.write("%s%s\n" % (prefix, description.replace('\n', nlprefix).strip())) else: - print description + ui.write(description + "\n") if prefix: - sys.stdout.write('\0') + ui.write('\0') def base(ui, repo, node1, node2): """Output common ancestor information""" node1 = repo.lookup(node1) node2 = repo.lookup(node2) n = repo.changelog.ancestor(node1, node2) - print hg.short(n) + ui.write(hg.short(n) + "\n") def catfile(ui, repo, type=None, r=None, **opts): """cat a specific revision""" @@ -157,10 +155,10 @@ while r: if type != "commit": - sys.stderr.write("aborting hg cat-file only understands commits\n") - sys.exit(1); + ui.warn("aborting hg cat-file only understands commits\n") + return 1; n = repo.lookup(r) - catcommit(repo, n, prefix) + catcommit(ui, repo, n, prefix) if opts['stdin']: try: (type, r) = raw_input().split(' '); @@ -174,7 +172,7 @@ # telling you which commits are reachable from the supplied ones via # a bitmask based on arg position. # you can specify a commit to stop at by starting the sha1 with ^ -def revtree(args, repo, full="tree", maxnr=0, parents=False): +def revtree(ui, args, repo, full="tree", maxnr=0, parents=False): def chlogwalk(): count = repo.changelog.count() i = count @@ -259,24 +257,24 @@ if pp[1] != hg.nullid: parentstr += " " + hg.short(pp[1]) if not full: - print hg.short(n) + parentstr + ui.write("%s%s\n" % (hg.short(n), parentstr)) elif full == "commit": - print hg.short(n) + parentstr - catcommit(repo, n, ' ', ctx) + ui.write("%s%s\n" % (hg.short(n), parentstr)) + catcommit(ui, repo, n, ' ', ctx) else: (p1, p2) = repo.changelog.parents(n) (h, h1, h2) = map(hg.short, (n, p1, p2)) (i1, i2) = map(repo.changelog.rev, (p1, p2)) date = ctx.date()[0] - print "%s %s:%s" % (date, h, mask), + ui.write("%s %s:%s" % (date, h, mask)) mask = is_reachable(want_sha1, reachable, p1) if i1 != hg.nullrev and mask > 0: - print "%s:%s " % (h1, mask), + ui.write("%s:%s " % (h1, mask)), mask = is_reachable(want_sha1, reachable, p2) if i2 != hg.nullrev and mask > 0: - print "%s:%s " % (h2, mask), - print "" + ui.write("%s:%s " % (h2, mask)) + ui.write("\n") if maxnr and count >= maxnr: break count += 1 @@ -304,15 +302,15 @@ else: full = None copy = [x for x in revs] - revtree(copy, repo, full, opts['max_count'], opts['parents']) + revtree(ui, copy, repo, full, opts['max_count'], opts['parents']) def config(ui, repo, **opts): """print extension options""" def writeopt(name, value): - ui.write('k=%s\nv=%s\n' % (name, value)) + ui.write('k=%s\nv=%s\n' % (name, value)) writeopt('vdiff', ui.config('hgk', 'vdiff', '')) - + def view(ui, repo, *etc, **opts): "start interactive history viewer"
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/hgext/highlight.py Fri Feb 08 11:55:17 2008 +0100 @@ -0,0 +1,139 @@ +""" +This is Mercurial extension for syntax highlighting in the file +revision view of hgweb. + +It depends on the pygments syntax highlighting library: +http://pygments.org/ + +To enable the extension add this to hgrc: + +[extensions] +hgext.highlight = + +There is a single configuration option: + +[web] +pygments_style = <style> + +The default is 'colorful'. If this is changed the corresponding CSS +file should be re-generated by running + +# pygmentize -f html -S <newstyle> + + +-- Adam Hupp <adam@hupp.org> + + +""" + +from mercurial import demandimport +demandimport.ignore.extend(['pkgutil', + 'pkg_resources', + '__main__',]) + +import mimetypes + +from mercurial.hgweb import hgweb_mod +from mercurial.hgweb.hgweb_mod import hgweb +from mercurial import util +from mercurial.hgweb.common import paritygen +from mercurial.node import hex + +from pygments import highlight +from pygments.util import ClassNotFound +from pygments.lexers import guess_lexer_for_filename, TextLexer +from pygments.formatters import HtmlFormatter + +SYNTAX_CSS = ('\n<link rel="stylesheet" href="#staticurl#highlight.css" ' + 'type="text/css" />') + +class StripedHtmlFormatter(HtmlFormatter): + def __init__(self, stripecount, *args, **kwargs): + super(StripedHtmlFormatter, self).__init__(*args, **kwargs) + self.stripecount = stripecount + + def wrap(self, source, outfile): + yield 0, "<div class='highlight'>" + yield 0, "<pre>" + parity = paritygen(self.stripecount) + + for n, i in source: + if n == 1: + i = "<div class='parity%s'>%s</div>" % (parity.next(), i) + yield n, i + + yield 0, "</pre>" + yield 0, "</div>" + + +def pygments_format(filename, text, forcetext, stripecount, style): + if not forcetext: + try: + lexer = guess_lexer_for_filename(filename, text, + encoding=util._encoding) + except ClassNotFound: + lexer = TextLexer(encoding=util._encoding) + else: + lexer = TextLexer(encoding=util._encoding) + + formatter = StripedHtmlFormatter(stripecount, style=style, + linenos='inline', encoding=util._encoding) + + return highlight(text, lexer, formatter) + + +def filerevision_pygments(self, tmpl, fctx): + """Reimplement hgweb.filerevision to use syntax highlighting""" + f = fctx.path() + text = fctx.data() + fl = fctx.filelog() + n = fctx.filenode() + + if util.binary(text): + mt = mimetypes.guess_type(f)[0] or 'application/octet-stream' + text = "(binary:%s)" % mt + # don't parse (binary:...) as anything + forcetext = True + else: + # encode to hgweb.encoding for lexers and formatter + util._encoding = self.encoding + text = util.tolocal(text) + forcetext = False + + def lines(text): + for line in text.splitlines(True): + yield {"line": line} + + style = self.config("web", "pygments_style", "colorful") + + text_formatted = lines(pygments_format(f, text, forcetext, + self.stripecount, style)) + + # override per-line template + tmpl.cache['fileline'] = '#line#' + + # append a <link ...> to the syntax highlighting css + old_header = ''.join(tmpl('header')) + if SYNTAX_CSS not in old_header: + new_header = old_header + SYNTAX_CSS + tmpl.cache['header'] = new_header + + return tmpl("filerevision", + file=f, + path=hgweb_mod._up(f), # fixme: make public + text=text_formatted, + rev=fctx.rev(), + node=hex(fctx.node()), + author=fctx.user(), + date=fctx.date(), + desc=fctx.description(), + parent=self.siblings(fctx.parents()), + child=self.siblings(fctx.children()), + rename=self.renamelink(fl, n), + permissions=fctx.manifest().flags(f)) + + +# monkeypatch in the new version +# should be safer than overriding the method in a derived class +# and then patching the class +hgweb.filerevision = filerevision_pygments
--- a/hgext/imerge.py Fri Feb 08 11:50:37 2008 +0100 +++ b/hgext/imerge.py Fri Feb 08 11:55:17 2008 +0100 @@ -7,7 +7,8 @@ from mercurial.i18n import _ from mercurial.node import * -from mercurial import commands, cmdutil, dispatch, fancyopts, hg, merge, util +from mercurial import commands, cmdutil, dispatch, fancyopts +from mercurial import hg, filemerge, util import os, tarfile class InvalidStateFileException(Exception): pass @@ -126,7 +127,7 @@ self.wctx._parents.pop() try: # TODO: we should probably revert the file if merge fails - return merge.filemerge(self.repo, fn, fd, fo, self.wctx, p2) + return filemerge.filemerge(self.repo, fn, fd, fo, self.wctx, p2) finally: self.wctx._parents.append(p2) if realmerge: @@ -135,13 +136,13 @@ del os.environ['HGMERGE'] def start(self, rev=None): - _filemerge = merge.filemerge - def filemerge(repo, fw, fd, fo, wctx, mctx): + _filemerge = filemerge.filemerge + def filemerge_(repo, fw, fd, fo, wctx, mctx): self.conflicts[fw] = (fd, fo) - merge.filemerge = filemerge + filemerge.filemerge = filemerge_ commands.merge(self.ui, self.repo, rev=rev) - merge.filemerge = _filemerge + filemerge.filemerge = _filemerge self.wctx = self.repo.workingctx() self.save()
--- a/hgext/interhg.py Fri Feb 08 11:50:37 2008 +0100 +++ b/hgext/interhg.py Fri Feb 08 11:55:17 2008 +0100 @@ -27,9 +27,9 @@ import re from mercurial.hgweb import hgweb_mod -from mercurial import templater +from mercurial import templatefilters -orig_escape = templater.common_filters["escape"] +orig_escape = templatefilters.filters["escape"] interhg_table = [] @@ -39,7 +39,7 @@ escstr = regexp.sub(format, escstr) return escstr -templater.common_filters["escape"] = interhg_escape +templatefilters.filters["escape"] = interhg_escape orig_refresh = hgweb_mod.hgweb.refresh
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/hgext/keyword.py Fri Feb 08 11:55:17 2008 +0100 @@ -0,0 +1,520 @@ +# keyword.py - $Keyword$ expansion for Mercurial +# +# Copyright 2007, 2008 Christian Ebert <blacktrash@gmx.net> +# +# This software may be used and distributed according to the terms +# of the GNU General Public License, incorporated herein by reference. +# +# $Id$ +# +# Keyword expansion hack against the grain of a DSCM +# +# There are many good reasons why this is not needed in a distributed +# SCM, still it may be useful in very small projects based on single +# files (like LaTeX packages), that are mostly addressed to an audience +# not running a version control system. +# +# For in-depth discussion refer to +# <http://www.selenic.com/mercurial/wiki/index.cgi/KeywordPlan>. +# +# Keyword expansion is based on Mercurial's changeset template mappings. +# +# Binary files are not touched. +# +# Setup in hgrc: +# +# [extensions] +# # enable extension +# hgext.keyword = +# +# Files to act upon/ignore are specified in the [keyword] section. +# Customized keyword template mappings in the [keywordmaps] section. +# +# Run "hg help keyword" and "hg kwdemo" to get info on configuration. + +'''keyword expansion in local repositories + +This extension expands RCS/CVS-like or self-customized $Keywords$ +in tracked text files selected by your configuration. + +Keywords are only expanded in local repositories and not stored in +the change history. The mechanism can be regarded as a convenience +for the current user or for archive distribution. + +Configuration is done in the [keyword] and [keywordmaps] sections +of hgrc files. + +Example: + + [keyword] + # expand keywords in every python file except those matching "x*" + **.py = + x* = ignore + +Note: the more specific you are in your filename patterns + the less you lose speed in huge repos. + +For [keywordmaps] template mapping and expansion demonstration and +control run "hg kwdemo". + +An additional date template filter {date|utcdate} is provided. + +The default template mappings (view with "hg kwdemo -d") can be replaced +with customized keywords and templates. +Again, run "hg kwdemo" to control the results of your config changes. + +Before changing/disabling active keywords, run "hg kwshrink" to avoid +the risk of inadvertedly storing expanded keywords in the change history. + +To force expansion after enabling it, or a configuration change, run +"hg kwexpand". + +Also, when committing with the record extension or using mq's qrecord, be aware +that keywords cannot be updated. Again, run "hg kwexpand" on the files in +question to update keyword expansions after all changes have been checked in. + +Expansions spanning more than one line and incremental expansions, +like CVS' $Log$, are not supported. A keyword template map +"Log = {desc}" expands to the first line of the changeset description. +''' + +from mercurial import commands, cmdutil, context, dispatch, filelog, revlog +from mercurial import patch, localrepo, templater, templatefilters, util +from mercurial.node import * +from mercurial.i18n import _ +import re, shutil, sys, tempfile, time + +commands.optionalrepo += ' kwdemo' + +# hg commands that do not act on keywords +nokwcommands = ('add addremove bundle copy export grep identify incoming init' + ' log outgoing push remove rename rollback tip convert') + +# hg commands that trigger expansion only when writing to working dir, +# not when reading filelog, and unexpand when reading from working dir +restricted = 'diff1 record qfold qimport qnew qpush qrefresh qrecord' + +def utcdate(date): + '''Returns hgdate in cvs-like UTC format.''' + return time.strftime('%Y/%m/%d %H:%M:%S', time.gmtime(date[0])) + + +_kwtemplater = None + +class kwtemplater(object): + ''' + Sets up keyword templates, corresponding keyword regex, and + provides keyword substitution functions. + ''' + templates = { + 'Revision': '{node|short}', + 'Author': '{author|user}', + 'Date': '{date|utcdate}', + 'RCSFile': '{file|basename},v', + 'Source': '{root}/{file},v', + 'Id': '{file|basename},v {node|short} {date|utcdate} {author|user}', + 'Header': '{root}/{file},v {node|short} {date|utcdate} {author|user}', + } + + def __init__(self, ui, repo, inc, exc, restricted): + self.ui = ui + self.repo = repo + self.matcher = util.matcher(repo.root, inc=inc, exc=exc)[1] + self.restricted = restricted + self.commitnode = None + self.path = '' + + kwmaps = self.ui.configitems('keywordmaps') + if kwmaps: # override default templates + kwmaps = [(k, templater.parsestring(v, quoted=False)) + for (k, v) in kwmaps] + self.templates = dict(kwmaps) + escaped = map(re.escape, self.templates.keys()) + kwpat = r'\$(%s)(: [^$\n\r]*? )??\$' % '|'.join(escaped) + self.re_kw = re.compile(kwpat) + + templatefilters.filters['utcdate'] = utcdate + self.ct = cmdutil.changeset_templater(self.ui, self.repo, + False, '', False) + + def substitute(self, node, data, subfunc): + '''Obtains file's changenode if commit node not given, + and calls given substitution function.''' + if self.commitnode: + fnode = self.commitnode + else: + c = context.filectx(self.repo, self.path, fileid=node) + fnode = c.node() + + def kwsub(mobj): + '''Substitutes keyword using corresponding template.''' + kw = mobj.group(1) + self.ct.use_template(self.templates[kw]) + self.ui.pushbuffer() + self.ct.show(changenode=fnode, root=self.repo.root, file=self.path) + ekw = templatefilters.firstline(self.ui.popbuffer()) + return '$%s: %s $' % (kw, ekw) + + return subfunc(kwsub, data) + + def expand(self, node, data): + '''Returns data with keywords expanded.''' + if self.restricted or util.binary(data): + return data + return self.substitute(node, data, self.re_kw.sub) + + def process(self, node, data, expand): + '''Returns a tuple: data, count. + Count is number of keywords/keyword substitutions, + telling caller whether to act on file containing data.''' + if util.binary(data): + return data, None + if expand: + return self.substitute(node, data, self.re_kw.subn) + return data, self.re_kw.search(data) + + def shrink(self, text): + '''Returns text with all keyword substitutions removed.''' + if util.binary(text): + return text + return self.re_kw.sub(r'$\1$', text) + +class kwfilelog(filelog.filelog): + ''' + Subclass of filelog to hook into its read, add, cmp methods. + Keywords are "stored" unexpanded, and processed on reading. + ''' + def __init__(self, opener, path): + super(kwfilelog, self).__init__(opener, path) + _kwtemplater.path = path + + def kwctread(self, node, expand): + '''Reads expanding and counting keywords, called from _overwrite.''' + data = super(kwfilelog, self).read(node) + return _kwtemplater.process(node, data, expand) + + def read(self, node): + '''Expands keywords when reading filelog.''' + data = super(kwfilelog, self).read(node) + return _kwtemplater.expand(node, data) + + def add(self, text, meta, tr, link, p1=None, p2=None): + '''Removes keyword substitutions when adding to filelog.''' + text = _kwtemplater.shrink(text) + return super(kwfilelog, self).add(text, meta, tr, link, p1=p1, p2=p2) + + def cmp(self, node, text): + '''Removes keyword substitutions for comparison.''' + text = _kwtemplater.shrink(text) + if self.renamed(node): + t2 = super(kwfilelog, self).read(node) + return t2 != text + return revlog.revlog.cmp(self, node, text) + + +# store original patch.patchfile.__init__ +_patchfile_init = patch.patchfile.__init__ + +def _kwpatchfile_init(self, ui, fname, missing=False): + '''Monkeypatch/wrap patch.patchfile.__init__ to avoid + rejects or conflicts due to expanded keywords in working dir.''' + _patchfile_init(self, ui, fname, missing=missing) + + if _kwtemplater.matcher(self.fname): + # shrink keywords read from working dir + kwshrunk = _kwtemplater.shrink(''.join(self.lines)) + self.lines = kwshrunk.splitlines(True) + + +def _iskwfile(f, link): + return not link(f) and _kwtemplater.matcher(f) + +def _status(ui, repo, *pats, **opts): + '''Bails out if [keyword] configuration is not active. + Returns status of working directory.''' + if _kwtemplater: + files, match, anypats = cmdutil.matchpats(repo, pats, opts) + return repo.status(files=files, match=match, list_clean=True) + if ui.configitems('keyword'): + raise util.Abort(_('[keyword] patterns cannot match')) + raise util.Abort(_('no [keyword] patterns configured')) + +def _overwrite(ui, repo, node=None, expand=True, files=None): + '''Overwrites selected files expanding/shrinking keywords.''' + ctx = repo.changectx(node) + mf = ctx.manifest() + if node is not None: # commit + _kwtemplater.commitnode = node + files = [f for f in ctx.files() if f in mf] + notify = ui.debug + else: # kwexpand/kwshrink + notify = ui.note + candidates = [f for f in files if _iskwfile(f, mf.linkf)] + if candidates: + candidates.sort() + action = expand and 'expanding' or 'shrinking' + for f in candidates: + fp = repo.file(f, kwmatch=True) + data, kwfound = fp.kwctread(mf[f], expand) + if kwfound: + notify(_('overwriting %s %s keywords\n') % (f, action)) + repo.wwrite(f, data, mf.flags(f)) + repo.dirstate.normal(f) + +def _kwfwrite(ui, repo, expand, *pats, **opts): + '''Selects files and passes them to _overwrite.''' + status = _status(ui, repo, *pats, **opts) + modified, added, removed, deleted, unknown, ignored, clean = status + if modified or added or removed or deleted: + raise util.Abort(_('outstanding uncommitted changes in given files')) + wlock = lock = None + try: + wlock = repo.wlock() + lock = repo.lock() + _overwrite(ui, repo, expand=expand, files=clean) + finally: + del wlock, lock + + +def demo(ui, repo, *args, **opts): + '''print [keywordmaps] configuration and an expansion example + + Show current, custom, or default keyword template maps + and their expansion. + + Extend current configuration by specifying maps as arguments + and optionally by reading from an additional hgrc file. + + Override current keyword template maps with "default" option. + ''' + def demostatus(stat): + ui.status(_('\n\t%s\n') % stat) + + def demoitems(section, items): + ui.write('[%s]\n' % section) + for k, v in items: + ui.write('%s = %s\n' % (k, v)) + + msg = 'hg keyword config and expansion example' + kwstatus = 'current' + fn = 'demo.txt' + branchname = 'demobranch' + tmpdir = tempfile.mkdtemp('', 'kwdemo.') + ui.note(_('creating temporary repo at %s\n') % tmpdir) + repo = localrepo.localrepository(ui, path=tmpdir, create=True) + ui.setconfig('keyword', fn, '') + if args or opts.get('rcfile'): + kwstatus = 'custom' + if opts.get('rcfile'): + ui.readconfig(opts.get('rcfile')) + if opts.get('default'): + kwstatus = 'default' + kwmaps = kwtemplater.templates + if ui.configitems('keywordmaps'): + # override maps from optional rcfile + for k, v in kwmaps.iteritems(): + ui.setconfig('keywordmaps', k, v) + elif args: + # simulate hgrc parsing + rcmaps = ['[keywordmaps]\n'] + [a + '\n' for a in args] + fp = repo.opener('hgrc', 'w') + fp.writelines(rcmaps) + fp.close() + ui.readconfig(repo.join('hgrc')) + if not opts.get('default'): + kwmaps = dict(ui.configitems('keywordmaps')) or kwtemplater.templates + reposetup(ui, repo) + for k, v in ui.configitems('extensions'): + if k.endswith('keyword'): + extension = '%s = %s' % (k, v) + break + demostatus('config using %s keyword template maps' % kwstatus) + ui.write('[extensions]\n%s\n' % extension) + demoitems('keyword', ui.configitems('keyword')) + demoitems('keywordmaps', kwmaps.iteritems()) + keywords = '$' + '$\n$'.join(kwmaps.keys()) + '$\n' + repo.wopener(fn, 'w').write(keywords) + repo.add([fn]) + path = repo.wjoin(fn) + ui.note(_('\n%s keywords written to %s:\n') % (kwstatus, path)) + ui.note(keywords) + ui.note('\nhg -R "%s" branch "%s"\n' % (tmpdir, branchname)) + # silence branch command if not verbose + quiet = ui.quiet + ui.quiet = not ui.verbose + commands.branch(ui, repo, branchname) + ui.quiet = quiet + for name, cmd in ui.configitems('hooks'): + if name.split('.', 1)[0].find('commit') > -1: + repo.ui.setconfig('hooks', name, '') + ui.note(_('unhooked all commit hooks\n')) + ui.note('hg -R "%s" ci -m "%s"\n' % (tmpdir, msg)) + repo.commit(text=msg) + format = ui.verbose and ' in %s' % path or '' + demostatus('%s keywords expanded%s' % (kwstatus, format)) + ui.write(repo.wread(fn)) + ui.debug(_('\nremoving temporary repo %s\n') % tmpdir) + shutil.rmtree(tmpdir, ignore_errors=True) + +def expand(ui, repo, *pats, **opts): + '''expand keywords in working directory + + Run after (re)enabling keyword expansion. + + kwexpand refuses to run if given files contain local changes. + ''' + # 3rd argument sets expansion to True + _kwfwrite(ui, repo, True, *pats, **opts) + +def files(ui, repo, *pats, **opts): + '''print files currently configured for keyword expansion + + Crosscheck which files in working directory are potential targets for + keyword expansion. + That is, files matched by [keyword] config patterns but not symlinks. + ''' + status = _status(ui, repo, *pats, **opts) + modified, added, removed, deleted, unknown, ignored, clean = status + files = modified + added + clean + if opts.get('untracked'): + files += unknown + files.sort() + kwfiles = [f for f in files if _iskwfile(f, repo._link)] + cwd = pats and repo.getcwd() or '' + kwfstats = not opts.get('ignore') and (('K', kwfiles),) or () + if opts.get('all') or opts.get('ignore'): + kwfstats += (('I', [f for f in files if f not in kwfiles]),) + for char, filenames in kwfstats: + format = (opts.get('all') or ui.verbose) and '%s %%s\n' % char or '%s\n' + for f in filenames: + ui.write(format % repo.pathto(f, cwd)) + +def shrink(ui, repo, *pats, **opts): + '''revert expanded keywords in working directory + + Run before changing/disabling active keywords + or if you experience problems with "hg import" or "hg merge". + + kwshrink refuses to run if given files contain local changes. + ''' + # 3rd argument sets expansion to False + _kwfwrite(ui, repo, False, *pats, **opts) + + +def reposetup(ui, repo): + '''Sets up repo as kwrepo for keyword substitution. + Overrides file method to return kwfilelog instead of filelog + if file matches user configuration. + Wraps commit to overwrite configured files with updated + keyword substitutions. + This is done for local repos only, and only if there are + files configured at all for keyword substitution.''' + + if not repo.local(): + return + + hgcmd, func, args, opts, cmdopts = dispatch._parse(ui, sys.argv[1:]) + if hgcmd in nokwcommands.split(): + return + + if hgcmd == 'diff': + # only expand if comparing against working dir + node1, node2 = cmdutil.revpair(repo, cmdopts.get('rev')) + if node2 is not None: + return + # shrink if rev is not current node + if node1 is not None and node1 != repo.changectx().node(): + hgcmd = 'diff1' + + inc, exc = [], ['.hgtags'] + for pat, opt in ui.configitems('keyword'): + if opt != 'ignore': + inc.append(pat) + else: + exc.append(pat) + if not inc: + return + + global _kwtemplater + _restricted = hgcmd in restricted.split() + _kwtemplater = kwtemplater(ui, repo, inc, exc, _restricted) + + class kwrepo(repo.__class__): + def file(self, f, kwmatch=False): + if f[0] == '/': + f = f[1:] + if kwmatch or _kwtemplater.matcher(f): + return kwfilelog(self.sopener, f) + return filelog.filelog(self.sopener, f) + + def wread(self, filename): + data = super(kwrepo, self).wread(filename) + if _restricted and _kwtemplater.matcher(filename): + return _kwtemplater.shrink(data) + return data + + def commit(self, files=None, text='', user=None, date=None, + match=util.always, force=False, force_editor=False, + p1=None, p2=None, extra={}, empty_ok=False): + wlock = lock = None + _p1 = _p2 = None + try: + wlock = self.wlock() + lock = self.lock() + # store and postpone commit hooks + commithooks = {} + for name, cmd in ui.configitems('hooks'): + if name.split('.', 1)[0] == 'commit': + commithooks[name] = cmd + ui.setconfig('hooks', name, None) + if commithooks: + # store parents for commit hook environment + if p1 is None: + _p1, _p2 = repo.dirstate.parents() + else: + _p1, _p2 = p1, p2 or nullid + _p1 = hex(_p1) + if _p2 == nullid: + _p2 = '' + else: + _p2 = hex(_p2) + + node = super(kwrepo, + self).commit(files=files, text=text, user=user, + date=date, match=match, force=force, + force_editor=force_editor, + p1=p1, p2=p2, extra=extra, + empty_ok=empty_ok) + + # restore commit hooks + for name, cmd in commithooks.iteritems(): + ui.setconfig('hooks', name, cmd) + if node is not None: + _overwrite(ui, self, node=node) + repo.hook('commit', node=node, parent1=_p1, parent2=_p2) + return node + finally: + del wlock, lock + + repo.__class__ = kwrepo + patch.patchfile.__init__ = _kwpatchfile_init + + +cmdtable = { + 'kwdemo': + (demo, + [('d', 'default', None, _('show default keyword template maps')), + ('f', 'rcfile', [], _('read maps from rcfile'))], + _('hg kwdemo [-d] [-f RCFILE] [TEMPLATEMAP]...')), + 'kwexpand': (expand, commands.walkopts, + _('hg kwexpand [OPTION]... [FILE]...')), + 'kwfiles': + (files, + [('a', 'all', None, _('show keyword status flags of all files')), + ('i', 'ignore', None, _('show files excluded from expansion')), + ('u', 'untracked', None, _('additionally show untracked files')), + ] + commands.walkopts, + _('hg kwfiles [OPTION]... [FILE]...')), + 'kwshrink': (shrink, commands.walkopts, + _('hg kwshrink [OPTION]... [FILE]...')), +}
--- a/hgext/mq.py Fri Feb 08 11:50:37 2008 +0100 +++ b/hgext/mq.py Fri Feb 08 11:55:17 2008 +0100 @@ -34,7 +34,7 @@ from mercurial import repair import os, sys, re, errno -commands.norepo += " qclone qversion" +commands.norepo += " qclone" # Patch names looks like unix-file names. # They must be joinable with queue directory and result in the patch path. @@ -224,7 +224,7 @@ def write_list(items, path): fp = self.opener(path, 'w') for i in items: - print >> fp, i + fp.write("%s\n" % i) fp.close() if self.applied_dirty: write_list(map(str, self.applied), self.status_path) if self.series_dirty: write_list(self.full_series, self.series_path) @@ -610,6 +610,8 @@ def new(self, repo, patch, *pats, **opts): msg = opts.get('msg') force = opts.get('force') + user = opts.get('user') + date = opts.get('date') self.check_reserved_name(patch) if os.path.exists(self.join(patch)): raise util.Abort(_('patch "%s" already exists') % patch) @@ -625,7 +627,7 @@ try: insert = self.full_series_end() commitmsg = msg and msg or ("[mq]: %s" % patch) - n = repo.commit(commitfiles, commitmsg, match=match, force=True) + n = repo.commit(commitfiles, commitmsg, user, date, match=match, force=True) if n == None: raise util.Abort(_("repo commit failed")) self.full_series[insert:insert] = [patch] @@ -634,6 +636,15 @@ self.series_dirty = 1 self.applied_dirty = 1 p = self.opener(patch, "w") + if date: + p.write("# HG changeset patch\n") + if user: + p.write("# User " + user + "\n") + p.write("# Date " + date + "\n") + p.write("\n") + elif user: + p.write("From: " + user + "\n") + p.write("\n") if msg: msg = msg + "\n" p.write(msg) @@ -961,6 +972,34 @@ while message[mi] != comments[ci]: ci += 1 del comments[ci] + + def setheaderfield(comments, prefixes, new): + # Update all references to a field in the patch header. + # If none found, add it email style. + res = False + for prefix in prefixes: + for i in xrange(len(comments)): + if comments[i].startswith(prefix): + comments[i] = prefix + new + res = True + break + return res + + newuser = opts.get('user') + if newuser: + if not setheaderfield(comments, ['From: ', '# User '], newuser): + try: + patchheaderat = comments.index('# HG changeset patch') + comments.insert(patchheaderat + 1,'# User ' + newuser) + except ValueError: + comments = ['From: ' + newuser, ''] + comments + user = newuser + + newdate = opts.get('date') + if newdate: + if setheaderfield(comments, ['# Date '], newdate): + date = newdate + if msg: comments.append(msg) @@ -1086,11 +1125,14 @@ else: message = msg + if not user: + user = changes[1] + self.applied.pop() self.applied_dirty = 1 self.strip(repo, top, update=False, backup='strip') - n = repo.commit(filelist, message, changes[1], match=matchfn, + n = repo.commit(filelist, message, user, date, match=matchfn, force=1) self.applied.append(statusentry(revlog.hex(n), patchfn)) self.removeundo(repo) @@ -1242,7 +1284,7 @@ self.ui.warn("saved queue repository parents: %s %s\n" % (hg.short(qpp[0]), hg.short(qpp[1]))) if qupdate: - print "queue directory updating" + self.ui.status(_("queue directory updating\n")) r = self.qrepo() if not r: self.ui.warn("Unable to load queue repository\n") @@ -1625,6 +1667,13 @@ return q.qseries(repo, start=l-2, length=1, status='A', summary=opts.get('summary')) +def setupheaderopts(ui, opts): + def do(opt,val): + if not opts[opt] and opts['current' + opt]: + opts[opt] = val + do('user', ui.username()) + do('date', "%d %d" % util.makedate()) + def new(ui, repo, patch, *args, **opts): """create a new patch @@ -1643,6 +1692,7 @@ if opts['edit']: message = ui.edit(message, ui.username()) opts['msg'] = message + setupheaderopts(ui, opts) q.new(repo, patch, *args, **opts) q.save_dirty() return 0 @@ -1668,6 +1718,7 @@ patch = q.applied[-1].name (message, comment, user, date, hasdiff) = q.readheaders(patch) message = ui.edit('\n'.join(message), user or ui.username()) + setupheaderopts(ui, opts) ret = q.refresh(repo, pats, msg=message, **opts) q.save_dirty() return ret @@ -2170,6 +2221,12 @@ seriesopts = [('s', 'summary', None, _('print first line of patch header'))] +headeropts = [ + ('U', 'currentuser', None, _('add "From: <current user>" to patch')), + ('u', 'user', '', _('add "From: <given user>" to patch')), + ('D', 'currentdate', None, _('add "Date: <current date>" to patch')), + ('d', 'date', '', _('add "Date: <given date>" to patch'))] + cmdtable = { "qapplied": (applied, [] + seriesopts, _('hg qapplied [-s] [PATCH]')), "qclone": @@ -2188,8 +2245,9 @@ "^qdiff": (diff, [('g', 'git', None, _('use git extended diff format')), + ('U', 'unified', 3, _('number of lines of context to show')), ] + commands.walkopts, - _('hg qdiff [-I] [-X] [-g] [FILE]...')), + _('hg qdiff [-I] [-X] [-U NUM] [-g] [FILE]...')), "qdelete|qremove|qrm": (delete, [('k', 'keep', None, _('keep patch file')), @@ -2228,7 +2286,7 @@ [('e', 'edit', None, _('edit commit message')), ('f', 'force', None, _('import uncommitted changes into patch')), ('g', 'git', None, _('use git extended diff format')), - ] + commands.walkopts + commands.commitopts, + ] + commands.walkopts + commands.commitopts + headeropts, _('hg qnew [-e] [-m TEXT] [-l FILE] [-f] PATCH [FILE]...')), "qnext": (next, [] + seriesopts, _('hg qnext [-s]')), "qprev": (prev, [] + seriesopts, _('hg qprev [-s]')), @@ -2251,7 +2309,7 @@ [('e', 'edit', None, _('edit commit message')), ('g', 'git', None, _('use git extended diff format')), ('s', 'short', None, _('refresh only files already in the patch')), - ] + commands.walkopts + commands.commitopts, + ] + commands.walkopts + commands.commitopts + headeropts, _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...')), 'qrename|qmv': (rename, [], _('hg qrename PATCH1 [PATCH2]')),
--- a/hgext/notify.py Fri Feb 08 11:50:37 2008 +0100 +++ b/hgext/notify.py Fri Feb 08 11:55:17 2008 +0100 @@ -135,7 +135,7 @@ def fixmail(self, addr): '''try to clean up email addresses.''' - addr = templater.email(addr.strip()) + addr = util.email(addr.strip()) if self.domain: a = addr.find('@localhost') if a != -1: @@ -231,7 +231,7 @@ else: self.ui.status(_('notify: sending %d subscribers %d changes\n') % (len(self.subs), count)) - mail.sendmail(self.ui, templater.email(msg['From']), + mail.sendmail(self.ui, util.email(msg['From']), self.subs, msgtext) def diff(self, node, ref):
--- a/hgext/patchbomb.py Fri Feb 08 11:50:37 2008 +0100 +++ b/hgext/patchbomb.py Fri Feb 08 11:55:17 2008 +0100 @@ -115,19 +115,19 @@ ''' def prompt(prompt, default = None, rest = ': ', empty_ok = False): - try: - # readline gives raw_input editing capabilities, but is not - # present on windows - import readline - except ImportError: pass - - if default: prompt += ' [%s]' % default + if not ui.interactive: + return default + if default: + prompt += ' [%s]' % default prompt += rest while True: - r = raw_input(prompt) - if r: return r - if default is not None: return default - if empty_ok: return r + r = ui.prompt(prompt, default=default) + if r: + return r + if default is not None: + return default + if empty_ok: + return r ui.warn(_('Please enter a valid value.\n')) def confirm(s, denial): @@ -153,27 +153,33 @@ body = '' for line in patch: if line.startswith('#'): - if line.startswith('# Node ID'): node = line.split()[-1] + if line.startswith('# Node ID'): + node = line.split()[-1] continue - if (line.startswith('diff -r') - or line.startswith('diff --git')): + if line.startswith('diff -r') or line.startswith('diff --git'): break desc.append(line) - if not node: raise ValueError + if not node: + raise ValueError - #body = ('\n'.join(desc[1:]).strip() or - # 'Patch subject is complete summary.') - #body += '\n\n\n' + if opts['attach']: + body = ('\n'.join(desc[1:]).strip() or + 'Patch subject is complete summary.') + body += '\n\n\n' - if opts['plain']: - while patch and patch[0].startswith('# '): patch.pop(0) - if patch: patch.pop(0) - while patch and not patch[0].strip(): patch.pop(0) - if opts['diffstat']: + if opts.get('plain'): + while patch and patch[0].startswith('# '): + patch.pop(0) + if patch: + patch.pop(0) + while patch and not patch[0].strip(): + patch.pop(0) + if opts.get('diffstat'): body += cdiffstat('\n'.join(desc), patch) + '\n\n' - if opts['attach']: + if opts.get('attach') or opts.get('inline'): msg = email.MIMEMultipart.MIMEMultipart() - if body: msg.attach(email.MIMEText.MIMEText(body, 'plain')) + if body: + msg.attach(email.MIMEText.MIMEText(body, 'plain')) p = email.MIMEText.MIMEText('\n'.join(patch), 'x-patch') binnode = bin(node) # if node is mq patch, it will have patch file name as tag @@ -183,10 +189,13 @@ patchname = patchname[0] elif total > 1: patchname = cmdutil.make_filename(repo, '%b-%n.patch', - binnode, idx, total) + binnode, idx, total) else: patchname = cmdutil.make_filename(repo, '%b.patch', binnode) - p['Content-Disposition'] = 'inline; filename=' + patchname + disposition = 'inline' + if opts['attach']: + disposition = 'attachment' + p['Content-Disposition'] = disposition + '; filename=' + patchname msg.attach(p) else: body += '\n'.join(patch) @@ -194,7 +203,7 @@ subj = desc[0].strip().rstrip('. ') if total == 1: - subj = '[PATCH] ' + (opts['subject'] or subj) + subj = '[PATCH] ' + (opts.get('subject') or subj) else: tlen = len(str(total)) subj = '[PATCH %0*d of %d] %s' % (tlen, idx, total, subj) @@ -228,7 +237,7 @@ pass os.rmdir(tmpdir) - if not (opts['test'] or opts['mbox']): + if not (opts.get('test') or opts.get('mbox')): # really sending mail.validateconfig(ui) @@ -238,7 +247,8 @@ cmdutil.setremoteconfig(ui, opts) if opts.get('outgoing') and opts.get('bundle'): - raise util.Abort(_("--outgoing mode always on with --bundle; do not re-specify --outgoing")) + raise util.Abort(_("--outgoing mode always on with --bundle;" + " do not re-specify --outgoing")) if opts.get('outgoing') or opts.get('bundle'): if len(revs) > 1: @@ -258,7 +268,7 @@ # start if opts.get('date'): - start_time = util.parsedate(opts['date']) + start_time = util.parsedate(opts.get('date')) else: start_time = util.makedate() @@ -266,8 +276,8 @@ return '<%s.%s@%s>' % (id[:20], int(start_time[0]), socket.getfqdn()) def getdescription(body, sender): - if opts['desc']: - body = open(opts['desc']).read() + if opts.get('desc'): + body = open(opts.get('desc')).read() else: ui.write(_('\nWrite the introductory message for the ' 'patch series.\n\n')) @@ -298,7 +308,8 @@ jumbo = [] msgs = [] - ui.write(_('This patch series consists of %d patches.\n\n') % len(patches)) + ui.write(_('This patch series consists of %d patches.\n\n') + % len(patches)) for p, i in zip(patches, xrange(len(patches))): jumbo.extend(p) @@ -308,16 +319,16 @@ tlen = len(str(len(patches))) subj = '[PATCH %0*d of %d] %s' % ( - tlen, 0, - len(patches), - opts['subject'] or - prompt('Subject:', rest = ' [PATCH %0*d of %d] ' % (tlen, 0, - len(patches)))) + tlen, 0, len(patches), + opts.get('subject') or + prompt('Subject:', + rest=' [PATCH %0*d of %d] ' % (tlen, 0, len(patches)))) body = '' - if opts['diffstat']: + if opts.get('diffstat'): d = cdiffstat(_('Final summary:\n'), jumbo) - if d: body = '\n' + d + if d: + body = '\n' + d body = getdescription(body, sender) msg = email.MIMEText.MIMEText(body) @@ -327,7 +338,7 @@ return msgs def getbundlemsgs(bundle): - subj = (opts['subject'] + subj = (opts.get('subject') or prompt('Subject:', default='A bundle for your repository')) body = getdescription('', sender) @@ -343,7 +354,7 @@ msg['Subject'] = subj return [msg] - sender = (opts['from'] or ui.config('email', 'from') or + sender = (opts.get('from') or ui.config('email', 'from') or ui.config('patchbomb', 'from') or prompt('From', ui.username())) @@ -353,15 +364,15 @@ msgs = getexportmsgs() def getaddrs(opt, prpt, default = None): - addrs = opts[opt] or (ui.config('email', opt) or - ui.config('patchbomb', opt) or - prompt(prpt, default = default)).split(',') + addrs = opts.get(opt) or (ui.config('email', opt) or + ui.config('patchbomb', opt) or + prompt(prpt, default = default)).split(',') return [a.strip() for a in addrs if a.strip()] to = getaddrs('to', 'To') cc = getaddrs('cc', 'Cc', '') - bcc = opts['bcc'] or (ui.config('email', 'bcc') or + bcc = opts.get('bcc') or (ui.config('email', 'bcc') or ui.config('patchbomb', 'bcc') or '').split(',') bcc = [a.strip() for a in bcc if a.strip()] @@ -370,6 +381,7 @@ parent = None sender_addr = email.Utils.parseaddr(sender)[1] + sendmail = None for m in msgs: try: m['Message-Id'] = genmsgid(m['X-Mercurial-Node']) @@ -380,14 +392,16 @@ else: parent = m['Message-Id'] m['Date'] = util.datestr(date=start_time, - format="%a, %d %b %Y %H:%M:%S", timezone=True) + format="%a, %d %b %Y %H:%M:%S", timezone=True) start_time = (start_time[0] + 1, start_time[1]) m['From'] = sender m['To'] = ', '.join(to) - if cc: m['Cc'] = ', '.join(cc) - if bcc: m['Bcc'] = ', '.join(bcc) - if opts['test']: + if cc: + m['Cc'] = ', '.join(cc) + if bcc: + m['Bcc'] = ', '.join(bcc) + if opts.get('test'): ui.status('Displaying ', m['Subject'], ' ...\n') ui.flush() if 'PAGER' in os.environ: @@ -402,25 +416,28 @@ raise if fp is not ui: fp.close() - elif opts['mbox']: + elif opts.get('mbox'): ui.status('Writing ', m['Subject'], ' ...\n') - fp = open(opts['mbox'], m.has_key('In-Reply-To') and 'ab+' or 'wb+') + fp = open(opts.get('mbox'), 'In-Reply-To' in m and 'ab+' or 'wb+') date = util.datestr(date=start_time, - format='%a %b %d %H:%M:%S %Y', timezone=False) + format='%a %b %d %H:%M:%S %Y', timezone=False) fp.write('From %s %s\n' % (sender_addr, date)) fp.write(m.as_string(0)) fp.write('\n\n') fp.close() else: + if not sendmail: + sendmail = mail.connect(ui) ui.status('Sending ', m['Subject'], ' ...\n') # Exim does not remove the Bcc field del m['Bcc'] - mail.sendmail(ui, sender, to + bcc + cc, m.as_string(0)) + sendmail(sender, to + bcc + cc, m.as_string(0)) cmdtable = { "email": (patchbomb, - [('a', 'attach', None, _('send patches as inline attachments')), + [('a', 'attach', None, _('send patches as attachments')), + ('i', 'inline', None, _('send patches as inline attachments')), ('', 'bcc', [], _('email addresses of blind copy recipients')), ('c', 'cc', [], _('email addresses of copy recipients')), ('d', 'diffstat', None, _('add diffstat output to messages')),
--- a/hgext/record.py Fri Feb 08 11:50:37 2008 +0100 +++ b/hgext/record.py Fri Feb 08 11:55:17 2008 +0100 @@ -5,19 +5,27 @@ # This software may be used and distributed according to the terms of # the GNU General Public License, incorporated herein by reference. -'''interactive change selection during commit''' +'''interactive change selection during commit or qrefresh''' from mercurial.i18n import _ -from mercurial import cmdutil, commands, cmdutil, hg, mdiff, patch, revlog +from mercurial import cmdutil, commands, cmdutil, extensions, hg, mdiff, patch, revlog from mercurial import util import copy, cStringIO, errno, operator, os, re, shutil, tempfile lines_re = re.compile(r'@@ -(\d+),(\d+) \+(\d+),(\d+) @@\s*(.*)') def scanpatch(fp): + """like patch.iterhunks, but yield different events + + - ('file', [header_lines + fromfile + tofile]) + - ('context', [context_lines]) + - ('hunk', [hunk_lines]) + - ('range', (-start,len, +start,len, diffp)) + """ lr = patch.linereader(fp) def scanwhile(first, p): + """scan lr while predicate holds""" lines = [first] while True: line = lr.readline() @@ -58,6 +66,10 @@ raise patch.PatchError('unknown patch content: %r' % line) class header(object): + """patch header + + XXX shoudn't we move this to mercurial/patch.py ? + """ diff_re = re.compile('diff --git a/(.*) b/(.*)$') allhunks_re = re.compile('(?:index|new file|deleted file) ') pretty_re = re.compile('(?:new file|deleted file) ') @@ -115,11 +127,16 @@ return True def countchanges(hunk): + """hunk -> (n+,n-)""" add = len([h for h in hunk if h[0] == '+']) rem = len([h for h in hunk if h[0] == '-']) return add, rem class hunk(object): + """patch hunk + + XXX shouldn't we merge this with patch.hunk ? + """ maxcontext = 3 def __init__(self, header, fromline, toline, proc, before, hunk, after): @@ -154,7 +171,9 @@ return '<hunk %r@%d>' % (self.filename(), self.fromline) def parsepatch(fp): + """patch -> [] of hunks """ class parser(object): + """patch parsing state machine""" def __init__(self): self.fromline = 0 self.toline = 0 @@ -227,10 +246,14 @@ return p.finished() def filterpatch(ui, chunks): + """Interactively filter patch chunks into applied-only chunks""" chunks = list(chunks) chunks.reverse() seen = {} def consumefile(): + """fetch next portion from chunks until a 'header' is seen + NB: header == new-file mark + """ consumed = [] while chunks: if isinstance(chunks[-1], header): @@ -238,17 +261,27 @@ else: consumed.append(chunks.pop()) return consumed - resp_all = [None] - resp_file = [None] - applied = {} + + resp_all = [None] # this two are changed from inside prompt, + resp_file = [None] # so can't be usual variables + applied = {} # 'filename' -> [] of chunks def prompt(query): + """prompt query, and process base inputs + + - y/n for the rest of file + - y/n for the rest + - ? (help) + - q (quit) + + else, input is returned to the caller. + """ if resp_all[0] is not None: return resp_all[0] if resp_file[0] is not None: return resp_file[0] while True: - r = (ui.prompt(query + _(' [Ynsfdaq?] '), '[Ynsfdaq?]?$', - matchflags=re.I) or 'y').lower() + r = (ui.prompt(query + _(' [Ynsfdaq?] '), '(?i)[Ynsfdaq?]?$') + or 'y').lower() if r == '?': c = record.__doc__.find('y - record this change') for l in record.__doc__[c:].splitlines(): @@ -268,6 +301,7 @@ while chunks: chunk = chunks.pop() if isinstance(chunk, header): + # new-file mark resp_file = [None] fixoffset = 0 hdr = ''.join(chunk.header) @@ -286,6 +320,7 @@ else: consumefile() else: + # new hunk if resp_file[0] is None and resp_all[0] is None: chunk.pretty(ui) r = prompt(_('record this change to %r?') % @@ -323,10 +358,48 @@ ? - display help''' + def record_committer(ui, repo, pats, opts): + commands.commit(ui, repo, *pats, **opts) + + dorecord(ui, repo, record_committer, *pats, **opts) + + +def qrecord(ui, repo, patch, *pats, **opts): + '''interactively record a new patch + + see 'hg help qnew' & 'hg help record' for more information and usage + ''' + + try: + mq = extensions.find('mq') + except KeyError: + raise util.Abort(_("'mq' extension not loaded")) + + def qrecord_committer(ui, repo, pats, opts): + mq.new(ui, repo, patch, *pats, **opts) + + opts = opts.copy() + opts['force'] = True # always 'qnew -f' + dorecord(ui, repo, qrecord_committer, *pats, **opts) + + +def dorecord(ui, repo, committer, *pats, **opts): if not ui.interactive: raise util.Abort(_('running non-interactively, use commit instead')) def recordfunc(ui, repo, files, message, match, opts): + """This is generic record driver. + + It's job is to interactively filter local changes, and accordingly + prepare working dir into a state, where the job can be delegated to + non-interactive commit command such as 'commit' or 'qrefresh'. + + After the actual job is done by non-interactive command, working dir + state is restored to original. + + In the end we'll record intresting changes, and everything else will be + left in place, so the user can continue his work. + """ if files: changes = None else: @@ -339,6 +412,7 @@ match=match, changes=changes, opts=diffopts, fp=fp) fp.seek(0) + # 1. filter patch, so we have intending-to apply subset of it chunks = filterpatch(ui, parsepatch(fp)) del fp @@ -357,6 +431,7 @@ changes = repo.status(files=newfiles, match=match)[:5] modified = dict.fromkeys(changes[0]) + # 2. backup changed files, so we can restore them in the end backups = {} backupdir = repo.join('record-backups') try: @@ -365,6 +440,7 @@ if err.errno != errno.EEXIST: raise try: + # backup continues for f in newfiles: if f not in modified: continue @@ -382,19 +458,32 @@ dopatch = fp.tell() fp.seek(0) + # 3a. apply filtered patch to clean repo (clean) if backups: hg.revert(repo, repo.dirstate.parents()[0], backups.has_key) + # 3b. (apply) if dopatch: ui.debug('applying patch\n') ui.debug(fp.getvalue()) patch.internalpatch(fp, ui, 1, repo.root) del fp - repo.commit(newfiles, message, opts['user'], opts['date'], match, - force_editor=opts.get('force_editor')) + # 4. We prepared working directory according to filtered patch. + # Now is the time to delegate the job to commit/qrefresh or the like! + + # it is important to first chdir to repo root -- we'll call a + # highlevel command with list of pathnames relative to repo root + cwd = os.getcwd() + os.chdir(repo.root) + try: + committer(ui, repo, newfiles, opts) + finally: + os.chdir(cwd) + return 0 finally: + # 5. finally restore backed-up files try: for realname, tmpname in backups.iteritems(): ui.debug('restoring %r to %r\n' % (tmpname, realname)) @@ -408,8 +497,29 @@ cmdtable = { "record": (record, - [('A', 'addremove', None, - _('mark new/missing files as added/removed before committing')), - ] + commands.walkopts + commands.commitopts + commands.commitopts2, + + # add commit options + commands.table['^commit|ci'][1], + _('hg record [OPTION]... [FILE]...')), } + + +def extsetup(): + try: + mq = extensions.find('mq') + except KeyError: + return + + qcmdtable = { + "qrecord": + (qrecord, + + # add qnew options, except '--force' + [opt for opt in mq.cmdtable['qnew'][1] if opt[1] != 'force'], + + _('hg qrecord [OPTION]... PATCH [FILE]...')), + } + + cmdtable.update(qcmdtable) +
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/hgext/win32mbcs.py Fri Feb 08 11:55:17 2008 +0100 @@ -0,0 +1,158 @@ +# win32mbcs.py -- MBCS filename support for Mercurial on Windows +# +# Copyright (c) 2008 Shun-ichi Goto <shunichi.goto@gmail.com> +# +# Version: 0.1 +# Author: Shun-ichi Goto <shunichi.goto@gmail.com> +# +# This software may be used and distributed according to the terms +# of the GNU General Public License, incorporated herein by reference. +# +"""Allow to use shift_jis/big5 filenames on Windows. + +There is a well known issue "0x5c problem" on Windows. It is a +trouble on handling path name as raw encoded byte sequence of +problematic encodings like shift_jis or big5. The primary intent +of this extension is to allow using such a encoding on Mercurial +without strange file operation error. + +By enabling this extension, hook mechanism is activated and some +functions are altered. Usually, this encoding is your local encoding +on your system by default. So you can get benefit simply by enabling +this extension. + +The encoding for filename is same one for terminal by default. You +can change the encoding by setting HGENCODING environment variable. + +This extension is usefull for: + * Japanese Windows user using shift_jis encoding. + * Chinese Windows user using big5 encoding. + * Users who want to use a repository created with such a encoding. + +Note: Unix people does not need to use this extension. + +""" + +import os +from mercurial.i18n import _ +from mercurial import util + +__all__ = ['install', 'uninstall', 'reposetup'] + + +# codec and alias names of sjis and big5 to be faked. +_problematic_encodings = util.frozenset([ + 'big5', 'big5-tw', 'csbig5', + 'big5hkscs', 'big5-hkscs', 'hkscs', + 'cp932', '932', 'ms932', 'mskanji', 'ms-kanji', + 'shift_jis', 'csshiftjis', 'shiftjis', 'sjis', 's_jis', + 'shift_jis_2004', 'shiftjis2004', 'sjis_2004', 'sjis2004', + 'shift_jisx0213', 'shiftjisx0213', 'sjisx0213', 's_jisx0213', + ]) + +# attribute name to store original function +_ORIGINAL = '_original' + +_ui = None + +def decode_with_check(arg): + if isinstance(arg, tuple): + return tuple(map(decode_with_check, arg)) + elif isinstance(arg, list): + return map(decode_with_check, arg) + elif isinstance(arg, str): + uarg = arg.decode(util._encoding) + if arg == uarg.encode(util._encoding): + return uarg + else: + raise UnicodeError("Not local encoding") + else: + return arg + +def encode_with_check(arg): + if isinstance(arg, tuple): + return tuple(map(encode_with_check, arg)) + elif isinstance(arg, list): + return map(encode_with_check, arg) + elif isinstance(arg, unicode): + ret = arg.encode(util._encoding) + return ret + else: + return arg + +def wrap(func): + + def wrapped(*args): + # check argument is unicode, then call original + for arg in args: + if isinstance(arg, unicode): + return func(*args) + # make decoded argument list into uargs + try: + args = decode_with_check(args) + except UnicodeError, exc: + # If not encoded with _local_fs_encoding, report it then + # continue with calling original function. + _ui.warn(_("WARNING: [win32mbcs] filename conversion fail for" + + " %s: '%s'\n") % (util._encoding, args)) + return func(*args) + # call as unicode operation, then return with encoding + return encode_with_check(func(*args)) + + # fake is only for relevant environment. + if hasattr(func, _ORIGINAL) or \ + util._encoding.lower() not in _problematic_encodings: + return func + else: + f = wrapped + f.__name__ = func.__name__ + setattr(f, _ORIGINAL, func) # hold original to restore + return f + +def unwrap(func): + return getattr(func, _ORIGINAL, func) + +def install(): + # wrap some python functions and mercurial functions + # to handle raw bytes on Windows. + # NOTE: dirname and basename is safe because they use result + # of os.path.split() + global _ui + if not _ui: + from mercurial import ui + _ui = ui.ui() + os.path.join = wrap(os.path.join) + os.path.split = wrap(os.path.split) + os.path.splitext = wrap(os.path.splitext) + os.path.splitunc = wrap(os.path.splitunc) + os.path.normpath = wrap(os.path.normpath) + os.path.normcase = wrap(os.path.normcase) + os.makedirs = wrap(os.makedirs) + util.endswithsep = wrap(util.endswithsep) + util.splitpath = wrap(util.splitpath) + +def uninstall(): + # restore original functions. + os.path.join = unwrap(os.path.join) + os.path.split = unwrap(os.path.split) + os.path.splitext = unwrap(os.path.splitext) + os.path.splitunc = unwrap(os.path.splitunc) + os.path.normpath = unwrap(os.path.normpath) + os.path.normcase = unwrap(os.path.normcase) + os.makedirs = unwrap(os.makedirs) + util.endswithsep = unwrap(util.endswithsep) + util.splitpath = unwrap(util.splitpath) + + +def reposetup(ui, repo): + # TODO: decide use of config section for this extension + global _ui + _ui = ui + if not os.path.supports_unicode_filenames: + ui.warn(_("[win32mbcs] cannot activate on this platform.\n")) + return + # install features of this extension + install() + ui.debug(_("[win32mbcs] activeted with encoding: %s\n") % util._encoding) + +# win32mbcs.py ends here
--- a/hgext/win32text.py Fri Feb 08 11:50:37 2008 +0100 +++ b/hgext/win32text.py Fri Feb 08 11:55:17 2008 +0100 @@ -1,22 +1,46 @@ +# win32text.py - LF <-> CRLF translation utilities for Windows users +# +# This software may be used and distributed according to the terms +# of the GNU General Public License, incorporated herein by reference. +# +# To perform automatic newline conversion, use: +# +# [extensions] +# hgext.win32text = +# [encode] +# ** = cleverencode: +# [decode] +# ** = cleverdecode: +# +# If not doing conversion, to make sure you do not commit CRLF by accident: +# +# [hooks] +# pretxncommit.crlf = python:hgext.win32text.forbidcrlf +# +# To do the same check on a server to prevent CRLF from being pushed or pulled: +# +# [hooks] +# pretxnchangegroup.crlf = python:hgext.win32text.forbidcrlf + from mercurial import util, ui from mercurial.i18n import gettext as _ +from mercurial.node import * import re # regexp for single LF without CR preceding. re_single_lf = re.compile('(^|[^\r])\n', re.MULTILINE) -def dumbdecode(s, cmd): +def dumbdecode(s, cmd, ui=None, repo=None, filename=None, **kwargs): # warn if already has CRLF in repository. # it might cause unexpected eol conversion. # see issue 302: # http://www.selenic.com/mercurial/bts/issue302 - if '\r\n' in s: - u = ui.ui() - u.warn(_('WARNING: file in repository already has CRLF line ending \n' - ' which does not need eol conversion by win32text plugin.\n' - ' Please reconsider encode/decode setting in' - ' mercurial.ini or .hg/hgrc\n' - ' before next commit.\n')) + if '\r\n' in s and ui and filename and repo: + ui.warn(_('WARNING: %s already has CRLF line endings\n' + 'and does not need EOL conversion by the win32text plugin.\n' + 'Before your next commit, please reconsider your ' + 'encode/decode settings in \nMercurial.ini or %s.\n') % + (filename, repo.join('hgrc'))) # replace single LF to CRLF return re_single_lf.sub('\\1\r\n', s) @@ -27,9 +51,9 @@ if '\0' in s: return False return True -def cleverdecode(s, cmd): +def cleverdecode(s, cmd, **kwargs): if clevertest(s, cmd): - return dumbdecode(s, cmd) + return dumbdecode(s, cmd, **kwargs) return s def cleverencode(s, cmd): @@ -37,9 +61,47 @@ return dumbencode(s, cmd) return s -util.filtertable.update({ +_filters = { 'dumbdecode:': dumbdecode, 'dumbencode:': dumbencode, 'cleverdecode:': cleverdecode, 'cleverencode:': cleverencode, - }) + } + +def forbidcrlf(ui, repo, hooktype, node, **kwargs): + halt = False + for rev in xrange(repo.changelog.rev(bin(node)), repo.changelog.count()): + c = repo.changectx(rev) + for f in c.files(): + if f not in c: + continue + data = c[f].data() + if '\0' not in data and '\r\n' in data: + if not halt: + ui.warn(_('Attempt to commit or push text file(s) ' + 'using CRLF line endings\n')) + ui.warn(_('in %s: %s\n') % (short(c.node()), f)) + halt = True + if halt and hooktype == 'pretxnchangegroup': + ui.warn(_('\nTo prevent this mistake in your local repository,\n' + 'add to Mercurial.ini or .hg/hgrc:\n' + '\n' + '[hooks]\n' + 'pretxncommit.crlf = python:hgext.win32text.forbidcrlf\n' + '\n' + 'and also consider adding:\n' + '\n' + '[extensions]\n' + 'hgext.win32text =\n' + '[encode]\n' + '** = cleverencode:\n' + '[decode]\n' + '** = cleverdecode:\n')) + return halt + +def reposetup(ui, repo): + if not repo.local(): + return + for name, fn in _filters.iteritems(): + repo.adddatafilter(name, fn) +
--- a/hgmerge Fri Feb 08 11:50:37 2008 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,209 +0,0 @@ -#!/bin/sh -# -# hgmerge - default merge helper for Mercurial -# -# This tries to find a way to do three-way merge on the current system. -# The result ought to end up in $1. Script is run in root directory of -# repository. -# -# Environment variables set by Mercurial: -# HG_FILE name of file within repo -# HG_MY_NODE revision being merged -# HG_OTHER_NODE revision being merged - -set -e # bail out quickly on failure - -LOCAL="$1" -BASE="$2" -OTHER="$3" - -if [ -z "$EDITOR" ]; then - EDITOR="vi" -fi - -# find decent versions of our utilities, insisting on the GNU versions where we -# need to -MERGE="merge" -DIFF3="gdiff3" -DIFF="gdiff" -PATCH="gpatch" - -type "$MERGE" >/dev/null 2>&1 || MERGE= -type "$DIFF3" >/dev/null 2>&1 || DIFF3="diff3" -$DIFF3 --version >/dev/null 2>&1 || DIFF3= -type "$DIFF" >/dev/null 2>&1 || DIFF="diff" -type "$DIFF" >/dev/null 2>&1 || DIFF= -type "$PATCH" >/dev/null 2>&1 || PATCH="patch" -type "$PATCH" >/dev/null 2>&1 || PATCH= - -# find optional visual utilities -FILEMERGE="/Developer/Applications/Utilities/FileMerge.app/Contents/MacOS/FileMerge" -KDIFF3="kdiff3" -TKDIFF="tkdiff" -MELD="meld" - -type "$FILEMERGE" >/dev/null 2>&1 || FILEMERGE= -type "$KDIFF3" >/dev/null 2>&1 || KDIFF3= -type "$TKDIFF" >/dev/null 2>&1 || TKDIFF= -type "$MELD" >/dev/null 2>&1 || MELD= - -# Hack for Solaris -TEST="/usr/bin/test" -type "$TEST" >/dev/null 2>&1 || TEST="/bin/test" -type "$TEST" >/dev/null 2>&1 || TEST="test" - -# random part of names -RAND="$RANDOM$RANDOM" - -# temporary directory for diff+patch merge -HGTMP="${TMPDIR-/tmp}/hgmerge.$RAND" - -# backup file -BACKUP="$LOCAL.orig.$RAND" - -# file used to test for file change -CHGTEST="$LOCAL.chg.$RAND" - -# put all your required cleanup here -cleanup() { - rm -f "$BACKUP" "$CHGTEST" - rm -rf "$HGTMP" -} - -# functions concerning program exit -success() { - cleanup - exit 0 -} - -failure() { - echo "merge failed" 1>&2 - mv "$BACKUP" "$LOCAL" - cleanup - exit 1 -} - -# Ask if the merge was successful -ask_if_merged() { - while true; do - echo "$LOCAL seems unchanged." - echo "Was the merge successful? [y/n]" - read answer - case "$answer" in - y*|Y*) success;; - n*|N*) failure;; - esac - done -} - -# Check if conflict markers are present and ask if the merge was successful -conflicts_or_success() { - while egrep '^(<<<<<<< .*|=======|>>>>>>> .*)$' "$LOCAL" >/dev/null; do - echo "$LOCAL contains conflict markers." - echo "Keep this version? [y/n]" - read answer - case "$answer" in - y*|Y*) success;; - n*|N*) failure;; - esac - done - success -} - -# Clean up when interrupted -trap "failure" 1 2 3 6 15 # HUP INT QUIT ABRT TERM - -# Back up our file (and try hard to keep the mtime unchanged) -mv "$LOCAL" "$BACKUP" -cp "$BACKUP" "$LOCAL" - -# Attempt to do a non-interactive merge -if [ -n "$MERGE" -o -n "$DIFF3" ]; then - if [ -n "$MERGE" ]; then - $MERGE "$LOCAL" "$BASE" "$OTHER" 2> /dev/null && success - elif [ -n "$DIFF3" ]; then - $DIFF3 -m "$BACKUP" "$BASE" "$OTHER" > "$LOCAL" && success - fi - if [ $? -gt 1 ]; then - echo "automatic merge failed! Exiting." 1>&2 - failure - fi -fi - -# on MacOS X try FileMerge.app, shipped with Apple's developer tools -if [ -n "$FILEMERGE" ]; then - cp "$BACKUP" "$LOCAL" - cp "$BACKUP" "$CHGTEST" - # filemerge prefers the right by default - $FILEMERGE -left "$OTHER" -right "$LOCAL" -ancestor "$BASE" -merge "$LOCAL" - [ $? -ne 0 ] && echo "FileMerge failed to launch" && failure - $TEST "$LOCAL" -nt "$CHGTEST" && conflicts_or_success || ask_if_merged -fi - -if [ -n "$DISPLAY" ]; then - # try using kdiff3, which is fairly nice - if [ -n "$KDIFF3" ]; then - $KDIFF3 --auto "$BASE" "$BACKUP" "$OTHER" -o "$LOCAL" || failure - conflicts_or_success - fi - - # try using tkdiff, which is a bit less sophisticated - if [ -n "$TKDIFF" ]; then - $TKDIFF "$BACKUP" "$OTHER" -a "$BASE" -o "$LOCAL" || failure - conflicts_or_success - fi - - if [ -n "$MELD" ]; then - cp "$BACKUP" "$CHGTEST" - # protect our feet - meld allows us to save to the left file - cp "$BACKUP" "$LOCAL.tmp.$RAND" - # Meld doesn't have automatic merging, so to reduce intervention - # use the file with conflicts - $MELD "$LOCAL.tmp.$RAND" "$LOCAL" "$OTHER" || failure - # Also it doesn't return good error code - $TEST "$LOCAL" -nt "$CHGTEST" && conflicts_or_success || ask_if_merged - fi -fi - -# Attempt to do a merge with $EDITOR -if [ -n "$MERGE" -o -n "$DIFF3" ]; then - echo "conflicts detected in $LOCAL" - cp "$BACKUP" "$CHGTEST" - case "$EDITOR" in - "emacs") - $EDITOR "$LOCAL" --eval '(condition-case nil (smerge-mode 1) (error nil))' || failure - ;; - *) - $EDITOR "$LOCAL" || failure - ;; - esac - # Some editors do not return meaningful error codes - # Do not take any chances - $TEST "$LOCAL" -nt "$CHGTEST" && conflicts_or_success || ask_if_merged -fi - -# attempt to manually merge with diff and patch -if [ -n "$DIFF" -a -n "$PATCH" ]; then - - (umask 077 && mkdir "$HGTMP") || { - echo "Could not create temporary directory $HGTMP" 1>&2 - failure - } - - $DIFF -u "$BASE" "$OTHER" > "$HGTMP/diff" || : - if $PATCH "$LOCAL" < "$HGTMP/diff"; then - success - else - # If rejects are empty after using the editor, merge was ok - $EDITOR "$LOCAL" "$LOCAL.rej" || failure - $TEST -s "$LOCAL.rej" || success - fi - failure -fi - -echo -echo "hgmerge: unable to find any merge utility!" -echo "supported programs:" -echo "merge, FileMerge, tkdiff, kdiff3, meld, diff+patch" -echo -failure
--- a/hgweb.cgi Fri Feb 08 11:50:37 2008 +0100 +++ b/hgweb.cgi Fri Feb 08 11:55:17 2008 +0100 @@ -22,10 +22,9 @@ #os.environ["HGENCODING"] = "UTF-8" from mercurial.hgweb.hgweb_mod import hgweb -from mercurial.hgweb.request import wsgiapplication +from mercurial import dispatch, ui import mercurial.hgweb.wsgicgi as wsgicgi -def make_web_app(): - return hgweb("/path/to/repo", "repository name") - -wsgicgi.launch(wsgiapplication(make_web_app)) +u = ui.ui(report_untrusted=False, interactive=False) +dispatch.profiled(u, lambda: wsgicgi.launch(hgweb("/path/to/repo", + "repository name", u)))
--- a/hgwebdir.cgi Fri Feb 08 11:50:37 2008 +0100 +++ b/hgwebdir.cgi Fri Feb 08 11:55:17 2008 +0100 @@ -22,7 +22,7 @@ #os.environ["HGENCODING"] = "UTF-8" from mercurial.hgweb.hgwebdir_mod import hgwebdir -from mercurial.hgweb.request import wsgiapplication +from mercurial import dispatch, ui import mercurial.hgweb.wsgicgi as wsgicgi # The config file looks like this. You can have paths to individual @@ -44,7 +44,5 @@ # Alternatively you can pass a list of ('virtual/path', '/real/path') tuples # or use a dictionary with entries like 'virtual/path': '/real/path' -def make_web_app(): - return hgwebdir("hgweb.config") - -wsgicgi.launch(wsgiapplication(make_web_app)) +u = ui.ui(report_untrusted=False, interactive=False) +dispatch.profiled(u, lambda: wsgicgi.launch(hgwebdir('hgweb.config', u)))
--- a/mercurial/archival.py Fri Feb 08 11:50:37 2008 +0100 +++ b/mercurial/archival.py Fri Feb 08 11:55:17 2008 +0100 @@ -15,7 +15,7 @@ safe for consumers.''' if prefix: - prefix = prefix.replace('\\', '/') + prefix = util.normpath(prefix) else: if not isinstance(dest, str): raise ValueError('dest must be string if no prefix')
--- a/mercurial/bdiff.c Fri Feb 08 11:50:37 2008 +0100 +++ b/mercurial/bdiff.c Fri Feb 08 11:55:17 2008 +0100 @@ -245,7 +245,7 @@ /* allocate and fill arrays */ t = equatelines(a, an, b, bn); - pos = (struct pos *)calloc((bn>0)?bn:1, sizeof(struct pos)); + pos = (struct pos *)calloc(bn ? bn : 1, sizeof(struct pos)); /* we can't have more matches than lines in the shorter file */ l.head = l.base = (struct hunk *)malloc(sizeof(struct hunk) * ((an<bn ? an:bn) + 1));
--- a/mercurial/bundlerepo.py Fri Feb 08 11:50:37 2008 +0100 +++ b/mercurial/bundlerepo.py Fri Feb 08 11:55:17 2008 +0100 @@ -48,7 +48,7 @@ continue for p in (p1, p2): if not p in self.nodemap: - raise revlog.LookupError(_("unknown parent %s") % short(p1)) + raise revlog.LookupError(hex(p1), _("unknown parent %s") % short(p1)) if linkmapper is None: link = n else: @@ -256,14 +256,25 @@ def instance(ui, path, create): if create: raise util.Abort(_('cannot create new bundle repository')) + parentpath = ui.config("bundle", "mainreporoot", "") + if parentpath: + # Try to make the full path relative so we get a nice, short URL. + # In particular, we don't want temp dir names in test outputs. + cwd = os.getcwd() + if parentpath == cwd: + parentpath = '' + else: + cwd = os.path.join(cwd,'') + if parentpath.startswith(cwd): + parentpath = parentpath[len(cwd):] path = util.drop_scheme('file', path) if path.startswith('bundle:'): path = util.drop_scheme('bundle', path) s = path.split("+", 1) if len(s) == 1: - repopath, bundlename = "", s[0] + repopath, bundlename = parentpath, s[0] else: repopath, bundlename = s else: - repopath, bundlename = '', path + repopath, bundlename = parentpath, path return bundlerepository(ui, repopath, bundlename)
--- a/mercurial/byterange.py Fri Feb 08 11:50:37 2008 +0100 +++ b/mercurial/byterange.py Fri Feb 08 11:55:17 2008 +0100 @@ -233,7 +233,7 @@ size = (lb - fb) fo = RangeableFileObject(fo, (fb, lb)) headers = mimetools.Message(StringIO( - 'Content-Type: %s\nContent-Length: %d\nLast-modified: %s\n' % + 'Content-Type: %s\nContent-Length: %d\nLast-Modified: %s\n' % (mtype or 'text/plain', size, modified))) return urllib.addinfourl(fo, headers, 'file:'+file)
--- a/mercurial/changegroup.py Fri Feb 08 11:50:37 2008 +0100 +++ b/mercurial/changegroup.py Fri Feb 08 11:55:17 2008 +0100 @@ -80,9 +80,13 @@ # in case of sshrepo because we don't know the end of the stream # an empty chunkiter is the end of the changegroup + # a changegroup has at least 2 chunkiters (changelog and manifest). + # after that, an empty chunkiter is the end of the changegroup empty = False - while not empty: + count = 0 + while not empty or count <= 2: empty = True + count += 1 for chunk in chunkiter(cg): empty = False fh.write(z.compress(chunkheader(len(chunk))))
--- a/mercurial/changelog.py Fri Feb 08 11:50:37 2008 +0100 +++ b/mercurial/changelog.py Fri Feb 08 11:55:17 2008 +0100 @@ -16,16 +16,13 @@ >>> s 'ab\\ncd\\\\\\\\n\\x00ab\\rcd\\\\\\n' >>> res = _string_escape(s) - >>> s == _string_unescape(res) + >>> s == res.decode('string_escape') True """ # subset of the string_escape codec text = text.replace('\\', '\\\\').replace('\n', '\\n').replace('\r', '\\r') return text.replace('\0', '\\0') -def _string_unescape(text): - return text.decode('string_escape') - class appender: '''the changelog index must be update last on disk, so we use this class to delay writes to it''' @@ -123,10 +120,9 @@ def decode_extra(self, text): extra = {} for l in text.split('\0'): - if not l: - continue - k, v = _string_unescape(l).split(':', 1) - extra[k] = v + if l: + k, v = l.decode('string_escape').split(':', 1) + extra[k] = v return extra def encode_extra(self, d): @@ -136,7 +132,7 @@ items = [_string_escape('%s:%s' % (k, d[k])) for k in keys] return "\0".join(items) - def extract(self, text): + def read(self, node): """ format used: nodeid\n : manifest node in ascii @@ -149,6 +145,7 @@ changelog v0 doesn't use extra """ + text = self.revision(node) if not text: return (nullid, "", (0, 0), [], "", {'branch': 'default'}) last = text.index("\n\n") @@ -175,9 +172,6 @@ files = l[3:] return (manifest, user, (time, timezone), files, desc, extra) - def read(self, node): - return self.extract(self.revision(node)) - def add(self, manifest, list, desc, transaction, p1=None, p2=None, user=None, date=None, extra={}):
--- a/mercurial/cmdutil.py Fri Feb 08 11:50:37 2008 +0100 +++ b/mercurial/cmdutil.py Fri Feb 08 11:55:17 2008 +0100 @@ -8,7 +8,7 @@ from node import * from i18n import _ import os, sys, bisect, stat -import mdiff, bdiff, util, templater, patch +import mdiff, bdiff, util, templater, templatefilters, patch, errno revrangesep = ':' @@ -50,7 +50,7 @@ """Return (aliases, command table entry) for command string.""" choice = findpossible(ui, cmd, table) - if choice.has_key(cmd): + if cmd in choice: return choice[cmd] if len(choice) > 1: @@ -64,6 +64,8 @@ raise UnknownCommand(cmd) def bail_if_changed(repo): + if repo.dirstate.parents()[1] != nullid: + raise util.Abort(_('outstanding uncommitted merge')) modified, added, removed, deleted = repo.status()[:4] if modified or added or removed or deleted: raise util.Abort(_("outstanding uncommitted changes")) @@ -286,6 +288,206 @@ if not dry_run: repo.copy(old, new) +def copy(ui, repo, pats, opts, rename=False): + # called with the repo lock held + # + # hgsep => pathname that uses "/" to separate directories + # ossep => pathname that uses os.sep to separate directories + cwd = repo.getcwd() + targets = {} + after = opts.get("after") + dryrun = opts.get("dry_run") + + def walkpat(pat): + srcs = [] + for tag, abs, rel, exact in walk(repo, [pat], opts, globbed=True): + state = repo.dirstate[abs] + if state in '?r': + if exact and state == '?': + ui.warn(_('%s: not copying - file is not managed\n') % rel) + if exact and state == 'r': + ui.warn(_('%s: not copying - file has been marked for' + ' remove\n') % rel) + continue + # abs: hgsep + # rel: ossep + srcs.append((abs, rel, exact)) + return srcs + + # abssrc: hgsep + # relsrc: ossep + # otarget: ossep + def copyfile(abssrc, relsrc, otarget, exact): + abstarget = util.canonpath(repo.root, cwd, otarget) + reltarget = repo.pathto(abstarget, cwd) + target = repo.wjoin(abstarget) + src = repo.wjoin(abssrc) + state = repo.dirstate[abstarget] + + # check for collisions + prevsrc = targets.get(abstarget) + if prevsrc is not None: + ui.warn(_('%s: not overwriting - %s collides with %s\n') % + (reltarget, repo.pathto(abssrc, cwd), + repo.pathto(prevsrc, cwd))) + return + + # check for overwrites + exists = os.path.exists(target) + if (not after and exists or after and state in 'mn'): + if not opts['force']: + ui.warn(_('%s: not overwriting - file exists\n') % + reltarget) + return + + if after: + if not exists: + return + elif not dryrun: + try: + if exists: + os.unlink(target) + targetdir = os.path.dirname(target) or '.' + if not os.path.isdir(targetdir): + os.makedirs(targetdir) + util.copyfile(src, target) + except IOError, inst: + if inst.errno == errno.ENOENT: + ui.warn(_('%s: deleted in working copy\n') % relsrc) + else: + ui.warn(_('%s: cannot copy - %s\n') % + (relsrc, inst.strerror)) + return True # report a failure + + if ui.verbose or not exact: + action = rename and "moving" or "copying" + ui.status(_('%s %s to %s\n') % (action, relsrc, reltarget)) + + targets[abstarget] = abssrc + + # fix up dirstate + origsrc = repo.dirstate.copied(abssrc) or abssrc + if abstarget == origsrc: # copying back a copy? + if state not in 'mn' and not dryrun: + repo.dirstate.normallookup(abstarget) + else: + if repo.dirstate[origsrc] == 'a': + if not ui.quiet: + ui.warn(_("%s has not been committed yet, so no copy " + "data will be stored for %s.\n") + % (repo.pathto(origsrc, cwd), reltarget)) + if abstarget not in repo.dirstate and not dryrun: + repo.add([abstarget]) + elif not dryrun: + repo.copy(origsrc, abstarget) + + if rename and not dryrun: + repo.remove([abssrc], True) + + # pat: ossep + # dest ossep + # srcs: list of (hgsep, hgsep, ossep, bool) + # return: function that takes hgsep and returns ossep + def targetpathfn(pat, dest, srcs): + if os.path.isdir(pat): + abspfx = util.canonpath(repo.root, cwd, pat) + abspfx = util.localpath(abspfx) + if destdirexists: + striplen = len(os.path.split(abspfx)[0]) + else: + striplen = len(abspfx) + if striplen: + striplen += len(os.sep) + res = lambda p: os.path.join(dest, util.localpath(p)[striplen:]) + elif destdirexists: + res = lambda p: os.path.join(dest, + os.path.basename(util.localpath(p))) + else: + res = lambda p: dest + return res + + # pat: ossep + # dest ossep + # srcs: list of (hgsep, hgsep, ossep, bool) + # return: function that takes hgsep and returns ossep + def targetpathafterfn(pat, dest, srcs): + if util.patkind(pat, None)[0]: + # a mercurial pattern + res = lambda p: os.path.join(dest, + os.path.basename(util.localpath(p))) + else: + abspfx = util.canonpath(repo.root, cwd, pat) + if len(abspfx) < len(srcs[0][0]): + # A directory. Either the target path contains the last + # component of the source path or it does not. + def evalpath(striplen): + score = 0 + for s in srcs: + t = os.path.join(dest, util.localpath(s[0])[striplen:]) + if os.path.exists(t): + score += 1 + return score + + abspfx = util.localpath(abspfx) + striplen = len(abspfx) + if striplen: + striplen += len(os.sep) + if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])): + score = evalpath(striplen) + striplen1 = len(os.path.split(abspfx)[0]) + if striplen1: + striplen1 += len(os.sep) + if evalpath(striplen1) > score: + striplen = striplen1 + res = lambda p: os.path.join(dest, + util.localpath(p)[striplen:]) + else: + # a file + if destdirexists: + res = lambda p: os.path.join(dest, + os.path.basename(util.localpath(p))) + else: + res = lambda p: dest + return res + + + pats = util.expand_glob(pats) + if not pats: + raise util.Abort(_('no source or destination specified')) + if len(pats) == 1: + raise util.Abort(_('no destination specified')) + dest = pats.pop() + destdirexists = os.path.isdir(dest) + if not destdirexists: + if len(pats) > 1 or util.patkind(pats[0], None)[0]: + raise util.Abort(_('with multiple sources, destination must be an ' + 'existing directory')) + if util.endswithsep(dest): + raise util.Abort(_('destination %s is not a directory') % dest) + + tfn = targetpathfn + if after: + tfn = targetpathafterfn + copylist = [] + for pat in pats: + srcs = walkpat(pat) + if not srcs: + continue + copylist.append((tfn(pat, dest, srcs), srcs)) + if not copylist: + raise util.Abort(_('no files to copy')) + + errors = 0 + for targetpath, srcs in copylist: + for abssrc, relsrc, exact in srcs: + if copyfile(abssrc, relsrc, targetpath(abssrc), exact): + errors += 1 + + if errors: + ui.warn(_('(consider using --after)\n')) + + return errors + def service(opts, parentfn=None, initfn=None, runfn=None): '''Run a command as a service.''' @@ -293,6 +495,15 @@ rfd, wfd = os.pipe() args = sys.argv[:] args.append('--daemon-pipefds=%d,%d' % (rfd, wfd)) + # Don't pass --cwd to the child process, because we've already + # changed directory. + for i in xrange(1,len(args)): + if args[i].startswith('--cwd='): + del args[i] + break + elif args[i].startswith('--cwd'): + del args[i:i+2] + break pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0), args[0], args) os.close(wfd) @@ -462,7 +673,7 @@ def __init__(self, ui, repo, patch, mapfile, buffered): changeset_printer.__init__(self, ui, repo, patch, buffered) - filters = templater.common_filters.copy() + filters = templatefilters.filters.copy() filters['formatnode'] = (ui.debugflag and (lambda x: x) or (lambda x: x[:12])) self.t = templater.templater(mapfile, filters, @@ -572,25 +783,25 @@ c = [{'name': x[0], 'source': x[1]} for x in copies] return showlist('file_copy', c, plural='file_copies', **args) - if self.ui.debugflag: - files = self.repo.status(log.parents(changenode)[0], changenode)[:3] - def showfiles(**args): - return showlist('file', files[0], **args) - def showadds(**args): - return showlist('file_add', files[1], **args) - def showdels(**args): - return showlist('file_del', files[2], **args) - def showmanifest(**args): - args = args.copy() - args.update(dict(rev=self.repo.manifest.rev(changes[0]), - node=hex(changes[0]))) - return self.t('manifest', **args) - else: - def showfiles(**args): - return showlist('file', changes[3], **args) - showadds = '' - showdels = '' - showmanifest = '' + files = [] + def getfiles(): + if not files: + files[:] = self.repo.status( + log.parents(changenode)[0], changenode)[:3] + return files + def showfiles(**args): + return showlist('file', changes[3], **args) + def showmods(**args): + return showlist('file_mod', getfiles()[0], **args) + def showadds(**args): + return showlist('file_add', getfiles()[1], **args) + def showdels(**args): + return showlist('file_del', getfiles()[2], **args) + def showmanifest(**args): + args = args.copy() + args.update(dict(rev=self.repo.manifest.rev(changes[0]), + node=hex(changes[0]))) + return self.t('manifest', **args) defprops = { 'author': changes[1], @@ -599,6 +810,7 @@ 'desc': changes[4].strip(), 'file_adds': showadds, 'file_dels': showdels, + 'file_mods': showmods, 'files': showfiles, 'file_copies': showcopies, 'manifest': showmanifest, @@ -690,7 +902,7 @@ def finddate(ui, repo, date): """Find the tipmost changeset that matches the given date spec""" - df = util.matchdate(date + " to " + date) + df = util.matchdate(date) get = util.cachefunc(lambda r: repo.changectx(r).changeset()) changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None}) results = {} @@ -905,8 +1117,11 @@ '''commit the specified files or all outstanding changes''' message = logmessage(opts) - if opts['addremove']: + # extract addremove carefully -- this function can be called from a command + # that doesn't support addremove + if opts.get('addremove'): addremove(repo, pats, opts) + fns, match, anypats = matchpats(repo, pats, opts) if pats: status = repo.status(files=fns, match=match)
--- a/mercurial/commands.py Fri Feb 08 11:50:37 2008 +0100 +++ b/mercurial/commands.py Fri Feb 08 11:55:17 2008 +0100 @@ -11,7 +11,7 @@ import hg, util, revlog, bundlerepo, extensions import difflib, patch, time, help, mdiff, tempfile import errno, version, socket -import archival, changegroup, cmdutil, hgweb.server, sshserver +import archival, changegroup, cmdutil, hgweb.server, sshserver, hbisect # Commands start here, listed alphabetically @@ -26,17 +26,23 @@ If no names are given, add all files in the repository. """ + rejected = None + exacts = {} names = [] - for src, abs, rel, exact in cmdutil.walk(repo, pats, opts): + for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, + badmatch=util.always): if exact: if ui.verbose: ui.status(_('adding %s\n') % rel) names.append(abs) + exacts[abs] = 1 elif abs not in repo.dirstate: ui.status(_('adding %s\n') % rel) names.append(abs) if not opts.get('dry_run'): - repo.add(names) + rejected = repo.add(names) + rejected = [p for p in rejected if p in exacts] + return rejected and 1 or 0 def addremove(ui, repo, *pats, **opts): """add all new files, delete all missing files @@ -190,10 +196,13 @@ raise util.Abort(_("please specify a revision to backout")) cmdutil.bail_if_changed(repo) + node = repo.lookup(rev) + op1, op2 = repo.dirstate.parents() - if op2 != nullid: - raise util.Abort(_('outstanding uncommitted merge')) - node = repo.lookup(rev) + a = repo.changelog.ancestor(op1, node) + if a != node: + raise util.Abort(_('cannot back out change on a different branch')) + p1, p2 = repo.changelog.parents(node) if p1 == nullid: raise util.Abort(_('cannot back out a change with no parents')) @@ -210,6 +219,7 @@ if opts['parent']: raise util.Abort(_('cannot use --parent on non-merge changeset')) parent = p1 + hg.clean(repo, node, show_stats=False) revert_opts = opts.copy() revert_opts['date'] = None @@ -236,6 +246,90 @@ ui.status(_('(use "backout --merge" ' 'if you want to auto-merge)\n')) +def bisect(ui, repo, rev=None, extra=None, + reset=None, good=None, bad=None, skip=None, noupdate=None): + """subdivision search of changesets + + This command helps to find changesets which introduce problems. + To use, mark the earliest changeset you know exhibits the problem + as bad, then mark the latest changeset which is free from the + problem as good. Bisect will update your working directory to a + revision for testing. Once you have performed tests, mark the + working directory as bad or good and bisect will either update to + another candidate changeset or announce that it has found the bad + revision. + """ + # backward compatibility + if rev in "good bad reset init".split(): + ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n")) + cmd, rev, extra = rev, extra, None + if cmd == "good": + good = True + elif cmd == "bad": + bad = True + else: + reset = True + elif extra or good + bad + skip + reset > 1: + raise util.Abort("Incompatible arguments") + + if reset: + p = repo.join("bisect.state") + if os.path.exists(p): + os.unlink(p) + return + + # load state + state = {'good': [], 'bad': [], 'skip': []} + if os.path.exists(repo.join("bisect.state")): + for l in repo.opener("bisect.state"): + kind, node = l[:-1].split() + node = repo.lookup(node) + if kind not in state: + raise util.Abort(_("unknown bisect kind %s") % kind) + state[kind].append(node) + + # update state + node = repo.lookup(rev or '.') + if good: + state['good'].append(node) + elif bad: + state['bad'].append(node) + elif skip: + state['skip'].append(node) + + # save state + f = repo.opener("bisect.state", "w", atomictemp=True) + wlock = repo.wlock() + try: + for kind in state: + for node in state[kind]: + f.write("%s %s\n" % (kind, hg.hex(node))) + f.rename() + finally: + del wlock + + if not state['good'] or not state['bad']: + return + + # actually bisect + node, changesets, good = hbisect.bisect(repo.changelog, state) + if changesets == 0: + ui.write(_("The first %s revision is:\n") % (good and "good" or "bad")) + displayer = cmdutil.show_changeset(ui, repo, {}) + displayer.show(changenode=node) + elif node is not None: + # compute the approximate number of remaining tests + tests, size = 0, 2 + while size <= changesets: + tests, size = tests + 1, size * 2 + rev = repo.changelog.rev(node) + ui.write(_("Testing changeset %s:%s " + "(%s changesets remaining, ~%s tests)\n") + % (rev, hg.short(node), changesets, tests)) + if not noupdate: + cmdutil.bail_if_changed(repo) + return hg.clean(repo, node) + def branch(ui, repo, label=None, **opts): """set or show the current branch name @@ -245,6 +339,8 @@ Unless --force is specified, branch will not let you set a branch name that shadows an existing branch. + + Use the command 'hg update' to switch to an existing branch. """ if label: @@ -264,6 +360,8 @@ inactive. If active is specified, only show active branches. A branch is considered active if it contains unmerged heads. + + Use the command 'hg update' to switch to an existing branch. """ b = repo.branchtags() heads = dict.fromkeys(repo.heads(), 1) @@ -425,212 +523,14 @@ If a list of files is omitted, all changes reported by "hg status" will be committed. - If no commit message is specified, the editor configured in your hgrc - or in the EDITOR environment variable is started to enter a message. + If no commit message is specified, the configured editor is started to + enter a message. """ def commitfunc(ui, repo, files, message, match, opts): return repo.commit(files, message, opts['user'], opts['date'], match, force_editor=opts.get('force_editor')) cmdutil.commit(ui, repo, commitfunc, pats, opts) -def docopy(ui, repo, pats, opts): - # called with the repo lock held - # - # hgsep => pathname that uses "/" to separate directories - # ossep => pathname that uses os.sep to separate directories - cwd = repo.getcwd() - errors = 0 - copied = [] - targets = {} - - # abs: hgsep - # rel: ossep - # return: hgsep - def okaytocopy(abs, rel, exact): - reasons = {'?': _('is not managed'), - 'r': _('has been marked for remove')} - state = repo.dirstate[abs] - reason = reasons.get(state) - if reason: - if exact: - ui.warn(_('%s: not copying - file %s\n') % (rel, reason)) - else: - if state == 'a': - origsrc = repo.dirstate.copied(abs) - if origsrc is not None: - return origsrc - return abs - - # origsrc: hgsep - # abssrc: hgsep - # relsrc: ossep - # otarget: ossep - def copy(origsrc, abssrc, relsrc, otarget, exact): - abstarget = util.canonpath(repo.root, cwd, otarget) - reltarget = repo.pathto(abstarget, cwd) - prevsrc = targets.get(abstarget) - src = repo.wjoin(abssrc) - target = repo.wjoin(abstarget) - if prevsrc is not None: - ui.warn(_('%s: not overwriting - %s collides with %s\n') % - (reltarget, repo.pathto(abssrc, cwd), - repo.pathto(prevsrc, cwd))) - return - if (not opts['after'] and os.path.exists(target) or - opts['after'] and repo.dirstate[abstarget] in 'mn'): - if not opts['force']: - ui.warn(_('%s: not overwriting - file exists\n') % - reltarget) - return - if not opts['after'] and not opts.get('dry_run'): - os.unlink(target) - if opts['after']: - if not os.path.exists(target): - return - else: - targetdir = os.path.dirname(target) or '.' - if not os.path.isdir(targetdir) and not opts.get('dry_run'): - os.makedirs(targetdir) - try: - restore = repo.dirstate[abstarget] == 'r' - if restore and not opts.get('dry_run'): - repo.undelete([abstarget]) - try: - if not opts.get('dry_run'): - util.copyfile(src, target) - restore = False - finally: - if restore: - repo.remove([abstarget]) - except IOError, inst: - if inst.errno == errno.ENOENT: - ui.warn(_('%s: deleted in working copy\n') % relsrc) - else: - ui.warn(_('%s: cannot copy - %s\n') % - (relsrc, inst.strerror)) - errors += 1 - return - if ui.verbose or not exact: - ui.status(_('copying %s to %s\n') % (relsrc, reltarget)) - targets[abstarget] = abssrc - if abstarget != origsrc: - if repo.dirstate[origsrc] == 'a': - if not ui.quiet: - ui.warn(_("%s has not been committed yet, so no copy " - "data will be stored for %s.\n") - % (repo.pathto(origsrc, cwd), reltarget)) - if abstarget not in repo.dirstate and not opts.get('dry_run'): - repo.add([abstarget]) - elif not opts.get('dry_run'): - repo.copy(origsrc, abstarget) - copied.append((abssrc, relsrc, exact)) - - # pat: ossep - # dest ossep - # srcs: list of (hgsep, hgsep, ossep, bool) - # return: function that takes hgsep and returns ossep - def targetpathfn(pat, dest, srcs): - if os.path.isdir(pat): - abspfx = util.canonpath(repo.root, cwd, pat) - abspfx = util.localpath(abspfx) - if destdirexists: - striplen = len(os.path.split(abspfx)[0]) - else: - striplen = len(abspfx) - if striplen: - striplen += len(os.sep) - res = lambda p: os.path.join(dest, util.localpath(p)[striplen:]) - elif destdirexists: - res = lambda p: os.path.join(dest, - os.path.basename(util.localpath(p))) - else: - res = lambda p: dest - return res - - # pat: ossep - # dest ossep - # srcs: list of (hgsep, hgsep, ossep, bool) - # return: function that takes hgsep and returns ossep - def targetpathafterfn(pat, dest, srcs): - if util.patkind(pat, None)[0]: - # a mercurial pattern - res = lambda p: os.path.join(dest, - os.path.basename(util.localpath(p))) - else: - abspfx = util.canonpath(repo.root, cwd, pat) - if len(abspfx) < len(srcs[0][0]): - # A directory. Either the target path contains the last - # component of the source path or it does not. - def evalpath(striplen): - score = 0 - for s in srcs: - t = os.path.join(dest, util.localpath(s[0])[striplen:]) - if os.path.exists(t): - score += 1 - return score - - abspfx = util.localpath(abspfx) - striplen = len(abspfx) - if striplen: - striplen += len(os.sep) - if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])): - score = evalpath(striplen) - striplen1 = len(os.path.split(abspfx)[0]) - if striplen1: - striplen1 += len(os.sep) - if evalpath(striplen1) > score: - striplen = striplen1 - res = lambda p: os.path.join(dest, - util.localpath(p)[striplen:]) - else: - # a file - if destdirexists: - res = lambda p: os.path.join(dest, - os.path.basename(util.localpath(p))) - else: - res = lambda p: dest - return res - - - pats = util.expand_glob(pats) - if not pats: - raise util.Abort(_('no source or destination specified')) - if len(pats) == 1: - raise util.Abort(_('no destination specified')) - dest = pats.pop() - destdirexists = os.path.isdir(dest) - if not destdirexists: - if len(pats) > 1 or util.patkind(pats[0], None)[0]: - raise util.Abort(_('with multiple sources, destination must be an ' - 'existing directory')) - if dest.endswith(os.sep) or os.altsep and dest.endswith(os.altsep): - raise util.Abort(_('destination %s is not a directory') % dest) - if opts['after']: - tfn = targetpathafterfn - else: - tfn = targetpathfn - copylist = [] - for pat in pats: - srcs = [] - for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts, - globbed=True): - origsrc = okaytocopy(abssrc, relsrc, exact) - if origsrc: - srcs.append((origsrc, abssrc, relsrc, exact)) - if not srcs: - continue - copylist.append((tfn(pat, dest, srcs), srcs)) - if not copylist: - raise util.Abort(_('no files to copy')) - - for targetpath, srcs in copylist: - for origsrc, abssrc, relsrc, exact in srcs: - copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact) - - if errors: - ui.warn(_('(consider using --after)\n')) - return errors, copied - def copy(ui, repo, *pats, **opts): """mark files as copied for the next commit @@ -647,10 +547,9 @@ """ wlock = repo.wlock(False) try: - errs, copied = docopy(ui, repo, pats, opts) + return cmdutil.copy(ui, repo, pats, opts) finally: del wlock - return errs def debugancestor(ui, index, rev1, rev2): """find the ancestor revision of two revisions in a given index""" @@ -679,6 +578,14 @@ clist.sort() ui.write("%s\n" % "\n".join(clist)) +def debugfsinfo(ui, path = "."): + file('.debugfsinfo', 'w').write('') + ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no')) + ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no')) + ui.write('case-sensitive: %s\n' % (util.checkfolding('.debugfsinfo') + and 'yes' or 'no')) + os.unlink('.debugfsinfo') + def debugrebuildstate(ui, repo, rev=""): """rebuild the dirstate as it would look like for the given revision""" if rev == "": @@ -767,11 +674,11 @@ for file_, ent in k: if ent[3] == -1: # Pad or slice to locale representation - locale_len = len(time.strftime("%x %X", time.localtime(0))) + locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(0))) timestr = 'unset' timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr)) else: - timestr = time.strftime("%x %X", time.localtime(ent[3])) + timestr = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(ent[3])) if ent[1] & 020000: mode = 'lnk' else: @@ -899,52 +806,16 @@ ui.write(_(" (Current patch tool may be incompatible with patch," " or misconfigured. Please check your .hgrc file)\n")) else: - ui.write(_(" Internal patcher failure, please report this error" + ui.write(_(" Internal patcher failure, please report this error" " to http://www.selenic.com/mercurial/bts\n")) problems += patchproblems os.unlink(fa) os.unlink(fd) - # merge helper - ui.status(_("Checking merge helper...\n")) - cmd = (os.environ.get("HGMERGE") or ui.config("ui", "merge") - or "hgmerge") - cmdpath = util.find_exe(cmd) or util.find_exe(cmd.split()[0]) - if not cmdpath: - if cmd == 'hgmerge': - ui.write(_(" No merge helper set and can't find default" - " hgmerge script in PATH\n")) - ui.write(_(" (specify a merge helper in your .hgrc file)\n")) - else: - ui.write(_(" Can't find merge helper '%s' in PATH\n") % cmd) - ui.write(_(" (specify a merge helper in your .hgrc file)\n")) - problems += 1 - else: - # actually attempt a patch here - fa = writetemp("1\n2\n3\n4\n") - fl = writetemp("1\n2\n3\ninsert\n4\n") - fr = writetemp("begin\n1\n2\n3\n4\n") - r = util.system('%s "%s" "%s" "%s"' % (cmd, fl, fa, fr)) - if r: - ui.write(_(" Got unexpected merge error %d!\n") % r) - problems += 1 - m = file(fl).read() - if m != "begin\n1\n2\n3\ninsert\n4\n": - ui.write(_(" Got unexpected merge results!\n")) - ui.write(_(" (your merge helper may have the" - " wrong argument order)\n")) - ui.write(_(" Result: %r\n") % m) - problems += 1 - os.unlink(fa) - os.unlink(fl) - os.unlink(fr) - # editor ui.status(_("Checking commit editor...\n")) - editor = (os.environ.get("HGEDITOR") or - ui.config("ui", "editor") or - os.environ.get("EDITOR", "vi")) + editor = ui.geteditor() cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0]) if not cmdpath: if editor == 'vi': @@ -981,7 +852,8 @@ ctx = repo.changectx(opts.get('rev', 'tip')) for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts, ctx.node()): - m = ctx.filectx(abs).renamed() + fctx = ctx.filectx(abs) + m = fctx.filelog().renamed(fctx.filenode()) if m: ui.write(_("%s renamed from %s:%s\n") % (rel, m[0], hex(m[1]))) else: @@ -1314,7 +1186,11 @@ ui.write('\n') aliases, i = cmdutil.findcmd(ui, name, table) # synopsis - ui.write("%s\n\n" % i[2]) + ui.write("%s\n" % i[2]) + + # aliases + if not ui.quiet and len(aliases) > 1: + ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:])) # description doc = i[0].__doc__ @@ -1322,13 +1198,9 @@ doc = _("(No help text available)") if ui.quiet: doc = doc.splitlines(0)[0] - ui.write("%s\n" % doc.rstrip()) + ui.write("\n%s\n" % doc.rstrip()) if not ui.quiet: - # aliases - if len(aliases) > 1: - ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:])) - # options if i[1]: option_lists.append((_("options:\n"), i[1])) @@ -1626,15 +1498,17 @@ files=files) finally: files = patch.updatedir(ui, repo, files) - n = repo.commit(files, message, user, date) - if opts.get('exact'): - if hex(n) != nodeid: - repo.rollback() - raise util.Abort(_('patch is damaged' + - ' or loses information')) - # Force a dirstate write so that the next transaction backups - # an up-do-date file. - repo.dirstate.write() + if not opts.get('no_commit'): + n = repo.commit(files, message, opts.get('user') or user, + opts.get('date') or date) + if opts.get('exact'): + if hex(n) != nodeid: + repo.rollback() + raise util.Abort(_('patch is damaged' + ' or loses information')) + # Force a dirstate write so that the next transaction + # backups an up-do-date file. + repo.dirstate.write() finally: os.unlink(tmpname) finally: @@ -1804,8 +1678,7 @@ endrev = repo.changelog.count() rcache = {} ncache = {} - dcache = [] - def getrenamed(fn, rev, man): + def getrenamed(fn, rev): '''looks up all renames for a file (up to endrev) the first time the file is given. It indexes on the changerev and only parses the manifest if linkrev != changerev. @@ -1825,13 +1698,14 @@ break if rev in rcache[fn]: return rcache[fn][rev] - mr = repo.manifest.rev(man) - if repo.manifest.parentrevs(mr) != (mr - 1, nullrev): - return ncache[fn].get(repo.manifest.find(man, fn)[0]) - if not dcache or dcache[0] != man: - dcache[:] = [man, repo.manifest.readdelta(man)] - if fn in dcache[1]: - return ncache[fn].get(dcache[1][fn]) + + # If linkrev != rev (i.e. rev not found in rcache) fallback to + # filectx logic. + + try: + return repo.changectx(rev).filectx(fn).renamed() + except revlog.LookupError: + pass return None df = False @@ -1868,9 +1742,8 @@ copies = [] if opts.get('copies') and rev: - mf = get(rev)[0] for fn in get(rev)[3]: - rename = getrenamed(fn, rev, mf) + rename = getrenamed(fn, rev) if rename: copies.append((fn, rename[0])) displayer.show(rev, changenode, copies=copies) @@ -1889,7 +1762,7 @@ The manifest is the list of files being version controlled. If no revision is given then the first parent of the working directory is used. - With -v flag, print file permissions, symlink and executable bits. With + With -v flag, print file permissions, symlink and executable bits. With --debug flag, print file revision hashes. """ @@ -1941,7 +1814,7 @@ if len(heads) == 1: msg = _('there is nothing to merge') if parent != repo.lookup(repo.workingctx().branch()): - msg = _('%s - use "hg update" instead' % msg) + msg = _('%s - use "hg update" instead') % msg raise util.Abort(msg) if parent not in heads: @@ -2226,6 +2099,7 @@ forget.append(abs) continue reason = _('has been marked for add (use -f to force removal)') + exact = 1 # force the message elif abs not in repo.dirstate: reason = _('is not managed') elif opts['after'] and not exact and abs not in deleted: @@ -2258,20 +2132,15 @@ """ wlock = repo.wlock(False) try: - errs, copied = docopy(ui, repo, pats, opts) - names = [] - for abs, rel, exact in copied: - if ui.verbose or not exact: - ui.status(_('removing %s\n') % rel) - names.append(abs) - if not opts.get('dry_run'): - repo.remove(names, True) - return errs + return cmdutil.copy(ui, repo, pats, opts, rename=True) finally: del wlock def revert(ui, repo, *pats, **opts): - """revert files or dirs to their states as of some revision + """restore individual files or dirs to an earlier state + + (use update -r to check out earlier revisions, revert does not + change the working dir parents) With no revision specified, revert the named files or directories to the contents they had in the parent of the working directory. @@ -2280,12 +2149,9 @@ working directory has two parents, you must explicitly specify the revision to revert to. - Modified files are saved with a .orig suffix before reverting. - To disable these backups, use --no-backup. - Using the -r option, revert the given files or directories to their contents as of a specific revision. This can be helpful to "roll - back" some or all of a change that should not have been committed. + back" some or all of an earlier change. Revert modifies the working directory. It does not commit any changes, or change the parent of the working directory. If you @@ -2299,6 +2165,9 @@ If names are given, all files matching the names are reverted. If no arguments are given, no files are reverted. + + Modified files are saved with a .orig suffix before reverting. + To disable these backups, use --no-backup. """ if opts["date"]: @@ -2449,10 +2318,12 @@ del wlock def rollback(ui, repo): - """roll back the last transaction in this repository - - Roll back the last transaction in this repository, restoring the - project to its state prior to the transaction. + """roll back the last transaction + + This command should be used with care. There is only one level of + rollback, and there is no way to undo a rollback. It will also + restore the dirstate at the time of the last transaction, losing + any dirstate changes since that time. Transactions are used to encapsulate the effects of all commands that create new changesets or propagate existing changesets into a @@ -2465,11 +2336,6 @@ push (with this repository as destination) unbundle - This command should be used with care. There is only one level of - rollback, and there is no way to undo a rollback. It will also - restore the dirstate at the time of the last transaction, which - may lose subsequent dirstate changes. - This command is not intended for use on public repositories. Once changes are visible for pull by other users, rolling a transaction back locally is ineffective (someone else may already have pulled @@ -2503,7 +2369,7 @@ s.serve_forever() parentui = ui.parentui or ui - optlist = ("name templates style address port ipv6" + optlist = ("name templates style address port prefix ipv6" " accesslog errorlog webdir_conf certificate") for o in optlist.split(): if opts[o]: @@ -2525,11 +2391,17 @@ if not ui.verbose: return + if self.httpd.prefix: + prefix = self.httpd.prefix.strip('/') + '/' + else: + prefix = '' + if self.httpd.port != 80: - ui.status(_('listening at http://%s:%d/\n') % - (self.httpd.addr, self.httpd.port)) + ui.status(_('listening at http://%s:%d/%s\n') % + (self.httpd.addr, self.httpd.port, prefix)) else: - ui.status(_('listening at http://%s/\n') % self.httpd.addr) + ui.status(_('listening at http://%s/%s\n') % + (self.httpd.addr, prefix)) def run(self): self.httpd.serve_forever() @@ -2542,8 +2414,10 @@ """show changed files in the working directory Show status of files in the repository. If names are given, only - files that match are shown. Files that are clean or ignored, are - not listed unless -c (clean), -i (ignored) or -A is given. + files that match are shown. Files that are clean or ignored or + source of a copy/move operation, are not listed unless -c (clean), + -i (ignored), -C (copies) or -A is given. Unless options described + with "show only ..." are given, the options -mardu are used. NOTE: status may appear to disagree with diff if permissions have changed or a merge has occurred. The standard diff format does not @@ -2560,7 +2434,7 @@ C = clean ! = deleted, but still tracked ? = not tracked - I = ignored (not shown by default) + I = ignored = the previous added file was copied from here """ @@ -2632,8 +2506,15 @@ rev_ = opts['rev'] message = opts['message'] if opts['remove']: - if not name in repo.tags(): + tagtype = repo.tagtype(name) + + if not tagtype: raise util.Abort(_('tag %s does not exist') % name) + if opts['local'] and tagtype == 'global': + raise util.Abort(_('%s tag is global') % name) + if not opts['local'] and tagtype == 'local': + raise util.Abort(_('%s tag is local') % name) + rev_ = nullid if not message: message = _('Removed tag %s') % name @@ -2655,23 +2536,33 @@ List the repository tags. - This lists both regular and local tags. + This lists both regular and local tags. When the -v/--verbose switch + is used, a third column "local" is printed for local tags. """ l = repo.tagslist() l.reverse() hexfunc = ui.debugflag and hex or short + tagtype = "" + for t, n in l: + if ui.quiet: + ui.write("%s\n" % t) + continue + try: hn = hexfunc(n) - r = "%5d:%s" % (repo.changelog.rev(n), hexfunc(n)) + r = "%5d:%s" % (repo.changelog.rev(n), hn) except revlog.LookupError: r = " ?:%s" % hn - if ui.quiet: - ui.write("%s\n" % t) else: spaces = " " * (30 - util.locallen(t)) - ui.write("%s%s %s\n" % (t, spaces, r)) + if ui.verbose: + if repo.tagtype(t) == 'local': + tagtype = " local" + else: + tagtype = "" + ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype)) def tip(ui, repo, **opts): """show the tip revision @@ -2746,7 +2637,7 @@ ui.write(_("Mercurial Distributed SCM (version %s)\n") % version.get_version()) ui.status(_( - "\nCopyright (C) 2005-2007 Matt Mackall <mpm@selenic.com> and others\n" + "\nCopyright (C) 2005-2008 Matt Mackall <mpm@selenic.com> and others\n" "This is free software; see the source for copying conditions. " "There is NO\nwarranty; " "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n" @@ -2835,11 +2726,19 @@ ('r', 'rev', '', _('revision to backout')), ] + walkopts + commitopts + commitopts2, _('hg backout [OPTION]... [-r] REV')), + "bisect": + (bisect, + [('r', 'reset', False, _('reset bisect state')), + ('g', 'good', False, _('mark changeset good')), + ('b', 'bad', False, _('mark changeset bad')), + ('s', 'skip', False, _('skip testing changeset')), + ('U', 'noupdate', False, _('do not update to target'))], + _("hg bisect [-gbsr] [REV]")), "branch": (branch, [('f', 'force', None, _('set branch name even if it shadows an existing branch'))], - _('hg branch [NAME]')), + _('hg branch [-f] [NAME]')), "branches": (branches, [('a', 'active', False, @@ -2884,31 +2783,41 @@ _('forcibly copy over an existing managed file')), ] + walkopts + dryrunopts, _('hg copy [OPTION]... [SOURCE]... DEST')), - "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')), + "debugancestor": (debugancestor, [], _('hg debugancestor INDEX REV1 REV2')), + "debugcheckstate": (debugcheckstate, [], _('hg debugcheckstate')), "debugcomplete": (debugcomplete, [('o', 'options', None, _('show the command options'))], - _('debugcomplete [-o] CMD')), - "debuginstall": (debuginstall, [], _('debuginstall')), + _('hg debugcomplete [-o] CMD')), + "debugdate": + (debugdate, + [('e', 'extended', None, _('try extended date formats'))], + _('hg debugdate [-e] DATE [RANGE]')), + "debugdata": (debugdata, [], _('hg debugdata FILE REV')), + "debugfsinfo": (debugfsinfo, [], _('hg debugfsinfo [PATH]')), + "debugindex": (debugindex, [], _('hg debugindex FILE')), + "debugindexdot": (debugindexdot, [], _('hg debugindexdot FILE')), + "debuginstall": (debuginstall, [], _('hg debuginstall')), + "debugrawcommit|rawcommit": + (rawcommit, + [('p', 'parent', [], _('parent')), + ('F', 'files', '', _('file list')) + ] + commitopts + commitopts2, + _('hg debugrawcommit [OPTION]... [FILE]...')), "debugrebuildstate": (debugrebuildstate, [('r', 'rev', '', _('revision to rebuild to'))], - _('debugrebuildstate [-r REV] [REV]')), - "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')), - "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')), - "debugstate": (debugstate, [], _('debugstate')), - "debugdate": - (debugdate, - [('e', 'extended', None, _('try extended date formats'))], - _('debugdate [-e] DATE [RANGE]')), - "debugdata": (debugdata, [], _('debugdata FILE REV')), - "debugindex": (debugindex, [], _('debugindex FILE')), - "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')), + _('hg debugrebuildstate [-r REV] [REV]')), "debugrename": (debugrename, [('r', 'rev', '', _('revision to debug'))], - _('debugrename [-r REV] FILE')), - "debugwalk": (debugwalk, walkopts, _('debugwalk [OPTION]... [FILE]...')), + _('hg debugrename [-r REV] FILE')), + "debugsetparents": + (debugsetparents, + [], + _('hg debugsetparents REV1 [REV2]')), + "debugstate": (debugstate, [], _('hg debugstate')), + "debugwalk": (debugwalk, walkopts, _('hg debugwalk [OPTION]... [FILE]...')), "^diff": (diff, [('r', 'rev', [], _('revision')), @@ -2923,6 +2832,8 @@ _('ignore changes in the amount of white space')), ('B', 'ignore-blank-lines', None, _('ignore changes whose lines are all blank')), + ('U', 'unified', 3, + _('number of lines of context to show')) ] + walkopts, _('hg diff [OPTION]... [-r REV1 [-r REV2]] [FILE]...')), "^export": @@ -2970,12 +2881,15 @@ ('b', 'base', '', _('base path')), ('f', 'force', None, _('skip check for outstanding uncommitted changes')), + ('', 'no-commit', None, _("don't commit, just update the working directory")), ('', 'exact', None, _('apply patch to the nodes from which it was generated')), ('', 'import-branch', None, - _('Use any branch information in patch (implied by --exact)'))] + commitopts, - _('hg import [-p NUM] [-m MESSAGE] [-f] PATCH...')), - "incoming|in": (incoming, + _('Use any branch information in patch (implied by --exact)'))] + + commitopts + commitopts2, + _('hg import [OPTION]... PATCH...')), + "incoming|in": + (incoming, [('M', 'no-merges', None, _('do not show merges')), ('f', 'force', None, _('run even when remote repository is unrelated')), @@ -3021,15 +2935,18 @@ ('', 'template', '', _('display with template')), ] + walkopts, _('hg log [OPTION]... [FILE]')), - "manifest": (manifest, [('r', 'rev', '', _('revision to display'))], - _('hg manifest [-r REV]')), + "manifest": + (manifest, + [('r', 'rev', '', _('revision to display'))], + _('hg manifest [-r REV]')), "^merge": (merge, [('f', 'force', None, _('force a merge with outstanding changes')), ('r', 'rev', '', _('revision to merge')), ], _('hg merge [-f] [[-r] REV]')), - "outgoing|out": (outgoing, + "outgoing|out": + (outgoing, [('M', 'no-merges', None, _('do not show merges')), ('f', 'force', None, _('run even when remote repository is unrelated')), @@ -3063,16 +2980,10 @@ ('r', 'rev', [], _('a specific revision you would like to push')), ] + remoteopts, _('hg push [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')), - "debugrawcommit|rawcommit": - (rawcommit, - [('p', 'parent', [], _('parent')), - ('F', 'files', '', _('file list')) - ] + commitopts + commitopts2, - _('hg debugrawcommit [OPTION]... [FILE]...')), "recover": (recover, [], _('hg recover')), "^remove|rm": (remove, - [('A', 'after', None, _('record remove that has already occurred')), + [('A', 'after', None, _('record remove without deleting')), ('f', 'force', None, _('remove file even if modified')), ] + walkopts, _('hg remove [OPTION]... FILE...')), @@ -3083,7 +2994,7 @@ _('forcibly copy over an existing managed file')), ] + walkopts + dryrunopts, _('hg rename [OPTION]... SOURCE... DEST')), - "^revert": + "revert": (revert, [('a', 'all', None, _('revert all changes when no arguments given')), ('d', 'date', '', _('tipmost revision matching date')), @@ -3093,10 +3004,6 @@ _('hg revert [OPTION]... [-r REV] [NAME]...')), "rollback": (rollback, [], _('hg rollback')), "root": (root, [], _('hg root')), - "showconfig|debugconfig": - (showconfig, - [('u', 'untrusted', None, _('show untrusted configuration options'))], - _('showconfig [-u] [NAME]...')), "^serve": (serve, [('A', 'accesslog', '', _('name of access log file to write to')), @@ -3105,6 +3012,7 @@ ('E', 'errorlog', '', _('name of error log file to write to')), ('p', 'port', 0, _('port to use (default: 8000)')), ('a', 'address', '', _('address to use')), + ('', 'prefix', '', _('prefix path to serve from (default: server root)')), ('n', 'name', '', _('name to show in web pages (default: working dir)')), ('', 'webdir-conf', '', _('name of the webdir config file' @@ -3116,6 +3024,10 @@ ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')), ('', 'certificate', '', _('SSL certificate file'))], _('hg serve [OPTION]...')), + "showconfig|debugconfig": + (showconfig, + [('u', 'untrusted', None, _('show untrusted configuration options'))], + _('hg showconfig [-u] [NAME]...')), "^status|st": (status, [('A', 'all', None, _('show status of all files')), @@ -3166,5 +3078,5 @@ } norepo = ("clone init version help debugancestor debugcomplete debugdata" - " debugindex debugindexdot debugdate debuginstall") + " debugindex debugindexdot debugdate debuginstall debugfsinfo") optionalrepo = ("identify paths serve showconfig")
--- a/mercurial/context.py Fri Feb 08 11:50:37 2008 +0100 +++ b/mercurial/context.py Fri Feb 08 11:55:17 2008 +0100 @@ -100,13 +100,13 @@ try: return self._manifest[path], self._manifest.flags(path) except KeyError: - raise revlog.LookupError(_("'%s' not found in manifest") % path) + raise revlog.LookupError(path, _("'%s' not found in manifest") % path) if '_manifestdelta' in self.__dict__ or path in self.files(): if path in self._manifestdelta: return self._manifestdelta[path], self._manifestdelta.flags(path) node, flag = self._repo.manifest.find(self._changeset[0], path) if not node: - raise revlog.LookupError(_("'%s' not found in manifest") % path) + raise revlog.LookupError(path, _("'%s' not found in manifest") % path) return node, flag @@ -159,12 +159,11 @@ if filelog: self._filelog = filelog - if fileid is None: - if changectx is None: - self._changeid = changeid - else: - self._changectx = changectx - else: + if changeid is not None: + self._changeid = changeid + if changectx is not None: + self._changectx = changectx + if fileid is not None: self._fileid = fileid def __getattr__(self, name): @@ -175,7 +174,10 @@ self._filelog = self._repo.file(self._path) return self._filelog elif name == '_changeid': - self._changeid = self._filelog.linkrev(self._filenode) + if '_changectx' in self.__dict__: + self._changeid = self._changectx.rev() + else: + self._changeid = self._filelog.linkrev(self._filenode) return self._changeid elif name == '_filenode': if '_fileid' in self.__dict__: @@ -229,8 +231,11 @@ def rev(self): if '_changectx' in self.__dict__: return self._changectx.rev() + if '_changeid' in self.__dict__: + return self._changectx.rev() return self._filelog.linkrev(self._filenode) + def linkrev(self): return self._filelog.linkrev(self._filenode) def node(self): return self._changectx.node() def user(self): return self._changectx.user() def date(self): return self._changectx.date() @@ -241,18 +246,42 @@ def changectx(self): return self._changectx def data(self): return self._filelog.read(self._filenode) - def renamed(self): return self._filelog.renamed(self._filenode) def path(self): return self._path def size(self): return self._filelog.size(self._filerev) def cmp(self, text): return self._filelog.cmp(self._filenode, text) + def renamed(self): + """check if file was actually renamed in this changeset revision + + If rename logged in file revision, we report copy for changeset only + if file revisions linkrev points back to the changeset in question + or both changeset parents contain different file revisions. + """ + + renamed = self._filelog.renamed(self._filenode) + if not renamed: + return renamed + + if self.rev() == self.linkrev(): + return renamed + + name = self.path() + fnode = self._filenode + for p in self._changectx.parents(): + try: + if fnode == p.filenode(name): + return None + except revlog.LookupError: + pass + return renamed + def parents(self): p = self._path fl = self._filelog pl = [(p, n, fl) for n in self._filelog.parents(self._filenode)] - r = self.renamed() + r = self._filelog.renamed(self._filenode) if r: pl[0] = (r[0], r[1], None) @@ -318,7 +347,7 @@ return [getctx(p, n) for p, n in pl if n != nullrev] # use linkrev to find the first changeset where self appeared - if self.rev() != self._filelog.linkrev(self._filenode): + if self.rev() != self.linkrev(): base = self.filectx(self.filerev()) else: base = self @@ -483,7 +512,7 @@ return self._manifest.flags(path) except KeyError: return '' - + pnode = self._parents[0].changeset()[0] orig = self._repo.dirstate.copies().get(path, path) node, flag = self._repo.manifest.find(pnode, orig)
--- a/mercurial/demandimport.py Fri Feb 08 11:50:37 2008 +0100 +++ b/mercurial/demandimport.py Fri Feb 08 11:55:17 2008 +0100 @@ -67,7 +67,7 @@ return "<proxied module '%s'>" % self._data[0] return "<unloaded module '%s'>" % self._data[0] def __call__(self, *args, **kwargs): - raise TypeError("'unloaded module' object is not callable") + raise TypeError("%s object is not callable" % repr(self)) def __getattribute__(self, attr): if attr in ('_data', '_extend', '_load', '_module'): return object.__getattribute__(self, attr) @@ -77,7 +77,7 @@ self._load() setattr(self._module, attr, val) -def _demandimport(name, globals=None, locals=None, fromlist=None): +def _demandimport(name, globals=None, locals=None, fromlist=None, level=None): if not locals or name in ignore or fromlist == ('*',): # these cases we can't really delay return _origimport(name, globals, locals, fromlist) @@ -95,6 +95,9 @@ return locals[base] return _demandmod(name, globals, locals) else: + if level is not None: + # from . import b,c,d or from .a import b,c,d + return _origimport(name, globals, locals, fromlist, level) # from a import b,c,d mod = _origimport(name, globals, locals) # recurse down the module chain
--- a/mercurial/dirstate.py Fri Feb 08 11:50:37 2008 +0100 +++ b/mercurial/dirstate.py Fri Feb 08 11:55:17 2008 +0100 @@ -74,7 +74,7 @@ if cwd == self._root: return '' # self._root ends with a path separator if self._root is '/' or 'C:\' rootsep = self._root - if not rootsep.endswith(os.sep): + if not util.endswithsep(rootsep): rootsep += os.sep if cwd.startswith(rootsep): return cwd[len(rootsep):] @@ -87,7 +87,7 @@ cwd = self.getcwd() path = util.pathto(self._root, cwd, f) if self._slash: - return path.replace(os.sep, '/') + return util.normpath(path) return path def __getitem__(self, key): @@ -235,7 +235,7 @@ self._changepath(f, 'n', True) s = os.lstat(self._join(f)) self._map[f] = ('n', s.st_mode, s.st_size, s.st_mtime, 0) - if self._copymap.has_key(f): + if f in self._copymap: del self._copymap[f] def normallookup(self, f): @@ -410,7 +410,7 @@ # self._root may end with a path separator when self._root == '/' common_prefix_len = len(self._root) - if not self._root.endswith(os.sep): + if not util.endswithsep(self._root): common_prefix_len += 1 normpath = util.normpath
--- a/mercurial/dispatch.py Fri Feb 08 11:50:37 2008 +0100 +++ b/mercurial/dispatch.py Fri Feb 08 11:55:17 2008 +0100 @@ -125,7 +125,7 @@ ui.warn("\n%r\n" % util.ellipsis(inst[1])) except ImportError, inst: m = str(inst).split()[-1] - ui.warn(_("abort: could not import module %s!\n" % m)) + ui.warn(_("abort: could not import module %s!\n") % m) if m in "mpatch bdiff".split(): ui.warn(_("(did you forget to compile extensions?)\n")) elif m in "zlib".split(): @@ -133,6 +133,8 @@ except util.Abort, inst: ui.warn(_("abort: %s\n") % inst) + except MemoryError: + ui.warn(_("abort: out of memory\n")) except SystemExit, inst: # Commands shouldn't sys.exit directly, but give a return code. # Just in case catch this and and pass exit code to caller. @@ -273,6 +275,15 @@ for name, module in extensions.extensions(): if name in _loaded: continue + + # setup extensions + # TODO this should be generalized to scheme, where extensions can + # redepend on other extensions. then we should toposort them, and + # do initialization in correct order + extsetup = getattr(module, 'extsetup', None) + if extsetup: + extsetup() + cmdtable = getattr(module, 'cmdtable', {}) overrides = [cmd for cmd in cmdtable if cmd in commands.table] if overrides: @@ -329,6 +340,7 @@ try: repo = hg.repository(ui, path=path) ui = repo.ui + ui.setconfig("bundle", "mainreporoot", repo.root) if not repo.local(): raise util.Abort(_("repository '%s' is not local") % path) except hg.RepoError: @@ -342,12 +354,12 @@ d = lambda: func(ui, *args, **cmdoptions) # run pre-hook, and abort if it fails - ret = hook.hook(ui, repo, "pre-%s" % cmd, False, args=" ".join(fullargs)) + ret = hook.hook(lui, repo, "pre-%s" % cmd, False, args=" ".join(fullargs)) if ret: return ret ret = _runcommand(ui, options, cmd, d) # run post-hook, passing command result - hook.hook(ui, repo, "post-%s" % cmd, False, args=" ".join(fullargs), + hook.hook(lui, repo, "post-%s" % cmd, False, args=" ".join(fullargs), result = ret) return ret @@ -361,8 +373,18 @@ if len(tb) != 2: # no raise raise ParseError(cmd, _("invalid arguments")) + return profiled(ui, checkargs, options) - if options['profile']: +def profiled(ui, func, options={}): + def profile_fp(): + outfile = ui.config('profile', 'output', untrusted=True) + if outfile: + pid = str(os.getpid()) + return open(outfile.replace('%p', pid), 'w') + else: + return sys.stderr + + if options.get('profile') or ui.config('profile', 'enable') == 'hotshot': import hotshot, hotshot.stats prof = hotshot.Profile("hg.prof") try: @@ -378,10 +400,11 @@ finally: prof.close() stats = hotshot.stats.load("hg.prof") + stats.stream = profile_fp() stats.strip_dirs() stats.sort_stats('time', 'calls') stats.print_stats(40) - elif options['lsprof']: + elif options.get('lsprof') or ui.config('profile', 'enable') == 'lsprof': try: from mercurial import lsprof except ImportError: @@ -391,11 +414,11 @@ p = lsprof.Profiler() p.enable(subcalls=True) try: - return checkargs() + return func() finally: p.disable() stats = lsprof.Stats(p.getstats()) stats.sort() - stats.pprint(top=10, file=sys.stderr, climit=5) + stats.pprint(top=10, file=profile_fp(), climit=5) else: - return checkargs() + return func()
--- a/mercurial/fancyopts.py Fri Feb 08 11:50:37 2008 +0100 +++ b/mercurial/fancyopts.py Fri Feb 08 11:55:17 2008 +0100 @@ -1,35 +1,73 @@ import getopt def fancyopts(args, options, state): - long = [] - short = '' - map = {} - dt = {} + """ + read args, parse options, and store options in state + + each option is a tuple of: + + short option or '' + long option + default value + description + + option types include: + + boolean or none - option sets variable in state to true + string - parameter string is stored in state + list - parameter string is added to a list + integer - parameter strings is stored as int + function - call function with parameter - for s, l, d, c in options: - pl = l.replace('-', '_') - map['-'+s] = map['--'+l] = pl - if isinstance(d, list): - state[pl] = d[:] + non-option args are returned + """ + namelist = [] + shortlist = '' + argmap = {} + defmap = {} + + for short, name, default, comment in options: + # convert opts to getopt format + oname = name + name = name.replace('-', '_') + + argmap['-' + short] = argmap['--' + oname] = name + defmap[name] = default + + # copy defaults to state + if isinstance(default, list): + state[name] = default[:] + elif callable(default): + state[name] = None else: - state[pl] = d - dt[pl] = type(d) - if (d is not None and d is not True and d is not False and - not callable(d)): - if s: s += ':' - if l: l += '=' - if s: short = short + s - if l: long.append(l) + state[name] = default - opts, args = getopt.getopt(args, short, long) + # does it take a parameter? + if not (default is None or default is True or default is False): + if short: short += ':' + if oname: oname += '=' + if short: + shortlist += short + if name: + namelist.append(oname) + + # parse arguments + opts, args = getopt.getopt(args, shortlist, namelist) - for opt, arg in opts: - if dt[map[opt]] is type(fancyopts): state[map[opt]](state, map[opt], arg) - elif dt[map[opt]] is type(1): state[map[opt]] = int(arg) - elif dt[map[opt]] is type(''): state[map[opt]] = arg - elif dt[map[opt]] is type([]): state[map[opt]].append(arg) - elif dt[map[opt]] is type(None): state[map[opt]] = True - elif dt[map[opt]] is type(False): state[map[opt]] = True + # transfer result to state + for opt, val in opts: + name = argmap[opt] + t = type(defmap[name]) + if t is type(fancyopts): + state[name] = defmap[name](val) + elif t is type(1): + state[name] = int(val) + elif t is type(''): + state[name] = val + elif t is type([]): + state[name].append(val) + elif t is type(None) or t is type(False): + state[name] = True + # return unparsed args return args -
--- a/mercurial/filelog.py Fri Feb 08 11:50:37 2008 +0100 +++ b/mercurial/filelog.py Fri Feb 08 11:55:17 2008 +0100 @@ -58,7 +58,7 @@ if self.parents(node)[0] != nullid: return False m = self._readmeta(node) - if m and m.has_key("copy"): + if m and "copy" in m: return (m["copy"], bin(m["copyrev"])) return False
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/mercurial/filemerge.py Fri Feb 08 11:55:17 2008 +0100 @@ -0,0 +1,206 @@ +# filemerge.py - file-level merge handling for Mercurial +# +# Copyright 2006, 2007, 2008 Matt Mackall <mpm@selenic.com> +# +# This software may be used and distributed according to the terms +# of the GNU General Public License, incorporated herein by reference. + +from node import * +from i18n import _ +import util, os, tempfile, context, simplemerge, re + +def _toolstr(ui, tool, part, default=""): + return ui.config("merge-tools", tool + "." + part, default) + +def _toolbool(ui, tool, part, default=False): + return ui.configbool("merge-tools", tool + "." + part, default) + +def _findtool(ui, tool): + k = _toolstr(ui, tool, "regkey") + if k: + p = util.lookup_reg(k, _toolstr(ui, tool, "regname")) + if p: + p = util.find_exe(p + _toolstr(ui, tool, "regappend")) + if p: + return p + return util.find_exe(_toolstr(ui, tool, "executable", tool)) + +def _picktool(repo, ui, path, binary, symlink): + def check(tool, pat, symlink, binary): + tmsg = tool + if pat: + tmsg += " specified for " + pat + if pat and not _findtool(ui, tool): # skip search if not matching + ui.warn(_("couldn't find merge tool %s\n") % tmsg) + elif symlink and not _toolbool(ui, tool, "symlink"): + ui.warn(_("tool %s can't handle symlinks\n") % tmsg) + elif binary and not _toolbool(ui, tool, "binary"): + ui.warn(_("tool %s can't handle binary\n") % tmsg) + elif not util.gui() and _toolbool(ui, tool, "gui"): + ui.warn(_("tool %s requires a GUI\n") % tmsg) + else: + return True + return False + + # HGMERGE takes precedence + hgmerge = os.environ.get("HGMERGE") + if hgmerge: + return (hgmerge, hgmerge) + + # then patterns + for pat, tool in ui.configitems("merge-patterns"): + mf = util.matcher(repo.root, "", [pat], [], [])[1] + if mf(path) and check(tool, pat, symlink, False): + toolpath = _findtool(ui, tool) + return (tool, '"' + toolpath + '"') + + # then merge tools + tools = {} + for k,v in ui.configitems("merge-tools"): + t = k.split('.')[0] + if t not in tools: + tools[t] = int(_toolstr(ui, t, "priority", "0")) + tools = [(-p,t) for t,p in tools.items()] + tools.sort() + if ui.config("ui", "merge"): + tools.insert(0, (None, ui.config("ui", "merge"))) # highest priority + tools.append((None, "hgmerge")) # the old default, if found + for p,t in tools: + toolpath = _findtool(ui, t) + if toolpath and check(t, None, symlink, binary): + return (t, '"' + toolpath + '"') + # internal merge as last resort + return (not (symlink or binary) and "internal:merge" or None, None) + +def _eoltype(data): + "Guess the EOL type of a file" + if '\0' in data: # binary + return None + if '\r\n' in data: # Windows + return '\r\n' + if '\r' in data: # Old Mac + return '\r' + if '\n' in data: # UNIX + return '\n' + return None # unknown + +def _matcheol(file, origfile): + "Convert EOL markers in a file to match origfile" + tostyle = _eoltype(open(origfile, "rb").read()) + if tostyle: + data = open(file, "rb").read() + style = _eoltype(data) + if style: + newdata = data.replace(style, tostyle) + if newdata != data: + open(file, "wb").write(newdata) + +def filemerge(repo, fw, fd, fo, wctx, mctx): + """perform a 3-way merge in the working directory + + fw = original filename in the working directory + fd = destination filename in the working directory + fo = filename in other parent + wctx, mctx = working and merge changecontexts + """ + + def temp(prefix, ctx): + pre = "%s~%s." % (os.path.basename(ctx.path()), prefix) + (fd, name) = tempfile.mkstemp(prefix=pre) + data = repo.wwritedata(ctx.path(), ctx.data()) + f = os.fdopen(fd, "wb") + f.write(data) + f.close() + return name + + def isbin(ctx): + try: + return util.binary(ctx.data()) + except IOError: + return False + + fco = mctx.filectx(fo) + if not fco.cmp(wctx.filectx(fd).data()): # files identical? + return None + + ui = repo.ui + fcm = wctx.filectx(fw) + fca = fcm.ancestor(fco) or repo.filectx(fw, fileid=nullrev) + binary = isbin(fcm) or isbin(fco) or isbin(fca) + symlink = fcm.islink() or fco.islink() + tool, toolpath = _picktool(repo, ui, fw, binary, symlink) + ui.debug(_("picked tool '%s' for %s (binary %s symlink %s)\n") % + (tool, fw, binary, symlink)) + + if not tool: + tool = "internal:local" + if ui.prompt(_(" no tool found to merge %s\n" + "keep (l)ocal or take (o)ther?") % fw, + _("[lo]"), _("l")) != _("l"): + tool = "internal:other" + if tool == "internal:local": + return 0 + if tool == "internal:other": + repo.wwrite(fd, fco.data(), fco.fileflags()) + return 0 + if tool == "internal:fail": + return 1 + + # do the actual merge + a = repo.wjoin(fd) + b = temp("base", fca) + c = temp("other", fco) + out = "" + back = a + ".orig" + util.copyfile(a, back) + + if fw != fo: + repo.ui.status(_("merging %s and %s\n") % (fw, fo)) + else: + repo.ui.status(_("merging %s\n") % fw) + repo.ui.debug(_("my %s other %s ancestor %s\n") % (fcm, fco, fca)) + + # do we attempt to simplemerge first? + if _toolbool(ui, tool, "premerge", not (binary or symlink)): + r = simplemerge.simplemerge(a, b, c, quiet=True) + if not r: + ui.debug(_(" premerge successful\n")) + os.unlink(back) + os.unlink(b) + os.unlink(c) + return 0 + util.copyfile(back, a) # restore from backup and try again + + env = dict(HG_FILE=fd, + HG_MY_NODE=str(wctx.parents()[0]), + HG_OTHER_NODE=str(mctx), + HG_MY_ISLINK=fcm.islink(), + HG_OTHER_ISLINK=fco.islink(), + HG_BASE_ISLINK=fca.islink()) + + if tool == "internal:merge": + r = simplemerge.simplemerge(a, b, c, label=['local', 'other']) + else: + args = _toolstr(ui, tool, "args", '$local $base $other') + if "$output" in args: + out, a = a, back # read input from backup, write to original + replace = dict(local=a, base=b, other=c, output=out) + args = re.sub("\$(local|base|other|output)", + lambda x: '"%s"' % replace[x.group()[1:]], args) + r = util.system(toolpath + ' ' + args, cwd=repo.root, environ=env) + + if not r and _toolbool(ui, tool, "checkconflicts"): + if re.match("^(<<<<<<< .*|=======|>>>>>>> .*)$", fcm.data()): + r = 1 + + if _toolbool(ui, tool, "fixeol"): + _matcheol(repo.wjoin(fd), back) + + if r: + repo.ui.warn(_("merging %s failed!\n") % fd) + else: + os.unlink(back) + + os.unlink(b) + os.unlink(c) + return r
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/mercurial/hbisect.py Fri Feb 08 11:55:17 2008 +0100 @@ -0,0 +1,105 @@ +# changelog bisection for mercurial +# +# Copyright 2007 Matt Mackall +# Copyright 2005, 2006 Benoit Boissinot <benoit.boissinot@ens-lyon.org> +# Inspired by git bisect, extension skeleton taken from mq.py. +# +# This software may be used and distributed according to the terms +# of the GNU General Public License, incorporated herein by reference. + +from i18n import _ +import hg, util + +def bisect(changelog, state): + clparents = changelog.parentrevs + skip = dict.fromkeys([changelog.rev(n) for n in state['skip']]) + + def buildancestors(bad, good): + # only the earliest bad revision matters + badrev = min([changelog.rev(n) for n in bad]) + goodrevs = [changelog.rev(n) for n in good] + # build ancestors array + ancestors = [[]] * (changelog.count() + 1) # an extra for [-1] + + # clear good revs from array + for node in goodrevs: + ancestors[node] = None + for rev in xrange(changelog.count(), -1, -1): + if ancestors[rev] is None: + for prev in clparents(rev): + ancestors[prev] = None + + if ancestors[badrev] is None: + return badrev, None + return badrev, ancestors + + good = 0 + badrev, ancestors = buildancestors(state['bad'], state['good']) + if not ancestors: # looking for bad to good transition? + good = 1 + badrev, ancestors = buildancestors(state['good'], state['bad']) + bad = changelog.node(badrev) + if not ancestors: # now we're confused + raise util.Abort(_("Inconsistent state, %s:%s is good and bad") + % (badrev, hg.short(bad))) + + # build children dict + children = {} + visit = [badrev] + candidates = [] + while visit: + rev = visit.pop(0) + if ancestors[rev] == []: + candidates.append(rev) + for prev in clparents(rev): + if prev != -1: + if prev in children: + children[prev].append(rev) + else: + children[prev] = [rev] + visit.append(prev) + + candidates.sort() + # have we narrowed it down to one entry? + tot = len(candidates) + if tot == 1: + return (bad, 0, good) + perfect = tot / 2 + + # find the best node to test + best_rev = None + best_len = -1 + poison = {} + for rev in candidates: + if rev in poison: + for c in children.get(rev, []): + poison[c] = True # poison children + continue + + a = ancestors[rev] or [rev] + ancestors[rev] = None + + x = len(a) # number of ancestors + y = tot - x # number of non-ancestors + value = min(x, y) # how good is this test? + if value > best_len and rev not in skip: + best_len = value + best_rev = rev + if value == perfect: # found a perfect candidate? quit early + break + + if y < perfect: # all downhill from here? + for c in children.get(rev, []): + poison[c] = True # poison children + continue + + for c in children.get(rev, []): + if ancestors[c]: + ancestors[c] = dict.fromkeys(ancestors[c] + a).keys() + else: + ancestors[c] = a + [c] + + assert best_rev is not None + best_node = changelog.node(best_rev) + + return (best_node, tot, good)
--- a/mercurial/help.py Fri Feb 08 11:50:37 2008 +0100 +++ b/mercurial/help.py Fri Feb 08 11:55:17 2008 +0100 @@ -43,8 +43,7 @@ 'hg' (with com/exe/bat/cmd extension on Windows) is searched. HGEDITOR:: - This is the name of the editor to use when committing. Defaults to the - value of EDITOR. + This is the name of the editor to use when committing. See EDITOR. (deprecated, use .hgrc) @@ -67,9 +66,6 @@ will be executed with three arguments: local file, remote file, ancestor file. - The default program is "hgmerge", which is a shell script provided - by Mercurial with some sensible defaults. - (deprecated, use .hgrc) HGRCPATH:: @@ -94,9 +90,16 @@ If neither HGUSER nor EMAIL is set, LOGNAME will be used (with '@hostname' appended) as the author value for a commit. +VISUAL:: + This is the name of the editor to use when committing. See EDITOR. + EDITOR:: - This is the name of the editor used in the hgmerge script. It will be - used for commit messages if HGEDITOR isn't set. Defaults to 'vi'. + Sometimes Mercurial needs to open a text file in an editor + for a user to modify, for example when writing commit messages. + The editor it uses is determined by looking at the environment + variables HGEDITOR, VISUAL and EDITOR, in that order. The first + non-empty one is chosen. If all of them are empty, the editor + defaults to 'vi'. PYTHONPATH:: This is used by Python to find imported modules and may need to be set
--- a/mercurial/hg.py Fri Feb 08 11:50:37 2008 +0100 +++ b/mercurial/hg.py Fri Feb 08 11:55:17 2008 +0100 @@ -164,17 +164,23 @@ if copy: def force_copy(src, dst): - try: - util.copyfiles(src, dst) - except OSError, inst: - if inst.errno != errno.ENOENT: - raise + if not os.path.exists(src): + # Tolerate empty source repository and optional files + return + util.copyfiles(src, dst) src_store = os.path.realpath(src_repo.spath) if not os.path.exists(dest): os.mkdir(dest) - dest_path = os.path.realpath(os.path.join(dest, ".hg")) - os.mkdir(dest_path) + try: + dest_path = os.path.realpath(os.path.join(dest, ".hg")) + os.mkdir(dest_path) + except OSError, inst: + if inst.errno == errno.EEXIST: + dir_cleanup.close() + raise util.Abort(_("destination '%s' already exists") + % dest) + raise if src_repo.spath != src_repo.path: # XXX racy dummy_changelog = os.path.join(dest_path, "00changelog.i") @@ -203,7 +209,14 @@ dest_repo = repository(ui, dest) else: - dest_repo = repository(ui, dest, create=True) + try: + dest_repo = repository(ui, dest, create=True) + except OSError, inst: + if inst.errno == errno.EEXIST: + dir_cleanup.close() + raise util.Abort(_("destination '%s' already exists") + % dest) + raise revs = None if rev: @@ -266,13 +279,13 @@ # len(pl)==1, otherwise _merge.update() would have raised util.Abort: repo.ui.status(_(" hg update %s\n hg update %s\n") % (pl[0].rev(), repo.changectx(node).rev())) - return stats[3] + return stats[3] > 0 def clean(repo, node, show_stats=True): """forcibly switch the working directory to node, clobbering changes""" stats = _merge.update(repo, node, False, True, None) if show_stats: _showstats(repo, stats) - return stats[3] + return stats[3] > 0 def merge(repo, node, force=None, remind=True): """branch merge with node, resolving changes""" @@ -287,11 +300,11 @@ % (pl[0].rev(), pl[1].rev())) elif remind: repo.ui.status(_("(branch merge, don't forget to commit)\n")) - return stats[3] + return stats[3] > 0 def revert(repo, node, choose): """revert changes to revision in node without updating dirstate""" - return _merge.update(repo, node, False, True, choose)[3] + return _merge.update(repo, node, False, True, choose)[3] > 0 def verify(repo): """verify the consistency of a repository"""
--- a/mercurial/hgweb/common.py Fri Feb 08 11:50:37 2008 +0100 +++ b/mercurial/hgweb/common.py Fri Feb 08 11:55:17 2008 +0100 @@ -6,7 +6,29 @@ # This software may be used and distributed according to the terms # of the GNU General Public License, incorporated herein by reference. -import os, mimetypes +import errno, mimetypes, os + +HTTP_OK = 200 +HTTP_BAD_REQUEST = 400 +HTTP_NOT_FOUND = 404 +HTTP_SERVER_ERROR = 500 + +class ErrorResponse(Exception): + def __init__(self, code, message=None): + Exception.__init__(self) + self.code = code + if message: + self.message = message + else: + self.message = _statusmessage(code) + +def _statusmessage(code): + from BaseHTTPServer import BaseHTTPRequestHandler + responses = BaseHTTPRequestHandler.responses + return responses.get(code, ('Error', 'Unknown error'))[0] + +def statusmessage(code): + return '%d %s' % (code, _statusmessage(code)) def get_mtime(repo_path): store_path = os.path.join(repo_path, ".hg") @@ -19,11 +41,11 @@ return os.stat(store_path).st_mtime def staticfile(directory, fname, req): - """return a file inside directory with guessed content-type header + """return a file inside directory with guessed Content-Type header fname always uses '/' as directory separator and isn't allowed to contain unusual path components. - Content-type is guessed using the mimetypes module. + Content-Type is guessed using the mimetypes module. Return an empty string if fname is illegal or file not found. """ @@ -37,12 +59,15 @@ try: os.stat(path) ct = mimetypes.guess_type(path)[0] or "text/plain" - req.header([('Content-type', ct), - ('Content-length', str(os.path.getsize(path)))]) + req.respond(HTTP_OK, ct, length = os.path.getsize(path)) return file(path, 'rb').read() - except (TypeError, OSError): - # illegal fname or unreadable file - return "" + except TypeError: + raise ErrorResponse(HTTP_SERVER_ERROR, 'illegal file name') + except OSError, err: + if err.errno == errno.ENOENT: + raise ErrorResponse(HTTP_NOT_FOUND) + else: + raise ErrorResponse(HTTP_SERVER_ERROR, err.strerror) def style_map(templatepath, style): """Return path to mapfile for a given style. @@ -76,3 +101,12 @@ parity = 1 - parity count = 0 +def get_contact(config): + """Return repo contact information or empty string. + + web.contact is the primary source, but if that is not set, try + ui.username or $EMAIL as a fallback to display something useful. + """ + return (config("web", "contact") or + config("ui", "username") or + os.environ.get("EMAIL") or "")
--- a/mercurial/hgweb/hgweb_mod.py Fri Feb 08 11:50:37 2008 +0100 +++ b/mercurial/hgweb/hgweb_mod.py Fri Feb 08 11:55:17 2008 +0100 @@ -6,13 +6,29 @@ # This software may be used and distributed according to the terms # of the GNU General Public License, incorporated herein by reference. -import os, mimetypes, re, zlib, mimetools, cStringIO, sys -import tempfile, urllib, bz2 +import os, mimetypes, re from mercurial.node import * -from mercurial.i18n import gettext as _ -from mercurial import mdiff, ui, hg, util, archival, streamclone, patch -from mercurial import revlog, templater -from common import get_mtime, staticfile, style_map, paritygen +from mercurial import mdiff, ui, hg, util, archival, patch, hook +from mercurial import revlog, templater, templatefilters +from common import ErrorResponse, get_mtime, style_map, paritygen, get_contact +from common import HTTP_OK, HTTP_BAD_REQUEST, HTTP_NOT_FOUND, HTTP_SERVER_ERROR +from request import wsgirequest +import webcommands, protocol + +shortcuts = { + 'cl': [('cmd', ['changelog']), ('rev', None)], + 'sl': [('cmd', ['shortlog']), ('rev', None)], + 'cs': [('cmd', ['changeset']), ('node', None)], + 'f': [('cmd', ['file']), ('filenode', None)], + 'fl': [('cmd', ['filelog']), ('filenode', None)], + 'fd': [('cmd', ['filediff']), ('node', None)], + 'fa': [('cmd', ['annotate']), ('filenode', None)], + 'mf': [('cmd', ['manifest']), ('manifest', None)], + 'ca': [('cmd', ['archive']), ('node', None)], + 'tags': [('cmd', ['tags'])], + 'tip': [('cmd', ['changeset']), ('node', ['tip'])], + 'static': [('cmd', ['static']), ('file', None)] +} def _up(p): if p[0] != "/": @@ -63,13 +79,15 @@ return nav class hgweb(object): - def __init__(self, repo, name=None): + def __init__(self, repo, name=None, parentui=None): if isinstance(repo, str): - parentui = ui.ui(report_untrusted=False, interactive=False) + parentui = (parentui or + ui.ui(report_untrusted=False, interactive=False)) self.repo = hg.repository(parentui, repo) else: self.repo = repo + hook.redirect(True) self.mtime = -1 self.reponame = name self.archives = 'zip', 'gz', 'bz2' @@ -106,17 +124,201 @@ self.allowpull = self.configbool("web", "allowpull", True) self.encoding = self.config("web", "encoding", util._encoding) + def run(self): + if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."): + raise RuntimeError("This function is only intended to be called while running as a CGI script.") + import mercurial.hgweb.wsgicgi as wsgicgi + wsgicgi.launch(self) + + def __call__(self, env, respond): + req = wsgirequest(env, respond) + self.run_wsgi(req) + return req + + def run_wsgi(self, req): + + self.refresh() + + # expand form shortcuts + + for k in shortcuts.iterkeys(): + if k in req.form: + for name, value in shortcuts[k]: + if value is None: + value = req.form[k] + req.form[name] = value + del req.form[k] + + # work with CGI variables to create coherent structure + # use SCRIPT_NAME, PATH_INFO and QUERY_STRING as well as our REPO_NAME + + req.url = req.env['SCRIPT_NAME'] + if not req.url.endswith('/'): + req.url += '/' + if 'REPO_NAME' in req.env: + req.url += req.env['REPO_NAME'] + '/' + + if req.env.get('PATH_INFO'): + parts = req.env.get('PATH_INFO').strip('/').split('/') + repo_parts = req.env.get('REPO_NAME', '').split('/') + if parts[:len(repo_parts)] == repo_parts: + parts = parts[len(repo_parts):] + query = '/'.join(parts) + else: + query = req.env['QUERY_STRING'].split('&', 1)[0] + query = query.split(';', 1)[0] + + # translate user-visible url structure to internal structure + + args = query.split('/', 2) + if 'cmd' not in req.form and args and args[0]: + + cmd = args.pop(0) + style = cmd.rfind('-') + if style != -1: + req.form['style'] = [cmd[:style]] + cmd = cmd[style+1:] + + # avoid accepting e.g. style parameter as command + if hasattr(webcommands, cmd) or hasattr(protocol, cmd): + req.form['cmd'] = [cmd] + + if args and args[0]: + node = args.pop(0) + req.form['node'] = [node] + if args: + req.form['file'] = args + + if cmd == 'static': + req.form['file'] = req.form['node'] + elif cmd == 'archive': + fn = req.form['node'][0] + for type_, spec in self.archive_specs.iteritems(): + ext = spec[2] + if fn.endswith(ext): + req.form['node'] = [fn[:-len(ext)]] + req.form['type'] = [type_] + + # actually process the request + + try: + + cmd = req.form.get('cmd', [''])[0] + if cmd in protocol.__all__: + method = getattr(protocol, cmd) + method(self, req) + else: + tmpl = self.templater(req) + ctype = tmpl('mimetype', encoding=self.encoding) + ctype = templater.stringify(ctype) + + if cmd == '': + req.form['cmd'] = [tmpl.cache['default']] + cmd = req.form['cmd'][0] + + if cmd not in webcommands.__all__: + msg = 'No such method: %s' % cmd + raise ErrorResponse(HTTP_BAD_REQUEST, msg) + elif cmd == 'file' and 'raw' in req.form.get('style', []): + self.ctype = ctype + content = webcommands.rawfile(self, req, tmpl) + else: + content = getattr(webcommands, cmd)(self, req, tmpl) + req.respond(HTTP_OK, ctype) + + req.write(content) + del tmpl + + except revlog.LookupError, err: + req.respond(HTTP_NOT_FOUND, ctype) + req.write(tmpl('error', error='revision not found: %s' % err.name)) + except (hg.RepoError, revlog.RevlogError), inst: + req.respond(HTTP_SERVER_ERROR, ctype) + req.write(tmpl('error', error=str(inst))) + except ErrorResponse, inst: + req.respond(inst.code, ctype) + req.write(tmpl('error', error=inst.message)) + + def templater(self, req): + + # determine scheme, port and server name + # this is needed to create absolute urls + + proto = req.env.get('wsgi.url_scheme') + if proto == 'https': + proto = 'https' + default_port = "443" + else: + proto = 'http' + default_port = "80" + + port = req.env["SERVER_PORT"] + port = port != default_port and (":" + port) or "" + urlbase = '%s://%s%s' % (proto, req.env['SERVER_NAME'], port) + staticurl = self.config("web", "staticurl") or req.url + 'static/' + if not staticurl.endswith('/'): + staticurl += '/' + + # some functions for the templater + + def header(**map): + yield tmpl('header', encoding=self.encoding, **map) + + def footer(**map): + yield tmpl("footer", **map) + + def motd(**map): + yield self.config("web", "motd", "") + + def sessionvars(**map): + fields = [] + if 'style' in req.form: + style = req.form['style'][0] + if style != self.config('web', 'style', ''): + fields.append(('style', style)) + + separator = req.url[-1] == '?' and ';' or '?' + for name, value in fields: + yield dict(name=name, value=value, separator=separator) + separator = ';' + + # figure out which style to use + + style = self.config("web", "style", "") + if 'style' in req.form: + style = req.form['style'][0] + mapfile = style_map(self.templatepath, style) + + if not self.reponame: + self.reponame = (self.config("web", "name") + or req.env.get('REPO_NAME') + or req.url.strip('/') or self.repo.root) + + # create the templater + + tmpl = templater.templater(mapfile, templatefilters.filters, + defaults={"url": req.url, + "staticurl": staticurl, + "urlbase": urlbase, + "repo": self.reponame, + "header": header, + "footer": footer, + "motd": motd, + "sessionvars": sessionvars + }) + return tmpl + def archivelist(self, nodeid): allowed = self.configlist("web", "allow_archive") for i, spec in self.archive_specs.iteritems(): if i in allowed or self.configbool("web", "allow" + i): yield {"type" : i, "extension" : spec[2], "node" : nodeid} - def listfilediffs(self, files, changeset): + def listfilediffs(self, tmpl, files, changeset): for f in files[:self.maxfiles]: - yield self.t("filedifflink", node=hex(changeset), file=f) + yield tmpl("filedifflink", node=hex(changeset), file=f) if len(files) > self.maxfiles: - yield self.t("fileellipses") + yield tmpl("fileellipses") def siblings(self, siblings=[], hiderev=None, **args): siblings = [s for s in siblings if s.node() != nullid] @@ -148,11 +350,11 @@ branches.append({"name": branch}) return branches - def showtag(self, t1, node=nullid, **args): + def showtag(self, tmpl, t1, node=nullid, **args): for t in self.repo.nodetags(node): - yield self.t(t1, tag=t, **args) + yield tmpl(t1, tag=t, **args) - def diff(self, node1, node2, files): + def diff(self, tmpl, node1, node2, files): def filterfiles(filters, files): l = [x for x in files if x in filters] @@ -164,22 +366,22 @@ parity = paritygen(self.stripecount) def diffblock(diff, f, fn): - yield self.t("diffblock", - lines=prettyprintlines(diff), - parity=parity.next(), - file=f, - filenode=hex(fn or nullid)) + yield tmpl("diffblock", + lines=prettyprintlines(diff), + parity=parity.next(), + file=f, + filenode=hex(fn or nullid)) def prettyprintlines(diff): for l in diff.splitlines(1): if l.startswith('+'): - yield self.t("difflineplus", line=l) + yield tmpl("difflineplus", line=l) elif l.startswith('-'): - yield self.t("difflineminus", line=l) + yield tmpl("difflineminus", line=l) elif l.startswith('@'): - yield self.t("difflineat", line=l) + yield tmpl("difflineat", line=l) else: - yield self.t("diffline", line=l) + yield tmpl("diffline", line=l) r = self.repo c1 = r.changectx(node1) @@ -209,7 +411,7 @@ yield diffblock(mdiff.unidiff(to, date1, tn, date2, f, f, opts=diffopts), f, tn) - def changelog(self, ctx, shortlog=False): + def changelog(self, tmpl, ctx, shortlog=False): def changelist(limit=0,**map): cl = self.repo.changelog l = [] # build a list in forward order for efficiency @@ -224,7 +426,7 @@ "changelogtag": self.showtag("changelogtag",n), "desc": ctx.description(), "date": ctx.date(), - "files": self.listfilediffs(ctx.files(), n), + "files": self.listfilediffs(tmpl, ctx.files(), n), "rev": i, "node": hex(n), "tags": self.nodetagsdict(n), @@ -247,15 +449,15 @@ changenav = revnavgen(pos, maxchanges, count, self.repo.changectx) - yield self.t(shortlog and 'shortlog' or 'changelog', - changenav=changenav, - node=hex(cl.tip()), - rev=pos, changesets=count, - entries=lambda **x: changelist(limit=0,**x), - latestentry=lambda **x: changelist(limit=1,**x), - archives=self.archivelist("tip")) + return tmpl(shortlog and 'shortlog' or 'changelog', + changenav=changenav, + node=hex(cl.tip()), + rev=pos, changesets=count, + entries=lambda **x: changelist(limit=0,**x), + latestentry=lambda **x: changelist(limit=1,**x), + archives=self.archivelist("tip")) - def search(self, query): + def search(self, tmpl, query): def changelist(**map): cl = self.repo.changelog @@ -286,19 +488,19 @@ count += 1 n = ctx.node() - yield self.t('searchentry', - parity=parity.next(), - author=ctx.user(), - parent=self.siblings(ctx.parents()), - child=self.siblings(ctx.children()), - changelogtag=self.showtag("changelogtag",n), - desc=ctx.description(), - date=ctx.date(), - files=self.listfilediffs(ctx.files(), n), - rev=ctx.rev(), - node=hex(n), - tags=self.nodetagsdict(n), - branches=self.nodebranchdict(ctx)) + yield tmpl('searchentry', + parity=parity.next(), + author=ctx.user(), + parent=self.siblings(ctx.parents()), + child=self.siblings(ctx.children()), + changelogtag=self.showtag("changelogtag",n), + desc=ctx.description(), + date=ctx.date(), + files=self.listfilediffs(tmpl, ctx.files(), n), + rev=ctx.rev(), + node=hex(n), + tags=self.nodetagsdict(n), + branches=self.nodebranchdict(ctx)) if count >= self.maxchanges: break @@ -306,13 +508,13 @@ cl = self.repo.changelog parity = paritygen(self.stripecount) - yield self.t('search', - query=query, - node=hex(cl.tip()), - entries=changelist, - archives=self.archivelist("tip")) + return tmpl('search', + query=query, + node=hex(cl.tip()), + entries=changelist, + archives=self.archivelist("tip")) - def changeset(self, ctx): + def changeset(self, tmpl, ctx): n = ctx.node() parents = ctx.parents() p1 = parents[0].node() @@ -320,29 +522,29 @@ files = [] parity = paritygen(self.stripecount) for f in ctx.files(): - files.append(self.t("filenodelink", - node=hex(n), file=f, - parity=parity.next())) + files.append(tmpl("filenodelink", + node=hex(n), file=f, + parity=parity.next())) def diff(**map): - yield self.diff(p1, n, None) + yield self.diff(tmpl, p1, n, None) - yield self.t('changeset', - diff=diff, - rev=ctx.rev(), - node=hex(n), - parent=self.siblings(parents), - child=self.siblings(ctx.children()), - changesettag=self.showtag("changesettag",n), - author=ctx.user(), - desc=ctx.description(), - date=ctx.date(), - files=files, - archives=self.archivelist(hex(n)), - tags=self.nodetagsdict(n), - branches=self.nodebranchdict(ctx)) + return tmpl('changeset', + diff=diff, + rev=ctx.rev(), + node=hex(n), + parent=self.siblings(parents), + child=self.siblings(ctx.children()), + changesettag=self.showtag("changesettag",n), + author=ctx.user(), + desc=ctx.description(), + date=ctx.date(), + files=files, + archives=self.archivelist(hex(n)), + tags=self.nodetagsdict(n), + branches=self.nodebranchdict(ctx)) - def filelog(self, fctx): + def filelog(self, tmpl, fctx): f = fctx.path() fl = fctx.filelog() count = fl.count() @@ -379,23 +581,20 @@ nodefunc = lambda x: fctx.filectx(fileid=x) nav = revnavgen(pos, pagelen, count, nodefunc) - yield self.t("filelog", file=f, node=hex(fctx.node()), nav=nav, - entries=lambda **x: entries(limit=0, **x), - latestentry=lambda **x: entries(limit=1, **x)) + return tmpl("filelog", file=f, node=hex(fctx.node()), nav=nav, + entries=lambda **x: entries(limit=0, **x), + latestentry=lambda **x: entries(limit=1, **x)) - def filerevision(self, fctx): + def filerevision(self, tmpl, fctx): f = fctx.path() text = fctx.data() fl = fctx.filelog() n = fctx.filenode() parity = paritygen(self.stripecount) - mt = mimetypes.guess_type(f)[0] - rawtext = text if util.binary(text): - mt = mt or 'application/octet-stream' - text = "(binary:%s)" % mt - mt = mt or 'text/plain' + mt = mimetypes.guess_type(f)[0] or 'application/octet-stream' + text = '(binary:%s)' % mt def lines(): for l, t in enumerate(text.splitlines(1)): @@ -403,23 +602,21 @@ "linenumber": "% 6d" % (l + 1), "parity": parity.next()} - yield self.t("filerevision", - file=f, - path=_up(f), - text=lines(), - raw=rawtext, - mimetype=mt, - rev=fctx.rev(), - node=hex(fctx.node()), - author=fctx.user(), - date=fctx.date(), - desc=fctx.description(), - parent=self.siblings(fctx.parents()), - child=self.siblings(fctx.children()), - rename=self.renamelink(fl, n), - permissions=fctx.manifest().flags(f)) + return tmpl("filerevision", + file=f, + path=_up(f), + text=lines(), + rev=fctx.rev(), + node=hex(fctx.node()), + author=fctx.user(), + date=fctx.date(), + desc=fctx.description(), + parent=self.siblings(fctx.parents()), + child=self.siblings(fctx.children()), + rename=self.renamelink(fl, n), + permissions=fctx.manifest().flags(f)) - def fileannotate(self, fctx): + def fileannotate(self, tmpl, fctx): f = fctx.path() n = fctx.filenode() fl = fctx.filelog() @@ -441,21 +638,21 @@ "file": f.path(), "line": l} - yield self.t("fileannotate", - file=f, - annotate=annotate, - path=_up(f), - rev=fctx.rev(), - node=hex(fctx.node()), - author=fctx.user(), - date=fctx.date(), - desc=fctx.description(), - rename=self.renamelink(fl, n), - parent=self.siblings(fctx.parents()), - child=self.siblings(fctx.children()), - permissions=fctx.manifest().flags(f)) + return tmpl("fileannotate", + file=f, + annotate=annotate, + path=_up(f), + rev=fctx.rev(), + node=hex(fctx.node()), + author=fctx.user(), + date=fctx.date(), + desc=fctx.description(), + rename=self.renamelink(fl, n), + parent=self.siblings(fctx.parents()), + child=self.siblings(fctx.children()), + permissions=fctx.manifest().flags(f)) - def manifest(self, ctx, path): + def manifest(self, tmpl, ctx, path): mf = ctx.manifest() node = ctx.node() @@ -478,6 +675,9 @@ short = os.path.basename(remain) files[short] = (f, n) + if not files: + raise ErrorResponse(HTTP_NOT_FOUND, 'Path not found: ' + path) + def filelist(**map): fl = files.keys() fl.sort() @@ -506,19 +706,19 @@ "path": "%s%s" % (abspath, f), "basename": f[:-1]} - yield self.t("manifest", - rev=ctx.rev(), - node=hex(node), - path=abspath, - up=_up(abspath), - upparity=parity.next(), - fentries=filelist, - dentries=dirlist, - archives=self.archivelist(hex(node)), - tags=self.nodetagsdict(node), - branches=self.nodebranchdict(ctx)) + return tmpl("manifest", + rev=ctx.rev(), + node=hex(node), + path=abspath, + up=_up(abspath), + upparity=parity.next(), + fentries=filelist, + dentries=dirlist, + archives=self.archivelist(hex(node)), + tags=self.nodetagsdict(node), + branches=self.nodebranchdict(ctx)) - def tags(self): + def tags(self, tmpl): i = self.repo.tagslist() i.reverse() parity = paritygen(self.stripecount) @@ -536,13 +736,13 @@ "date": self.repo.changectx(n).date(), "node": hex(n)} - yield self.t("tags", - node=hex(self.repo.changelog.tip()), - entries=lambda **x: entries(False,0, **x), - entriesnotip=lambda **x: entries(True,0, **x), - latestentry=lambda **x: entries(True,1, **x)) + return tmpl("tags", + node=hex(self.repo.changelog.tip()), + entries=lambda **x: entries(False,0, **x), + entriesnotip=lambda **x: entries(True,0, **x), + latestentry=lambda **x: entries(True,1, **x)) - def summary(self): + def summary(self, tmpl): i = self.repo.tagslist() i.reverse() @@ -557,11 +757,11 @@ if count > 10: # limit to 10 tags break; - yield self.t("tagentry", - parity=parity.next(), - tag=k, - node=hex(n), - date=self.repo.changectx(n).date()) + yield tmpl("tagentry", + parity=parity.next(), + tag=k, + node=hex(n), + date=self.repo.changectx(n).date()) def branches(**map): @@ -587,8 +787,8 @@ n = ctx.node() hn = hex(n) - l.insert(0, self.t( - 'shortlogentry', + l.insert(0, tmpl( + 'shortlogentry', parity=parity.next(), author=ctx.user(), desc=ctx.description(), @@ -605,34 +805,32 @@ start = max(0, count - self.maxchanges) end = min(count, start + self.maxchanges) - yield self.t("summary", - desc=self.config("web", "description", "unknown"), - owner=(self.config("ui", "username") or # preferred - self.config("web", "contact") or # deprecated - self.config("web", "author", "unknown")), # also - lastchange=cl.read(cl.tip())[2], - tags=tagentries, - branches=branches, - shortlog=changelist, - node=hex(cl.tip()), - archives=self.archivelist("tip")) + return tmpl("summary", + desc=self.config("web", "description", "unknown"), + owner=get_contact(self.config) or "unknown", + lastchange=cl.read(cl.tip())[2], + tags=tagentries, + branches=branches, + shortlog=changelist, + node=hex(cl.tip()), + archives=self.archivelist("tip")) - def filediff(self, fctx): + def filediff(self, tmpl, fctx): n = fctx.node() path = fctx.path() parents = fctx.parents() p1 = parents and parents[0].node() or nullid def diff(**map): - yield self.diff(p1, n, [path]) + yield self.diff(tmpl, p1, n, [path]) - yield self.t("filediff", - file=path, - node=hex(n), - rev=fctx.rev(), - parent=self.siblings(parents), - child=self.siblings(fctx.children()), - diff=diff) + return tmpl("filediff", + file=path, + node=hex(n), + rev=fctx.rev(), + parent=self.siblings(parents), + child=self.siblings(fctx.children()), + diff=diff) archive_specs = { 'bz2': ('application/x-tar', 'tbz2', '.tar.bz2', None), @@ -640,7 +838,7 @@ 'zip': ('application/zip', 'zip', '.zip', None), } - def archive(self, req, key, type_): + def archive(self, tmpl, req, key, type_): reponame = re.sub(r"\W+", "-", os.path.basename(self.reponame)) cnode = self.repo.lookup(key) arch_version = key @@ -648,13 +846,16 @@ arch_version = short(cnode) name = "%s-%s" % (reponame, arch_version) mimetype, artype, extension, encoding = self.archive_specs[type_] - headers = [('Content-type', mimetype), - ('Content-disposition', 'attachment; filename=%s%s' % - (name, extension))] + headers = [ + ('Content-Type', mimetype), + ('Content-Disposition', 'attachment; filename=%s%s' % + (name, extension)) + ] if encoding: - headers.append(('Content-encoding', encoding)) + headers.append(('Content-Encoding', encoding)) req.header(headers) - archival.archive(self.repo, req.out, cnode, artype, prefix=name) + req.respond(HTTP_OK) + archival.archive(self.repo, req, cnode, artype, prefix=name) # add tags to things # tags -> list of changesets corresponding to tags @@ -664,202 +865,10 @@ path = path.lstrip('/') return util.canonpath(self.repo.root, '', path) - def run(self): - if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."): - raise RuntimeError("This function is only intended to be called while running as a CGI script.") - import mercurial.hgweb.wsgicgi as wsgicgi - from request import wsgiapplication - def make_web_app(): - return self - wsgicgi.launch(wsgiapplication(make_web_app)) - - def run_wsgi(self, req): - def header(**map): - header_file = cStringIO.StringIO( - ''.join(self.t("header", encoding=self.encoding, **map))) - msg = mimetools.Message(header_file, 0) - req.header(msg.items()) - yield header_file.read() - - def rawfileheader(**map): - req.header([('Content-type', map['mimetype']), - ('Content-disposition', 'filename=%s' % map['file']), - ('Content-length', str(len(map['raw'])))]) - yield '' - - def footer(**map): - yield self.t("footer", **map) - - def motd(**map): - yield self.config("web", "motd", "") - - def expand_form(form): - shortcuts = { - 'cl': [('cmd', ['changelog']), ('rev', None)], - 'sl': [('cmd', ['shortlog']), ('rev', None)], - 'cs': [('cmd', ['changeset']), ('node', None)], - 'f': [('cmd', ['file']), ('filenode', None)], - 'fl': [('cmd', ['filelog']), ('filenode', None)], - 'fd': [('cmd', ['filediff']), ('node', None)], - 'fa': [('cmd', ['annotate']), ('filenode', None)], - 'mf': [('cmd', ['manifest']), ('manifest', None)], - 'ca': [('cmd', ['archive']), ('node', None)], - 'tags': [('cmd', ['tags'])], - 'tip': [('cmd', ['changeset']), ('node', ['tip'])], - 'static': [('cmd', ['static']), ('file', None)] - } - - for k in shortcuts.iterkeys(): - if form.has_key(k): - for name, value in shortcuts[k]: - if value is None: - value = form[k] - form[name] = value - del form[k] - - def rewrite_request(req): - '''translate new web interface to traditional format''' - - def spliturl(req): - def firstitem(query): - return query.split('&', 1)[0].split(';', 1)[0] - - def normurl(url): - inner = '/'.join([x for x in url.split('/') if x]) - tl = len(url) > 1 and url.endswith('/') and '/' or '' - - return '%s%s%s' % (url.startswith('/') and '/' or '', - inner, tl) - - root = normurl(urllib.unquote(req.env.get('REQUEST_URI', '').split('?', 1)[0])) - pi = normurl(req.env.get('PATH_INFO', '')) - if pi: - # strip leading / - pi = pi[1:] - if pi: - root = root[:root.rfind(pi)] - if req.env.has_key('REPO_NAME'): - rn = req.env['REPO_NAME'] + '/' - root += rn - query = pi[len(rn):] - else: - query = pi - else: - root += '?' - query = firstitem(req.env['QUERY_STRING']) - - return (root, query) - - req.url, query = spliturl(req) - - if req.form.has_key('cmd'): - # old style - return - - args = query.split('/', 2) - if not args or not args[0]: - return - - cmd = args.pop(0) - style = cmd.rfind('-') - if style != -1: - req.form['style'] = [cmd[:style]] - cmd = cmd[style+1:] - # avoid accepting e.g. style parameter as command - if hasattr(self, 'do_' + cmd): - req.form['cmd'] = [cmd] - - if args and args[0]: - node = args.pop(0) - req.form['node'] = [node] - if args: - req.form['file'] = args - - if cmd == 'static': - req.form['file'] = req.form['node'] - elif cmd == 'archive': - fn = req.form['node'][0] - for type_, spec in self.archive_specs.iteritems(): - ext = spec[2] - if fn.endswith(ext): - req.form['node'] = [fn[:-len(ext)]] - req.form['type'] = [type_] - - def sessionvars(**map): - fields = [] - if req.form.has_key('style'): - style = req.form['style'][0] - if style != self.config('web', 'style', ''): - fields.append(('style', style)) - - separator = req.url[-1] == '?' and ';' or '?' - for name, value in fields: - yield dict(name=name, value=value, separator=separator) - separator = ';' - - self.refresh() - - expand_form(req.form) - rewrite_request(req) - - style = self.config("web", "style", "") - if req.form.has_key('style'): - style = req.form['style'][0] - mapfile = style_map(self.templatepath, style) - - proto = req.env.get('wsgi.url_scheme') - if proto == 'https': - proto = 'https' - default_port = "443" - else: - proto = 'http' - default_port = "80" - - port = req.env["SERVER_PORT"] - port = port != default_port and (":" + port) or "" - urlbase = '%s://%s%s' % (proto, req.env['SERVER_NAME'], port) - staticurl = self.config("web", "staticurl") or req.url + 'static/' - if not staticurl.endswith('/'): - staticurl += '/' - - if not self.reponame: - self.reponame = (self.config("web", "name") - or req.env.get('REPO_NAME') - or req.url.strip('/') or self.repo.root) - - self.t = templater.templater(mapfile, templater.common_filters, - defaults={"url": req.url, - "staticurl": staticurl, - "urlbase": urlbase, - "repo": self.reponame, - "header": header, - "footer": footer, - "motd": motd, - "rawfileheader": rawfileheader, - "sessionvars": sessionvars - }) - - try: - if not req.form.has_key('cmd'): - req.form['cmd'] = [self.t.cache['default']] - - cmd = req.form['cmd'][0] - - method = getattr(self, 'do_' + cmd, None) - if method: - try: - method(req) - except (hg.RepoError, revlog.RevlogError), inst: - req.write(self.t("error", error=str(inst))) - else: - req.write(self.t("error", error='No such method: ' + cmd)) - finally: - self.t = None - def changectx(self, req): - if req.form.has_key('node'): + if 'node' in req.form: changeid = req.form['node'][0] - elif req.form.has_key('manifest'): + elif 'manifest' in req.form: changeid = req.form['manifest'][0] else: changeid = self.repo.changelog.count() - 1 @@ -875,7 +884,7 @@ def filectx(self, req): path = self.cleanpath(req.form['file'][0]) - if req.form.has_key('node'): + if 'node' in req.form: changeid = req.form['node'][0] else: changeid = req.form['filenode'][0] @@ -887,181 +896,6 @@ return fctx - def do_log(self, req): - if req.form.has_key('file') and req.form['file'][0]: - self.do_filelog(req) - else: - self.do_changelog(req) - - def do_rev(self, req): - self.do_changeset(req) - - def do_file(self, req): - path = self.cleanpath(req.form.get('file', [''])[0]) - if path: - try: - req.write(self.filerevision(self.filectx(req))) - return - except revlog.LookupError: - pass - - req.write(self.manifest(self.changectx(req), path)) - - def do_diff(self, req): - self.do_filediff(req) - - def do_changelog(self, req, shortlog = False): - if req.form.has_key('node'): - ctx = self.changectx(req) - else: - if req.form.has_key('rev'): - hi = req.form['rev'][0] - else: - hi = self.repo.changelog.count() - 1 - try: - ctx = self.repo.changectx(hi) - except hg.RepoError: - req.write(self.search(hi)) # XXX redirect to 404 page? - return - - req.write(self.changelog(ctx, shortlog = shortlog)) - - def do_shortlog(self, req): - self.do_changelog(req, shortlog = True) - - def do_changeset(self, req): - req.write(self.changeset(self.changectx(req))) - - def do_manifest(self, req): - req.write(self.manifest(self.changectx(req), - self.cleanpath(req.form['path'][0]))) - - def do_tags(self, req): - req.write(self.tags()) - - def do_summary(self, req): - req.write(self.summary()) - - def do_filediff(self, req): - req.write(self.filediff(self.filectx(req))) - - def do_annotate(self, req): - req.write(self.fileannotate(self.filectx(req))) - - def do_filelog(self, req): - req.write(self.filelog(self.filectx(req))) - - def do_lookup(self, req): - try: - r = hex(self.repo.lookup(req.form['key'][0])) - success = 1 - except Exception,inst: - r = str(inst) - success = 0 - resp = "%s %s\n" % (success, r) - req.httphdr("application/mercurial-0.1", length=len(resp)) - req.write(resp) - - def do_heads(self, req): - resp = " ".join(map(hex, self.repo.heads())) + "\n" - req.httphdr("application/mercurial-0.1", length=len(resp)) - req.write(resp) - - def do_branches(self, req): - nodes = [] - if req.form.has_key('nodes'): - nodes = map(bin, req.form['nodes'][0].split(" ")) - resp = cStringIO.StringIO() - for b in self.repo.branches(nodes): - resp.write(" ".join(map(hex, b)) + "\n") - resp = resp.getvalue() - req.httphdr("application/mercurial-0.1", length=len(resp)) - req.write(resp) - - def do_between(self, req): - if req.form.has_key('pairs'): - pairs = [map(bin, p.split("-")) - for p in req.form['pairs'][0].split(" ")] - resp = cStringIO.StringIO() - for b in self.repo.between(pairs): - resp.write(" ".join(map(hex, b)) + "\n") - resp = resp.getvalue() - req.httphdr("application/mercurial-0.1", length=len(resp)) - req.write(resp) - - def do_changegroup(self, req): - req.httphdr("application/mercurial-0.1") - nodes = [] - if not self.allowpull: - return - - if req.form.has_key('roots'): - nodes = map(bin, req.form['roots'][0].split(" ")) - - z = zlib.compressobj() - f = self.repo.changegroup(nodes, 'serve') - while 1: - chunk = f.read(4096) - if not chunk: - break - req.write(z.compress(chunk)) - - req.write(z.flush()) - - def do_changegroupsubset(self, req): - req.httphdr("application/mercurial-0.1") - bases = [] - heads = [] - if not self.allowpull: - return - - if req.form.has_key('bases'): - bases = [bin(x) for x in req.form['bases'][0].split(' ')] - if req.form.has_key('heads'): - heads = [bin(x) for x in req.form['heads'][0].split(' ')] - - z = zlib.compressobj() - f = self.repo.changegroupsubset(bases, heads, 'serve') - while 1: - chunk = f.read(4096) - if not chunk: - break - req.write(z.compress(chunk)) - - req.write(z.flush()) - - def do_archive(self, req): - type_ = req.form['type'][0] - allowed = self.configlist("web", "allow_archive") - if (type_ in self.archives and (type_ in allowed or - self.configbool("web", "allow" + type_, False))): - self.archive(req, req.form['node'][0], type_) - return - - req.write(self.t("error")) - - def do_static(self, req): - fname = req.form['file'][0] - # a repo owner may set web.static in .hg/hgrc to get any file - # readable by the user running the CGI script - static = self.config("web", "static", - os.path.join(self.templatepath, "static"), - untrusted=False) - req.write(staticfile(static, fname, req) - or self.t("error", error="%r not found" % fname)) - - def do_capabilities(self, req): - caps = ['lookup', 'changegroupsubset'] - if self.configbool('server', 'uncompressed'): - caps.append('stream=%d' % self.repo.changelog.version) - # XXX: make configurable and/or share code with do_unbundle: - unbundleversions = ['HG10GZ', 'HG10BZ', 'HG10UN'] - if unbundleversions: - caps.append('unbundle=%s' % ','.join(unbundleversions)) - resp = ' '.join(caps) - req.httphdr("application/mercurial-0.1", length=len(resp)) - req.write(resp) - def check_perm(self, req, op, default): '''check permission for operation based on user auth. return true if op allowed, else false. @@ -1075,134 +909,3 @@ allow = self.configlist('web', 'allow_' + op) return (allow and (allow == ['*'] or user in allow)) or default - - def do_unbundle(self, req): - def bail(response, headers={}): - length = int(req.env['CONTENT_LENGTH']) - for s in util.filechunkiter(req, limit=length): - # drain incoming bundle, else client will not see - # response when run outside cgi script - pass - req.httphdr("application/mercurial-0.1", headers=headers) - req.write('0\n') - req.write(response) - - # require ssl by default, auth info cannot be sniffed and - # replayed - ssl_req = self.configbool('web', 'push_ssl', True) - if ssl_req: - if req.env.get('wsgi.url_scheme') != 'https': - bail(_('ssl required\n')) - return - proto = 'https' - else: - proto = 'http' - - # do not allow push unless explicitly allowed - if not self.check_perm(req, 'push', False): - bail(_('push not authorized\n'), - headers={'status': '401 Unauthorized'}) - return - - their_heads = req.form['heads'][0].split(' ') - - def check_heads(): - heads = map(hex, self.repo.heads()) - return their_heads == [hex('force')] or their_heads == heads - - # fail early if possible - if not check_heads(): - bail(_('unsynced changes\n')) - return - - req.httphdr("application/mercurial-0.1") - - # do not lock repo until all changegroup data is - # streamed. save to temporary file. - - fd, tempname = tempfile.mkstemp(prefix='hg-unbundle-') - fp = os.fdopen(fd, 'wb+') - try: - length = int(req.env['CONTENT_LENGTH']) - for s in util.filechunkiter(req, limit=length): - fp.write(s) - - try: - lock = self.repo.lock() - try: - if not check_heads(): - req.write('0\n') - req.write(_('unsynced changes\n')) - return - - fp.seek(0) - header = fp.read(6) - if not header.startswith("HG"): - # old client with uncompressed bundle - def generator(f): - yield header - for chunk in f: - yield chunk - elif not header.startswith("HG10"): - req.write("0\n") - req.write(_("unknown bundle version\n")) - return - elif header == "HG10GZ": - def generator(f): - zd = zlib.decompressobj() - for chunk in f: - yield zd.decompress(chunk) - elif header == "HG10BZ": - def generator(f): - zd = bz2.BZ2Decompressor() - zd.decompress("BZ") - for chunk in f: - yield zd.decompress(chunk) - elif header == "HG10UN": - def generator(f): - for chunk in f: - yield chunk - else: - req.write("0\n") - req.write(_("unknown bundle compression type\n")) - return - gen = generator(util.filechunkiter(fp, 4096)) - - # send addchangegroup output to client - - old_stdout = sys.stdout - sys.stdout = cStringIO.StringIO() - - try: - url = 'remote:%s:%s' % (proto, - req.env.get('REMOTE_HOST', '')) - try: - ret = self.repo.addchangegroup( - util.chunkbuffer(gen), 'serve', url) - except util.Abort, inst: - sys.stdout.write("abort: %s\n" % inst) - ret = 0 - finally: - val = sys.stdout.getvalue() - sys.stdout = old_stdout - req.write('%d\n' % ret) - req.write(val) - finally: - del lock - except (OSError, IOError), inst: - req.write('0\n') - filename = getattr(inst, 'filename', '') - # Don't send our filesystem layout to the client - if filename.startswith(self.repo.root): - filename = filename[len(self.repo.root)+1:] - else: - filename = '' - error = getattr(inst, 'strerror', 'Unknown error') - req.write('%s: %s\n' % (error, filename)) - finally: - fp.close() - os.unlink(tempname) - - def do_stream_out(self, req): - req.httphdr("application/mercurial-0.1") - streamclone.stream_out(self.repo, req, untrusted=True)
--- a/mercurial/hgweb/hgwebdir_mod.py Fri Feb 08 11:50:37 2008 +0100 +++ b/mercurial/hgweb/hgwebdir_mod.py Fri Feb 08 11:55:17 2008 +0100 @@ -6,11 +6,13 @@ # This software may be used and distributed according to the terms # of the GNU General Public License, incorporated herein by reference. -import os, mimetools, cStringIO +import os from mercurial.i18n import gettext as _ -from mercurial import ui, hg, util, templater -from common import get_mtime, staticfile, style_map, paritygen +from mercurial import ui, hg, util, templater, templatefilters +from common import ErrorResponse, get_mtime, staticfile, style_map, paritygen,\ + get_contact, HTTP_OK, HTTP_NOT_FOUND, HTTP_SERVER_ERROR from hgweb_mod import hgweb +from request import wsgirequest # This is a stopgap class hgwebdir(object): @@ -19,7 +21,8 @@ return [(util.pconvert(name).strip('/'), path) for name, path in items] - self.parentui = parentui + self.parentui = parentui or ui.ui(report_untrusted=False, + interactive = False) self.motd = None self.style = None self.stripecount = None @@ -60,60 +63,79 @@ if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."): raise RuntimeError("This function is only intended to be called while running as a CGI script.") import mercurial.hgweb.wsgicgi as wsgicgi - from request import wsgiapplication - def make_web_app(): - return self - wsgicgi.launch(wsgiapplication(make_web_app)) + wsgicgi.launch(self) + + def __call__(self, env, respond): + req = wsgirequest(env, respond) + self.run_wsgi(req) + return req def run_wsgi(self, req): - def header(**map): - header_file = cStringIO.StringIO( - ''.join(tmpl("header", encoding=util._encoding, **map))) - msg = mimetools.Message(header_file, 0) - req.header(msg.items()) - yield header_file.read() + + try: + try: - def footer(**map): - yield tmpl("footer", **map) + virtual = req.env.get("PATH_INFO", "").strip('/') + tmpl = self.templater(req) + ctype = tmpl('mimetype', encoding=util._encoding) + ctype = templater.stringify(ctype) - def motd(**map): - if self.motd is not None: - yield self.motd - else: - yield config('web', 'motd', '') + # a static file + if virtual.startswith('static/') or 'static' in req.form: + static = os.path.join(templater.templatepath(), 'static') + if virtual.startswith('static/'): + fname = virtual[7:] + else: + fname = req.form['static'][0] + req.write(staticfile(static, fname, req)) + return - parentui = self.parentui or ui.ui(report_untrusted=False, - interactive=False) - - def config(section, name, default=None, untrusted=True): - return parentui.config(section, name, default, untrusted) + # top-level index + elif not virtual: + req.respond(HTTP_OK, ctype) + req.write(self.makeindex(req, tmpl)) + return - url = req.env['REQUEST_URI'].split('?')[0] - if not url.endswith('/'): - url += '/' - pathinfo = req.env.get('PATH_INFO', '').strip('/') + '/' - base = url[:len(url) - len(pathinfo)] - if not base.endswith('/'): - base += '/' - - staticurl = config('web', 'staticurl') or base + 'static/' - if not staticurl.endswith('/'): - staticurl += '/' + # nested indexes and hgwebs + + repos = dict(self.repos) + while virtual: + real = repos.get(virtual) + if real: + req.env['REPO_NAME'] = virtual + try: + repo = hg.repository(self.parentui, real) + hgweb(repo).run_wsgi(req) + return + except IOError, inst: + msg = inst.strerror + raise ErrorResponse(HTTP_SERVER_ERROR, msg) + except hg.RepoError, inst: + raise ErrorResponse(HTTP_SERVER_ERROR, str(inst)) - style = self.style - if style is None: - style = config('web', 'style', '') - if req.form.has_key('style'): - style = req.form['style'][0] - if self.stripecount is None: - self.stripecount = int(config('web', 'stripes', 1)) - mapfile = style_map(templater.templatepath(), style) - tmpl = templater.templater(mapfile, templater.common_filters, - defaults={"header": header, - "footer": footer, - "motd": motd, - "url": url, - "staticurl": staticurl}) + # browse subdirectories + subdir = virtual + '/' + if [r for r in repos if r.startswith(subdir)]: + req.respond(HTTP_OK, ctype) + req.write(self.makeindex(req, tmpl, subdir)) + return + + up = virtual.rfind('/') + if up < 0: + break + virtual = virtual[:up] + + # prefixes not found + req.respond(HTTP_NOT_FOUND, ctype) + req.write(tmpl("notfound", repo=virtual)) + + except ErrorResponse, err: + req.respond(err.code, ctype) + req.write(tmpl('error', error=err.message or '')) + finally: + tmpl = None + + def makeindex(self, req, tmpl, subdir=""): def archivelist(ui, nodeid, url): allowed = ui.configlist("web", "allow_archive", untrusted=True) @@ -126,7 +148,7 @@ def entries(sortcolumn="", descending=False, subdir="", **map): def sessionvars(**map): fields = [] - if req.form.has_key('style'): + if 'style' in req.form: style = req.form['style'][0] if style != get('web', 'style', ''): fields.append(('style', style)) @@ -143,7 +165,7 @@ continue name = name[len(subdir):] - u = ui.ui(parentui=parentui) + u = ui.ui(parentui=self.parentui) try: u.readconfig(os.path.join(path, '.hg', 'hgrc')) except Exception, e: @@ -155,8 +177,10 @@ if u.configbool("web", "hidden", untrusted=True): continue - url = ('/'.join([req.env["REQUEST_URI"].split('?')[0], name]) - .replace("//", "/")) + '/' + parts = [req.env['PATH_INFO'].strip('/'), name] + if req.env['SCRIPT_NAME']: + parts.insert(0, req.env['SCRIPT_NAME']) + url = ('/'.join(parts).replace("//", "/")) + '/' # update time with local timezone try: @@ -164,9 +188,7 @@ except OSError: continue - contact = (get("ui", "username") or # preferred - get("web", "contact") or # deprecated - get("web", "author", "")) # also + contact = get_contact(get) description = get("web", "description", "") name = get("web", "name", name) row = dict(contact=contact or "unknown", @@ -195,66 +217,62 @@ row['parity'] = parity.next() yield row - def makeindex(req, subdir=""): - sortable = ["name", "description", "contact", "lastchange"] - sortcolumn, descending = self.repos_sorted - if req.form.has_key('sort'): - sortcolumn = req.form['sort'][0] - descending = sortcolumn.startswith('-') - if descending: - sortcolumn = sortcolumn[1:] - if sortcolumn not in sortable: - sortcolumn = "" + sortable = ["name", "description", "contact", "lastchange"] + sortcolumn, descending = self.repos_sorted + if 'sort' in req.form: + sortcolumn = req.form['sort'][0] + descending = sortcolumn.startswith('-') + if descending: + sortcolumn = sortcolumn[1:] + if sortcolumn not in sortable: + sortcolumn = "" - sort = [("sort_%s" % column, - "%s%s" % ((not descending and column == sortcolumn) - and "-" or "", column)) - for column in sortable] - req.write(tmpl("index", entries=entries, subdir=subdir, - sortcolumn=sortcolumn, descending=descending, - **dict(sort))) + sort = [("sort_%s" % column, + "%s%s" % ((not descending and column == sortcolumn) + and "-" or "", column)) + for column in sortable] + + return tmpl("index", entries=entries, subdir=subdir, + sortcolumn=sortcolumn, descending=descending, + **dict(sort)) + + def templater(self, req): + + def header(**map): + yield tmpl('header', encoding=util._encoding, **map) + + def footer(**map): + yield tmpl("footer", **map) - try: - virtual = req.env.get("PATH_INFO", "").strip('/') - if virtual.startswith('static/'): - static = os.path.join(templater.templatepath(), 'static') - fname = virtual[7:] - req.write(staticfile(static, fname, req) or - tmpl('error', error='%r not found' % fname)) - elif virtual: - repos = dict(self.repos) - while virtual: - real = repos.get(virtual) - if real: - req.env['REPO_NAME'] = virtual - try: - repo = hg.repository(parentui, real) - hgweb(repo).run_wsgi(req) - except IOError, inst: - req.write(tmpl("error", error=inst.strerror)) - except hg.RepoError, inst: - req.write(tmpl("error", error=str(inst))) - return + def motd(**map): + if self.motd is not None: + yield self.motd + else: + yield config('web', 'motd', '') + + def config(section, name, default=None, untrusted=True): + return self.parentui.config(section, name, default, untrusted) + + url = req.env.get('SCRIPT_NAME', '') + if not url.endswith('/'): + url += '/' - # browse subdirectories - subdir = virtual + '/' - if [r for r in repos if r.startswith(subdir)]: - makeindex(req, subdir) - return - - up = virtual.rfind('/') - if up < 0: - break - virtual = virtual[:up] + staticurl = config('web', 'staticurl') or url + 'static/' + if not staticurl.endswith('/'): + staticurl += '/' - req.write(tmpl("notfound", repo=virtual)) - else: - if req.form.has_key('static'): - static = os.path.join(templater.templatepath(), "static") - fname = req.form['static'][0] - req.write(staticfile(static, fname, req) - or tmpl("error", error="%r not found" % fname)) - else: - makeindex(req) - finally: - tmpl = None + style = self.style + if style is None: + style = config('web', 'style', '') + if 'style' in req.form: + style = req.form['style'][0] + if self.stripecount is None: + self.stripecount = int(config('web', 'stripes', 1)) + mapfile = style_map(templater.templatepath(), style) + tmpl = templater.templater(mapfile, templatefilters.filters, + defaults={"header": header, + "footer": footer, + "motd": motd, + "url": url, + "staticurl": staticurl}) + return tmpl
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/mercurial/hgweb/protocol.py Fri Feb 08 11:55:17 2008 +0100 @@ -0,0 +1,250 @@ +# +# Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net> +# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# +# This software may be used and distributed according to the terms +# of the GNU General Public License, incorporated herein by reference. + +import cStringIO, zlib, bz2, tempfile, errno, os, sys +from mercurial import util, streamclone +from mercurial.i18n import gettext as _ +from mercurial.node import * +from common import HTTP_OK, HTTP_NOT_FOUND, HTTP_SERVER_ERROR + +# __all__ is populated with the allowed commands. Be sure to add to it if +# you're adding a new command, or the new command won't work. + +__all__ = [ + 'lookup', 'heads', 'branches', 'between', 'changegroup', + 'changegroupsubset', 'capabilities', 'unbundle', 'stream_out', +] + +HGTYPE = 'application/mercurial-0.1' + +def lookup(web, req): + try: + r = hex(web.repo.lookup(req.form['key'][0])) + success = 1 + except Exception,inst: + r = str(inst) + success = 0 + resp = "%s %s\n" % (success, r) + req.respond(HTTP_OK, HGTYPE, length=len(resp)) + req.write(resp) + +def heads(web, req): + resp = " ".join(map(hex, web.repo.heads())) + "\n" + req.respond(HTTP_OK, HGTYPE, length=len(resp)) + req.write(resp) + +def branches(web, req): + nodes = [] + if 'nodes' in req.form: + nodes = map(bin, req.form['nodes'][0].split(" ")) + resp = cStringIO.StringIO() + for b in web.repo.branches(nodes): + resp.write(" ".join(map(hex, b)) + "\n") + resp = resp.getvalue() + req.respond(HTTP_OK, HGTYPE, length=len(resp)) + req.write(resp) + +def between(web, req): + if 'pairs' in req.form: + pairs = [map(bin, p.split("-")) + for p in req.form['pairs'][0].split(" ")] + resp = cStringIO.StringIO() + for b in web.repo.between(pairs): + resp.write(" ".join(map(hex, b)) + "\n") + resp = resp.getvalue() + req.respond(HTTP_OK, HGTYPE, length=len(resp)) + req.write(resp) + +def changegroup(web, req): + req.respond(HTTP_OK, HGTYPE) + nodes = [] + if not web.allowpull: + return + + if 'roots' in req.form: + nodes = map(bin, req.form['roots'][0].split(" ")) + + z = zlib.compressobj() + f = web.repo.changegroup(nodes, 'serve') + while 1: + chunk = f.read(4096) + if not chunk: + break + req.write(z.compress(chunk)) + + req.write(z.flush()) + +def changegroupsubset(web, req): + req.respond(HTTP_OK, HGTYPE) + bases = [] + heads = [] + if not web.allowpull: + return + + if 'bases' in req.form: + bases = [bin(x) for x in req.form['bases'][0].split(' ')] + if 'heads' in req.form: + heads = [bin(x) for x in req.form['heads'][0].split(' ')] + + z = zlib.compressobj() + f = web.repo.changegroupsubset(bases, heads, 'serve') + while 1: + chunk = f.read(4096) + if not chunk: + break + req.write(z.compress(chunk)) + + req.write(z.flush()) + +def capabilities(web, req): + caps = ['lookup', 'changegroupsubset'] + if web.configbool('server', 'uncompressed'): + caps.append('stream=%d' % web.repo.changelog.version) + # XXX: make configurable and/or share code with do_unbundle: + unbundleversions = ['HG10GZ', 'HG10BZ', 'HG10UN'] + if unbundleversions: + caps.append('unbundle=%s' % ','.join(unbundleversions)) + resp = ' '.join(caps) + req.respond(HTTP_OK, HGTYPE, length=len(resp)) + req.write(resp) + +def unbundle(web, req): + def bail(response, headers={}): + length = int(req.env['CONTENT_LENGTH']) + for s in util.filechunkiter(req, limit=length): + # drain incoming bundle, else client will not see + # response when run outside cgi script + pass + req.header(headers.items()) + req.respond(HTTP_OK, HGTYPE) + req.write('0\n') + req.write(response) + + # require ssl by default, auth info cannot be sniffed and + # replayed + ssl_req = web.configbool('web', 'push_ssl', True) + if ssl_req: + if req.env.get('wsgi.url_scheme') != 'https': + bail(_('ssl required\n')) + return + proto = 'https' + else: + proto = 'http' + + # do not allow push unless explicitly allowed + if not web.check_perm(req, 'push', False): + bail(_('push not authorized\n'), + headers={'status': '401 Unauthorized'}) + return + + their_heads = req.form['heads'][0].split(' ') + + def check_heads(): + heads = map(hex, web.repo.heads()) + return their_heads == [hex('force')] or their_heads == heads + + # fail early if possible + if not check_heads(): + bail(_('unsynced changes\n')) + return + + req.respond(HTTP_OK, HGTYPE) + + # do not lock repo until all changegroup data is + # streamed. save to temporary file. + + fd, tempname = tempfile.mkstemp(prefix='hg-unbundle-') + fp = os.fdopen(fd, 'wb+') + try: + length = int(req.env['CONTENT_LENGTH']) + for s in util.filechunkiter(req, limit=length): + fp.write(s) + + try: + lock = web.repo.lock() + try: + if not check_heads(): + req.write('0\n') + req.write(_('unsynced changes\n')) + return + + fp.seek(0) + header = fp.read(6) + if not header.startswith("HG"): + # old client with uncompressed bundle + def generator(f): + yield header + for chunk in f: + yield chunk + elif not header.startswith("HG10"): + req.write("0\n") + req.write(_("unknown bundle version\n")) + return + elif header == "HG10GZ": + def generator(f): + zd = zlib.decompressobj() + for chunk in f: + yield zd.decompress(chunk) + elif header == "HG10BZ": + def generator(f): + zd = bz2.BZ2Decompressor() + zd.decompress("BZ") + for chunk in f: + yield zd.decompress(chunk) + elif header == "HG10UN": + def generator(f): + for chunk in f: + yield chunk + else: + req.write("0\n") + req.write(_("unknown bundle compression type\n")) + return + gen = generator(util.filechunkiter(fp, 4096)) + + # send addchangegroup output to client + + old_stdout = sys.stdout + sys.stdout = cStringIO.StringIO() + + try: + url = 'remote:%s:%s' % (proto, + req.env.get('REMOTE_HOST', '')) + try: + ret = web.repo.addchangegroup( + util.chunkbuffer(gen), 'serve', url) + except util.Abort, inst: + sys.stdout.write("abort: %s\n" % inst) + ret = 0 + finally: + val = sys.stdout.getvalue() + sys.stdout = old_stdout + req.write('%d\n' % ret) + req.write(val) + finally: + del lock + except (OSError, IOError), inst: + req.write('0\n') + filename = getattr(inst, 'filename', '') + # Don't send our filesystem layout to the client + if filename.startswith(web.repo.root): + filename = filename[len(web.repo.root)+1:] + else: + filename = '' + error = getattr(inst, 'strerror', 'Unknown error') + if inst.errno == errno.ENOENT: + code = HTTP_NOT_FOUND + else: + code = HTTP_SERVER_ERROR + req.respond(code) + req.write('%s: %s\n' % (error, filename)) + finally: + fp.close() + os.unlink(tempname) + +def stream_out(web, req): + req.respond(HTTP_OK, HGTYPE) + streamclone.stream_out(web.repo, req, untrusted=True)
--- a/mercurial/hgweb/request.py Fri Feb 08 11:50:37 2008 +0100 +++ b/mercurial/hgweb/request.py Fri Feb 08 11:55:17 2008 +0100 @@ -8,33 +8,24 @@ import socket, cgi, errno from mercurial.i18n import gettext as _ - -class wsgiapplication(object): - def __init__(self, destmaker): - self.destmaker = destmaker +from common import ErrorResponse, statusmessage - def __call__(self, wsgienv, start_response): - return _wsgirequest(self.destmaker(), wsgienv, start_response) - -class _wsgirequest(object): - def __init__(self, destination, wsgienv, start_response): +class wsgirequest(object): + def __init__(self, wsgienv, start_response): version = wsgienv['wsgi.version'] if (version < (1, 0)) or (version >= (2, 0)): raise RuntimeError("Unknown and unsupported WSGI version %d.%d" % version) self.inp = wsgienv['wsgi.input'] - self.server_write = None self.err = wsgienv['wsgi.errors'] self.threaded = wsgienv['wsgi.multithread'] self.multiprocess = wsgienv['wsgi.multiprocess'] self.run_once = wsgienv['wsgi.run_once'] self.env = wsgienv self.form = cgi.parse(self.inp, self.env, keep_blank_values=1) - self.start_response = start_response + self._start_response = start_response + self.server_write = None self.headers = [] - destination.run_wsgi(self) - - out = property(lambda self: self) def __iter__(self): return iter([]) @@ -42,25 +33,39 @@ def read(self, count=-1): return self.inp.read(count) - def write(self, *things): - for thing in things: - if hasattr(thing, "__iter__"): - for part in thing: - self.write(part) - else: - thing = str(thing) - if self.server_write is None: - if not self.headers: - raise RuntimeError("request.write called before headers sent (%s)." % thing) - self.server_write = self.start_response('200 Script output follows', - self.headers) - self.start_response = None - self.headers = None - try: - self.server_write(thing) - except socket.error, inst: - if inst[0] != errno.ECONNRESET: - raise + def respond(self, status, type=None, filename=None, length=0): + if self._start_response is not None: + + self.httphdr(type, filename, length) + if not self.headers: + raise RuntimeError("request.write called before headers sent") + + for k, v in self.headers: + if not isinstance(v, str): + raise TypeError('header value must be string: %r' % v) + + if isinstance(status, ErrorResponse): + status = statusmessage(status.code) + elif status == 200: + status = '200 Script output follows' + elif isinstance(status, int): + status = statusmessage(status) + + self.server_write = self._start_response(status, self.headers) + self._start_response = None + self.headers = [] + + def write(self, thing): + if hasattr(thing, "__iter__"): + for part in thing: + self.write(part) + else: + thing = str(thing) + try: + self.server_write(thing) + except socket.error, inst: + if inst[0] != errno.ECONNRESET: + raise def writelines(self, lines): for line in lines: @@ -72,15 +77,24 @@ def close(self): return None - def header(self, headers=[('Content-type','text/html')]): + def header(self, headers=[('Content-Type','text/html')]): self.headers.extend(headers) - def httphdr(self, type, filename=None, length=0, headers={}): + def httphdr(self, type=None, filename=None, length=0, headers={}): headers = headers.items() - headers.append(('Content-type', type)) + if type is not None: + headers.append(('Content-Type', type)) if filename: - headers.append(('Content-disposition', 'attachment; filename=%s' % + headers.append(('Content-Disposition', 'inline; filename=%s' % filename)) if length: - headers.append(('Content-length', str(length))) + headers.append(('Content-Length', str(length))) self.header(headers) + +def wsgiapplication(app_maker): + '''For compatibility with old CGI scripts. A plain hgweb() or hgwebdir() + can and should now be used as a WSGI application.''' + application = app_maker() + def run_wsgi(env, respond): + return application(env, respond) + return run_wsgi
--- a/mercurial/hgweb/server.py Fri Feb 08 11:50:37 2008 +0100 +++ b/mercurial/hgweb/server.py Fri Feb 08 11:55:17 2008 +0100 @@ -10,7 +10,6 @@ from mercurial import ui, hg, util, templater from hgweb_mod import hgweb from hgwebdir_mod import hgwebdir -from request import wsgiapplication from mercurial.i18n import gettext as _ def _splitURI(uri): @@ -77,7 +76,7 @@ self.do_POST() def do_hgweb(self): - path_info, query = _splitURI(self.path) + path, query = _splitURI(self.path) env = {} env['GATEWAY_INTERFACE'] = 'CGI/1.1' @@ -85,7 +84,8 @@ env['SERVER_NAME'] = self.server.server_name env['SERVER_PORT'] = str(self.server.server_port) env['REQUEST_URI'] = self.path - env['PATH_INFO'] = path_info + env['SCRIPT_NAME'] = self.server.prefix + env['PATH_INFO'] = path[len(self.server.prefix):] env['REMOTE_HOST'] = self.client_address[0] env['REMOTE_ADDR'] = self.client_address[0] if query: @@ -121,10 +121,7 @@ self.saved_headers = [] self.sent_headers = False self.length = None - req = self.server.reqmaker(env, self._start_response) - for data in req: - if data: - self._write(data) + self.server.application(env, self._start_response) def send_headers(self): if not self.saved_status: @@ -200,7 +197,7 @@ def openlog(opt, default): if opt and opt != '-': - return open(opt, 'w') + return open(opt, 'a') return default if repo is None: @@ -209,6 +206,9 @@ myui = repo.ui address = myui.config("web", "address", "") port = int(myui.config("web", "port", 8000)) + prefix = myui.config("web", "prefix", "") + if prefix: + prefix = "/" + prefix.strip("/") use_ipv6 = myui.configbool("web", "ipv6") webdir_conf = myui.config("web", "webdir_conf") ssl_cert = myui.config("web", "certificate") @@ -250,13 +250,14 @@ raise hg.RepoError(_("There is no Mercurial repository here" " (.hg not found)")) return hgwebobj - self.reqmaker = wsgiapplication(make_handler) + self.application = make_handler() addr = address if addr in ('', '::'): addr = socket.gethostname() self.addr, self.port = addr, port + self.prefix = prefix if ssl_cert: try:
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/mercurial/hgweb/webcommands.py Fri Feb 08 11:55:17 2008 +0100 @@ -0,0 +1,120 @@ +# +# Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net> +# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# +# This software may be used and distributed according to the terms +# of the GNU General Public License, incorporated herein by reference. + +import os, mimetypes +from mercurial import revlog, util, hg +from common import staticfile, ErrorResponse, HTTP_OK, HTTP_NOT_FOUND + +# __all__ is populated with the allowed commands. Be sure to add to it if +# you're adding a new command, or the new command won't work. + +__all__ = [ + 'log', 'rawfile', 'file', 'changelog', 'shortlog', 'changeset', 'rev', + 'manifest', 'tags', 'summary', 'filediff', 'diff', 'annotate', 'filelog', + 'archive', 'static', +] + +def log(web, req, tmpl): + if 'file' in req.form and req.form['file'][0]: + return filelog(web, req, tmpl) + else: + return changelog(web, req, tmpl) + +def rawfile(web, req, tmpl): + path = web.cleanpath(req.form.get('file', [''])[0]) + if not path: + content = web.manifest(tmpl, web.changectx(req), path) + req.respond(HTTP_OK, web.ctype) + return content + + try: + fctx = web.filectx(req) + except revlog.LookupError: + content = web.manifest(tmpl, web.changectx(req), path) + req.respond(HTTP_OK, web.ctype) + return content + + path = fctx.path() + text = fctx.data() + mt = mimetypes.guess_type(path)[0] + if mt is None or util.binary(text): + mt = mt or 'application/octet-stream' + + req.respond(HTTP_OK, mt, path, len(text)) + return [text] + +def file(web, req, tmpl): + path = web.cleanpath(req.form.get('file', [''])[0]) + if path: + try: + return web.filerevision(tmpl, web.filectx(req)) + except revlog.LookupError: + pass + + return web.manifest(tmpl, web.changectx(req), path) + +def changelog(web, req, tmpl, shortlog = False): + if 'node' in req.form: + ctx = web.changectx(req) + else: + if 'rev' in req.form: + hi = req.form['rev'][0] + else: + hi = web.repo.changelog.count() - 1 + try: + ctx = web.repo.changectx(hi) + except hg.RepoError: + return web.search(tmpl, hi) # XXX redirect to 404 page? + + return web.changelog(tmpl, ctx, shortlog = shortlog) + +def shortlog(web, req, tmpl): + return changelog(web, req, tmpl, shortlog = True) + +def changeset(web, req, tmpl): + return web.changeset(tmpl, web.changectx(req)) + +rev = changeset + +def manifest(web, req, tmpl): + return web.manifest(tmpl, web.changectx(req), + web.cleanpath(req.form['path'][0])) + +def tags(web, req, tmpl): + return web.tags(tmpl) + +def summary(web, req, tmpl): + return web.summary(tmpl) + +def filediff(web, req, tmpl): + return web.filediff(tmpl, web.filectx(req)) + +diff = filediff + +def annotate(web, req, tmpl): + return web.fileannotate(tmpl, web.filectx(req)) + +def filelog(web, req, tmpl): + return web.filelog(tmpl, web.filectx(req)) + +def archive(web, req, tmpl): + type_ = req.form['type'][0] + allowed = web.configlist("web", "allow_archive") + if (type_ in web.archives and (type_ in allowed or + web.configbool("web", "allow" + type_, False))): + web.archive(tmpl, req, req.form['node'][0], type_) + return [] + raise ErrorResponse(HTTP_NOT_FOUND, 'Unsupported archive type: %s' % type_) + +def static(web, req, tmpl): + fname = req.form['file'][0] + # a repo owner may set web.static in .hg/hgrc to get any file + # readable by the user running the CGI script + static = web.config("web", "static", + os.path.join(web.templatepath, "static"), + untrusted=False) + return [staticfile(static, fname, req)]
--- a/mercurial/hgweb/wsgicgi.py Fri Feb 08 11:50:37 2008 +0100 +++ b/mercurial/hgweb/wsgicgi.py Fri Feb 08 11:55:17 2008 +0100 @@ -16,6 +16,7 @@ util.set_binary(sys.stdout) environ = dict(os.environ.items()) + environ.setdefault('PATH_INFO', '') environ['wsgi.input'] = sys.stdin environ['wsgi.errors'] = sys.stderr environ['wsgi.version'] = (1, 0) @@ -61,13 +62,4 @@ headers_set[:] = [status, response_headers] return write - result = application(environ, start_response) - try: - for data in result: - if data: # don't send headers until body appears - write(data) - if not headers_sent: - write('') # send headers now if body was empty - finally: - if hasattr(result,'close'): - result.close() + application(environ, start_response)
--- a/mercurial/hook.py Fri Feb 08 11:50:37 2008 +0100 +++ b/mercurial/hook.py Fri Feb 08 11:55:17 2008 +0100 @@ -6,7 +6,7 @@ # of the GNU General Public License, incorporated herein by reference. from i18n import _ -import util +import util, os, sys def _pythonhook(ui, repo, name, hname, funcname, args, throw): '''call python hook. hook is callable object, looked up as @@ -71,7 +71,11 @@ def _exthook(ui, repo, name, cmd, args, throw): ui.note(_("running hook %s: %s\n") % (name, cmd)) env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()]) - r = util.system(cmd, environ=env, cwd=repo.root) + if repo: + cwd = repo.root + else: + cwd = os.getcwd() + r = util.system(cmd, environ=env, cwd=cwd) if r: desc, r = util.explain_exit(r) if throw: @@ -79,8 +83,18 @@ ui.warn(_('warning: %s hook %s\n') % (name, desc)) return r +_redirect = False +def redirect(state): + _redirect = state + def hook(ui, repo, name, throw=False, **args): r = False + + if _redirect: + # temporarily redirect stdout to stderr + oldstdout = os.dup(sys.stdout.fileno()) + os.dup2(sys.stderr.fileno(), sys.stdout.fileno()) + hooks = [(hname, cmd) for hname, cmd in ui.configitems("hooks") if hname.split(".", 1)[0] == name and cmd] hooks.sort() @@ -94,3 +108,6 @@ r = _exthook(ui, repo, hname, cmd, args, throw) or r return r + if _redirect: + os.dup2(oldstdout, sys.stdout.fileno()) + os.close(oldstdout)
--- a/mercurial/httprepo.py Fri Feb 08 11:50:37 2008 +0100 +++ b/mercurial/httprepo.py Fri Feb 08 11:55:17 2008 +0100 @@ -248,7 +248,7 @@ # will take precedence if found, so drop them for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]: try: - if os.environ.has_key(env): + if env in os.environ: del os.environ[env] except OSError: pass @@ -339,7 +339,7 @@ version = proto.split('-', 1)[1] version_info = tuple([int(n) for n in version.split('.')]) except ValueError: - raise repo.RepoError(_("'%s' sent a broken Content-type " + raise repo.RepoError(_("'%s' sent a broken Content-Type " "header (%s)") % (self._url, proto)) if version_info > (0, 1): raise repo.RepoError(_("'%s' uses newer protocol %s") % @@ -424,7 +424,7 @@ try: rfp = self.do_cmd( 'unbundle', data=fp, - headers={'content-type': 'application/octet-stream'}, + headers={'Content-Type': 'application/octet-stream'}, heads=' '.join(map(hex, heads))) try: ret = int(rfp.readline())
--- a/mercurial/ignore.py Fri Feb 08 11:50:37 2008 +0100 +++ b/mercurial/ignore.py Fri Feb 08 11:55:17 2008 +0100 @@ -6,18 +6,21 @@ # of the GNU General Public License, incorporated herein by reference. from i18n import _ -import util +import util, re + +_commentre = None def _parselines(fp): for line in fp: - if not line.endswith('\n'): - line += '\n' - escape = False - for i in xrange(len(line)): - if escape: escape = False - elif line[i] == '\\': escape = True - elif line[i] == '#': break - line = line[:i].rstrip() + if "#" in line: + global _commentre + if not _commentre: + _commentre = re.compile(r'((^|[^\\])(\\\\)*)#.*') + # remove comments prefixed by an even number of escapes + line = _commentre.sub(r'\1', line) + # fixup properly escaped comments that survived the above + line = line.replace("\\#", "#") + line = line.rstrip() if line: yield line
--- a/mercurial/keepalive.py Fri Feb 08 11:50:37 2008 +0100 +++ b/mercurial/keepalive.py Fri Feb 08 11:55:17 2008 +0100 @@ -129,7 +129,7 @@ def add(self, host, connection, ready): self._lock.acquire() try: - if not self._hostmap.has_key(host): self._hostmap[host] = [] + if not host in self._hostmap: self._hostmap[host] = [] self._hostmap[host].append(connection) self._connmap[connection] = host self._readymap[connection] = ready @@ -159,7 +159,7 @@ conn = None self._lock.acquire() try: - if self._hostmap.has_key(host): + if host in self._hostmap: for c in self._hostmap[host]: if self._readymap[c]: self._readymap[c] = 0
--- a/mercurial/localrepo.py Fri Feb 08 11:50:37 2008 +0100 +++ b/mercurial/localrepo.py Fri Feb 08 11:55:17 2008 +0100 @@ -10,7 +10,7 @@ import repo, changegroup import changelog, dirstate, filelog, manifest, context, weakref import re, lock, transaction, tempfile, stat, errno, ui -import os, revlog, time, util, extensions, hook +import os, revlog, time, util, extensions, hook, inspect class localrepository(repo.repository): capabilities = util.set(('lookup', 'changegroupsubset')) @@ -79,9 +79,11 @@ pass self.tagscache = None + self._tagstypecache = None self.branchcache = None self.nodetagscache = None self.filterpats = {} + self._datafilters = {} self._transref = self._lockref = self._wlockref = None def __getattr__(self, name): @@ -199,8 +201,9 @@ return self.tagscache globaltags = {} + tagtypes = {} - def readtags(lines, fn): + def readtags(lines, fn, tagtype): filetags = {} count = 0 @@ -235,7 +238,9 @@ for k, nh in filetags.items(): if k not in globaltags: globaltags[k] = nh + tagtypes[k] = tagtype continue + # we prefer the global tag if: # it supercedes us OR # mutual supercedes and it has a higher rank @@ -247,31 +252,47 @@ an = bn ah.extend([n for n in bh if n not in ah]) globaltags[k] = an, ah + tagtypes[k] = tagtype # read the tags file from each head, ending with the tip f = None for rev, node, fnode in self._hgtagsnodes(): f = (f and f.filectx(fnode) or self.filectx('.hgtags', fileid=fnode)) - readtags(f.data().splitlines(), f) + readtags(f.data().splitlines(), f, "global") try: data = util.fromlocal(self.opener("localtags").read()) # localtags are stored in the local character set # while the internal tag table is stored in UTF-8 - readtags(data.splitlines(), "localtags") + readtags(data.splitlines(), "localtags", "local") except IOError: pass self.tagscache = {} + self._tagstypecache = {} for k,nh in globaltags.items(): n = nh[0] if n != nullid: self.tagscache[k] = n + self._tagstypecache[k] = tagtypes[k] self.tagscache['tip'] = self.changelog.tip() return self.tagscache + def tagtype(self, tagname): + ''' + return the type of the given tag. result can be: + + 'local' : a local tag + 'global' : a global tag + None : tag does not exist + ''' + + self.tags() + + return self._tagstypecache.get(tagname) + def _hgtagsnodes(self): heads = self.heads() heads.reverse() @@ -466,17 +487,31 @@ l = [] for pat, cmd in self.ui.configitems(filter): mf = util.matcher(self.root, "", [pat], [], [])[1] - l.append((mf, cmd)) + fn = None + for name, filterfn in self._datafilters.iteritems(): + if cmd.startswith(name): + fn = filterfn + break + if not fn: + fn = lambda s, c, **kwargs: util.filter(s, c) + # Wrap old filters not supporting keyword arguments + if not inspect.getargspec(fn)[2]: + oldfn = fn + fn = lambda s, c, **kwargs: oldfn(s, c) + l.append((mf, fn, cmd)) self.filterpats[filter] = l - for mf, cmd in self.filterpats[filter]: + for mf, fn, cmd in self.filterpats[filter]: if mf(filename): self.ui.debug(_("filtering %s through %s\n") % (filename, cmd)) - data = util.filter(data, cmd) + data = fn(data, cmd, ui=self.ui, repo=self, filename=filename) break return data + def adddatafilter(self, name, filter): + self._datafilters[name] = filter + def wread(self, filename): if self._link(filename): data = os.readlink(self.wjoin(filename)) @@ -486,16 +521,12 @@ def wwrite(self, filename, data, flags): data = self._filter("decode", filename, data) - if "l" in flags: - self.wopener.symlink(data, filename) - else: - try: - if self._link(filename): - os.unlink(self.wjoin(filename)) - except OSError: - pass - self.wopener(filename, 'w').write(data) - util.set_exec(self.wjoin(filename), "x" in flags) + try: + os.unlink(self.wjoin(filename)) + except OSError: + pass + self.wopener(filename, 'w').write(data) + util.set_flags(self.wjoin(filename), flags) def wwritedata(self, filename, data): return self._filter("decode", filename, data) @@ -504,15 +535,21 @@ if self._transref and self._transref(): return self._transref().nest() + # abort here if the journal already exists + if os.path.exists(self.sjoin("journal")): + raise repo.RepoError(_("journal already exists - run hg recover")) + # save dirstate for rollback try: ds = self.opener("dirstate").read() except IOError: ds = "" self.opener("journal.dirstate", "w").write(ds) + self.opener("journal.branch", "w").write(self.dirstate.branch()) renames = [(self.sjoin("journal"), self.sjoin("undo")), - (self.join("journal.dirstate"), self.join("undo.dirstate"))] + (self.join("journal.dirstate"), self.join("undo.dirstate")), + (self.join("journal.branch"), self.join("undo.branch"))] tr = transaction.transaction(self.ui.warn, self.sopener, self.sjoin("journal"), aftertrans(renames)) @@ -542,6 +579,8 @@ self.ui.status(_("rolling back last transaction\n")) transaction.rollback(self.sopener, self.sjoin("undo")) util.rename(self.join("undo.dirstate"), self.join("dirstate")) + branch = self.opener("undo.branch").read() + self.dirstate.setbranch(branch) self.invalidate() self.dirstate.invalidate() else: @@ -554,6 +593,7 @@ if hasattr(self, a): self.__delattr__(a) self.tagscache = None + self._tagstypecache = None self.nodetagscache = None def _lock(self, lockname, wait, releasefn, acquirefn, desc): @@ -662,6 +702,7 @@ match=util.always, force=False, force_editor=False, p1=None, p2=None, extra={}, empty_ok=False): wlock = lock = tr = None + valid = 0 # don't save the dirstate if this isn't set if files: files = util.unique(files) try: @@ -750,6 +791,9 @@ if old_exec != new_exec or old_link != new_link: changed.append(f) m1.set(f, new_exec, new_link) + if use_dirstate: + self.dirstate.normal(f) + except (OSError, IOError): if use_dirstate: self.ui.warn(_("trouble committing %s!\n") % f) @@ -781,11 +825,14 @@ if text: edittext.append(text) edittext.append("") + edittext.append(_("HG: Enter commit message." + " Lines beginning with 'HG:' are removed.")) + edittext.append("HG: --") edittext.append("HG: user: %s" % user) if p2 != nullid: edittext.append("HG: branch merge") if branchname: - edittext.append("HG: branch %s" % util.tolocal(branchname)) + edittext.append("HG: branch '%s'" % util.tolocal(branchname)) edittext.extend(["HG: changed %s" % f for f in changed]) edittext.extend(["HG: removed %s" % f for f in removed]) if not changed and not remove: @@ -805,7 +852,7 @@ while lines and not lines[0]: del lines[0] if not lines: - return None + raise util.Abort(_("empty commit message")) text = '\n'.join(lines) n = self.changelog.add(mn, changed + removed, text, trp, p1, p2, @@ -820,14 +867,15 @@ if use_dirstate or update_dirstate: self.dirstate.setparents(n) if use_dirstate: - for f in new: - self.dirstate.normal(f) for f in removed: self.dirstate.forget(f) + valid = 1 # our dirstate updates are complete self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2) return n finally: + if not valid: # don't save our updated dirstate + self.dirstate.invalidate() del tr, lock, wlock def walk(self, node=None, files=[], match=util.always, badmatch=None): @@ -966,7 +1014,7 @@ mf2keys.sort() getnode = lambda fn: mf1.get(fn, nullid) for fn in mf2keys: - if mf1.has_key(fn): + if fn in mf1: if (mf1.flags(fn) != mf2.flags(fn) or (mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, getnode)))): @@ -987,12 +1035,14 @@ def add(self, list): wlock = self.wlock() try: + rejected = [] for f in list: p = self.wjoin(f) try: st = os.lstat(p) except: self.ui.warn(_("%s does not exist!\n") % f) + rejected.append(f) continue if st.st_size > 10000000: self.ui.warn(_("%s: files over 10MB may cause memory and" @@ -1002,12 +1052,14 @@ if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)): self.ui.warn(_("%s not added: only files and symlinks " "supported currently\n") % f) + rejected.append(p) elif self.dirstate[f] in 'amn': self.ui.warn(_("%s already tracked!\n") % f) elif self.dirstate[f] == 'r': self.dirstate.normallookup(f) else: self.dirstate.add(f) + return rejected finally: del wlock @@ -1468,14 +1520,15 @@ return remote.unbundle(cg, remote_heads, 'push') return ret[1] - def changegroupinfo(self, nodes): - self.ui.note(_("%d changesets found\n") % len(nodes)) + def changegroupinfo(self, nodes, source): + if self.ui.verbose or source == 'bundle': + self.ui.status(_("%d changesets found\n") % len(nodes)) if self.ui.debugflag: self.ui.debug(_("List of changesets:\n")) for node in nodes: self.ui.debug("%s\n" % hex(node)) - def changegroupsubset(self, bases, heads, source): + def changegroupsubset(self, bases, heads, source, extranodes=None): """This function generates a changegroup consisting of all the nodes that are descendents of any of the bases, and ancestors of any of the heads. @@ -1485,7 +1538,15 @@ is non-trivial. Another wrinkle is doing the reverse, figuring out which changeset in - the changegroup a particular filenode or manifestnode belongs to.""" + the changegroup a particular filenode or manifestnode belongs to. + + The caller can specify some nodes that must be included in the + changegroup using the extranodes argument. It should be a dict + where the keys are the filenames (or 1 for the manifest), and the + values are lists of (node, linknode) tuples, where node is a wanted + node and linknode is the changelog node that should be transmitted as + the linkrev. + """ self.hook('preoutgoing', throw=True, source=source) @@ -1495,7 +1556,7 @@ # msng is short for missing - compute the list of changesets in this # changegroup. msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads) - self.changegroupinfo(msng_cl_lst) + self.changegroupinfo(msng_cl_lst, source) # Some bases may turn out to be superfluous, and some heads may be # too. nodesbetween will return the minimal set of bases and heads # necessary to re-create the changegroup. @@ -1678,6 +1739,15 @@ return msngset[fnode] return lookup_filenode_link + # Add the nodes that were explicitly requested. + def add_extra_nodes(name, nodes): + if not extranodes or name not in extranodes: + return + + for node, linknode in extranodes[name]: + if node not in nodes: + nodes[node] = linknode + # Now that we have all theses utility functions to help out and # logically divide up the task, generate the group. def gengroup(): @@ -1693,6 +1763,7 @@ # The list of manifests has been collected by the generator # calling our functions back. prune_manifests() + add_extra_nodes(1, msng_mnfst_set) msng_mnfst_lst = msng_mnfst_set.keys() # Sort the manifestnodes by revision number. msng_mnfst_lst.sort(cmp_by_rev_func(mnfst)) @@ -1708,14 +1779,23 @@ msng_mnfst_lst = None msng_mnfst_set.clear() + if extranodes: + for fname in extranodes: + if isinstance(fname, int): + continue + add_extra_nodes(fname, + msng_filenode_set.setdefault(fname, {})) + changedfiles[fname] = 1 changedfiles = changedfiles.keys() changedfiles.sort() # Go through all our files in order sorted by name. for fname in changedfiles: filerevlog = self.file(fname) + if filerevlog.count() == 0: + raise util.Abort(_("empty or missing revlog for %s") % fname) # Toss out the filenodes that the recipient isn't really # missing. - if msng_filenode_set.has_key(fname): + if fname in msng_filenode_set: prune_filenodes(fname, filerevlog) msng_filenode_lst = msng_filenode_set[fname].keys() else: @@ -1734,7 +1814,7 @@ lookup_filenode_link_func(fname)) for chnk in group: yield chnk - if msng_filenode_set.has_key(fname): + if fname in msng_filenode_set: # Don't need this anymore, toss it to free memory. del msng_filenode_set[fname] # Signal that no more groups are left. @@ -1757,7 +1837,7 @@ cl = self.changelog nodes = cl.nodesbetween(basenodes, None)[0] revset = dict.fromkeys([cl.rev(n) for n in nodes]) - self.changegroupinfo(nodes) + self.changegroupinfo(nodes, source) def identity(x): return x @@ -1797,6 +1877,8 @@ for fname in changedfiles: filerevlog = self.file(fname) + if filerevlog.count() == 0: + raise util.Abort(_("empty or missing revlog for %s") % fname) nodeiter = gennodelst(filerevlog) nodeiter = list(nodeiter) if nodeiter: @@ -1813,7 +1895,7 @@ return util.chunkbuffer(gengroup()) - def addchangegroup(self, source, srctype, url): + def addchangegroup(self, source, srctype, url, emptyok=False): """add changegroup to repo. return values: @@ -1849,7 +1931,7 @@ self.ui.status(_("adding changesets\n")) cor = cl.count() - 1 chunkiter = changegroup.chunkiter(source) - if cl.addgroup(chunkiter, csmap, trp, 1) is None: + if cl.addgroup(chunkiter, csmap, trp, 1) is None and not emptyok: raise util.Abort(_("received changelog group is empty")) cnr = cl.count() - 1 changesets = cnr - cor
--- a/mercurial/lsprof.py Fri Feb 08 11:50:37 2008 +0100 +++ b/mercurial/lsprof.py Fri Feb 08 11:55:17 2008 +0100 @@ -1,28 +1,19 @@ -# this is copied from the lsprof distro because somehow -# it is not installed by distutils -# -# small modifications made +#! /usr/bin/env python import sys -try: - from _lsprof import Profiler, profiler_entry, profiler_subentry -except ImportError, inst: - import packagescan - if packagescan.scan_in_progress: - raise packagescan.SkipPackage('_lsprof not available') - raise +from _lsprof import Profiler, profiler_entry, profiler_subentry __all__ = ['profile', 'Stats'] def profile(f, *args, **kwds): """XXX docstring""" p = Profiler() - p.enable(subcalls=True) + p.enable(subcalls=True, builtins=True) try: - ret = f(*args, **kwds) + f(*args, **kwds) finally: p.disable() - return ret, Stats(p.getstats()) + return Stats(p.getstats()) class Stats(object): @@ -49,14 +40,14 @@ d = self.data if top is not None: d = d[:top] - cols = "% 12s %11.4f %11.4f %s\n" - hcols = "% 12s %12s %12s %s\n" - cols2 = "+%12s %11.4f %11.4f + %s\n" - file.write(hcols % ("CallCount", "Total(s)", - "Inline(s)", "module:lineno(function)")) + cols = "% 12s %12s %11.4f %11.4f %s\n" + hcols = "% 12s %12s %12s %12s %s\n" + cols2 = "+%12s %12s %11.4f %11.4f + %s\n" + file.write(hcols % ("CallCount", "Recursive", "Total(ms)", + "Inline(ms)", "module:lineno(function)")) count = 0 for e in d: - file.write(cols % (e.callcount, e.totaltime, + file.write(cols % (e.callcount, e.reccallcount, e.totaltime, e.inlinetime, label(e.code))) count += 1 if limit is not None and count == limit: @@ -64,7 +55,7 @@ ccount = 0 if e.calls: for se in e.calls: - file.write(cols % ("+%s" % se.callcount, + file.write(cols % ("+%s" % se.callcount, se.reccallcount, se.totaltime, se.inlinetime, "+%s" % label(se.code))) count += 1 @@ -83,11 +74,11 @@ e = self.data[i] if not isinstance(e.code, str): self.data[i] = type(e)((label(e.code),) + e[1:]) - if e.calls: - for j in range(len(e.calls)): - se = e.calls[j] - if not isinstance(se.code, str): - e.calls[j] = type(se)((label(se.code),) + se[1:]) + if e.calls: + for j in range(len(e.calls)): + se = e.calls[j] + if not isinstance(se.code, str): + e.calls[j] = type(se)((label(se.code),) + se[1:]) _fn2mod = {} @@ -97,7 +88,7 @@ try: mname = _fn2mod[code.co_filename] except KeyError: - for k, v in sys.modules.iteritems(): + for k, v in sys.modules.items(): if v is None: continue if not hasattr(v, '__file__'):
--- a/mercurial/mail.py Fri Feb 08 11:50:37 2008 +0100 +++ b/mercurial/mail.py Fri Feb 08 11:55:17 2008 +0100 @@ -6,11 +6,10 @@ # of the GNU General Public License, incorporated herein by reference. from i18n import _ -import os, smtplib, templater, util, socket +import os, smtplib, util, socket def _smtp(ui): - '''send mail using smtp.''' - + '''build an smtp connection and return a function to send mail''' local_hostname = ui.config('smtp', 'local_hostname') s = smtplib.SMTP(local_hostname=local_hostname) mailhost = ui.config('smtp', 'host') @@ -30,50 +29,48 @@ s.ehlo() username = ui.config('smtp', 'username') password = ui.config('smtp', 'password') + if username and not password: + password = ui.getpass() if username and password: ui.note(_('(authenticating to mail server as %s)\n') % (username)) s.login(username, password) - return s -class _sendmail(object): - '''send mail using sendmail.''' + def send(sender, recipients, msg): + try: + return s.sendmail(sender, recipients, msg) + except smtplib.SMTPRecipientsRefused, inst: + recipients = [r[1] for r in inst.recipients.values()] + raise util.Abort('\n' + '\n'.join(recipients)) + except smtplib.SMTPException, inst: + raise util.Abort(inst) - def __init__(self, ui, program): - self.ui = ui - self.program = program + return send - def sendmail(self, sender, recipients, msg): - cmdline = '%s -f %s %s' % ( - self.program, templater.email(sender), - ' '.join(map(templater.email, recipients))) - self.ui.note(_('sending mail: %s\n') % cmdline) - fp = os.popen(cmdline, 'w') - fp.write(msg) - ret = fp.close() - if ret: - raise util.Abort('%s %s' % ( - os.path.basename(self.program.split(None, 1)[0]), - util.explain_exit(ret)[0])) +def _sendmail(ui, sender, recipients, msg): + '''send mail using sendmail.''' + program = ui.config('email', 'method') + cmdline = '%s -f %s %s' % (program, util.email(sender), + ' '.join(map(util.email, recipients))) + ui.note(_('sending mail: %s\n') % cmdline) + fp = os.popen(cmdline, 'w') + fp.write(msg) + ret = fp.close() + if ret: + raise util.Abort('%s %s' % ( + os.path.basename(program.split(None, 1)[0]), + util.explain_exit(ret)[0])) def connect(ui): - '''make a mail connection. object returned has one method, sendmail. + '''make a mail connection. return a function to send mail. call as sendmail(sender, list-of-recipients, msg).''' - - method = ui.config('email', 'method', 'smtp') - if method == 'smtp': + if ui.config('email', 'method', 'smtp') == 'smtp': return _smtp(ui) - - return _sendmail(ui, method) + return lambda s, r, m: _sendmail(ui, s, r, m) def sendmail(ui, sender, recipients, msg): - try: - return connect(ui).sendmail(sender, recipients, msg) - except smtplib.SMTPRecipientsRefused, inst: - recipients = [r[1] for r in inst.recipients.values()] - raise util.Abort('\n' + '\n'.join(recipients)) - except smtplib.SMTPException, inst: - raise util.Abort(inst) + send = connect(ui) + return send(sender, recipients, msg) def validateconfig(ui): '''determine if we have enough config data to try sending email.'''
--- a/mercurial/mdiff.py Fri Feb 08 11:50:37 2008 +0100 +++ b/mercurial/mdiff.py Fri Feb 08 11:55:17 2008 +0100 @@ -30,7 +30,7 @@ defaults = { 'context': 3, 'text': False, - 'showfunc': True, + 'showfunc': False, 'git': False, 'nodates': False, 'ignorews': False,
--- a/mercurial/merge.py Fri Feb 08 11:50:37 2008 +0100 +++ b/mercurial/merge.py Fri Feb 08 11:55:17 2008 +0100 @@ -7,62 +7,7 @@ from node import * from i18n import _ -import errno, util, os, tempfile, context, heapq - -def filemerge(repo, fw, fd, fo, wctx, mctx): - """perform a 3-way merge in the working directory - - fw = original filename in the working directory - fd = destination filename in the working directory - fo = filename in other parent - wctx, mctx = working and merge changecontexts - """ - - def temp(prefix, ctx): - pre = "%s~%s." % (os.path.basename(ctx.path()), prefix) - (fd, name) = tempfile.mkstemp(prefix=pre) - data = repo.wwritedata(ctx.path(), ctx.data()) - f = os.fdopen(fd, "wb") - f.write(data) - f.close() - return name - - fcm = wctx.filectx(fw) - fcmdata = wctx.filectx(fd).data() - fco = mctx.filectx(fo) - - if not fco.cmp(fcmdata): # files identical? - return None - - fca = fcm.ancestor(fco) - if not fca: - fca = repo.filectx(fw, fileid=nullrev) - a = repo.wjoin(fd) - b = temp("base", fca) - c = temp("other", fco) - - if fw != fo: - repo.ui.status(_("merging %s and %s\n") % (fw, fo)) - else: - repo.ui.status(_("merging %s\n") % fw) - - repo.ui.debug(_("my %s other %s ancestor %s\n") % (fcm, fco, fca)) - - cmd = (os.environ.get("HGMERGE") or repo.ui.config("ui", "merge") - or "hgmerge") - r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=repo.root, - environ={'HG_FILE': fd, - 'HG_MY_NODE': str(wctx.parents()[0]), - 'HG_OTHER_NODE': str(mctx), - 'HG_MY_ISLINK': fcm.islink(), - 'HG_OTHER_ISLINK': fco.islink(), - 'HG_BASE_ISLINK': fca.islink(),}) - if r: - repo.ui.warn(_("merging %s failed!\n") % fd) - - os.unlink(b) - os.unlink(c) - return r +import errno, util, os, heapq, filemerge def checkunknown(wctx, mctx): "check for collisions between unknown files and files in mctx" @@ -210,21 +155,23 @@ for f in u2: checkcopies(ctx(f, m2[f]), m1, ma) - d2 = {} + diverge2 = {} for of, fl in diverge.items(): - for f in fl: - fo = list(fl) - fo.remove(f) - d2[f] = (of, fo) + if len(fl) == 1: + del diverge[of] # not actually divergent + else: + diverge2.update(dict.fromkeys(fl)) # reverse map for below if fullcopy: repo.ui.debug(_(" all copies found (* = to merge, ! = divergent):\n")) for f in fullcopy: note = "" if f in copy: note += "*" - if f in diverge: note += "!" + if f in diverge2: note += "!" repo.ui.debug(_(" %s -> %s %s\n") % (f, fullcopy[f], note)) + del diverge2 + if not fullcopy or not repo.ui.configbool("merge", "followdirs", True): return copy, diverge @@ -356,13 +303,23 @@ if not f2: f2 = f fa = f - a, b, c = ma.execf(fa), m1.execf(f), m2.execf(f2) - if ((a^b) | (a^c)) ^ a: - return 'x' - a, b, c = ma.linkf(fa), m1.linkf(f), m2.linkf(f2) - if ((a^b) | (a^c)) ^ a: - return 'l' - return '' + a, m, n = ma.flags(fa), m1.flags(f), m2.flags(f2) + if m == n: # flags agree + return m # unchanged + if m and n: # flags are set but don't agree + if not a: # both differ from parent + r = repo.ui.prompt( + _(" conflicting flags for %s\n" + "(n)one, e(x)ec or sym(l)ink?") % f, "[nxl]", "n") + return r != "n" and r or '' + if m == a: + return n # changed from m to n + return m # changed from n to m + if m and m != a: # changed from a to m + return m + if n and n != a: # changed from a to n + return n + return '' # flag was cleared def act(msg, m, f, *args): repo.ui.debug(" %s: %s -> %s\n" % (f, msg, m)) @@ -386,27 +343,31 @@ if partial and not partial(f): continue if f in m2: + if overwrite or backwards: + rflags = m2.flags(f) + else: + rflags = fmerge(f) # are files different? if n != m2[f]: a = ma.get(f, nullid) + # are we clobbering? + if overwrite: + act("clobbering", "g", f, rflags) + # or are we going back in time and clean? + elif backwards and not n[20:]: + act("reverting", "g", f, rflags) # are both different from the ancestor? - if not overwrite and n != a and m2[f] != a: - act("versions differ", "m", f, f, f, fmerge(f), False) - # are we clobbering? + elif n != a and m2[f] != a: + act("versions differ", "m", f, f, f, rflags, False) # is remote's version newer? - # or are we going back in time and clean? - elif overwrite or m2[f] != a or (backwards and not n[20:]): - act("remote is newer", "g", f, m2.flags(f)) + elif m2[f] != a: + act("remote is newer", "g", f, rflags) # local is newer, not overwrite, check mode bits - elif fmerge(f) != m1.flags(f): - act("update permissions", "e", f, m2.flags(f)) + elif m1.flags(f) != rflags: + act("update permissions", "e", f, rflags) # contents same, check mode bits - elif m1.flags(f) != m2.flags(f): - # are we clobbering? - # is remote's version newer? - # or are we going back? - if overwrite or fmerge(f) != m1.flags(f) or backwards: - act("update permissions", "e", f, m2.flags(f)) + elif m1.flags(f) != rflags: + act("update permissions", "e", f, rflags) elif f in copied: continue elif f in copy: @@ -498,7 +459,7 @@ removed += 1 elif m == "m": # merge f2, fd, flags, move = a[2:] - r = filemerge(repo, f, fd, f2, wctx, mctx) + r = filemerge.filemerge(repo, f, fd, f2, wctx, mctx) if r > 0: unresolved += 1 else: @@ -506,7 +467,7 @@ updated += 1 else: merged += 1 - util.set_exec(repo.wjoin(fd), "x" in flags) + util.set_flags(repo.wjoin(fd), flags) if f != fd and move and util.lexists(repo.wjoin(f)): repo.ui.debug(_("removing %s\n") % f) os.unlink(repo.wjoin(f)) @@ -535,7 +496,7 @@ repo.ui.warn(" %s\n" % nf) elif m == "e": # exec flags = a[2] - util.set_exec(repo.wjoin(f), flags) + util.set_flags(repo.wjoin(f), flags) return updated, merged, removed, unresolved @@ -612,7 +573,10 @@ try: node = repo.branchtags()[wc.branch()] except KeyError: - raise util.Abort(_("branch %s not found") % wc.branch()) + if wc.branch() == "default": # no default branch! + node = repo.lookup("tip") # update to tip + else: + raise util.Abort(_("branch %s not found") % wc.branch()) overwrite = force and not branchmerge forcemerge = force and branchmerge pl = wc.parents()
--- a/mercurial/patch.py Fri Feb 08 11:50:37 2008 +0100 +++ b/mercurial/patch.py Fri Feb 08 11:55:17 2008 +0100 @@ -302,24 +302,26 @@ contextdesc = re.compile('(---|\*\*\*) (\d+)(,(\d+))? (---|\*\*\*)') class patchfile: - def __init__(self, ui, fname): + def __init__(self, ui, fname, missing=False): self.fname = fname self.ui = ui - try: - fp = file(fname, 'rb') - self.lines = fp.readlines() - self.exists = True - except IOError: + self.lines = [] + self.exists = False + self.missing = missing + if not missing: + try: + fp = file(fname, 'rb') + self.lines = fp.readlines() + self.exists = True + except IOError: + pass + else: + self.ui.warn(_("unable to find '%s' for patching\n") % self.fname) + + if not self.exists: dirname = os.path.dirname(fname) if dirname and not os.path.isdir(dirname): - dirs = dirname.split(os.path.sep) - d = "" - for x in dirs: - d = os.path.join(d, x) - if not os.path.isdir(d): - os.mkdir(d) - self.lines = [] - self.exists = False + os.makedirs(dirname) self.hash = {} self.dirty = 0 @@ -427,6 +429,10 @@ if reverse: h.reverse() + if self.missing: + self.rej.append(h) + return -1 + if self.exists and h.createfile(): self.ui.warn(_("file %s already exists\n") % self.fname) self.rej.append(h) @@ -799,31 +805,32 @@ nulla = afile_orig == "/dev/null" nullb = bfile_orig == "/dev/null" afile = pathstrip(afile_orig, strip) - gooda = os.path.exists(afile) and not nulla + gooda = not nulla and os.path.exists(afile) bfile = pathstrip(bfile_orig, strip) if afile == bfile: goodb = gooda else: - goodb = os.path.exists(bfile) and not nullb + goodb = not nullb and os.path.exists(bfile) createfunc = hunk.createfile if reverse: createfunc = hunk.rmfile - if not goodb and not gooda and not createfunc(): - raise PatchError(_("unable to find %s or %s for patching") % - (afile, bfile)) - if gooda and goodb: - fname = bfile - if afile in bfile: + missing = not goodb and not gooda and not createfunc() + fname = None + if not missing: + if gooda and goodb: + fname = (afile in bfile) and afile or bfile + elif gooda: fname = afile - elif gooda: - fname = afile - elif not nullb: - fname = bfile - if afile in bfile: + + if not fname: + if not nullb: + fname = (afile in bfile) and afile or bfile + elif not nulla: fname = afile - elif not nulla: - fname = afile - return fname + else: + raise PatchError(_("undefined source and destination files")) + + return fname, missing class linereader: # simple class to allow pushing lines back into the input stream @@ -841,14 +848,16 @@ return l return self.fp.readline() -def applydiff(ui, fp, changed, strip=1, sourcefile=None, reverse=False, - rejmerge=None, updatedir=None): - """reads a patch from fp and tries to apply it. The dict 'changed' is - filled in with all of the filenames changed by the patch. Returns 0 - for a clean patch, -1 if any rejects were found and 1 if there was - any fuzz.""" +def iterhunks(ui, fp, sourcefile=None): + """Read a patch and yield the following events: + - ("file", afile, bfile, firsthunk): select a new target file. + - ("hunk", hunk): a new hunk is ready to be applied, follows a + "file" event. + - ("git", gitchanges): current diff is in git format, gitchanges + maps filenames to gitpatch records. Unique event. + """ - def scangitpatch(fp, firstline, cwd=None): + def scangitpatch(fp, firstline): '''git patches can modify a file, then copy that file to a new file, but expect the source to be the unmodified form. So we scan the patch looking for that case so we can do @@ -861,46 +870,28 @@ fp = cStringIO.StringIO(fp.read()) (dopatch, gitpatches) = readgitpatch(fp, firstline) - for gp in gitpatches: - if gp.op in ('COPY', 'RENAME'): - copyfile(gp.oldpath, gp.path, basedir=cwd) - fp.seek(pos) return fp, dopatch, gitpatches + changed = {} current_hunk = None - current_file = None afile = "" bfile = "" state = None hunknum = 0 - rejects = 0 + emitfile = False git = False gitre = re.compile('diff --git (a/.*) (b/.*)') # our states BFILE = 1 - err = 0 context = None lr = linereader(fp) dopatch = True gitworkdone = False - def getpatchfile(afile, bfile, hunk): - try: - if sourcefile: - targetfile = patchfile(ui, sourcefile) - else: - targetfile = selectfile(afile, bfile, hunk, - strip, reverse) - targetfile = patchfile(ui, targetfile) - return targetfile - except PatchError, err: - ui.warn(str(err) + '\n') - return None - while True: newfile = False x = lr.readline() @@ -909,11 +900,7 @@ if current_hunk: if x.startswith('\ '): current_hunk.fix_newline() - ret = current_file.apply(current_hunk, reverse) - if ret >= 0: - changed.setdefault(current_file.fname, (None, None)) - if ret > 0: - err = 1 + yield 'hunk', current_hunk current_hunk = None gitworkdone = False if ((sourcefile or state == BFILE) and ((not context and x[0] == '@') or @@ -928,21 +915,15 @@ current_hunk = None continue hunknum += 1 - if not current_file: - current_file = getpatchfile(afile, bfile, current_hunk) - if not current_file: - current_file, current_hunk = None, None - rejects += 1 - continue + if emitfile: + emitfile = False + yield 'file', (afile, bfile, current_hunk) elif state == BFILE and x.startswith('GIT binary patch'): current_hunk = binhunk(changed[bfile[2:]][1]) hunknum += 1 - if not current_file: - current_file = getpatchfile(afile, bfile, current_hunk) - if not current_file: - current_file, current_hunk = None, None - rejects += 1 - continue + if emitfile: + emitfile = False + yield 'file', (afile, bfile, current_hunk) current_hunk.extract(fp) elif x.startswith('diff --git'): # check for git diff, scanning the whole patch file if needed @@ -952,6 +933,7 @@ if not git: git = True fp, dopatch, gitpatches = scangitpatch(fp, x) + yield 'git', gitpatches for gp in gitpatches: changed[gp.path] = (gp.op, gp) # else error? @@ -988,36 +970,79 @@ bfile = parsefilename(l2) if newfile: - if current_file: - current_file.close() - if rejmerge: - rejmerge(current_file) - rejects += len(current_file.rej) + emitfile = True state = BFILE - current_file = None hunknum = 0 if current_hunk: if current_hunk.complete(): + yield 'hunk', current_hunk + else: + raise PatchError(_("malformed patch %s %s") % (afile, + current_hunk.desc)) + + if hunknum == 0 and dopatch and not gitworkdone: + raise NoHunks + +def applydiff(ui, fp, changed, strip=1, sourcefile=None, reverse=False, + rejmerge=None, updatedir=None): + """reads a patch from fp and tries to apply it. The dict 'changed' is + filled in with all of the filenames changed by the patch. Returns 0 + for a clean patch, -1 if any rejects were found and 1 if there was + any fuzz.""" + + rejects = 0 + err = 0 + current_file = None + gitpatches = None + + def closefile(): + if not current_file: + return 0 + current_file.close() + if rejmerge: + rejmerge(current_file) + return len(current_file.rej) + + for state, values in iterhunks(ui, fp, sourcefile): + if state == 'hunk': + if not current_file: + continue + current_hunk = values ret = current_file.apply(current_hunk, reverse) if ret >= 0: changed.setdefault(current_file.fname, (None, None)) if ret > 0: err = 1 + elif state == 'file': + rejects += closefile() + afile, bfile, first_hunk = values + try: + if sourcefile: + current_file = patchfile(ui, sourcefile) + else: + current_file, missing = selectfile(afile, bfile, first_hunk, + strip, reverse) + current_file = patchfile(ui, current_file, missing) + except PatchError, err: + ui.warn(str(err) + '\n') + current_file, current_hunk = None, None + rejects += 1 + continue + elif state == 'git': + gitpatches = values + for gp in gitpatches: + if gp.op in ('COPY', 'RENAME'): + copyfile(gp.oldpath, gp.path) + changed[gp.path] = (gp.op, gp) else: - fname = current_file and current_file.fname or None - raise PatchError(_("malformed patch %s %s") % (fname, - current_hunk.desc)) - if current_file: - current_file.close() - if rejmerge: - rejmerge(current_file) - rejects += len(current_file.rej) - if updatedir and git: + raise util.Abort(_('unsupported parser state: %s') % state) + + rejects += closefile() + + if updatedir and gitpatches: updatedir(gitpatches) if rejects: return -1 - if hunknum == 0 and dopatch and not gitworkdone: - raise NoHunks return err def diffopts(ui, opts={}, untrusted=False): @@ -1031,7 +1056,8 @@ showfunc=get('show_function', 'showfunc'), ignorews=get('ignore_all_space', 'ignorews'), ignorewsamount=get('ignore_space_change', 'ignorewsamount'), - ignoreblanklines=get('ignore_blank_lines', 'ignoreblanklines')) + ignoreblanklines=get('ignore_blank_lines', 'ignoreblanklines'), + context=get('unified')) def updatedir(ui, repo, patches): '''Update dirstate after patch application according to metadata''' @@ -1061,16 +1087,17 @@ for f in patches: ctype, gp = patches[f] if gp and gp.mode: - x = gp.mode & 0100 != 0 - l = gp.mode & 020000 != 0 + flags = '' + if gp.mode & 0100: + flags = 'x' + elif gp.mode & 020000: + flags = 'l' dst = os.path.join(repo.root, gp.path) # patch won't create empty files if ctype == 'ADD' and not os.path.exists(dst): - repo.wwrite(gp.path, '', x and 'x' or '') + repo.wwrite(gp.path, '', flags) else: - util.set_link(dst, l) - if not l: - util.set_exec(dst, x) + util.set_flags(dst, flags) cmdutil.addremove(repo, cfiles) files = patches.keys() files.extend([r for r in removes if r not in files]) @@ -1346,7 +1373,8 @@ try: p = popen2.Popen3('diffstat -p1 -w79 2>/dev/null > ' + name) try: - for line in patchlines: print >> p.tochild, line + for line in patchlines: + p.tochild.write(line + "\n") p.tochild.close() if p.wait(): return fp = os.fdopen(fd, 'r')
--- a/mercurial/repair.py Fri Feb 08 11:50:37 2008 +0100 +++ b/mercurial/repair.py Fri Feb 08 11:55:17 2008 +0100 @@ -6,71 +6,86 @@ # This software may be used and distributed according to the terms # of the GNU General Public License, incorporated herein by reference. -import changegroup, revlog, os, commands +import changegroup, os +from node import * -def strip(ui, repo, rev, backup="all"): - def limitheads(chlog, stop): - """return the list of all nodes that have no children""" - p = {} - h = [] - stoprev = 0 - if stop in chlog.nodemap: - stoprev = chlog.rev(stop) +def _limitheads(cl, stoprev): + """return the list of all revs >= stoprev that have no children""" + seen = {} + heads = [] + + for r in xrange(cl.count() - 1, stoprev - 1, -1): + if r not in seen: + heads.append(r) + for p in cl.parentrevs(r): + seen[p] = 1 + return heads - for r in xrange(chlog.count() - 1, -1, -1): - n = chlog.node(r) - if n not in p: - h.append(n) - if n == stop: - break - if r < stoprev: - break - for pn in chlog.parents(n): - p[pn] = 1 - return h +def _bundle(repo, bases, heads, node, suffix, extranodes=None): + """create a bundle with the specified revisions as a backup""" + cg = repo.changegroupsubset(bases, heads, 'strip', extranodes) + backupdir = repo.join("strip-backup") + if not os.path.isdir(backupdir): + os.mkdir(backupdir) + name = os.path.join(backupdir, "%s-%s" % (short(node), suffix)) + repo.ui.warn("saving bundle to %s\n" % name) + return changegroup.writebundle(cg, name, "HG10BZ") - def bundle(repo, bases, heads, rev, suffix): - cg = repo.changegroupsubset(bases, heads, 'strip') - backupdir = repo.join("strip-backup") - if not os.path.isdir(backupdir): - os.mkdir(backupdir) - name = os.path.join(backupdir, "%s-%s" % (revlog.short(rev), suffix)) - ui.warn("saving bundle to %s\n" % name) - return changegroup.writebundle(cg, name, "HG10BZ") +def _collectfiles(repo, striprev): + """find out the filelogs affected by the strip""" + files = {} + + for x in xrange(striprev, repo.changelog.count()): + for name in repo.changectx(x).files(): + if name in files: + continue + files[name] = 1 + + files = files.keys() + files.sort() + return files - def stripall(revnum): - mm = repo.changectx(rev).manifest() - seen = {} +def _collectextranodes(repo, files, link): + """return the nodes that have to be saved before the strip""" + def collectone(revlog): + extra = [] + startrev = count = revlog.count() + # find the truncation point of the revlog + for i in xrange(0, count): + node = revlog.node(i) + lrev = revlog.linkrev(node) + if lrev >= link: + startrev = i + 1 + break + + # see if any revision after that point has a linkrev less than link + # (we have to manually save these guys) + for i in xrange(startrev, count): + node = revlog.node(i) + lrev = revlog.linkrev(node) + if lrev < link: + extra.append((node, cl.node(lrev))) - for x in xrange(revnum, repo.changelog.count()): - for f in repo.changectx(x).files(): - if f in seen: - continue - seen[f] = 1 - if f in mm: - filerev = mm[f] - else: - filerev = 0 - seen[f] = filerev - # we go in two steps here so the strip loop happens in a - # sensible order. When stripping many files, this helps keep - # our disk access patterns under control. - seen_list = seen.keys() - seen_list.sort() - for f in seen_list: - ff = repo.file(f) - filerev = seen[f] - if filerev != 0: - if filerev in ff.nodemap: - filerev = ff.rev(filerev) - else: - filerev = 0 - ff.strip(filerev, revnum) + return extra - chlog = repo.changelog + extranodes = {} + cl = repo.changelog + extra = collectone(repo.manifest) + if extra: + extranodes[1] = extra + for fname in files: + f = repo.file(fname) + extra = collectone(f) + if extra: + extranodes[fname] = extra + + return extranodes + +def strip(ui, repo, node, backup="all"): + cl = repo.changelog # TODO delete the undo files, and handle undo of merge sets - pp = chlog.parents(rev) - revnum = chlog.rev(rev) + pp = cl.parents(node) + striprev = cl.rev(node) # save is a list of all the branches we are truncating away # that we actually want to keep. changegroup will be used @@ -78,7 +93,7 @@ saveheads = [] savebases = {} - heads = limitheads(chlog, rev) + heads = [cl.node(r) for r in _limitheads(cl, striprev)] seen = {} # search through all the heads, finding those where the revision @@ -89,39 +104,48 @@ n = h while True: seen[n] = 1 - pp = chlog.parents(n) - if pp[1] != revlog.nullid: + pp = cl.parents(n) + if pp[1] != nullid: for p in pp: - if chlog.rev(p) > revnum and p not in seen: + if cl.rev(p) > striprev and p not in seen: heads.append(p) - if pp[0] == revlog.nullid: + if pp[0] == nullid: break - if chlog.rev(pp[0]) < revnum: + if cl.rev(pp[0]) < striprev: break n = pp[0] - if n == rev: + if n == node: break - r = chlog.reachable(h, rev) - if rev not in r: + r = cl.reachable(h, node) + if node not in r: saveheads.append(h) for x in r: - if chlog.rev(x) > revnum: + if cl.rev(x) > striprev: savebases[x] = 1 + files = _collectfiles(repo, striprev) + + extranodes = _collectextranodes(repo, files, striprev) + # create a changegroup for all the branches we need to keep if backup == "all": - bundle(repo, [rev], chlog.heads(), rev, 'backup') - if saveheads: - chgrpfile = bundle(repo, savebases.keys(), saveheads, rev, 'temp') - - stripall(revnum) + _bundle(repo, [node], cl.heads(), node, 'backup') + if saveheads or extranodes: + chgrpfile = _bundle(repo, savebases.keys(), saveheads, node, 'temp', + extranodes) - change = chlog.read(rev) - chlog.strip(revnum, revnum) - repo.manifest.strip(repo.manifest.rev(change[0]), revnum) - if saveheads: + cl.strip(striprev) + repo.manifest.strip(striprev) + for name in files: + f = repo.file(name) + f.strip(striprev) + + if saveheads or extranodes: ui.status("adding branch\n") - commands.unbundle(ui, repo, "file:%s" % chgrpfile, update=False) + f = open(chgrpfile, "rb") + gen = changegroup.readbundle(f, chgrpfile) + repo.addchangegroup(gen, 'strip', 'bundle:' + chgrpfile, True) + f.close() if backup != "strip": os.unlink(chgrpfile)
--- a/mercurial/revlog.py Fri Feb 08 11:50:37 2008 +0100 +++ b/mercurial/revlog.py Fri Feb 08 11:55:17 2008 +0100 @@ -31,8 +31,13 @@ class RevlogError(Exception): pass + class LookupError(RevlogError): - pass + def __init__(self, name, message=None): + if message is None: + message = _('not found: %s') % name + RevlogError.__init__(self, message) + self.name = name def getoffset(q): return int(q >> 16) @@ -107,8 +112,6 @@ # lazyparser is not safe to use on windows if win32 extensions not # available. it keeps file handle open, which make it not possible # to break hardlinks on local cloned repos. - safe_to_use = os.name != 'nt' or (not util.is_win_9x() and - hasattr(util, 'win32api')) def __init__(self, dataf, size): self.dataf = dataf @@ -321,7 +324,7 @@ e = _unpack(indexformatv0, cur) # transform to revlogv1 format e2 = (offset_type(e[0], 0), e[1], -1, e[2], e[3], - nodemap[e[4]], nodemap[e[5]], e[6]) + nodemap.get(e[4], nullrev), nodemap.get(e[5], nullrev), e[6]) index.append(e2) nodemap[e[6]] = n n += 1 @@ -357,7 +360,7 @@ except AttributeError: size = 0 - if lazyparser.safe_to_use and not inline and size > 1000000: + if util.openhardlinks() and not inline and size > 1000000: # big index, let's parse it on demand parser = lazyparser(fp, size) index = lazyindex(parser) @@ -516,7 +519,7 @@ try: return self.nodemap[node] except KeyError: - raise LookupError(_('%s: no node %s') % (self.indexfile, hex(node))) + raise LookupError(hex(node), _('%s: no node %s') % (self.indexfile, hex(node))) def node(self, rev): return self.index[rev][7] def linkrev(self, node): @@ -836,7 +839,8 @@ for n in self.nodemap: if n.startswith(bin_id) and hex(n).startswith(id): if node is not None: - raise LookupError(_("Ambiguous identifier")) + raise LookupError(hex(node), + _("Ambiguous identifier")) node = n if node is not None: return node @@ -855,7 +859,7 @@ if n: return n - raise LookupError(_("No match found")) + raise LookupError(id, _("No match found")) def cmp(self, node, text): """compare text with a given file revision""" @@ -1166,13 +1170,13 @@ for p in (p1, p2): if not p in self.nodemap: - raise LookupError(_("unknown parent %s") % short(p)) + raise LookupError(hex(p), _("unknown parent %s") % short(p)) if not chain: # retrieve the parent revision of the delta chain chain = p1 if not chain in self.nodemap: - raise LookupError(_("unknown base %s") % short(chain[:4])) + raise LookupError(hex(chain), _("unknown base %s") % short(chain[:4])) # full versions are inserted when the needed deltas become # comparable to the uncompressed text or when the previous @@ -1233,21 +1237,31 @@ return node - def strip(self, rev, minlink): - if self.count() == 0 or rev >= self.count(): + def strip(self, minlink): + """truncate the revlog on the first revision with a linkrev >= minlink + + This function is called when we're stripping revision minlink and + its descendants from the repository. + + We have to remove all revisions with linkrev >= minlink, because + the equivalent changelog revisions will be renumbered after the + strip. + + So we truncate the revlog on the first of these revisions, and + trust that the caller has saved the revisions that shouldn't be + removed and that it'll readd them after this truncation. + """ + if self.count() == 0: return if isinstance(self.index, lazyindex): self._loadindexmap() - # When stripping away a revision, we need to make sure it - # does not actually belong to an older changeset. - # The minlink parameter defines the oldest revision - # we're allowed to strip away. - while minlink > self.index[rev][4]: - rev += 1 - if rev >= self.count(): - return + for rev in xrange(0, self.count()): + if self.index[rev][4] >= minlink: + break + else: + return # first truncate the files on disk end = self.start(rev)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/mercurial/simplemerge.py Fri Feb 08 11:55:17 2008 +0100 @@ -0,0 +1,456 @@ +#!/usr/bin/env python +# Copyright (C) 2004, 2005 Canonical Ltd +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA + +# mbp: "you know that thing where cvs gives you conflict markers?" +# s: "i hate that." + +from i18n import _ +import util, mdiff, fancyopts, sys, os + +class CantReprocessAndShowBase(Exception): + pass + +def warn(message): + sys.stdout.flush() + sys.stderr.write(message) + sys.stderr.flush() + +def intersect(ra, rb): + """Given two ranges return the range where they intersect or None. + + >>> intersect((0, 10), (0, 6)) + (0, 6) + >>> intersect((0, 10), (5, 15)) + (5, 10) + >>> intersect((0, 10), (10, 15)) + >>> intersect((0, 9), (10, 15)) + >>> intersect((0, 9), (7, 15)) + (7, 9) + """ + assert ra[0] <= ra[1] + assert rb[0] <= rb[1] + + sa = max(ra[0], rb[0]) + sb = min(ra[1], rb[1]) + if sa < sb: + return sa, sb + else: + return None + +def compare_range(a, astart, aend, b, bstart, bend): + """Compare a[astart:aend] == b[bstart:bend], without slicing. + """ + if (aend-astart) != (bend-bstart): + return False + for ia, ib in zip(xrange(astart, aend), xrange(bstart, bend)): + if a[ia] != b[ib]: + return False + else: + return True + +class Merge3Text(object): + """3-way merge of texts. + + Given strings BASE, OTHER, THIS, tries to produce a combined text + incorporating the changes from both BASE->OTHER and BASE->THIS.""" + def __init__(self, basetext, atext, btext, base=None, a=None, b=None): + self.basetext = basetext + self.atext = atext + self.btext = btext + if base is None: + base = mdiff.splitnewlines(basetext) + if a is None: + a = mdiff.splitnewlines(atext) + if b is None: + b = mdiff.splitnewlines(btext) + self.base = base + self.a = a + self.b = b + + def merge_lines(self, + name_a=None, + name_b=None, + name_base=None, + start_marker='<<<<<<<', + mid_marker='=======', + end_marker='>>>>>>>', + base_marker=None, + reprocess=False): + """Return merge in cvs-like form. + """ + self.conflicts = False + newline = '\n' + if len(self.a) > 0: + if self.a[0].endswith('\r\n'): + newline = '\r\n' + elif self.a[0].endswith('\r'): + newline = '\r' + if base_marker and reprocess: + raise CantReprocessAndShowBase() + if name_a: + start_marker = start_marker + ' ' + name_a + if name_b: + end_marker = end_marker + ' ' + name_b + if name_base and base_marker: + base_marker = base_marker + ' ' + name_base + merge_regions = self.merge_regions() + if reprocess is True: + merge_regions = self.reprocess_merge_regions(merge_regions) + for t in merge_regions: + what = t[0] + if what == 'unchanged': + for i in range(t[1], t[2]): + yield self.base[i] + elif what == 'a' or what == 'same': + for i in range(t[1], t[2]): + yield self.a[i] + elif what == 'b': + for i in range(t[1], t[2]): + yield self.b[i] + elif what == 'conflict': + self.conflicts = True + yield start_marker + newline + for i in range(t[3], t[4]): + yield self.a[i] + if base_marker is not None: + yield base_marker + newline + for i in range(t[1], t[2]): + yield self.base[i] + yield mid_marker + newline + for i in range(t[5], t[6]): + yield self.b[i] + yield end_marker + newline + else: + raise ValueError(what) + + def merge_annotated(self): + """Return merge with conflicts, showing origin of lines. + + Most useful for debugging merge. + """ + for t in self.merge_regions(): + what = t[0] + if what == 'unchanged': + for i in range(t[1], t[2]): + yield 'u | ' + self.base[i] + elif what == 'a' or what == 'same': + for i in range(t[1], t[2]): + yield what[0] + ' | ' + self.a[i] + elif what == 'b': + for i in range(t[1], t[2]): + yield 'b | ' + self.b[i] + elif what == 'conflict': + yield '<<<<\n' + for i in range(t[3], t[4]): + yield 'A | ' + self.a[i] + yield '----\n' + for i in range(t[5], t[6]): + yield 'B | ' + self.b[i] + yield '>>>>\n' + else: + raise ValueError(what) + + def merge_groups(self): + """Yield sequence of line groups. Each one is a tuple: + + 'unchanged', lines + Lines unchanged from base + + 'a', lines + Lines taken from a + + 'same', lines + Lines taken from a (and equal to b) + + 'b', lines + Lines taken from b + + 'conflict', base_lines, a_lines, b_lines + Lines from base were changed to either a or b and conflict. + """ + for t in self.merge_regions(): + what = t[0] + if what == 'unchanged': + yield what, self.base[t[1]:t[2]] + elif what == 'a' or what == 'same': + yield what, self.a[t[1]:t[2]] + elif what == 'b': + yield what, self.b[t[1]:t[2]] + elif what == 'conflict': + yield (what, + self.base[t[1]:t[2]], + self.a[t[3]:t[4]], + self.b[t[5]:t[6]]) + else: + raise ValueError(what) + + def merge_regions(self): + """Return sequences of matching and conflicting regions. + + This returns tuples, where the first value says what kind we + have: + + 'unchanged', start, end + Take a region of base[start:end] + + 'same', astart, aend + b and a are different from base but give the same result + + 'a', start, end + Non-clashing insertion from a[start:end] + + Method is as follows: + + The two sequences align only on regions which match the base + and both descendents. These are found by doing a two-way diff + of each one against the base, and then finding the + intersections between those regions. These "sync regions" + are by definition unchanged in both and easily dealt with. + + The regions in between can be in any of three cases: + conflicted, or changed on only one side. + """ + + # section a[0:ia] has been disposed of, etc + iz = ia = ib = 0 + + for zmatch, zend, amatch, aend, bmatch, bend in self.find_sync_regions(): + #print 'match base [%d:%d]' % (zmatch, zend) + + matchlen = zend - zmatch + assert matchlen >= 0 + assert matchlen == (aend - amatch) + assert matchlen == (bend - bmatch) + + len_a = amatch - ia + len_b = bmatch - ib + len_base = zmatch - iz + assert len_a >= 0 + assert len_b >= 0 + assert len_base >= 0 + + #print 'unmatched a=%d, b=%d' % (len_a, len_b) + + if len_a or len_b: + # try to avoid actually slicing the lists + equal_a = compare_range(self.a, ia, amatch, + self.base, iz, zmatch) + equal_b = compare_range(self.b, ib, bmatch, + self.base, iz, zmatch) + same = compare_range(self.a, ia, amatch, + self.b, ib, bmatch) + + if same: + yield 'same', ia, amatch + elif equal_a and not equal_b: + yield 'b', ib, bmatch + elif equal_b and not equal_a: + yield 'a', ia, amatch + elif not equal_a and not equal_b: + yield 'conflict', iz, zmatch, ia, amatch, ib, bmatch + else: + raise AssertionError("can't handle a=b=base but unmatched") + + ia = amatch + ib = bmatch + iz = zmatch + + # if the same part of the base was deleted on both sides + # that's OK, we can just skip it. + + + if matchlen > 0: + assert ia == amatch + assert ib == bmatch + assert iz == zmatch + + yield 'unchanged', zmatch, zend + iz = zend + ia = aend + ib = bend + + def reprocess_merge_regions(self, merge_regions): + """Where there are conflict regions, remove the agreed lines. + + Lines where both A and B have made the same changes are + eliminated. + """ + for region in merge_regions: + if region[0] != "conflict": + yield region + continue + type, iz, zmatch, ia, amatch, ib, bmatch = region + a_region = self.a[ia:amatch] + b_region = self.b[ib:bmatch] + matches = mdiff.get_matching_blocks(''.join(a_region), + ''.join(b_region)) + next_a = ia + next_b = ib + for region_ia, region_ib, region_len in matches[:-1]: + region_ia += ia + region_ib += ib + reg = self.mismatch_region(next_a, region_ia, next_b, + region_ib) + if reg is not None: + yield reg + yield 'same', region_ia, region_len+region_ia + next_a = region_ia + region_len + next_b = region_ib + region_len + reg = self.mismatch_region(next_a, amatch, next_b, bmatch) + if reg is not None: + yield reg + + def mismatch_region(next_a, region_ia, next_b, region_ib): + if next_a < region_ia or next_b < region_ib: + return 'conflict', None, None, next_a, region_ia, next_b, region_ib + mismatch_region = staticmethod(mismatch_region) + + def find_sync_regions(self): + """Return a list of sync regions, where both descendents match the base. + + Generates a list of (base1, base2, a1, a2, b1, b2). There is + always a zero-length sync region at the end of all the files. + """ + + ia = ib = 0 + amatches = mdiff.get_matching_blocks(self.basetext, self.atext) + bmatches = mdiff.get_matching_blocks(self.basetext, self.btext) + len_a = len(amatches) + len_b = len(bmatches) + + sl = [] + + while ia < len_a and ib < len_b: + abase, amatch, alen = amatches[ia] + bbase, bmatch, blen = bmatches[ib] + + # there is an unconflicted block at i; how long does it + # extend? until whichever one ends earlier. + i = intersect((abase, abase+alen), (bbase, bbase+blen)) + if i: + intbase = i[0] + intend = i[1] + intlen = intend - intbase + + # found a match of base[i[0], i[1]]; this may be less than + # the region that matches in either one + assert intlen <= alen + assert intlen <= blen + assert abase <= intbase + assert bbase <= intbase + + asub = amatch + (intbase - abase) + bsub = bmatch + (intbase - bbase) + aend = asub + intlen + bend = bsub + intlen + + assert self.base[intbase:intend] == self.a[asub:aend], \ + (self.base[intbase:intend], self.a[asub:aend]) + + assert self.base[intbase:intend] == self.b[bsub:bend] + + sl.append((intbase, intend, + asub, aend, + bsub, bend)) + + # advance whichever one ends first in the base text + if (abase + alen) < (bbase + blen): + ia += 1 + else: + ib += 1 + + intbase = len(self.base) + abase = len(self.a) + bbase = len(self.b) + sl.append((intbase, intbase, abase, abase, bbase, bbase)) + + return sl + + def find_unconflicted(self): + """Return a list of ranges in base that are not conflicted.""" + am = mdiff.get_matching_blocks(self.basetext, self.atext) + bm = mdiff.get_matching_blocks(self.basetext, self.btext) + + unc = [] + + while am and bm: + # there is an unconflicted block at i; how long does it + # extend? until whichever one ends earlier. + a1 = am[0][0] + a2 = a1 + am[0][2] + b1 = bm[0][0] + b2 = b1 + bm[0][2] + i = intersect((a1, a2), (b1, b2)) + if i: + unc.append(i) + + if a2 < b2: + del am[0] + else: + del bm[0] + + return unc + +def simplemerge(local, base, other, **opts): + def readfile(filename): + f = open(filename, "rb") + text = f.read() + f.close() + if util.binary(text): + msg = _("%s looks like a binary file.") % filename + if not opts.get('text'): + raise util.Abort(msg) + elif not opts.get('quiet'): + warn(_('warning: %s\n') % msg) + return text + + name_a = local + name_b = other + labels = opts.get('label', []) + if labels: + name_a = labels.pop(0) + if labels: + name_b = labels.pop(0) + if labels: + raise util.Abort(_("can only specify two labels.")) + + localtext = readfile(local) + basetext = readfile(base) + othertext = readfile(other) + + orig = local + local = os.path.realpath(local) + if not opts.get('print'): + opener = util.opener(os.path.dirname(local)) + out = opener(os.path.basename(local), "w", atomictemp=True) + else: + out = sys.stdout + + reprocess = not opts.get('no_minimal') + + m3 = Merge3Text(basetext, localtext, othertext) + for line in m3.merge_lines(name_a=name_a, name_b=name_b, + reprocess=reprocess): + out.write(line) + + if not opts.get('print'): + out.rename() + + if m3.conflicts: + if not opts.get('quiet'): + warn(_("warning: conflicts during merge.\n")) + return 1
--- a/mercurial/sshrepo.py Fri Feb 08 11:50:37 2008 +0100 +++ b/mercurial/sshrepo.py Fri Feb 08 11:55:17 2008 +0100 @@ -24,12 +24,11 @@ self.port = m.group(5) self.path = m.group(7) or "." - args = self.user and ("%s@%s" % (self.user, self.host)) or self.host - args = self.port and ("%s -p %s") % (args, self.port) or args - sshcmd = self.ui.config("ui", "ssh", "ssh") remotecmd = self.ui.config("ui", "remotecmd", "hg") + args = util.sshargs(sshcmd, self.host, self.user, self.port) + if create: cmd = '%s %s "%s init %s"' cmd = cmd % (sshcmd, args, remotecmd, self.path) @@ -203,7 +202,7 @@ r = self._recv() if r: # remote may send "unsynced changes" - self.raise_(hg.RepoError(_("push failed: %s") % r)) + self.raise_(repo.RepoError(_("push failed: %s") % r)) r = self._recv() try:
--- a/mercurial/sshserver.py Fri Feb 08 11:50:37 2008 +0100 +++ b/mercurial/sshserver.py Fri Feb 08 11:55:17 2008 +0100 @@ -8,7 +8,7 @@ from i18n import _ from node import * -import os, streamclone, sys, tempfile, util +import os, streamclone, sys, tempfile, util, hook class sshserver(object): def __init__(self, ui, repo): @@ -18,6 +18,7 @@ self.fin = sys.stdin self.fout = sys.stdout + hook.redirect(True) sys.stdout = sys.stderr # Prevent insertion/deletion of CRs
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/mercurial/templatefilters.py Fri Feb 08 11:55:17 2008 +0100 @@ -0,0 +1,155 @@ +# template-filters.py - common template expansion filters +# +# Copyright 2005-2008 Matt Mackall <mpm@selenic.com> +# +# This software may be used and distributed according to the terms +# of the GNU General Public License, incorporated herein by reference. + +import cgi, re, os, time, urllib, textwrap +import util, templater + +agescales = [("second", 1), + ("minute", 60), + ("hour", 3600), + ("day", 3600 * 24), + ("week", 3600 * 24 * 7), + ("month", 3600 * 24 * 30), + ("year", 3600 * 24 * 365)] + +agescales.reverse() + +def age(date): + '''turn a (timestamp, tzoff) tuple into an age string.''' + + def plural(t, c): + if c == 1: + return t + return t + "s" + def fmt(t, c): + return "%d %s" % (c, plural(t, c)) + + now = time.time() + then = date[0] + delta = max(1, int(now - then)) + + for t, s in agescales: + n = delta / s + if n >= 2 or s == 1: + return fmt(t, n) + +para_re = None +space_re = None + +def fill(text, width): + '''fill many paragraphs.''' + global para_re, space_re + if para_re is None: + para_re = re.compile('(\n\n|\n\\s*[-*]\\s*)', re.M) + space_re = re.compile(r' +') + + def findparas(): + start = 0 + while True: + m = para_re.search(text, start) + if not m: + w = len(text) + while w > start and text[w-1].isspace(): w -= 1 + yield text[start:w], text[w:] + break + yield text[start:m.start(0)], m.group(1) + start = m.end(1) + + return "".join([space_re.sub(' ', textwrap.fill(para, width)) + rest + for para, rest in findparas()]) + +def firstline(text): + '''return the first line of text''' + try: + return text.splitlines(1)[0].rstrip('\r\n') + except IndexError: + return '' + +def isodate(date): + '''turn a (timestamp, tzoff) tuple into an iso 8631 date and time.''' + return util.datestr(date, format='%Y-%m-%d %H:%M') + +def hgdate(date): + '''turn a (timestamp, tzoff) tuple into an hg cset timestamp.''' + return "%d %d" % date + +def nl2br(text): + '''replace raw newlines with xhtml line breaks.''' + return text.replace('\n', '<br/>\n') + +def obfuscate(text): + text = unicode(text, util._encoding, 'replace') + return ''.join(['&#%d;' % ord(c) for c in text]) + +def domain(author): + '''get domain of author, or empty string if none.''' + f = author.find('@') + if f == -1: return '' + author = author[f+1:] + f = author.find('>') + if f >= 0: author = author[:f] + return author + +def person(author): + '''get name of author, or else username.''' + f = author.find('<') + if f == -1: return util.shortuser(author) + return author[:f].rstrip() + +def shortdate(date): + '''turn (timestamp, tzoff) tuple into iso 8631 date.''' + return util.datestr(date, format='%Y-%m-%d', timezone=False) + +def indent(text, prefix): + '''indent each non-empty line of text after first with prefix.''' + lines = text.splitlines() + num_lines = len(lines) + def indenter(): + for i in xrange(num_lines): + l = lines[i] + if i and l.strip(): + yield prefix + yield l + if i < num_lines - 1 or text.endswith('\n'): + yield '\n' + return "".join(indenter()) + +def permissions(flags): + if "l" in flags: + return "lrwxrwxrwx" + if "x" in flags: + return "-rwxr-xr-x" + return "-rw-r--r--" + +filters = { + "addbreaks": nl2br, + "basename": os.path.basename, + "age": age, + "date": lambda x: util.datestr(x), + "domain": domain, + "email": util.email, + "escape": lambda x: cgi.escape(x, True), + "fill68": lambda x: fill(x, width=68), + "fill76": lambda x: fill(x, width=76), + "firstline": firstline, + "tabindent": lambda x: indent(x, '\t'), + "hgdate": hgdate, + "isodate": isodate, + "obfuscate": obfuscate, + "permissions": permissions, + "person": person, + "rfc822date": lambda x: util.datestr(x, "%a, %d %b %Y %H:%M:%S"), + "rfc3339date": lambda x: util.datestr(x, "%Y-%m-%dT%H:%M:%S", True, "%+03d:%02d"), + "short": lambda x: x[:12], + "shortdate": shortdate, + "stringify": templater.stringify, + "strip": lambda x: x.strip(), + "urlescape": lambda x: urllib.quote(x), + "user": lambda x: util.shortuser(x), + "stringescape": lambda x: x.encode('string_escape'), + } +
--- a/mercurial/templater.py Fri Feb 08 11:50:37 2008 +0100 +++ b/mercurial/templater.py Fri Feb 08 11:55:17 2008 +0100 @@ -6,8 +6,7 @@ # of the GNU General Public License, incorporated herein by reference. from i18n import _ -from node import * -import cgi, re, sys, os, time, urllib, util, textwrap +import re, sys, os def parsestring(s, quoted=True): '''parse a string using simple c-like syntax. @@ -82,7 +81,7 @@ '''perform expansion. t is name of map element to expand. map is added elements to use during expansion.''' - if not self.cache.has_key(t): + if not t in self.cache: try: self.cache[t] = file(self.map[t]).read() except IOError, inst: @@ -123,163 +122,6 @@ v = self.filters[f](v) yield v -agescales = [("second", 1), - ("minute", 60), - ("hour", 3600), - ("day", 3600 * 24), - ("week", 3600 * 24 * 7), - ("month", 3600 * 24 * 30), - ("year", 3600 * 24 * 365)] - -agescales.reverse() - -def age(date): - '''turn a (timestamp, tzoff) tuple into an age string.''' - - def plural(t, c): - if c == 1: - return t - return t + "s" - def fmt(t, c): - return "%d %s" % (c, plural(t, c)) - - now = time.time() - then = date[0] - delta = max(1, int(now - then)) - - for t, s in agescales: - n = delta / s - if n >= 2 or s == 1: - return fmt(t, n) - -def stringify(thing): - '''turn nested template iterator into string.''' - if hasattr(thing, '__iter__'): - return "".join([stringify(t) for t in thing if t is not None]) - return str(thing) - -para_re = None -space_re = None - -def fill(text, width): - '''fill many paragraphs.''' - global para_re, space_re - if para_re is None: - para_re = re.compile('(\n\n|\n\\s*[-*]\\s*)', re.M) - space_re = re.compile(r' +') - - def findparas(): - start = 0 - while True: - m = para_re.search(text, start) - if not m: - w = len(text) - while w > start and text[w-1].isspace(): w -= 1 - yield text[start:w], text[w:] - break - yield text[start:m.start(0)], m.group(1) - start = m.end(1) - - return "".join([space_re.sub(' ', textwrap.fill(para, width)) + rest - for para, rest in findparas()]) - -def firstline(text): - '''return the first line of text''' - try: - return text.splitlines(1)[0].rstrip('\r\n') - except IndexError: - return '' - -def isodate(date): - '''turn a (timestamp, tzoff) tuple into an iso 8631 date and time.''' - return util.datestr(date, format='%Y-%m-%d %H:%M') - -def hgdate(date): - '''turn a (timestamp, tzoff) tuple into an hg cset timestamp.''' - return "%d %d" % date - -def nl2br(text): - '''replace raw newlines with xhtml line breaks.''' - return text.replace('\n', '<br/>\n') - -def obfuscate(text): - text = unicode(text, util._encoding, 'replace') - return ''.join(['&#%d;' % ord(c) for c in text]) - -def domain(author): - '''get domain of author, or empty string if none.''' - f = author.find('@') - if f == -1: return '' - author = author[f+1:] - f = author.find('>') - if f >= 0: author = author[:f] - return author - -def email(author): - '''get email of author.''' - r = author.find('>') - if r == -1: r = None - return author[author.find('<')+1:r] - -def person(author): - '''get name of author, or else username.''' - f = author.find('<') - if f == -1: return util.shortuser(author) - return author[:f].rstrip() - -def shortdate(date): - '''turn (timestamp, tzoff) tuple into iso 8631 date.''' - return util.datestr(date, format='%Y-%m-%d', timezone=False) - -def indent(text, prefix): - '''indent each non-empty line of text after first with prefix.''' - lines = text.splitlines() - num_lines = len(lines) - def indenter(): - for i in xrange(num_lines): - l = lines[i] - if i and l.strip(): - yield prefix - yield l - if i < num_lines - 1 or text.endswith('\n'): - yield '\n' - return "".join(indenter()) - -def permissions(flags): - if "l" in flags: - return "lrwxrwxrwx" - if "x" in flags: - return "-rwxr-xr-x" - return "-rw-r--r--" - -common_filters = { - "addbreaks": nl2br, - "basename": os.path.basename, - "age": age, - "date": lambda x: util.datestr(x), - "domain": domain, - "email": email, - "escape": lambda x: cgi.escape(x, True), - "fill68": lambda x: fill(x, width=68), - "fill76": lambda x: fill(x, width=76), - "firstline": firstline, - "tabindent": lambda x: indent(x, '\t'), - "hgdate": hgdate, - "isodate": isodate, - "obfuscate": obfuscate, - "permissions": permissions, - "person": person, - "rfc822date": lambda x: util.datestr(x, "%a, %d %b %Y %H:%M:%S"), - "rfc3339date": lambda x: util.datestr(x, "%Y-%m-%dT%H:%M:%S", True, "%+03d:%02d"), - "short": lambda x: x[:12], - "shortdate": shortdate, - "stringify": stringify, - "strip": lambda x: x.strip(), - "urlescape": lambda x: urllib.quote(x), - "user": lambda x: util.shortuser(x), - "stringescape": lambda x: x.encode('string_escape'), - } - def templatepath(name=None): '''return location of template file or directory (if no name). returns None if not found.''' @@ -296,3 +138,9 @@ if (name and os.path.exists(p)) or os.path.isdir(p): return os.path.normpath(p) +def stringify(thing): + '''turn nested template iterator into string.''' + if hasattr(thing, '__iter__'): + return "".join([stringify(t) for t in thing if t is not None]) + return str(thing) +
--- a/mercurial/transaction.py Fri Feb 08 11:50:37 2008 +0100 +++ b/mercurial/transaction.py Fri Feb 08 11:55:17 2008 +0100 @@ -18,10 +18,6 @@ def __init__(self, report, opener, journal, after=None): self.journal = None - # abort here if the journal already exists - if os.path.exists(journal): - raise AssertionError(_("journal already exists - run hg recover")) - self.count = 1 self.report = report self.opener = opener
--- a/mercurial/ui.py Fri Feb 08 11:50:37 2008 +0100 +++ b/mercurial/ui.py Fri Feb 08 11:55:17 2008 +0100 @@ -204,7 +204,8 @@ pathsitems = items for n, path in pathsitems: if path and "://" not in path and not os.path.isabs(path): - cdata.set("paths", n, os.path.join(root, path)) + cdata.set("paths", n, + os.path.normpath(os.path.join(root, path))) # update verbosity/interactive/report_untrusted settings if section is None or section == 'ui': @@ -403,14 +404,25 @@ readline.read_history_file except ImportError: pass - return raw_input(prompt) + line = raw_input(prompt) + # When stdin is in binary mode on Windows, it can cause + # raw_input() to emit an extra trailing carriage return + if os.linesep == '\r\n' and line and line[-1] == '\r': + line = line[:-1] + return line - def prompt(self, msg, pat=None, default="y", matchflags=0): + def prompt(self, msg, pat=None, default="y"): + """Prompt user with msg, read response, and ensure it matches pat + + If not interactive -- the default is returned + """ if not self.interactive: return default while True: try: r = self._readline(msg + ' ') - if not pat or re.match(pat, r, matchflags): + if not r: + return default + if not pat or re.match(pat, r): return r else: self.write(_("unrecognized response\n")) @@ -436,9 +448,7 @@ f.write(text) f.close() - editor = (os.environ.get("HGEDITOR") or - self.config("ui", "editor") or - os.environ.get("EDITOR", "vi")) + editor = self.geteditor() util.system("%s \"%s\"" % (editor, name), environ={'HGUSER': user}, @@ -460,3 +470,11 @@ if self.traceback: traceback.print_exc() return self.traceback + + def geteditor(self): + '''return editor to use''' + return (os.environ.get("HGEDITOR") or + self.config("ui", "editor") or + os.environ.get("VISUAL") or + os.environ.get("EDITOR", "vi")) +
--- a/mercurial/util.py Fri Feb 08 11:50:37 2008 +0100 +++ b/mercurial/util.py Fri Feb 08 11:55:17 2008 +0100 @@ -81,18 +81,6 @@ """Find the length in characters of a local string""" return len(s.decode(_encoding, "replace")) -def localsub(s, a, b=None): - try: - u = s.decode(_encoding, _encodingmode) - if b is not None: - u = u[a:b] - else: - u = u[:a] - return u.encode(_encoding, _encodingmode) - except UnicodeDecodeError, inst: - sub = s[max(0, inst.start-10), inst.start+10] - raise Abort(_("decoding near '%s': %s!") % (sub, inst)) - # used by parsedate defaultdateformats = ( '%Y-%m-%d %H:%M:%S', @@ -274,7 +262,7 @@ "convert a glob pattern into a regexp" i, n = 0, len(pat) res = '' - group = False + group = 0 def peek(): return i < n and pat[i] while i < n: c = pat[i] @@ -304,11 +292,11 @@ stuff = '\\' + stuff res = '%s[%s]' % (res, stuff) elif c == '{': - group = True + group += 1 res += '(?:' elif c == '}' and group: res += ')' - group = False + group -= 1 elif c == ',' and group: res += '|' elif c == '\\': @@ -340,7 +328,7 @@ if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]: return os.path.join(root, localpath(n2)) n2 = '/'.join((pconvert(root), n2)) - a, b = n1.split(os.sep), n2.split('/') + a, b = splitpath(n1), n2.split('/') a.reverse() b.reverse() while a and b and a[-1] == b[-1]: @@ -353,7 +341,7 @@ """return the canonical path of myname, given cwd and root""" if root == os.sep: rootsep = os.sep - elif root.endswith(os.sep): + elif endswithsep(root): rootsep = root else: rootsep = root + os.sep @@ -704,7 +692,7 @@ if path in self.audited: return normpath = os.path.normcase(path) - parts = normpath.split(os.sep) + parts = splitpath(normpath) if (os.path.splitdrive(path)[0] or parts[0] in ('.hg', '') or os.pardir in parts): raise Abort(_("path contains illegal component: %s") % path) @@ -725,14 +713,15 @@ os.path.isdir(os.path.join(curpath, '.hg'))): raise Abort(_('path %r is inside repo %r') % (path, prefix)) - + parts.pop() prefixes = [] - for c in strutil.rfindall(normpath, os.sep): - prefix = normpath[:c] + for n in range(len(parts)): + prefix = os.sep.join(parts) if prefix in self.auditeddir: break check(prefix) prefixes.append(prefix) + parts.pop() self.audited.add(path) # only add prefixes to the cache after checking everything: we don't @@ -766,12 +755,9 @@ posixfile = file -def is_win_9x(): - '''return true if run on windows 95, 98 or me.''' - try: - return sys.getwindowsversion()[3] == 1 - except AttributeError: - return os.name == 'nt' and 'command' in os.environ.get('comspec', '') +def openhardlinks(): + '''return true if it is safe to hold open file handles to hardlinks''' + return True getuser_fallback = None @@ -845,18 +831,23 @@ Requires a directory (like /foo/.hg) """ + + # VFAT on some Linux versions can flip mode but it doesn't persist + # a FS remount. Frequently we can detect it if files are created + # with exec bit on. + try: EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH fh, fn = tempfile.mkstemp("", "", path) - os.close(fh) - m = os.stat(fn).st_mode - # VFAT on Linux can flip mode but it doesn't persist a FS remount. - # frequently we can detect it if files are created with exec bit on. - new_file_has_exec = m & EXECFLAGS - os.chmod(fn, m ^ EXECFLAGS) - exec_flags_cannot_flip = (os.stat(fn).st_mode == m) - os.unlink(fn) - except (IOError,OSError): + try: + os.close(fh) + m = os.stat(fn).st_mode & 0777 + new_file_has_exec = m & EXECFLAGS + os.chmod(fn, m ^ EXECFLAGS) + exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m) + finally: + os.unlink(fn) + except (IOError, OSError): # we don't care, the user probably won't be able to commit anyway return False return not (new_file_has_exec or exec_flags_cannot_flip) @@ -892,6 +883,22 @@ """return True if patches should be applied in binary mode by default.""" return os.name == 'nt' +def endswithsep(path): + '''Check path ends with os.sep or os.altsep.''' + return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep) + +def splitpath(path): + '''Split path by os.sep. + Note that this function does not use os.altsep because this is + an alternative of simple "xxx.split(os.sep)". + It is recommended to use os.path.normpath() before using this + function if need.''' + return path.split(os.sep) + +def gui(): + '''Are we running in a GUI?''' + return os.name == "nt" or os.name == "mac" or os.environ.get("DISPLAY") + # Platform specific variants if os.name == 'nt': import msvcrt @@ -913,7 +920,15 @@ def write(self, s): try: - return self.fp.write(s) + # This is workaround for "Not enough space" error on + # writing large size of data to console. + limit = 16000 + l = len(s) + start = 0 + while start < l: + end = start + limit + self.fp.write(s[start:end]) + start = end except IOError, inst: if inst.errno != 0: raise self.close() @@ -929,6 +944,16 @@ sys.stdout = winstdout(sys.stdout) + def _is_win_9x(): + '''return true if run on windows 95, 98 or me.''' + try: + return sys.getwindowsversion()[3] == 1 + except AttributeError: + return 'command' in os.environ.get('comspec', '') + + def openhardlinks(): + return not _is_win_9x and "win32api" in locals() + def system_rcpath(): try: return system_rcpath_win32() @@ -954,21 +979,24 @@ pf = pf[1:-1] # Remove the quotes return pf + def sshargs(sshcmd, host, user, port): + '''Build argument list for ssh or Plink''' + pflag = 'plink' in sshcmd.lower() and '-P' or '-p' + args = user and ("%s@%s" % (user, host)) or host + return port and ("%s %s %s" % (args, pflag, port)) or args + def testpid(pid): '''return False if pid dead, True if running or not known''' return True - def set_exec(f, mode): - pass - - def set_link(f, mode): + def set_flags(f, flags): pass def set_binary(fd): msvcrt.setmode(fd.fileno(), os.O_BINARY) def pconvert(path): - return path.replace("\\", "/") + return '/'.join(splitpath(path)) def localpath(path): return path.replace('/', '\\') @@ -1054,7 +1082,7 @@ try: # override functions with win32 versions if possible from util_win32 import * - if not is_win_9x(): + if not _is_win_9x(): posixfile = posixfile_nt except ImportError: pass @@ -1062,6 +1090,9 @@ else: nulldev = '/dev/null' + def lookup_reg(key, name=None, scope=None): + return None + def rcfiles(path): rcs = [os.path.join(path, 'hgrc')] rcdir = os.path.join(path, 'hgrc.d') @@ -1096,51 +1127,42 @@ pf = pf[1:-1] # Remove the quotes return pf + def sshargs(sshcmd, host, user, port): + '''Build argument list for ssh''' + args = user and ("%s@%s" % (user, host)) or host + return port and ("%s -p %s" % (args, port)) or args + def is_exec(f): """check whether a file is executable""" return (os.lstat(f).st_mode & 0100 != 0) - def force_chmod(f, s): - try: - os.chmod(f, s) - except OSError, inst: - if inst.errno != errno.EPERM: - raise - # maybe we don't own the file, try copying it - new_f = mktempcopy(f) - os.chmod(new_f, s) - os.rename(new_f, f) - - def set_exec(f, mode): + def set_flags(f, flags): s = os.lstat(f).st_mode - if stat.S_ISLNK(s) or (s & 0100 != 0) == mode: + x = "x" in flags + l = "l" in flags + if l: + if not stat.S_ISLNK(s): + # switch file to link + data = file(f).read() + os.unlink(f) + os.symlink(data, f) + # no chmod needed at this point return - if mode: - # Turn on +x for every +r bit when making a file executable - # and obey umask. - force_chmod(f, s | (s & 0444) >> 2 & ~_umask) - else: - force_chmod(f, s & 0666) - - def set_link(f, mode): - """make a file a symbolic link/regular file - - if a file is changed to a link, its contents become the link data - if a link is changed to a file, its link data become its contents - """ - - m = os.path.islink(f) - if m == bool(mode): - return - - if mode: # switch file to link - data = file(f).read() - os.unlink(f) - os.symlink(data, f) - else: + if stat.S_ISLNK(s): + # switch link to file data = os.readlink(f) os.unlink(f) file(f, "w").write(data) + s = 0666 & ~_umask # avoid restatting for chmod + + sx = s & 0100 + if x and not sx: + # Turn on +x for every +r bit when making a file executable + # and obey umask. + os.chmod(f, s | (s & 0444) >> 2 & ~_umask) + elif not x and sx: + # Turn off all +x bits + os.chmod(f, s & 0666) def set_binary(fd): pass @@ -1293,7 +1315,7 @@ # what we want. If the original file already exists, just copy # its mode. Otherwise, manually obey umask. try: - st_mode = os.lstat(name).st_mode + st_mode = os.lstat(name).st_mode & 0777 except OSError, inst: if inst.errno != errno.ENOENT: raise @@ -1621,6 +1643,12 @@ user = user[:f] return user +def email(author): + '''get email of author.''' + r = author.find('>') + if r == -1: r = None + return author[author.find('<')+1:r] + def ellipsis(text, maxlength=400): """Trim string to at most maxlength (default: 400) characters.""" if len(text) <= maxlength: @@ -1706,31 +1734,13 @@ return repr(s).replace('\\\\', '\\') def hidepassword(url): - '''replaces the password in the url string by three asterisks (***) - - >>> hidepassword('http://www.example.com/some/path#fragment') - 'http://www.example.com/some/path#fragment' - >>> hidepassword('http://me@www.example.com/some/path#fragment') - 'http://me@www.example.com/some/path#fragment' - >>> hidepassword('http://me:simplepw@www.example.com/path#frag') - 'http://me:***@www.example.com/path#frag' - >>> hidepassword('http://me:complex:pw@www.example.com/path#frag') - 'http://me:***@www.example.com/path#frag' - >>> hidepassword('/path/to/repo') - '/path/to/repo' - >>> hidepassword('relative/path/to/repo') - 'relative/path/to/repo' - >>> hidepassword('c:\\\\path\\\\to\\\\repo') - 'c:\\\\path\\\\to\\\\repo' - >>> hidepassword('c:/path/to/repo') - 'c:/path/to/repo' - >>> hidepassword('bundle://path/to/bundle') - 'bundle://path/to/bundle' - ''' - url_parts = list(urlparse.urlparse(url)) - host_with_pw_pattern = re.compile('^([^:]*):([^@]*)@(.*)$') - if host_with_pw_pattern.match(url_parts[1]): - url_parts[1] = re.sub(host_with_pw_pattern, r'\1:***@\3', - url_parts[1]) - return urlparse.urlunparse(url_parts) + '''hide user credential in a url string''' + scheme, netloc, path, params, query, fragment = urlparse.urlparse(url) + netloc = re.sub('([^:]*):([^@]*)@(.*)', r'\1:***@\3', netloc) + return urlparse.urlunparse((scheme, netloc, path, params, query, fragment)) +def removeauth(url): + '''remove all authentication information from a url string''' + scheme, netloc, path, params, query, fragment = urlparse.urlparse(url) + netloc = netloc[netloc.find('@')+1:] + return urlparse.urlunparse((scheme, netloc, path, params, query, fragment))
--- a/mercurial/util_win32.py Fri Feb 08 11:50:37 2008 +0100 +++ b/mercurial/util_win32.py Fri Feb 08 11:55:17 2008 +0100 @@ -16,6 +16,7 @@ from i18n import _ import errno, os, pywintypes, win32con, win32file, win32process import cStringIO, winerror +import osutil from win32com.shell import shell,shellcon class WinError: @@ -186,6 +187,37 @@ return details[0] != winerror.ERROR_INVALID_PARAMETER return True +def lookup_reg(key, valname=None, scope=None): + ''' Look up a key/value name in the Windows registry. + + valname: value name. If unspecified, the default value for the key + is used. + scope: optionally specify scope for registry lookup, this can be + a sequence of scopes to look up in order. Default (CURRENT_USER, + LOCAL_MACHINE). + ''' + try: + from _winreg import HKEY_CURRENT_USER, HKEY_LOCAL_MACHINE, \ + QueryValueEx, OpenKey + except ImportError: + return None + + def query_val(scope, key, valname): + try: + keyhandle = OpenKey(scope, key) + return QueryValueEx(keyhandle, valname)[0] + except EnvironmentError: + return None + + if scope is None: + scope = (HKEY_CURRENT_USER, HKEY_LOCAL_MACHINE) + elif not isinstance(scope, (list, tuple)): + scope = (scope,) + for s in scope: + val = query_val(s, key, valname) + if val is not None: + return val + def system_rcpath_win32(): '''return default os-specific hgrc search path''' proc = win32api.GetCurrentProcess() @@ -194,7 +226,25 @@ filename = win32process.GetModuleFileNameEx(proc, 0) except: filename = win32api.GetModuleFileName(0) - return [os.path.join(os.path.dirname(filename), 'mercurial.ini')] + # Use mercurial.ini found in directory with hg.exe + progrc = os.path.join(os.path.dirname(filename), 'mercurial.ini') + if os.path.isfile(progrc): + return [progrc] + # else look for a system rcpath in the registry + try: + value = win32api.RegQueryValue( + win32con.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Mercurial') + rcpath = [] + for p in value.split(os.pathsep): + if p.lower().endswith('mercurial.ini'): + rcpath.append(p) + elif os.path.isdir(p): + for f, kind in osutil.listdir(p): + if f.endswith('.rc'): + rcpath.append(os.path.join(p, f)) + return rcpath + except pywintypes.error: + return [] def user_rcpath_win32(): '''return os-specific hgrc search path to the user dir''' @@ -217,6 +267,9 @@ # but does not work at all. wrap win32 file api instead. def __init__(self, name, mode='rb'): + self.closed = False + self.name = name + self.mode = mode access = 0 if 'r' in mode or '+' in mode: access |= win32file.GENERIC_READ @@ -240,9 +293,6 @@ 0) except pywintypes.error, err: raise WinIOError(err, name) - self.closed = False - self.name = name - self.mode = mode def __iter__(self): for line in self.read().splitlines(True): @@ -275,6 +325,10 @@ except pywintypes.error, err: raise WinIOError(err) + def writelines(self, sequence): + for s in sequence: + self.write(s) + def seek(self, pos, whence=0): try: win32file.SetFilePointer(self.handle, int(pos), whence)
--- a/mercurial/verify.py Fri Feb 08 11:50:37 2008 +0100 +++ b/mercurial/verify.py Fri Feb 08 11:55:17 2008 +0100 @@ -62,9 +62,14 @@ repo.ui.status(_("repository uses revlog format %d\n") % (revlogv1 and 1 or 0)) + havecl = havemf = 1 seen = {} repo.ui.status(_("checking changesets\n")) - checksize(repo.changelog, "changelog") + if repo.changelog.count() == 0 and repo.manifest.count() > 1: + havecl = 0 + err(0, _("empty or missing 00changelog.i")) + else: + checksize(repo.changelog, "changelog") for i in xrange(repo.changelog.count()): changesets += 1 @@ -96,14 +101,18 @@ seen = {} repo.ui.status(_("checking manifests\n")) - checkversion(repo.manifest, "manifest") - checksize(repo.manifest, "manifest") + if repo.changelog.count() > 0 and repo.manifest.count() == 0: + havemf = 0 + err(0, _("empty or missing 00manifest.i")) + else: + checkversion(repo.manifest, "manifest") + checksize(repo.manifest, "manifest") for i in xrange(repo.manifest.count()): n = repo.manifest.node(i) l = repo.manifest.linkrev(n) - if l < 0 or l >= repo.changelog.count(): + if l < 0 or (havecl and l >= repo.changelog.count()): err(None, _("bad link (%d) at manifest revision %d") % (l, i)) if n in neededmanifests: @@ -132,38 +141,51 @@ repo.ui.status(_("crosschecking files in changesets and manifests\n")) - nm = neededmanifests.items() - nm.sort() - for m, c in nm: - err(m, _("changeset refers to unknown manifest %s") % short(c)) - del neededmanifests, nm + if havemf > 0: + nm = [(c, m) for m, c in neededmanifests.items()] + nm.sort() + for c, m in nm: + err(c, _("changeset refers to unknown manifest %s") % short(m)) + del neededmanifests, nm - for f in filenodes: - if f not in filelinkrevs: - lrs = [repo.manifest.linkrev(n) for n in filenodes[f]] - lrs.sort() - err(lrs[0], _("in manifest but not in changeset"), f) + if havecl: + fl = filenodes.keys() + fl.sort() + for f in fl: + if f not in filelinkrevs: + lrs = [repo.manifest.linkrev(n) for n in filenodes[f]] + lrs.sort() + err(lrs[0], _("in manifest but not in changeset"), f) + del fl - for f in filelinkrevs: - if f not in filenodes: - lr = filelinkrevs[f][0] - err(lr, _("in changeset but not in manifest"), f) + if havemf: + fl = filelinkrevs.keys() + fl.sort() + for f in fl: + if f not in filenodes: + lr = filelinkrevs[f][0] + err(lr, _("in changeset but not in manifest"), f) + del fl repo.ui.status(_("checking files\n")) - ff = filenodes.keys() + ff = dict.fromkeys(filenodes.keys() + filelinkrevs.keys()).keys() ff.sort() for f in ff: if f == "/dev/null": continue files += 1 if not f: - lr = repo.manifest.linkrev(filenodes[f][0]) - err(lr, _("file without name in manifest %s") % short(ff[n])) + lr = filelinkrevs[f][0] + err(lr, _("file without name in manifest")) continue fl = repo.file(f) checkversion(fl, f) checksize(fl, f) + if fl.count() == 0: + err(filelinkrevs[f][0], _("empty or missing revlog"), f) + continue + seen = {} nodes = {nullid: 1} for i in xrange(fl.count()): @@ -171,7 +193,7 @@ n = fl.node(i) flr = fl.linkrev(n) - if flr not in filelinkrevs.get(f, []): + if flr < 0 or (havecl and flr not in filelinkrevs.get(f, [])): if flr < 0 or flr >= repo.changelog.count(): err(None, _("rev %d point to nonexistent changeset %d") % (i, flr), f) @@ -182,14 +204,16 @@ warn(_(" (expected %s)") % filelinkrevs[f][0]) flr = None # can't be trusted else: - filelinkrevs[f].remove(flr) + if havecl: + filelinkrevs[f].remove(flr) if n in seen: err(flr, _("duplicate revision %d") % i, f) - if n not in filenodes[f]: - err(flr, _("%s not in manifests") % (short(n)), f) - else: - del filenodes[f][n] + if f in filenodes: + if havemf and n not in filenodes[f]: + err(flr, _("%s not in manifests") % (short(n)), f) + else: + del filenodes[f][n] # verify contents try: @@ -230,11 +254,12 @@ (short(n), inst), f) # cross-check - fns = [(repo.manifest.linkrev(filenodes[f][n]), n) - for n in filenodes[f]] - fns.sort() - for lr, node in fns: - err(lr, _("%s in manifests not found") % short(node), f) + if f in filenodes: + fns = [(repo.manifest.linkrev(filenodes[f][n]), n) + for n in filenodes[f]] + fns.sort() + for lr, node in fns: + err(lr, _("%s in manifests not found") % short(node), f) repo.ui.status(_("%d files, %d changesets, %d total revisions\n") % (files, changesets, revisions)) @@ -247,4 +272,3 @@ repo.ui.warn(_("(first damaged changeset appears to be %d)\n") % firstbad[0]) return 1 -
--- a/mercurial/version.py Fri Feb 08 11:50:37 2008 +0100 +++ b/mercurial/version.py Fri Feb 08 11:55:17 2008 +0100 @@ -1,4 +1,4 @@ -# Copyright (C) 2005, 2006 by Intevation GmbH +# Copyright (C) 2005, 2006, 2008 by Intevation GmbH # Author(s): # Thomas Arendsen Hein <thomas@intevation.de> # @@ -10,7 +10,6 @@ """ import os -import os.path import re import time import util
--- a/setup.py Fri Feb 08 11:50:37 2008 +0100 +++ b/setup.py Fri Feb 08 11:55:17 2008 +0100 @@ -72,13 +72,13 @@ url='http://selenic.com/mercurial', description='Scalable distributed SCM', license='GNU GPL', + scripts=['hg'], packages=['mercurial', 'mercurial.hgweb', 'hgext', 'hgext.convert'], ext_modules=ext_modules, data_files=[(os.path.join('mercurial', root), [os.path.join(root, file_) for file_ in files]) for root, dirs, files in os.walk('templates')], cmdclass=cmdclass, - scripts=['hg', 'hgmerge'], options=dict(py2exe=dict(packages=['hgext']), bdist_mpkg=dict(zipdist=True, license='COPYING',
--- a/templates/atom/header.tmpl Fri Feb 08 11:50:37 2008 +0100 +++ b/templates/atom/header.tmpl Fri Feb 08 11:55:17 2008 +0100 @@ -1,4 +1,2 @@ -Content-type: application/atom+xml; charset={encoding} - <?xml version="1.0" encoding="{encoding}"?> <feed xmlns="http://www.w3.org/2005/Atom"> \ No newline at end of file
--- a/templates/atom/map Fri Feb 08 11:50:37 2008 +0100 +++ b/templates/atom/map Fri Feb 08 11:55:17 2008 +0100 @@ -1,5 +1,6 @@ default = 'changelog' feedupdated = '<updated>#date|rfc3339date#</updated>' +mimetype = 'application/atom+xml; charset={encoding}' header = header.tmpl changelog = changelog.tmpl changelogentry = changelogentry.tmpl
--- a/templates/error.tmpl Fri Feb 08 11:50:37 2008 +0100 +++ b/templates/error.tmpl Fri Feb 08 11:55:17 2008 +0100 @@ -6,7 +6,7 @@ <h2>Mercurial Error</h2> <p> -An error occured while processing your request: +An error occurred while processing your request: </p> <p> #error|escape#
--- a/templates/gitweb/changelog.tmpl Fri Feb 08 11:50:37 2008 +0100 +++ b/templates/gitweb/changelog.tmpl Fri Feb 08 11:55:17 2008 +0100 @@ -1,14 +1,14 @@ #header# <title>#repo|escape#: Changelog</title> <link rel="alternate" type="application/atom+xml" - href="{url}atom-log" title="Atom feed for #repo|escape#"> + href="{url}atom-log" title="Atom feed for #repo|escape#"/> <link rel="alternate" type="application/rss+xml" - href="{url}rss-log" title="RSS feed for #repo|escape#"> + href="{url}rss-log" title="RSS feed for #repo|escape#"/> </head> <body> <div class="page_header"> -<a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / changelog +<a href="http://www.selenic.com/mercurial/" title="Mercurial" style="float: right;">Mercurial</a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / changelog </div> <form action="{url}log"> @@ -17,7 +17,6 @@ <input type="text" name="rev" /> </div> </form> -</div> <div class="page_nav"> <a href="{url}summary{sessionvars%urlparameter}">summary</a> | <a href="{url}shortlog/#rev#{sessionvars%urlparameter}">shortlog</a> | changelog | <a href="{url}tags{sessionvars%urlparameter}">tags</a> | <a href="{url}file/#node|short#{sessionvars%urlparameter}">manifest</a>#archives%archiveentry#
--- a/templates/gitweb/changeset.tmpl Fri Feb 08 11:50:37 2008 +0100 +++ b/templates/gitweb/changeset.tmpl Fri Feb 08 11:55:17 2008 +0100 @@ -1,14 +1,14 @@ #header# <title>{repo|escape}: changeset {rev}:{node|short}</title> <link rel="alternate" type="application/atom+xml" - href="{url}atom-log" title="Atom feed for #repo|escape#"> + href="{url}atom-log" title="Atom feed for #repo|escape#"/> <link rel="alternate" type="application/rss+xml" - href="{url}rss-log" title="RSS feed for #repo|escape#"> + href="{url}rss-log" title="RSS feed for #repo|escape#"/> </head> <body> <div class="page_header"> -<a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="#url#summary{sessionvars%urlparameter}">#repo|escape#</a> / changeset +<a href="http://www.selenic.com/mercurial/" title="Mercurial" style="float: right;">Mercurial</a><a href="#url#summary{sessionvars%urlparameter}">#repo|escape#</a> / changeset </div> <div class="page_nav">
--- a/templates/gitweb/error.tmpl Fri Feb 08 11:50:37 2008 +0100 +++ b/templates/gitweb/error.tmpl Fri Feb 08 11:55:17 2008 +0100 @@ -1,14 +1,14 @@ #header# <title>#repo|escape#: Error</title> <link rel="alternate" type="application/atom+xml" - href="{url}atom-log" title="Atom feed for #repo|escape#"> + href="{url}atom-log" title="Atom feed for #repo|escape#"/> <link rel="alternate" type="application/rss+xml" - href="{url}rss-log" title="RSS feed for #repo|escape#"> + href="{url}rss-log" title="RSS feed for #repo|escape#"/> </head> <body> <div class="page_header"> -<a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / error +<a href="http://www.selenic.com/mercurial/" title="Mercurial" style="float: right;">Mercurial</a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / error </div> <div class="page_nav"> @@ -17,7 +17,7 @@ <div class="page_body"> <br/> -<i>An error occured while processing your request</i><br/> +<i>An error occurred while processing your request</i><br/> <br/> {error|escape} </div>
--- a/templates/gitweb/fileannotate.tmpl Fri Feb 08 11:50:37 2008 +0100 +++ b/templates/gitweb/fileannotate.tmpl Fri Feb 08 11:55:17 2008 +0100 @@ -1,14 +1,14 @@ #header# <title>{repo|escape}: {file|escape}@{node|short} (annotated)</title> <link rel="alternate" type="application/atom+xml" - href="{url}atom-log" title="Atom feed for #repo|escape#"> + href="{url}atom-log" title="Atom feed for #repo|escape#"/> <link rel="alternate" type="application/rss+xml" - href="{url}rss-log" title="RSS feed for #repo|escape#"> + href="{url}rss-log" title="RSS feed for #repo|escape#"/> </head> <body> <div class="page_header"> -<a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / annotate +<a href="http://www.selenic.com/mercurial/" title="Mercurial" style="float: right;">Mercurial</a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / annotate </div> <div class="page_nav">
--- a/templates/gitweb/filediff.tmpl Fri Feb 08 11:50:37 2008 +0100 +++ b/templates/gitweb/filediff.tmpl Fri Feb 08 11:55:17 2008 +0100 @@ -1,14 +1,14 @@ {header} <title>{repo|escape}: diff {file|escape}</title> <link rel="alternate" type="application/atom+xml" - href="{url}atom-log" title="Atom feed for #repo|escape#"> + href="{url}atom-log" title="Atom feed for #repo|escape#"/> <link rel="alternate" type="application/rss+xml" - href="{url}rss-log" title="RSS feed for {repo|escape}"> + href="{url}rss-log" title="RSS feed for {repo|escape}"/> </head> <body> <div class="page_header"> -<a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="{url}summary{sessionvars%urlparameter}">{repo|escape}</a> / diff +<a href="http://www.selenic.com/mercurial/" title="Mercurial" style="float: right;">Mercurial</a><a href="{url}summary{sessionvars%urlparameter}">{repo|escape}</a> / diff </div> <div class="page_nav">
--- a/templates/gitweb/filelog.tmpl Fri Feb 08 11:50:37 2008 +0100 +++ b/templates/gitweb/filelog.tmpl Fri Feb 08 11:55:17 2008 +0100 @@ -1,14 +1,14 @@ #header# <title>#repo|escape#: File revisions</title> <link rel="alternate" type="application/atom+xml" - href="{url}atom-log" title="Atom feed for #repo|escape#"> + href="{url}atom-log" title="Atom feed for #repo|escape#"/> <link rel="alternate" type="application/rss+xml" - href="{url}rss-log" title="RSS feed for #repo|escape#"> + href="{url}rss-log" title="RSS feed for #repo|escape#"/> </head> <body> <div class="page_header"> -<a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / file revisions +<a href="http://www.selenic.com/mercurial/" title="Mercurial" style="float: right;">Mercurial</a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / file revisions </div> <div class="page_nav">
--- a/templates/gitweb/filerevision.tmpl Fri Feb 08 11:50:37 2008 +0100 +++ b/templates/gitweb/filerevision.tmpl Fri Feb 08 11:55:17 2008 +0100 @@ -1,14 +1,14 @@ #header# <title>{repo|escape}: {file|escape}@{node|short}</title> <link rel="alternate" type="application/atom+xml" - href="{url}atom-log" title="Atom feed for #repo|escape#"> + href="{url}atom-log" title="Atom feed for #repo|escape#"/> <link rel="alternate" type="application/rss+xml" - href="{url}rss-log" title="RSS feed for #repo|escape#"> + href="{url}rss-log" title="RSS feed for #repo|escape#"/> </head> <body> <div class="page_header"> -<a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / file revision +<a href="http://www.selenic.com/mercurial/" title="Mercurial" style="float: right;">Mercurial</a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / file revision </div> <div class="page_nav">
--- a/templates/gitweb/header.tmpl Fri Feb 08 11:50:37 2008 +0100 +++ b/templates/gitweb/header.tmpl Fri Feb 08 11:55:17 2008 +0100 @@ -1,10 +1,8 @@ -Content-type: text/html; charset={encoding} - <?xml version="1.0" encoding="{encoding}"?> <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd"> <html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en-US" lang="en-US"> <head> -<link rel="icon" href="{staticurl}hgicon.png" type="image/png"> +<link rel="icon" href="{staticurl}hgicon.png" type="image/png" /> <meta name="robots" content="index, nofollow"/> <link rel="stylesheet" href="{staticurl}style-gitweb.css" type="text/css" />
--- a/templates/gitweb/index.tmpl Fri Feb 08 11:50:37 2008 +0100 +++ b/templates/gitweb/index.tmpl Fri Feb 08 11:55:17 2008 +0100 @@ -4,7 +4,8 @@ <body> <div class="page_header"> -<a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a>Repositories list + <a href="http://www.selenic.com/mercurial/" title="Mercurial" style="float: right;">Mercurial</a> + Repositories list </div> <table cellspacing="0"> @@ -15,7 +16,7 @@ <td><a href="?sort=#sort_lastchange#">Last change</a></td> <td> </td> <td> </td> - <tr> + </tr> #entries%indexentry# </table> <div class="page_footer">
--- a/templates/gitweb/manifest.tmpl Fri Feb 08 11:50:37 2008 +0100 +++ b/templates/gitweb/manifest.tmpl Fri Feb 08 11:55:17 2008 +0100 @@ -1,14 +1,14 @@ #header# <title>#repo|escape#: Manifest</title> <link rel="alternate" type="application/atom+xml" - href="{url}atom-log" title="Atom feed for #repo|escape#"> + href="{url}atom-log" title="Atom feed for #repo|escape#"/> <link rel="alternate" type="application/rss+xml" - href="{url}rss-log" title="RSS feed for #repo|escape#"> + href="{url}rss-log" title="RSS feed for #repo|escape#"/> </head> <body> <div class="page_header"> -<a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / manifest +<a href="http://www.selenic.com/mercurial/" title="Mercurial" style="float: right;">Mercurial</a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / manifest </div> <div class="page_nav">
--- a/templates/gitweb/map Fri Feb 08 11:50:37 2008 +0100 +++ b/templates/gitweb/map Fri Feb 08 11:55:17 2008 +0100 @@ -1,4 +1,5 @@ default = 'summary' +mimetype = 'text/html; charset={encoding}' header = header.tmpl footer = footer.tmpl search = search.tmpl @@ -24,10 +25,10 @@ filelog = filelog.tmpl fileline = '<div style="font-family:monospace" class="parity#parity#"><pre><span class="linenr"> #linenumber#</span> #line|escape#</pre></div>' annotateline = '<tr style="font-family:monospace" class="parity#parity#"><td class="linenr" style="text-align: right;"><a href="#url#annotate/#node|short#/#file|urlescape#{sessionvars%urlparameter}">#author|obfuscate#@#rev#</a></td><td><pre>#line|escape#</pre></td></tr>' -difflineplus = '<div style="color:#008800;">#line|escape#</div>' -difflineminus = '<div style="color:#cc0000;">#line|escape#</div>' -difflineat = '<div style="color:#990099;">#line|escape#</div>' -diffline = '<div>#line|escape#</div>' +difflineplus = '<span style="color:#008800;">#line|escape#</span>' +difflineminus = '<span style="color:#cc0000;">#line|escape#</span>' +difflineat = '<span style="color:#990099;">#line|escape#</span>' +diffline = '<span>#line|escape#</span>' changelogparent = '<tr><th class="parent">parent #rev#:</th><td class="parent"><a href="#url#rev/#node|short#{sessionvars%urlparameter}">#node|short#</a></td></tr>' changesetparent = '<tr><td>parent {rev}</td><td style="font-family:monospace"><a class="list" href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a></td></tr>' filerevparent = '<tr><td>parent {rev}</td><td style="font-family:monospace"><a class="list" href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{rename%filerename}{node|short}</a></td></tr>' @@ -40,7 +41,7 @@ fileannotatechild = '<tr><td>child {rev}</td><td style="font-family:monospace"><a class="list" href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a></td></tr>' tags = tags.tmpl tagentry = '<tr class="parity#parity#"><td class="age"><i>#date|age# ago</i></td><td><a class="list" href="{url}rev/{node|short}{sessionvars%urlparameter}"><b>#tag|escape#</b></a></td><td class="link"><a href="{url}rev/#node|short#{sessionvars%urlparameter}">changeset</a> | <a href="{url}log/#node|short#{sessionvars%urlparameter}">changelog</a> | <a href="{url}file/#node|short#{sessionvars%urlparameter}">manifest</a></td></tr>' -branchentry = '<tr class="parity{parity}"><td class="age"><i>{date|age} ago</i></td><td><a class="list" href="{url}rev/{node|short}{sessionvars%urlparameter}"><b>{node|short}</b></td><td>{branch|escape}</td><td class="link"><a href="{url}rev/{node|short}{sessionvars%urlparameter}">changeset</a> | <a href="{url}log/{node|short}{sessionvars%urlparameter}">changelog</a> | <a href="{url}file/{node|short}{sessionvars%urlparameter}">manifest</a></td></tr>' +branchentry = '<tr class="parity{parity}"><td class="age"><i>{date|age} ago</i></td><td><a class="list" href="{url}rev/{node|short}{sessionvars%urlparameter}"><b>{node|short}</b></a></td><td>{branch|escape}</td><td class="link"><a href="{url}rev/{node|short}{sessionvars%urlparameter}">changeset</a> | <a href="{url}log/{node|short}{sessionvars%urlparameter}">changelog</a> | <a href="{url}file/{node|short}{sessionvars%urlparameter}">manifest</a></td></tr>' diffblock = '<pre>#lines#</pre>' filediffparent = '<tr><td>parent {rev}</td><td style="font-family:monospace"><a class="list" href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a></td></tr>' filelogparent = '<tr><td align="right">parent #rev#: </td><td><a href="{url}file/{node|short}/#file|urlescape#{sessionvars%urlparameter}">#node|short#</a></td></tr>'
--- a/templates/gitweb/notfound.tmpl Fri Feb 08 11:50:37 2008 +0100 +++ b/templates/gitweb/notfound.tmpl Fri Feb 08 11:55:17 2008 +0100 @@ -1,12 +1,11 @@ {header} -<title>Mercurial repositories index</title> +<title>Mercurial repository not found</title> </head> <body> <div class="page_header"> -<a href="http://www.selenic.com/mercurial/" title="Mercurial"><div - style="float:right;">Mercurial</div></a> Not found: {repo|escape} +<a href="http://www.selenic.com/mercurial/" title="Mercurial" style="float: right;">Mercurial</a> Not found: {repo|escape} </div> <div class="page_body">
--- a/templates/gitweb/search.tmpl Fri Feb 08 11:50:37 2008 +0100 +++ b/templates/gitweb/search.tmpl Fri Feb 08 11:55:17 2008 +0100 @@ -1,14 +1,14 @@ #header# <title>#repo|escape#: Search</title> <link rel="alternate" type="application/atom+xml" - href="{url}atom-log" title="Atom feed for #repo|escape#"> + href="{url}atom-log" title="Atom feed for #repo|escape#"/> <link rel="alternate" type="application/rss+xml" - href="{url}rss-log" title="RSS feed for #repo|escape#"> + href="{url}rss-log" title="RSS feed for #repo|escape#"/> </head> <body> <div class="page_header"> -<a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / search +<a href="http://www.selenic.com/mercurial/" title="Mercurial" style="float: right;">Mercurial</a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / search <form action="{url}log"> {sessionvars%hiddenformentry}
--- a/templates/gitweb/shortlog.tmpl Fri Feb 08 11:50:37 2008 +0100 +++ b/templates/gitweb/shortlog.tmpl Fri Feb 08 11:55:17 2008 +0100 @@ -1,14 +1,14 @@ #header# <title>#repo|escape#: Shortlog</title> <link rel="alternate" type="application/atom+xml" - href="{url}atom-log" title="Atom feed for #repo|escape#"> + href="{url}atom-log" title="Atom feed for #repo|escape#"/> <link rel="alternate" type="application/rss+xml" - href="{url}rss-log" title="RSS feed for #repo|escape#"> + href="{url}rss-log" title="RSS feed for #repo|escape#"/> </head> <body> <div class="page_header"> -<a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / shortlog +<a href="http://www.selenic.com/mercurial/" title="Mercurial" style="float: right;">Mercurial</a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / shortlog </div> <form action="{url}log"> @@ -17,7 +17,6 @@ <input type="text" name="rev" /> </div> </form> -</div> <div class="page_nav"> <a href="{url}summary{sessionvars%urlparameter}">summary</a> | shortlog |
--- a/templates/gitweb/summary.tmpl Fri Feb 08 11:50:37 2008 +0100 +++ b/templates/gitweb/summary.tmpl Fri Feb 08 11:55:17 2008 +0100 @@ -1,14 +1,14 @@ #header# <title>#repo|escape#: Summary</title> <link rel="alternate" type="application/atom+xml" - href="{url}atom-log" title="Atom feed for #repo|escape#"> + href="{url}atom-log" title="Atom feed for #repo|escape#"/> <link rel="alternate" type="application/rss+xml" - href="{url}rss-log" title="RSS feed for #repo|escape#"> + href="{url}rss-log" title="RSS feed for #repo|escape#"/> </head> <body> <div class="page_header"> -<a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / summary +<a href="http://www.selenic.com/mercurial/" title="Mercurial" style="float: right;">Mercurial</a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / summary <form action="{url}log"> {sessionvars%hiddenformentry} @@ -30,7 +30,7 @@ <div class="title"> </div> <table cellspacing="0"> <tr><td>description</td><td>#desc#</td></tr> -<tr><td>owner</td><td>#owner|escape#</td></tr> +<tr><td>owner</td><td>#owner|obfuscate#</td></tr> <tr><td>last change</td><td>#lastchange|rfc822date#</td></tr> </table>
--- a/templates/gitweb/tags.tmpl Fri Feb 08 11:50:37 2008 +0100 +++ b/templates/gitweb/tags.tmpl Fri Feb 08 11:55:17 2008 +0100 @@ -1,14 +1,14 @@ #header# <title>#repo|escape#: Tags</title> <link rel="alternate" type="application/atom+xml" - href="{url}atom-log" title="Atom feed for #repo|escape#"> + href="{url}atom-log" title="Atom feed for #repo|escape#"/> <link rel="alternate" type="application/rss+xml" - href="{url}rss-log" title="RSS feed for #repo|escape#"> + href="{url}rss-log" title="RSS feed for #repo|escape#"/> </head> <body> <div class="page_header"> -<a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / tags +<a href="http://www.selenic.com/mercurial/" title="Mercurial" style="float: right;">Mercurial</a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / tags </div> <div class="page_nav">
--- a/templates/header.tmpl Fri Feb 08 11:50:37 2008 +0100 +++ b/templates/header.tmpl Fri Feb 08 11:55:17 2008 +0100 @@ -1,5 +1,3 @@ -Content-type: text/html; charset={encoding} - <!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"> <html> <head>
--- a/templates/map Fri Feb 08 11:50:37 2008 +0100 +++ b/templates/map Fri Feb 08 11:55:17 2008 +0100 @@ -1,4 +1,5 @@ default = 'shortlog' +mimetype = 'text/html; charset={encoding}' header = header.tmpl footer = footer.tmpl search = search.tmpl
--- a/templates/map-cmdline.default Fri Feb 08 11:50:37 2008 +0100 +++ b/templates/map-cmdline.default Fri Feb 08 11:55:17 2008 +0100 @@ -1,10 +1,13 @@ changeset = 'changeset: {rev}:{node|short}\n{branches}{tags}{parents}user: {author}\ndate: {date|date}\nsummary: {desc|firstline}\n\n' changeset_quiet = '{rev}:{node|short}\n' -changeset_verbose = 'changeset: {rev}:{node|short}\n{branches}{tags}{parents}{manifest}user: {author}\ndate: {date|date}\n{files}{file_adds}{file_dels}{file_copies}description:\n{desc|strip}\n\n\n' -changeset_debug = 'changeset: {rev}:{node}\n{branches}{tags}{parents}{manifest}user: {author}\ndate: {date|date}\n{files}{file_adds}{file_dels}{file_copies}{extras}description:\n{desc|strip}\n\n\n' +changeset_verbose = 'changeset: {rev}:{node|short}\n{branches}{tags}{parents}user: {author}\ndate: {date|date}\n{files}{file_copies}description:\n{desc|strip}\n\n\n' +changeset_debug = 'changeset: {rev}:{node}\n{branches}{tags}{parents}{manifest}user: {author}\ndate: {date|date}\n{file_mods}{file_adds}{file_dels}{file_copies}{extras}description:\n{desc|strip}\n\n\n' start_files = 'files: ' file = ' {file}' end_files = '\n' +start_file_mods = 'files: ' +file_mod = ' {file_mod}' +end_file_mods = '\n' start_file_adds = 'files+: ' file_add = ' {file_add}' end_file_adds = '\n'
--- a/templates/notfound.tmpl Fri Feb 08 11:50:37 2008 +0100 +++ b/templates/notfound.tmpl Fri Feb 08 11:55:17 2008 +0100 @@ -1,9 +1,9 @@ #header# -<title>Mercurial repositories index</title> +<title>Mercurial repository not found</title> </head> <body> -<h2>Mercurial Repositories</h2> +<h2>Mercurial repository not found</h2> The specified repository "#repo|escape#" is unknown, sorry.
--- a/templates/old/header.tmpl Fri Feb 08 11:50:37 2008 +0100 +++ b/templates/old/header.tmpl Fri Feb 08 11:55:17 2008 +0100 @@ -1,5 +1,3 @@ -Content-type: text/html - <!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"> <html> <head>
--- a/templates/old/map Fri Feb 08 11:50:37 2008 +0100 +++ b/templates/old/map Fri Feb 08 11:55:17 2008 +0100 @@ -1,4 +1,5 @@ default = 'changelog' +mimetype = 'text/html' header = header.tmpl footer = footer.tmpl search = search.tmpl
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/templates/raw/error.tmpl Fri Feb 08 11:55:17 2008 +0100 @@ -0,0 +1,2 @@ +#header# +error: #error#
--- a/templates/raw/header.tmpl Fri Feb 08 11:50:37 2008 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,2 +0,0 @@ -Content-type: text/plain; charset={encoding} -
--- a/templates/raw/map Fri Feb 08 11:50:37 2008 +0100 +++ b/templates/raw/map Fri Feb 08 11:55:17 2008 +0100 @@ -1,4 +1,5 @@ -header = header.tmpl +mimetype = 'text/plain; charset={encoding}' +header = '' footer = '' changeset = changeset.tmpl difflineplus = '#line#' @@ -8,7 +9,6 @@ changesetparent = '# Parent #node#' changesetchild = '# Child #node#' filenodelink = '' -filerevision = '#rawfileheader##raw#' fileline = '#line#' diffblock = '#lines#' filediff = filediff.tmpl @@ -18,4 +18,6 @@ manifestdirentry = 'drwxr-xr-x {basename}\n' manifestfileentry = '{permissions|permissions} {size} {basename}\n' index = index.tmpl +notfound = notfound.tmpl +error = error.tmpl indexentry = '#url#\n'
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/templates/raw/notfound.tmpl Fri Feb 08 11:55:17 2008 +0100 @@ -0,0 +1,2 @@ +#header# +error: repository #repo# not found
--- a/templates/rss/header.tmpl Fri Feb 08 11:50:37 2008 +0100 +++ b/templates/rss/header.tmpl Fri Feb 08 11:55:17 2008 +0100 @@ -1,5 +1,3 @@ -Content-type: text/xml; charset={encoding} - <?xml version="1.0" encoding="{encoding}"?> <rss version="2.0"> <channel>
--- a/templates/rss/map Fri Feb 08 11:50:37 2008 +0100 +++ b/templates/rss/map Fri Feb 08 11:55:17 2008 +0100 @@ -1,4 +1,5 @@ default = 'changelog' +mimetype = 'text/xml; charset={encoding}' header = header.tmpl changelog = changelog.tmpl changelogentry = changelogentry.tmpl
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/templates/static/highlight.css Fri Feb 08 11:55:17 2008 +0100 @@ -0,0 +1,59 @@ +.c { color: #808080 } /* Comment */ +.err { color: #F00000; background-color: #F0A0A0 } /* Error */ +.k { color: #008000; font-weight: bold } /* Keyword */ +.o { color: #303030 } /* Operator */ +.cm { color: #808080 } /* Comment.Multiline */ +.cp { color: #507090 } /* Comment.Preproc */ +.c1 { color: #808080 } /* Comment.Single */ +.cs { color: #cc0000; font-weight: bold } /* Comment.Special */ +.gd { color: #A00000 } /* Generic.Deleted */ +.ge { font-style: italic } /* Generic.Emph */ +.gr { color: #FF0000 } /* Generic.Error */ +.gh { color: #000080; font-weight: bold } /* Generic.Heading */ +.gi { color: #00A000 } /* Generic.Inserted */ +.go { color: #808080 } /* Generic.Output */ +.gp { color: #c65d09; font-weight: bold } /* Generic.Prompt */ +.gs { font-weight: bold } /* Generic.Strong */ +.gu { color: #800080; font-weight: bold } /* Generic.Subheading */ +.gt { color: #0040D0 } /* Generic.Traceback */ +.kc { color: #008000; font-weight: bold } /* Keyword.Constant */ +.kd { color: #008000; font-weight: bold } /* Keyword.Declaration */ +.kp { color: #003080; font-weight: bold } /* Keyword.Pseudo */ +.kr { color: #008000; font-weight: bold } /* Keyword.Reserved */ +.kt { color: #303090; font-weight: bold } /* Keyword.Type */ +.m { color: #6000E0; font-weight: bold } /* Literal.Number */ +.s { background-color: #fff0f0 } /* Literal.String */ +.na { color: #0000C0 } /* Name.Attribute */ +.nb { color: #007020 } /* Name.Builtin */ +.nc { color: #B00060; font-weight: bold } /* Name.Class */ +.no { color: #003060; font-weight: bold } /* Name.Constant */ +.nd { color: #505050; font-weight: bold } /* Name.Decorator */ +.ni { color: #800000; font-weight: bold } /* Name.Entity */ +.ne { color: #F00000; font-weight: bold } /* Name.Exception */ +.nf { color: #0060B0; font-weight: bold } /* Name.Function */ +.nl { color: #907000; font-weight: bold } /* Name.Label */ +.nn { color: #0e84b5; font-weight: bold } /* Name.Namespace */ +.nt { color: #007000 } /* Name.Tag */ +.nv { color: #906030 } /* Name.Variable */ +.ow { color: #000000; font-weight: bold } /* Operator.Word */ +.w { color: #bbbbbb } /* Text.Whitespace */ +.mf { color: #6000E0; font-weight: bold } /* Literal.Number.Float */ +.mh { color: #005080; font-weight: bold } /* Literal.Number.Hex */ +.mi { color: #0000D0; font-weight: bold } /* Literal.Number.Integer */ +.mo { color: #4000E0; font-weight: bold } /* Literal.Number.Oct */ +.sb { background-color: #fff0f0 } /* Literal.String.Backtick */ +.sc { color: #0040D0 } /* Literal.String.Char */ +.sd { color: #D04020 } /* Literal.String.Doc */ +.s2 { background-color: #fff0f0 } /* Literal.String.Double */ +.se { color: #606060; font-weight: bold; background-color: #fff0f0 } /* Literal.String.Escape */ +.sh { background-color: #fff0f0 } /* Literal.String.Heredoc */ +.si { background-color: #e0e0e0 } /* Literal.String.Interpol */ +.sx { color: #D02000; background-color: #fff0f0 } /* Literal.String.Other */ +.sr { color: #000000; background-color: #fff0ff } /* Literal.String.Regex */ +.s1 { background-color: #fff0f0 } /* Literal.String.Single */ +.ss { color: #A06000 } /* Literal.String.Symbol */ +.bp { color: #007020 } /* Name.Builtin.Pseudo */ +.vc { color: #306090 } /* Name.Variable.Class */ +.vg { color: #d07000; font-weight: bold } /* Name.Variable.Global */ +.vi { color: #3030B0 } /* Name.Variable.Instance */ +.il { color: #0000D0; font-weight: bold } /* Literal.Number.Integer.Long */
--- a/templates/static/style-gitweb.css Fri Feb 08 11:50:37 2008 +0100 +++ b/templates/static/style-gitweb.css Fri Feb 08 11:55:17 2008 +0100 @@ -40,7 +40,7 @@ div.search { margin:4px 8px; position:absolute; top:56px; right:12px } .linenr { color:#999999; text-decoration:none } a.rss_logo { - float:right; padding:3px 0px; width:35px; line-height:10px; + float:right; padding:3px 6px; line-height:10px; border:1px solid; border-color:#fcc7a5 #7d3302 #3e1a01 #ff954e; color:#ffffff; background-color:#ff6600; font-weight:bold; font-family:sans-serif; font-size:10px;
--- a/tests/coverage.py Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/coverage.py Fri Feb 08 11:55:17 2008 +0100 @@ -22,15 +22,20 @@ # interface and limitations. See [GDR 2001-12-04b] for requirements and # design. -"""Usage: +r"""Usage: -coverage.py -x MODULE.py [ARG1 ARG2 ...] +coverage.py -x [-p] MODULE.py [ARG1 ARG2 ...] Execute module, passing the given command-line arguments, collecting - coverage data. + coverage data. With the -p option, write to a temporary file containing + the machine name and process ID. coverage.py -e Erase collected coverage data. +coverage.py -c + Collect data from multiple coverage files (as created by -p option above) + and store it into a single file representing the union of the coverage. + coverage.py -r [-m] [-o dir1,dir2,...] FILE1 FILE2 ... Report on the statement coverage for the given files. With the -m option, show line numbers of the statements that weren't executed. @@ -49,16 +54,26 @@ Coverage data is saved in the file .coverage by default. Set the COVERAGE_FILE environment variable to save it somewhere else.""" -__version__ = "2.5.20051204" # see detailed history at the end of this file. +__version__ = "2.77.20070729" # see detailed history at the end of this file. import compiler import compiler.visitor +import glob import os import re import string +import symbol import sys import threading +import token import types +from socket import gethostname + +# Python version compatibility +try: + strclass = basestring # new to 2.3 +except: + strclass = str # 2. IMPLEMENTATION # @@ -81,6 +96,9 @@ # names to increase speed. class StatementFindingAstVisitor(compiler.visitor.ASTVisitor): + """ A visitor for a parsed Abstract Syntax Tree which finds executable + statements. + """ def __init__(self, statements, excluded, suite_spots): compiler.visitor.ASTVisitor.__init__(self) self.statements = statements @@ -89,7 +107,6 @@ self.excluding_suite = 0 def doRecursive(self, node): - self.recordNodeLine(node) for n in node.getChildNodes(): self.dispatch(n) @@ -98,7 +115,9 @@ def doCode(self, node): if hasattr(node, 'decorators') and node.decorators: self.dispatch(node.decorators) - self.doSuite(node, node.code) + self.recordAndDispatch(node.code) + else: + self.doSuite(node, node.code) visitFunction = visitClass = doCode @@ -123,12 +142,35 @@ def doStatement(self, node): self.recordLine(self.getFirstLine(node)) - visitAssert = visitAssign = visitAssTuple = visitDiscard = visitPrint = \ + visitAssert = visitAssign = visitAssTuple = visitPrint = \ visitPrintnl = visitRaise = visitSubscript = visitDecorators = \ doStatement + def visitPass(self, node): + # Pass statements have weird interactions with docstrings. If this + # pass statement is part of one of those pairs, claim that the statement + # is on the later of the two lines. + l = node.lineno + if l: + lines = self.suite_spots.get(l, [l,l]) + self.statements[lines[1]] = 1 + + def visitDiscard(self, node): + # Discard nodes are statements that execute an expression, but then + # discard the results. This includes function calls, so we can't + # ignore them all. But if the expression is a constant, the statement + # won't be "executed", so don't count it now. + if node.expr.__class__.__name__ != 'Const': + self.doStatement(node) + def recordNodeLine(self, node): - return self.recordLine(node.lineno) + # Stmt nodes often have None, but shouldn't claim the first line of + # their children (because the first child might be an ignorable line + # like "global a"). + if node.__class__.__name__ != 'Stmt': + return self.recordLine(self.getFirstLine(node)) + else: + return 0 def recordLine(self, lineno): # Returns a bool, whether the line is included or excluded. @@ -137,16 +179,16 @@ # keyword. if lineno in self.suite_spots: lineno = self.suite_spots[lineno][0] - # If we're inside an exluded suite, record that this line was + # If we're inside an excluded suite, record that this line was # excluded. if self.excluding_suite: self.excluded[lineno] = 1 return 0 # If this line is excluded, or suite_spots maps this line to # another line that is exlcuded, then we're excluded. - elif self.excluded.has_key(lineno) or \ - self.suite_spots.has_key(lineno) and \ - self.excluded.has_key(self.suite_spots[lineno][1]): + elif lineno in self.excluded or \ + lineno in self.suite_spots and \ + self.suite_spots[lineno][1] in self.excluded: return 0 # Otherwise, this is an executable line. else: @@ -175,8 +217,8 @@ lastprev = self.getLastLine(prevsuite) firstelse = self.getFirstLine(suite) for l in range(lastprev+1, firstelse): - if self.suite_spots.has_key(l): - self.doSuite(None, suite, exclude=self.excluded.has_key(l)) + if l in self.suite_spots: + self.doSuite(None, suite, l in exclude=self.excluded) break else: self.doSuite(None, suite) @@ -189,6 +231,8 @@ self.doSuite(node, node.body) self.doElse(node.body, node) + visitWhile = visitFor + def visitIf(self, node): # The first test has to be handled separately from the rest. # The first test is credited to the line with the "if", but the others @@ -198,10 +242,6 @@ self.doSuite(t, n) self.doElse(node.tests[-1][1], node) - def visitWhile(self, node): - self.doSuite(node, node.body) - self.doElse(node.body, node) - def visitTryExcept(self, node): self.doSuite(node, node.body) for i in range(len(node.handlers)): @@ -221,6 +261,9 @@ self.doSuite(node, node.body) self.doPlainWordSuite(node.body, node.final) + def visitWith(self, node): + self.doSuite(node, node.body) + def visitGlobal(self, node): # "global" statements don't execute like others (they don't call the # trace function), so don't record their line numbers. @@ -228,9 +271,9 @@ the_coverage = None +class CoverageException(Exception): pass + class coverage: - error = "coverage error" - # Name of the cache file (unless environment variable is set). cache_default = ".coverage" @@ -257,14 +300,16 @@ def __init__(self): global the_coverage if the_coverage: - raise self.error, "Only one coverage object allowed." + raise CoverageException, "Only one coverage object allowed." self.usecache = 1 self.cache = None + self.parallel_mode = False self.exclude_re = '' self.nesting = 0 self.cstack = [] self.xstack = [] - self.relative_dir = os.path.normcase(os.path.abspath(os.curdir)+os.path.sep) + self.relative_dir = os.path.normcase(os.path.abspath(os.curdir)+os.sep) + self.exclude('# *pragma[: ]*[nN][oO] *[cC][oO][vV][eE][rR]') # t(f, x, y). This method is passed to sys.settrace as a trace function. # See [van Rossum 2001-07-20b, 9.2] for an explanation of sys.settrace and @@ -272,102 +317,120 @@ # See [van Rossum 2001-07-20a, 3.2] for a description of frame and code # objects. - def t(self, f, w, a): #pragma: no cover - #print w, f.f_code.co_filename, f.f_lineno + def t(self, f, w, unused): #pragma: no cover if w == 'line': + #print "Executing %s @ %d" % (f.f_code.co_filename, f.f_lineno) self.c[(f.f_code.co_filename, f.f_lineno)] = 1 for c in self.cstack: c[(f.f_code.co_filename, f.f_lineno)] = 1 return self.t - def help(self, error=None): + def help(self, error=None): #pragma: no cover if error: print error print print __doc__ sys.exit(1) - def command_line(self): + def command_line(self, argv, help_fn=None): import getopt + help_fn = help_fn or self.help settings = {} optmap = { '-a': 'annotate', + '-c': 'collect', '-d:': 'directory=', '-e': 'erase', '-h': 'help', '-i': 'ignore-errors', '-m': 'show-missing', + '-p': 'parallel-mode', '-r': 'report', '-x': 'execute', - '-o': 'omit=', + '-o:': 'omit=', } short_opts = string.join(map(lambda o: o[1:], optmap.keys()), '') long_opts = optmap.values() - options, args = getopt.getopt(sys.argv[1:], short_opts, long_opts) + options, args = getopt.getopt(argv, short_opts, long_opts) for o, a in options: - if optmap.has_key(o): + if o in optmap: settings[optmap[o]] = 1 - elif optmap.has_key(o + ':'): + elif o + ':' in optmap: settings[optmap[o + ':']] = a elif o[2:] in long_opts: settings[o[2:]] = 1 elif o[2:] + '=' in long_opts: - settings[o[2:]] = a - else: - self.help("Unknown option: '%s'." % o) + settings[o[2:]+'='] = a + else: #pragma: no cover + pass # Can't get here, because getopt won't return anything unknown. + if settings.get('help'): - self.help() + help_fn() + for i in ['erase', 'execute']: - for j in ['annotate', 'report']: + for j in ['annotate', 'report', 'collect']: if settings.get(i) and settings.get(j): - self.help("You can't specify the '%s' and '%s' " + help_fn("You can't specify the '%s' and '%s' " "options at the same time." % (i, j)) + args_needed = (settings.get('execute') or settings.get('annotate') or settings.get('report')) - action = settings.get('erase') or args_needed + action = (settings.get('erase') + or settings.get('collect') + or args_needed) if not action: - self.help("You must specify at least one of -e, -x, -r, or -a.") + help_fn("You must specify at least one of -e, -x, -c, -r, or -a.") if not args_needed and args: - self.help("Unexpected arguments %s." % args) + help_fn("Unexpected arguments: %s" % " ".join(args)) + self.parallel_mode = settings.get('parallel-mode') self.get_ready() - self.exclude('#pragma[: ]+[nN][oO] [cC][oO][vV][eE][rR]') if settings.get('erase'): self.erase() if settings.get('execute'): if not args: - self.help("Nothing to do.") + help_fn("Nothing to do.") sys.argv = args self.start() import __main__ sys.path[0] = os.path.dirname(sys.argv[0]) execfile(sys.argv[0], __main__.__dict__) + if settings.get('collect'): + self.collect() if not args: args = self.cexecuted.keys() + ignore_errors = settings.get('ignore-errors') show_missing = settings.get('show-missing') - directory = settings.get('directory') - omit = filter(None, settings.get('omit', '').split(',')) - omit += ['/<'] # Always skip /<string> etc. + directory = settings.get('directory=') + + omit = settings.get('omit=') + if omit is not None: + omit = omit.split(',') + else: + omit = [] if settings.get('report'): self.report(args, show_missing, ignore_errors, omit_prefixes=omit) if settings.get('annotate'): self.annotate(args, directory, ignore_errors, omit_prefixes=omit) - def use_cache(self, usecache): + def use_cache(self, usecache, cache_file=None): self.usecache = usecache + if cache_file and not self.cache: + self.cache_default = cache_file - def get_ready(self): + def get_ready(self, parallel_mode=False): if self.usecache and not self.cache: - self.cache = os.path.abspath(os.environ.get(self.cache_env, - self.cache_default)) + self.cache = os.environ.get(self.cache_env, self.cache_default) + if self.parallel_mode: + self.cache += "." + gethostname() + "." + str(os.getpid()) self.restore() self.analysis_cache = {} - def start(self): + def start(self, parallel_mode=False): self.get_ready() if self.nesting == 0: #pragma: no cover sys.settrace(self.t) @@ -383,12 +446,12 @@ threading.settrace(None) def erase(self): + self.get_ready() self.c = {} self.analysis_cache = {} self.cexecuted = {} if self.cache and os.path.exists(self.cache): os.remove(self.cache) - self.exclude_re = "" def exclude(self, re): if self.exclude_re: @@ -406,8 +469,6 @@ # save(). Save coverage data to the coverage cache. def save(self): - # move to directory that must exist. - os.chdir(os.sep) if self.usecache and self.cache: self.canonicalize_filenames() cache = open(self.cache, 'wb') @@ -421,24 +482,52 @@ self.c = {} self.cexecuted = {} assert self.usecache - if not os.path.exists(self.cache): - return + if os.path.exists(self.cache): + self.cexecuted = self.restore_file(self.cache) + + def restore_file(self, file_name): try: - cache = open(self.cache, 'rb') + cache = open(file_name, 'rb') import marshal cexecuted = marshal.load(cache) cache.close() if isinstance(cexecuted, types.DictType): - self.cexecuted = cexecuted + return cexecuted + else: + return {} except: - pass + return {} + + # collect(). Collect data in multiple files produced by parallel mode + + def collect(self): + cache_dir, local = os.path.split(self.cache) + for f in os.listdir(cache_dir or '.'): + if not f.startswith(local): + continue + + full_path = os.path.join(cache_dir, f) + cexecuted = self.restore_file(full_path) + self.merge_data(cexecuted) + + def merge_data(self, new_data): + for file_name, file_data in new_data.items(): + if file_name in self.cexecuted: + self.merge_file_data(self.cexecuted[file_name], file_data) + else: + self.cexecuted[file_name] = file_data + + def merge_file_data(self, cache_data, new_data): + for line_number in new_data.keys(): + if not line_number in cache_data: + cache_data[line_number] = new_data[line_number] # canonical_filename(filename). Return a canonical filename for the # file (that is, an absolute path with no redundant components and # normalized case). See [GDR 2001-12-04b, 3.3]. def canonical_filename(self, filename): - if not self.canonical_filename_cache.has_key(filename): + if not filename in self.canonical_filename_cache: f = filename if os.path.isabs(f) and not os.path.exists(f): f = os.path.basename(f) @@ -457,8 +546,11 @@ def canonicalize_filenames(self): for filename, lineno in self.c.keys(): + if filename == '<string>': + # Can't do anything useful with exec'd strings, so skip them. + continue f = self.canonical_filename(filename) - if not self.cexecuted.has_key(f): + if not f in self.cexecuted: self.cexecuted[f] = {} self.cexecuted[f][lineno] = 1 self.c = {} @@ -468,49 +560,90 @@ def morf_filename(self, morf): if isinstance(morf, types.ModuleType): if not hasattr(morf, '__file__'): - raise self.error, "Module has no __file__ attribute." - file = morf.__file__ + raise CoverageException, "Module has no __file__ attribute." + f = morf.__file__ else: - file = morf - return self.canonical_filename(file) + f = morf + return self.canonical_filename(f) # analyze_morf(morf). Analyze the module or filename passed as # the argument. If the source code can't be found, raise an error. # Otherwise, return a tuple of (1) the canonical filename of the # source code for the module, (2) a list of lines of statements - # in the source code, and (3) a list of lines of excluded statements. + # in the source code, (3) a list of lines of excluded statements, + # and (4), a map of line numbers to multi-line line number ranges, for + # statements that cross lines. def analyze_morf(self, morf): - if self.analysis_cache.has_key(morf): + if morf in self.analysis_cache: return self.analysis_cache[morf] filename = self.morf_filename(morf) ext = os.path.splitext(filename)[1] if ext == '.pyc': if not os.path.exists(filename[0:-1]): - raise self.error, ("No source for compiled code '%s'." + raise CoverageException, ("No source for compiled code '%s'." % filename) filename = filename[0:-1] elif ext != '.py': - raise self.error, "File '%s' not Python source." % filename + raise CoverageException, "File '%s' not Python source." % filename source = open(filename, 'r') - lines, excluded_lines = self.find_executable_statements( + lines, excluded_lines, line_map = self.find_executable_statements( source.read(), exclude=self.exclude_re ) source.close() - result = filename, lines, excluded_lines + result = filename, lines, excluded_lines, line_map self.analysis_cache[morf] = result return result + def first_line_of_tree(self, tree): + while True: + if len(tree) == 3 and type(tree[2]) == type(1): + return tree[2] + tree = tree[1] + + def last_line_of_tree(self, tree): + while True: + if len(tree) == 3 and type(tree[2]) == type(1): + return tree[2] + tree = tree[-1] + + def find_docstring_pass_pair(self, tree, spots): + for i in range(1, len(tree)): + if self.is_string_constant(tree[i]) and self.is_pass_stmt(tree[i+1]): + first_line = self.first_line_of_tree(tree[i]) + last_line = self.last_line_of_tree(tree[i+1]) + self.record_multiline(spots, first_line, last_line) + + def is_string_constant(self, tree): + try: + return tree[0] == symbol.stmt and tree[1][1][1][0] == symbol.expr_stmt + except: + return False + + def is_pass_stmt(self, tree): + try: + return tree[0] == symbol.stmt and tree[1][1][1][0] == symbol.pass_stmt + except: + return False + + def record_multiline(self, spots, i, j): + for l in range(i, j+1): + spots[l] = (i, j) + def get_suite_spots(self, tree, spots): - import symbol, token + """ Analyze a parse tree to find suite introducers which span a number + of lines. + """ for i in range(1, len(tree)): - if isinstance(tree[i], tuple): + if type(tree[i]) == type(()): if tree[i][0] == symbol.suite: # Found a suite, look back for the colon and keyword. lineno_colon = lineno_word = None for j in range(i-1, 0, -1): if tree[j][0] == token.COLON: - lineno_colon = tree[j][2] + # Colons are never executed themselves: we want the + # line number of the last token before the colon. + lineno_colon = self.last_line_of_tree(tree[j-1]) elif tree[j][0] == token.NAME: if tree[j][1] == 'elif': # Find the line number of the first non-terminal @@ -532,8 +665,18 @@ if lineno_colon and lineno_word: # Found colon and keyword, mark all the lines # between the two with the two line numbers. - for l in range(lineno_word, lineno_colon+1): - spots[l] = (lineno_word, lineno_colon) + self.record_multiline(spots, lineno_word, lineno_colon) + + # "pass" statements are tricky: different versions of Python + # treat them differently, especially in the common case of a + # function with a doc string and a single pass statement. + self.find_docstring_pass_pair(tree[i], spots) + + elif tree[i][0] == symbol.simple_stmt: + first_line = self.first_line_of_tree(tree[i]) + last_line = self.last_line_of_tree(tree[i]) + if first_line != last_line: + self.record_multiline(spots, first_line, last_line) self.get_suite_spots(tree[i], spots) def find_executable_statements(self, text, exclude=None): @@ -547,9 +690,12 @@ if reExclude.search(lines[i]): excluded[i+1] = 1 + # Parse the code and analyze the parse tree to find out which statements + # are multiline, and where suites begin and end. import parser tree = parser.suite(text+'\n\n').totuple(1) self.get_suite_spots(tree, suite_spots) + #print "Suite spots:", suite_spots # Use the compiler module to parse the text and find the executable # statements. We add newlines to be impervious to final partial lines. @@ -562,7 +708,7 @@ lines.sort() excluded_lines = excluded.keys() excluded_lines.sort() - return lines, excluded_lines + return lines, excluded_lines, suite_spots # format_lines(statements, lines). Format a list of line numbers # for printing by coalescing groups of lines as long as the lines @@ -595,7 +741,8 @@ return "%d" % start else: return "%d-%d" % (start, end) - return string.join(map(stringify, pairs), ", ") + ret = string.join(map(stringify, pairs), ", ") + return ret # Backward compatibility with version 1. def analysis(self, morf): @@ -603,13 +750,17 @@ return f, s, m, mf def analysis2(self, morf): - filename, statements, excluded = self.analyze_morf(morf) + filename, statements, excluded, line_map = self.analyze_morf(morf) self.canonicalize_filenames() - if not self.cexecuted.has_key(filename): + if not filename in self.cexecuted: self.cexecuted[filename] = {} missing = [] for line in statements: - if not self.cexecuted[filename].has_key(line): + lines = line_map.get(line, [line, line]) + for l in range(lines[0], lines[1]+1): + if l in self.cexecuted[filename]: + break + else: missing.append(line) return (filename, statements, excluded, missing, self.format_lines(statements, missing)) @@ -647,6 +798,15 @@ def report(self, morfs, show_missing=1, ignore_errors=0, file=None, omit_prefixes=[]): if not isinstance(morfs, types.ListType): morfs = [morfs] + # On windows, the shell doesn't expand wildcards. Do it here. + globbed = [] + for morf in morfs: + if isinstance(morf, strclass): + globbed.extend(glob.glob(morf)) + else: + globbed.append(morf) + morfs = globbed + morfs = self.filter_by_prefix(morfs, omit_prefixes) morfs.sort(self.morf_name_compare) @@ -684,8 +844,8 @@ raise except: if not ignore_errors: - type, msg = sys.exc_info()[0:2] - print >>file, fmt_err % (name, type, msg) + typ, msg = sys.exc_info()[0:2] + print >>file, fmt_err % (name, typ, msg) if len(morfs) > 1: print >>file, "-" * len(header) if total_statements > 0: @@ -765,18 +925,41 @@ the_coverage = coverage() # Module functions call methods in the singleton object. -def use_cache(*args, **kw): return the_coverage.use_cache(*args, **kw) -def start(*args, **kw): return the_coverage.start(*args, **kw) -def stop(*args, **kw): return the_coverage.stop(*args, **kw) -def erase(*args, **kw): return the_coverage.erase(*args, **kw) -def begin_recursive(*args, **kw): return the_coverage.begin_recursive(*args, **kw) -def end_recursive(*args, **kw): return the_coverage.end_recursive(*args, **kw) -def exclude(*args, **kw): return the_coverage.exclude(*args, **kw) -def analysis(*args, **kw): return the_coverage.analysis(*args, **kw) -def analysis2(*args, **kw): return the_coverage.analysis2(*args, **kw) -def report(*args, **kw): return the_coverage.report(*args, **kw) -def annotate(*args, **kw): return the_coverage.annotate(*args, **kw) -def annotate_file(*args, **kw): return the_coverage.annotate_file(*args, **kw) +def use_cache(*args, **kw): + return the_coverage.use_cache(*args, **kw) + +def start(*args, **kw): + return the_coverage.start(*args, **kw) + +def stop(*args, **kw): + return the_coverage.stop(*args, **kw) + +def erase(*args, **kw): + return the_coverage.erase(*args, **kw) + +def begin_recursive(*args, **kw): + return the_coverage.begin_recursive(*args, **kw) + +def end_recursive(*args, **kw): + return the_coverage.end_recursive(*args, **kw) + +def exclude(*args, **kw): + return the_coverage.exclude(*args, **kw) + +def analysis(*args, **kw): + return the_coverage.analysis(*args, **kw) + +def analysis2(*args, **kw): + return the_coverage.analysis2(*args, **kw) + +def report(*args, **kw): + return the_coverage.report(*args, **kw) + +def annotate(*args, **kw): + return the_coverage.annotate(*args, **kw) + +def annotate_file(*args, **kw): + return the_coverage.annotate_file(*args, **kw) # Save coverage data when Python exits. (The atexit module wasn't # introduced until Python 2.0, so use sys.exitfunc when it's not @@ -789,7 +972,7 @@ # Command-line interface. if __name__ == '__main__': - the_coverage.command_line() + the_coverage.command_line(sys.argv[1:]) # A. REFERENCES @@ -858,10 +1041,46 @@ # 2005-12-04 NMB Adapted Greg Rogers' patch for using relative filenames, # and sorting and omitting files to report on. # +# 2006-07-23 NMB Applied Joseph Tate's patch for function decorators. +# +# 2006-08-21 NMB Applied Sigve Tjora and Mark van der Wal's fixes for argument +# handling. +# +# 2006-08-22 NMB Applied Geoff Bache's parallel mode patch. +# +# 2006-08-23 NMB Refactorings to improve testability. Fixes to command-line +# logic for parallel mode and collect. +# +# 2006-08-25 NMB "#pragma: nocover" is excluded by default. +# +# 2006-09-10 NMB Properly ignore docstrings and other constant expressions that +# appear in the middle of a function, a problem reported by Tim Leslie. +# Minor changes to avoid lint warnings. +# +# 2006-09-17 NMB coverage.erase() shouldn't clobber the exclude regex. +# Change how parallel mode is invoked, and fix erase() so that it erases the +# cache when called programmatically. +# +# 2007-07-21 NMB In reports, ignore code executed from strings, since we can't +# do anything useful with it anyway. +# Better file handling on Linux, thanks Guillaume Chazarain. +# Better shell support on Windows, thanks Noel O'Boyle. +# Python 2.2 support maintained, thanks Catherine Proulx. +# +# 2007-07-22 NMB Python 2.5 now fully supported. The method of dealing with +# multi-line statements is now less sensitive to the exact line that Python +# reports during execution. Pass statements are handled specially so that their +# disappearance during execution won't throw off the measurement. +# +# 2007-07-23 NMB Now Python 2.5 is *really* fully supported: the body of the +# new with statement is counted as executable. +# +# 2007-07-29 NMB Better packaging. + # C. COPYRIGHT AND LICENCE # # Copyright 2001 Gareth Rees. All rights reserved. -# Copyright 2004-2005 Ned Batchelder. All rights reserved. +# Copyright 2004-2007 Ned Batchelder. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are @@ -888,4 +1107,4 @@ # USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH # DAMAGE. # -# $Id: coverage.py 26 2005-12-04 18:42:44Z ned $ +# $Id: coverage.py 74 2007-07-29 22:28:35Z nedbat $
--- a/tests/get-with-headers.py Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/get-with-headers.py Fri Feb 08 11:55:17 2008 +0100 @@ -14,3 +14,7 @@ print "%s: %s" % (h, response.getheader(h)) print sys.stdout.write(response.read()) + +if 200 <= response.status <= 299: + sys.exit(0) +sys.exit(1)
--- a/tests/hghave Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/hghave Fri Feb 08 11:55:17 2008 +0100 @@ -133,14 +133,14 @@ feature = feature[3:] if feature not in checks: - error('hghave: unknown feature: ' + feature) + error('skipped: unknown feature: ' + feature) continue check, desc = checks[feature] if not negate and not check(): - error('hghave: missing feature: ' + desc) + error('skipped: missing feature: ' + desc) elif negate and check(): - error('hghave: system supports %s' % desc) + error('skipped: system supports %s' % desc) if failures != 0: sys.exit(1)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/readlink.py Fri Feb 08 11:55:17 2008 +0100 @@ -0,0 +1,12 @@ +#!/usr/bin/env python + +import errno, os, sys + +for f in sys.argv[1:]: + try: + print f, '->', os.readlink(f) + except OSError, err: + if err.errno != errno.EINVAL: raise + print f, 'not a symlink' + +sys.exit(0)
--- a/tests/run-tests.py Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/run-tests.py Fri Feb 08 11:55:17 2008 +0100 @@ -19,8 +19,9 @@ import tempfile import time -# hghave reserved exit code to skip test +# reserved exit code to skip test (used by hghave) SKIPPED_STATUS = 80 +SKIPPED_PREFIX = 'skipped: ' required_tools = ["python", "diff", "grep", "unzip", "gunzip", "bunzip2", "sed"] @@ -99,10 +100,10 @@ '''Extract missing/unknown features log lines as a list''' missing = [] for line in lines: - if not line.startswith('hghave: '): + if not line.startswith(SKIPPED_PREFIX): continue line = line.splitlines()[0] - missing.append(line[8:]) + missing.append(line[len(SKIPPED_PREFIX):]) return missing @@ -411,6 +412,7 @@ # the tests produce repeatable output. os.environ['LANG'] = os.environ['LC_ALL'] = 'C' os.environ['TZ'] = 'GMT' +os.environ["EMAIL"] = "Foo Bar <foo.bar@example.com>" TESTDIR = os.environ["TESTDIR"] = os.getcwd() HGTMP = os.environ['HGTMP'] = tempfile.mkdtemp('', 'hgtests.', options.tmpdir) @@ -418,9 +420,7 @@ HGRCPATH = None os.environ["HGEDITOR"] = sys.executable + ' -c "import sys; sys.exit(0)"' -os.environ["HGMERGE"] = ('python "%s" -L my -L other' - % os.path.join(TESTDIR, os.path.pardir, - 'contrib', 'simplemerge')) +os.environ["HGMERGE"] = "internal:merge" os.environ["HGUSER"] = "test" os.environ["HGENCODING"] = "ascii" os.environ["HGENCODINGMODE"] = "strict" @@ -547,7 +547,7 @@ fp = os.fdopen(options.child, 'w') fp.write('%d\n%d\n%d\n' % (tested, skipped, failed)) for s in skips: - fp.write("%s %s\n" % s) + fp.write("%s %s\n" % s) fp.close() else: print
--- a/tests/test-add Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-add Fri Feb 08 11:55:17 2008 +0100 @@ -11,7 +11,7 @@ echo b > b hg add -n b hg st -hg add b +hg add b || echo "failed to add b" hg st echo % should fail hg add b @@ -40,3 +40,9 @@ echo a > a hg add a hg st + +hg add c && echo "unexpected addition of missing file" +echo c > c +hg add d c && echo "unexpected addition of missing file" +hg st +
--- a/tests/test-add.out Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-add.out Fri Feb 08 11:55:17 2008 +0100 @@ -13,17 +13,26 @@ % should fail a already tracked! 1 files updated, 0 files merged, 0 files removed, 0 files unresolved +merging a warning: conflicts during merge. -merging a merging a failed! 0 files updated, 0 files merged, 0 files removed, 1 files unresolved There are unresolved merges, you can redo the full merge using: hg update -C 2 hg merge 1 M a +? a.orig % should fail a already tracked! M a +? a.orig % issue683 R a +? a.orig M a +? a.orig +c does not exist! +d does not exist! +M a +A c +? a.orig
--- a/tests/test-archive Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-archive Fri Feb 08 11:55:17 2008 +0100 @@ -13,7 +13,7 @@ echo "[web]" >> .hg/hgrc echo "name = test-archive" >> .hg/hgrc echo "allow_archive = gz bz2, zip" >> .hg/hgrc -hg serve -p $HGPORT -d --pid-file=hg.pid +hg serve -p $HGPORT -d --pid-file=hg.pid -E errors.log cat hg.pid >> $DAEMON_PIDS TIP=`hg id -v | cut -f1 -d' '` @@ -71,8 +71,12 @@ hg archive -t bogus test.bogus +echo % server errors +cat errors.log + echo '% empty repo' hg init ../empty cd ../empty hg archive ../test-empty + exit 0
--- a/tests/test-archive-symlinks Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-archive-symlinks Fri Feb 08 11:55:17 2008 +0100 @@ -4,14 +4,6 @@ origdir=`pwd` -cat >> readlink.py <<EOF -import os -import sys - -for f in sys.argv[1:]: - print f, '->', os.readlink(f) -EOF - hg init repo cd repo ln -s nothing dangling @@ -25,16 +17,16 @@ echo '% files' cd "$origdir" cd archive -python ../readlink.py dangling +$TESTDIR/readlink.py dangling echo '% tar' cd "$origdir" tar xf archive.tar cd tar -python ../readlink.py dangling +$TESTDIR/readlink.py dangling echo '% zip' cd "$origdir" unzip archive.zip > /dev/null cd zip -python ../readlink.py dangling +$TESTDIR/readlink.py dangling
--- a/tests/test-archive.out Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-archive.out Fri Feb 08 11:55:17 2008 +0100 @@ -40,5 +40,6 @@ test-TIP/foo rev-0.tar created abort: unknown archive type 'bogus' +% server errors % empty repo abort: repository has no revisions
--- a/tests/test-backout Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-backout Fri Feb 08 11:55:17 2008 +0100 @@ -37,6 +37,22 @@ hg backout -d '3 0' --merge tip cat a 2>/dev/null || echo cat: a: No such file or directory +echo '# across branch' +cd .. +hg init branch +cd branch +echo a > a +hg ci -Am0 -d '0 0' +echo b > b +hg ci -Am1 -d '0 0' +hg co -C 0 +# should fail +hg backout -d '0 0' 1 +echo c > c +hg ci -Am2 -d '0 0' +# should fail +hg backout -d '0 0' 1 + echo '# backout with merge' cd .. hg init merge
--- a/tests/test-backout.out Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-backout.out Fri Feb 08 11:55:17 2008 +0100 @@ -15,6 +15,13 @@ removing a changeset 3:7f6d0f120113 backs out changeset 2:de31bdc76c0d cat: a: No such file or directory +# across branch +adding a +adding b +0 files updated, 0 files merged, 1 files removed, 0 files unresolved +abort: cannot back out change on a different branch +adding c +abort: cannot back out change on a different branch # backout with merge adding a reverting a
--- a/tests/test-bisect Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-bisect Fri Feb 08 11:55:17 2008 +0100 @@ -2,9 +2,6 @@ set -e -echo "[extensions]" >> $HGRCPATH -echo "hbisect=" >> $HGRCPATH - echo % init hg init @@ -26,11 +23,34 @@ hg up -C echo % bisect test -hg bisect init -hg bisect bad -hg bisect good 1 -hg bisect good -hg bisect good -hg bisect good -hg bisect bad -hg bisect good +hg bisect -r +hg bisect -b +hg bisect -g 1 +hg bisect -g +echo skip +hg bisect -s +hg bisect -g +hg bisect -g +hg bisect -b +hg bisect -g + +echo % bisect reverse test +hg bisect -r +hg bisect -b null +hg bisect -g tip +hg bisect -g +echo skip +hg bisect -s +hg bisect -g +hg bisect -g +hg bisect -b +hg bisect -g + +hg bisect -r +hg bisect -g tip +hg bisect -b tip || echo error + +hg bisect -r +hg bisect -g null +hg bisect -bU tip +hg id \ No newline at end of file
--- a/tests/test-bisect.out Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-bisect.out Fri Feb 08 11:55:17 2008 +0100 @@ -202,7 +202,10 @@ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved Testing changeset 23:5ec79163bff4 (15 changesets remaining, ~3 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved -Testing changeset 27:288867a866e9 (8 changesets remaining, ~3 tests) +skip +Testing changeset 24:10e0acd3809e (15 changesets remaining, ~3 tests) +1 files updated, 0 files merged, 0 files removed, 0 files unresolved +Testing changeset 27:288867a866e9 (7 changesets remaining, ~2 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved Testing changeset 29:b5bd63375ab9 (4 changesets remaining, ~2 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved @@ -214,3 +217,27 @@ date: Thu Jan 01 00:00:29 1970 +0000 summary: msg 29 +% bisect reverse test +Testing changeset 15:e7fa0811edb0 (32 changesets remaining, ~5 tests) +1 files updated, 0 files merged, 0 files removed, 0 files unresolved +Testing changeset 7:03750880c6b5 (16 changesets remaining, ~4 tests) +1 files updated, 0 files merged, 0 files removed, 0 files unresolved +skip +Testing changeset 6:a3d5c6fdf0d3 (16 changesets remaining, ~4 tests) +1 files updated, 0 files merged, 0 files removed, 0 files unresolved +Testing changeset 2:db07c04beaca (7 changesets remaining, ~2 tests) +1 files updated, 0 files merged, 0 files removed, 0 files unresolved +Testing changeset 0:b99c7b9c8e11 (3 changesets remaining, ~1 tests) +1 files updated, 0 files merged, 0 files removed, 0 files unresolved +Testing changeset 1:5cd978ea5149 (2 changesets remaining, ~1 tests) +1 files updated, 0 files merged, 0 files removed, 0 files unresolved +The first good revision is: +changeset: 1:5cd978ea5149 +user: test +date: Thu Jan 01 00:00:01 1970 +0000 +summary: msg 1 + +abort: Inconsistent state, 31:58c80a7c8a40 is good and bad +error +Testing changeset 15:e7fa0811edb0 (32 changesets remaining, ~5 tests) +5cd978ea5149
--- a/tests/test-bundle Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-bundle Fri Feb 08 11:55:17 2008 +0100 @@ -2,6 +2,7 @@ cp "$TESTDIR"/printenv.py . +echo "====== Setting up test" hg init test cd test echo 0 > afile @@ -30,20 +31,40 @@ hg verify cd .. hg init empty + +echo "====== Bundle test to full.hg" hg -R test bundle full.hg empty +echo "====== Unbundle full.hg in test" hg -R test unbundle full.hg +echo "====== Verify empty" hg -R empty heads hg -R empty verify +echo "====== Pull full.hg into test (using --cwd)" hg --cwd test pull ../full.hg +echo "====== Pull full.hg into empty (using --cwd)" hg --cwd empty pull ../full.hg +echo "====== Rollback empty" hg -R empty rollback +echo "====== Pull full.hg into empty again (using --cwd)" hg --cwd empty pull ../full.hg +echo "====== Pull full.hg into test (using -R)" +hg -R test pull full.hg +echo "====== Pull full.hg into empty (using -R)" +hg -R empty pull full.hg +echo "====== Rollback empty" +hg -R empty rollback +echo "====== Pull full.hg into empty again (using -R)" +hg -R empty pull full.hg + +echo "====== Log -R full.hg in fresh empty" rm -r empty hg init empty cd empty hg -R bundle://../full.hg log + +echo "====== Pull ../full.hg into empty (with hook)" echo '[hooks]' >> .hg/hgrc echo 'changegroup = python ../printenv.py changegroup' >> .hg/hgrc #doesn't work (yet ?) @@ -51,18 +72,24 @@ hg pull bundle://../full.hg cd .. +echo "====== Create partial clones" rm -r empty hg init empty hg clone -r 3 test partial hg clone partial partial2 cd partial +echo "====== Log -R full.hg in partial" hg -R bundle://../full.hg log +echo "====== Incoming full.hg in partial" hg incoming bundle://../full.hg +echo "====== Outgoing -R full.hg vs partial2 in partial" hg -R bundle://../full.hg outgoing ../partial2 +echo "====== Outgoing -R does-not-exist.hg vs partial2 in partial" hg -R bundle://../does-not-exist.hg outgoing ../partial2 cd .. # test for http://www.selenic.com/mercurial/bts/issue216 +echo "====== Unbundle incremental bundles into fresh empty in one go" rm -r empty hg init empty hg -R test bundle --base null -r 0 ../0.hg @@ -70,6 +97,7 @@ hg -R empty unbundle -u ../0.hg ../1.hg # test for 540d1059c802 +echo "====== test for 540d1059c802" hg init orig cd orig echo foo > foo
--- a/tests/test-bundle-r.out Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-bundle-r.out Fri Feb 08 11:55:17 2008 +0100 @@ -25,6 +25,7 @@ checking files 4 files, 9 changesets, 7 total revisions searching for changes +1 changesets found adding changesets adding manifests adding file changes @@ -37,6 +38,7 @@ 1 files, 1 changesets, 1 total revisions 0:5649c9d34dd8 searching for changes +2 changesets found adding changesets adding manifests adding file changes @@ -49,6 +51,7 @@ 1 files, 2 changesets, 2 total revisions 1:10b2180f755b searching for changes +3 changesets found adding changesets adding manifests adding file changes @@ -61,6 +64,7 @@ 1 files, 3 changesets, 3 total revisions 2:d62976ca1e50 searching for changes +4 changesets found adding changesets adding manifests adding file changes @@ -73,6 +77,7 @@ 1 files, 4 changesets, 4 total revisions 3:ac69c658229d searching for changes +2 changesets found adding changesets adding manifests adding file changes @@ -85,6 +90,7 @@ 1 files, 2 changesets, 2 total revisions 1:5f4f3ceb285e searching for changes +3 changesets found adding changesets adding manifests adding file changes @@ -97,6 +103,7 @@ 1 files, 3 changesets, 3 total revisions 2:024e4e7df376 searching for changes +4 changesets found adding changesets adding manifests adding file changes @@ -109,6 +116,7 @@ 2 files, 4 changesets, 5 total revisions 3:1e3f6b843bd6 searching for changes +5 changesets found adding changesets adding manifests adding file changes @@ -121,6 +129,7 @@ 3 files, 5 changesets, 6 total revisions 4:80fe151401c2 searching for changes +5 changesets found adding changesets adding manifests adding file changes @@ -148,6 +157,11 @@ % should fail abort: --base is incompatible with specifiying a destination abort: repository default-push not found! +2 changesets found +4 changesets found +6 changesets found +1 changesets found +1 changesets found 1 files updated, 0 files merged, 0 files removed, 0 files unresolved % 2 2:d62976ca1e50 @@ -202,6 +216,7 @@ adifferentfile 2 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) +7 changesets found 1 files updated, 0 files merged, 0 files removed, 0 files unresolved adding changesets adding manifests
--- a/tests/test-bundle.out Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-bundle.out Fri Feb 08 11:55:17 2008 +0100 @@ -1,3 +1,4 @@ +====== Setting up test 1 files updated, 0 files merged, 0 files removed, 0 files unresolved 1 files updated, 0 files merged, 2 files removed, 0 files unresolved checking changesets @@ -5,12 +6,16 @@ crosschecking files in changesets and manifests checking files 4 files, 9 changesets, 7 total revisions +====== Bundle test to full.hg searching for changes +9 changesets found +====== Unbundle full.hg in test adding changesets adding manifests adding file changes added 0 changesets with 0 changes to 4 files (run 'hg update' to get a working copy) +====== Verify empty changeset: -1:000000000000 tag: tip user: @@ -21,9 +26,11 @@ crosschecking files in changesets and manifests checking files 0 files, 0 changesets, 0 total revisions +====== Pull full.hg into test (using --cwd) pulling from ../full.hg searching for changes no changes found +====== Pull full.hg into empty (using --cwd) pulling from ../full.hg requesting all changes adding changesets @@ -31,7 +38,9 @@ adding file changes added 9 changesets with 7 changes to 4 files (+1 heads) (run 'hg heads' to see heads, 'hg merge' to merge) +====== Rollback empty rolling back last transaction +====== Pull full.hg into empty again (using --cwd) pulling from ../full.hg requesting all changes adding changesets @@ -39,6 +48,25 @@ adding file changes added 9 changesets with 7 changes to 4 files (+1 heads) (run 'hg heads' to see heads, 'hg merge' to merge) +====== Pull full.hg into test (using -R) +pulling from full.hg +searching for changes +no changes found +====== Pull full.hg into empty (using -R) +pulling from full.hg +searching for changes +no changes found +====== Rollback empty +rolling back last transaction +====== Pull full.hg into empty again (using -R) +pulling from full.hg +requesting all changes +adding changesets +adding manifests +adding file changes +added 9 changesets with 7 changes to 4 files (+1 heads) +(run 'hg heads' to see heads, 'hg merge' to merge) +====== Log -R full.hg in fresh empty changeset: 8:836ac62537ab tag: tip parent: 3:ac69c658229d @@ -87,6 +115,7 @@ date: Mon Jan 12 13:46:40 1970 +0000 summary: 0.0 +====== Pull ../full.hg into empty (with hook) changegroup hook: HG_NODE=5649c9d34dd87d0ecb5fd39672128376e83b22e1 HG_SOURCE=pull HG_URL=bundle:../full.hg pulling from bundle://../full.hg requesting all changes @@ -95,6 +124,7 @@ adding file changes added 9 changesets with 7 changes to 4 files (+1 heads) (run 'hg heads' to see heads, 'hg merge' to merge) +====== Create partial clones requesting all changes adding changesets adding manifests @@ -102,6 +132,7 @@ added 4 changesets with 4 changes to 1 files 1 files updated, 0 files merged, 0 files removed, 0 files unresolved 1 files updated, 0 files merged, 0 files removed, 0 files unresolved +====== Log -R full.hg in partial changeset: 8:836ac62537ab tag: tip parent: 3:ac69c658229d @@ -150,6 +181,7 @@ date: Mon Jan 12 13:46:40 1970 +0000 summary: 0.0 +====== Incoming full.hg in partial comparing with bundle://../full.hg searching for changes changeset: 4:5f4f3ceb285e @@ -180,6 +212,7 @@ date: Mon Jan 12 13:46:40 1970 +0000 summary: 0.3m +====== Outgoing -R full.hg vs partial2 in partial comparing with ../partial2 searching for changes changeset: 4:5f4f3ceb285e @@ -210,7 +243,11 @@ date: Mon Jan 12 13:46:40 1970 +0000 summary: 0.3m +====== Outgoing -R does-not-exist.hg vs partial2 in partial abort: No such file or directory: ../does-not-exist.hg +====== Unbundle incremental bundles into fresh empty in one go +1 changesets found +1 changesets found adding changesets adding manifests adding file changes @@ -220,8 +257,10 @@ adding file changes added 1 changesets with 1 changes to 1 files 1 files updated, 0 files merged, 0 files removed, 0 files unresolved +====== test for 540d1059c802 1 files updated, 0 files merged, 0 files removed, 0 files unresolved searching for changes +1 changesets found comparing with ../bundle.hg searching for changes changeset: 2:ed1b79f46b9a
--- a/tests/test-command-template Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-command-template Fri Feb 08 11:55:17 2008 +0100 @@ -89,8 +89,8 @@ cat changelog echo "# keys work" -for key in author branches date desc file_adds file_dels files \ - manifest node parents rev tags; do +for key in author branches date desc file_adds file_dels file_mods \ + files manifest node parents rev tags; do for mode in '' --verbose --debug; do hg log $mode --template "$key$mode: {$key}\n" done
--- a/tests/test-command-template.out Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-command-template.out Fri Feb 08 11:55:17 2008 +0100 @@ -260,22 +260,22 @@ other 3 desc--debug: line 1 line 2 -file_adds: -file_adds: +file_adds: second file_adds: -file_adds: -file_adds: -file_adds: +file_adds: d file_adds: file_adds: +file_adds: c +file_adds: b +file_adds: a +file_adds--verbose: second file_adds--verbose: -file_adds--verbose: +file_adds--verbose: d file_adds--verbose: file_adds--verbose: -file_adds--verbose: -file_adds--verbose: -file_adds--verbose: -file_adds--verbose: +file_adds--verbose: c +file_adds--verbose: b +file_adds--verbose: a file_adds--debug: second file_adds--debug: file_adds--debug: d @@ -308,6 +308,30 @@ file_dels--debug: file_dels--debug: file_dels--debug: +file_mods: +file_mods: +file_mods: +file_mods: +file_mods: c +file_mods: +file_mods: +file_mods: +file_mods--verbose: +file_mods--verbose: +file_mods--verbose: +file_mods--verbose: +file_mods--verbose: c +file_mods--verbose: +file_mods--verbose: +file_mods--verbose: +file_mods--debug: +file_mods--debug: +file_mods--debug: +file_mods--debug: +file_mods--debug: c +file_mods--debug: +file_mods--debug: +file_mods--debug: files: second files: files: d @@ -324,30 +348,30 @@ files--verbose: c files--verbose: b files--verbose: a -files--debug: +files--debug: second files--debug: -files--debug: +files--debug: d files--debug: files--debug: c -files--debug: -files--debug: -files--debug: -manifest: -manifest: -manifest: -manifest: -manifest: -manifest: -manifest: -manifest: -manifest--verbose: -manifest--verbose: -manifest--verbose: -manifest--verbose: -manifest--verbose: -manifest--verbose: -manifest--verbose: -manifest--verbose: +files--debug: c +files--debug: b +files--debug: a +manifest: 7:f2dbc354b94e +manifest: 6:91015e9dbdd7 +manifest: 5:4dc3def4f9b4 +manifest: 4:90ae8dda64e1 +manifest: 3:cb5a1327723b +manifest: 2:6e0e82995c35 +manifest: 1:4e8d705b1e53 +manifest: 0:a0c8bcbbb45c +manifest--verbose: 7:f2dbc354b94e +manifest--verbose: 6:91015e9dbdd7 +manifest--verbose: 5:4dc3def4f9b4 +manifest--verbose: 4:90ae8dda64e1 +manifest--verbose: 3:cb5a1327723b +manifest--verbose: 2:6e0e82995c35 +manifest--verbose: 1:4e8d705b1e53 +manifest--verbose: 0:a0c8bcbbb45c manifest--debug: 7:f2dbc354b94e5ec0b4f10680ee0cee816101d0bf manifest--debug: 6:91015e9dbdd76a6791085d12b0a0ec7fcd22ffbf manifest--debug: 5:4dc3def4f9b4c6e8de820f6ee74737f91e96a216
--- a/tests/test-commit Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-commit Fri Feb 08 11:55:17 2008 +0100 @@ -10,6 +10,7 @@ cd test echo foo > foo hg add foo +HGEDITOR=true hg commit -m "" hg commit -d '0 0' -m commit-1 echo foo >> foo hg commit -d '1 4444444' -m commit-3
--- a/tests/test-commit.out Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-commit.out Fri Feb 08 11:55:17 2008 +0100 @@ -1,6 +1,9 @@ % commit date test transaction abort! rollback completed +abort: empty commit message +transaction abort! +rollback completed abort: impossible time zone offset: 4444444 transaction abort! rollback completed
--- a/tests/test-conflict.out Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-conflict.out Fri Feb 08 11:55:17 2008 +0100 @@ -1,15 +1,16 @@ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved +merging a warning: conflicts during merge. -merging a merging a failed! 0 files updated, 0 files merged, 0 files removed, 1 files unresolved There are unresolved merges, you can redo the full merge using: hg update -C 2 hg merge 1 e7fe8eb3e180+0d24b7662d3e+ tip -<<<<<<< my +<<<<<<< local something else ======= something >>>>>>> other M a +? a.orig
--- a/tests/test-convert Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-convert Fri Feb 08 11:55:17 2008 +0100 @@ -1,7 +1,11 @@ #!/bin/sh -echo "[extensions]" >> $HGRCPATH -echo "convert=" >> $HGRCPATH +cat >> $HGRCPATH <<EOF +[extensions] +convert= +[convert] +hg.saverev=False +EOF hg help convert @@ -35,3 +39,8 @@ echo % should succeed chmod 700 bogusdir hg convert a bogusdir + +echo % test pre and post conversion actions +echo 'include b' > filemap +hg convert --debug --filemap filemap a partialb | \ + grep 'run hg'
--- a/tests/test-convert-cvs Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-convert-cvs Fri Feb 08 11:55:17 2008 +0100 @@ -2,6 +2,11 @@ "$TESTDIR/hghave" cvs cvsps || exit 80 +cvscall() +{ + cvs -f $@ +} + echo "[extensions]" >> $HGRCPATH echo "convert = " >> $HGRCPATH @@ -9,9 +14,10 @@ mkdir cvsrepo cd cvsrepo export CVSROOT=`pwd` +export CVS_OPTIONS=-f cd .. -cvs -q -d "$CVSROOT" init +cvscall -q -d "$CVSROOT" init echo % create source directory mkdir src-temp @@ -23,16 +29,16 @@ cd .. echo % import source directory -cvs -q import -m import src INITIAL start +cvscall -q import -m import src INITIAL start cd .. echo % checkout source directory -cvs -q checkout src +cvscall -q checkout src echo % commit a new revision changing b/c cd src echo c >> b/c -cvs -q commit -mci0 . | grep '<--' |\ +cvscall -q commit -mci0 . | grep '<--' |\ sed -e 's:.*src/\(.*\),v.*:checking in src/\1,v:g' cd .. @@ -51,7 +57,7 @@ cd src echo a >> a echo c >> b/c -cvs -q commit -mci1 . | grep '<--' |\ +cvscall -q commit -mci1 . | grep '<--' |\ sed -e 's:.*src/\(.*\),v.*:checking in src/\1,v:g' cd ..
--- a/tests/test-convert-cvs.out Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-convert-cvs.out Fri Feb 08 11:55:17 2008 +0100 @@ -44,7 +44,6 @@ checking in src/a,v checking in src/b/c,v % convert again -destination src-hg is a Mercurial repository connecting to cvsrepo scanning source... sorting... @@ -56,7 +55,6 @@ c c % convert again with --filemap -destination src-filemap is a Mercurial repository connecting to cvsrepo scanning source... sorting... @@ -75,7 +73,6 @@ T b/c checking in src/b/c,v % convert again -destination src-hg is a Mercurial repository connecting to cvsrepo scanning source... sorting... @@ -86,7 +83,6 @@ c d % convert again with --filemap -destination src-filemap is a Mercurial repository connecting to cvsrepo scanning source... sorting...
--- a/tests/test-convert-darcs Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-convert-darcs Fri Feb 08 11:55:17 2008 +0100 @@ -13,7 +13,7 @@ mkdir dummy mkdir dummy/_darcs if hg convert dummy 2>&1 | grep ElementTree > /dev/null; then - echo 'hghave: missing feature: elementtree module' + echo 'skipped: missing feature: elementtree module' exit 80 fi
--- a/tests/test-convert-hg-sink Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-convert-hg-sink Fri Feb 08 11:55:17 2008 +0100 @@ -1,7 +1,11 @@ #!/bin/sh -echo "[extensions]" >> $HGRCPATH -echo "hgext.convert=" >> $HGRCPATH +cat >> $HGRCPATH <<EOF +[extensions] +convert= +[convert] +hg.saverev=False +EOF hg init orig cd orig
--- a/tests/test-convert-hg-sink.out Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-convert-hg-sink.out Fri Feb 08 11:55:17 2008 +0100 @@ -34,10 +34,9 @@ % dirstate should be empty: 3 files updated, 0 files merged, 0 files removed, 0 files unresolved % put something in the dirstate: -a 0 -1 unset baz +a 0 -1 unset baz copy: bar -> baz % add a new revision in the original repo -destination new is a Mercurial repository scanning source... sorting... converting...
--- a/tests/test-convert-hg-source Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-convert-hg-source Fri Feb 08 11:55:17 2008 +0100 @@ -1,7 +1,11 @@ #!/bin/sh -echo "[extensions]" >> $HGRCPATH -echo "hgext.convert=" >> $HGRCPATH +cat >> $HGRCPATH <<EOF +[extensions] +convert= +[convert] +hg.saverev=False +EOF hg init orig cd orig @@ -25,6 +29,9 @@ hg merge 2 hg ci -m 'merge remote copy' -d '4 0' +chmod +x baz +hg ci -m 'mark baz executable' -d '5 0' + cd .. hg convert --datesort orig new 2>&1 | grep -v 'subversion python bindings could not be loaded' cd new
--- a/tests/test-convert-hg-source.out Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-convert-hg-source.out Fri Feb 08 11:55:17 2008 +0100 @@ -9,11 +9,12 @@ scanning source... sorting... converting... -4 add foo bar -3 change foo -2 make bar and baz copies of foo -1 merge local copy -0 merge remote copy +5 add foo bar +4 change foo +3 make bar and baz copies of foo +2 merge local copy +1 merge remote copy +0 mark baz executable comparing with ../orig searching for changes no changes found
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-convert-hg-svn Fri Feb 08 11:55:17 2008 +0100 @@ -0,0 +1,73 @@ +#!/bin/sh + +"$TESTDIR/hghave" svn svn-bindings || exit 80 + +fix_path() +{ + tr '\\' / +} + +echo "[extensions]" >> $HGRCPATH +echo "convert = " >> $HGRCPATH + +svnpath=`pwd | fix_path`/svn-repo +svnadmin create $svnpath + +cat > $svnpath/hooks/pre-revprop-change <<'EOF' +#!/bin/sh + +REPOS="$1" +REV="$2" +USER="$3" +PROPNAME="$4" +ACTION="$5" + +if [ "$ACTION" = "M" -a "$PROPNAME" = "svn:log" ]; then exit 0; fi +if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-branch" ]; then exit 0; fi +if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-rev" ]; then exit 0; fi + +echo "Changing prohibited revision property" >&2 +exit 1 +EOF +chmod +x $svnpath/hooks/pre-revprop-change + +# SVN wants all paths to start with a slash. Unfortunately, +# Windows ones don't. Handle that. +svnurl=$svnpath +expr $svnurl : "\/" > /dev/null +if [ $? -ne 0 ]; then + svnurl='/'$svnurl +fi +svnurl=file://$svnurl +svn co $svnurl $svnpath-wc + +cd $svnpath-wc +echo a > a +svn add a +svn ci -m'added a' a + +cd .. + +echo % initial roundtrip +hg convert -s svn -d hg $svnpath-wc $svnpath-hg | grep -v initializing +hg convert -s hg -d svn $svnpath-hg $svnpath-wc + +echo % second roundtrip should do nothing +hg convert -s svn -d hg $svnpath-wc $svnpath-hg +hg convert -s hg -d svn $svnpath-hg $svnpath-wc + +echo % new hg rev + +hg clone $svnpath-hg $svnpath-work +echo b > $svnpath-work/b +hg --cwd $svnpath-work add b +hg --cwd $svnpath-work ci -mb + +echo % echo hg to svn +hg --cwd $svnpath-hg pull -q $svnpath-work +hg convert -s hg -d svn $svnpath-hg $svnpath-wc + +echo % svn back to hg should do nothing +hg convert -s svn -d hg $svnpath-wc $svnpath-hg +echo % hg back to svn should do nothing +hg convert -s hg -d svn $svnpath-hg $svnpath-wc
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-convert-hg-svn.out Fri Feb 08 11:55:17 2008 +0100 @@ -0,0 +1,35 @@ +Checked out revision 0. +A a +Adding a +Transmitting file data . +Committed revision 1. +% initial roundtrip +scanning source... +sorting... +converting... +0 added a +scanning source... +sorting... +converting... +% second roundtrip should do nothing +scanning source... +sorting... +converting... +scanning source... +sorting... +converting... +% new hg rev +1 files updated, 0 files merged, 0 files removed, 0 files unresolved +% echo hg to svn +scanning source... +sorting... +converting... +0 b +% svn back to hg should do nothing +scanning source... +sorting... +converting... +% hg back to svn should do nothing +scanning source... +sorting... +converting...
--- a/tests/test-convert-svn Fri Feb 08 11:50:37 2008 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,118 +0,0 @@ -#!/bin/sh - -"$TESTDIR/hghave" svn svn-bindings || exit 80 - -fix_path() -{ - tr '\\' / -} - -echo "[extensions]" >> $HGRCPATH -echo "convert = " >> $HGRCPATH - -svnadmin create svn-repo - -echo % initial svn import -mkdir t -cd t -echo a > a -cd .. - -svnpath=`pwd | fix_path` -# SVN wants all paths to start with a slash. Unfortunately, -# Windows ones don't. Handle that. -expr $svnpath : "\/" > /dev/null -if [ $? -ne 0 ]; then - svnpath='/'$svnpath -fi - -svnurl=file://$svnpath/svn-repo/trunk -svn import -m init t $svnurl | fix_path - -echo % update svn repository -svn co $svnurl t2 | fix_path -cd t2 -echo b >> a -echo b > b -svn add b -svn ci -m changea -cd .. - -echo % convert to hg once -hg convert $svnurl - -echo % update svn repository again -cd t2 -echo c >> a -echo c >> b -svn ci -m changeb -cd .. - -echo % test incremental conversion -hg convert $svnurl - -echo % test filemap -echo 'include b' > filemap -hg convert --filemap filemap $svnurl fmap -echo '[extensions]' >> $HGRCPATH -echo 'hgext.graphlog =' >> $HGRCPATH -hg glog -R fmap --template '#rev# #desc|firstline# files: #files#\n' - -######################################## - -echo "# now tests that it works with trunk/branches/tags layout" -echo -echo % initial svn import -mkdir projA -cd projA -mkdir trunk -mkdir branches -mkdir tags -cd .. - -svnurl=file://$svnpath/svn-repo/projA -svn import -m "init projA" projA $svnurl | fix_path - - -echo % update svn repository -svn co $svnurl/trunk A | fix_path -cd A -echo hello > letter.txt -svn add letter.txt -svn ci -m hello - -echo world >> letter.txt -svn ci -m world - -svn copy -m "tag v0.1" $svnurl/trunk $svnurl/tags/v0.1 - -echo 'nice day today!' >> letter.txt -svn ci -m "nice day" -cd .. - -echo % convert to hg once -hg convert $svnurl A-hg - -echo % update svn repository again -cd A -echo "see second letter" >> letter.txt -# Put it in a subdirectory to test duplicate file records -# from svn source (issue 714) -mkdir todo -echo "nice to meet you" > todo/letter2.txt -svn add todo -svn ci -m "second letter" - -svn copy -m "tag v0.2" $svnurl/trunk $svnurl/tags/v0.2 - -echo "blah-blah-blah" >> todo/letter2.txt -svn ci -m "work in progress" -cd .. - -echo % test incremental conversion -hg convert $svnurl A-hg - -cd A-hg -hg glog --template '#rev# #desc|firstline# files: #files#\n' -hg tags -q -cd ..
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-convert-svn-branches Fri Feb 08 11:55:17 2008 +0100 @@ -0,0 +1,89 @@ +#!/bin/sh + +"$TESTDIR/hghave" svn svn-bindings || exit 80 + +fix_path() +{ + tr '\\' / +} + +echo "[extensions]" >> $HGRCPATH +echo "convert = " >> $HGRCPATH +echo "hgext.graphlog =" >> $HGRCPATH + +svnadmin create svn-repo + +svnpath=`pwd | fix_path` +# SVN wants all paths to start with a slash. Unfortunately, +# Windows ones don't. Handle that. +expr $svnpath : "\/" > /dev/null +if [ $? -ne 0 ]; then + svnpath='/'$svnpath +fi + +echo % initial svn import +mkdir projA +cd projA +mkdir trunk +mkdir branches +mkdir tags +cd .. + +svnurl=file://$svnpath/svn-repo/projA +svn import -m "init projA" projA $svnurl | fix_path + +echo % update svn repository +svn co $svnurl A | fix_path +cd A +echo hello > trunk/letter.txt +echo hey > trunk/letter2.txt +echo ho > trunk/letter3.txt +svn add trunk/letter.txt trunk/letter2.txt trunk/letter3.txt +svn ci -m hello + +echo % branch to old letters +svn copy trunk branches/old +svn rm branches/old/letter3.txt +svn ci -m "branch trunk, remove letter3" +svn up + +echo % update trunk +echo "what can I say ?" >> trunk/letter.txt +svn ci -m "change letter" + +echo % update old branch +echo "what's up ?" >> branches/old/letter2.txt +svn ci -m "change letter2" + +echo % create a cross-branch revision +svn move -m "move letter2" trunk/letter2.txt \ + branches/old/letter3.txt +echo "I am fine" >> branches/old/letter3.txt +svn ci -m "move and update letter3.txt" + +echo % update old branch again +echo "bye" >> branches/old/letter2.txt +svn ci -m "change letter2 again" + +echo % update trunk again +echo "how are you ?" >> trunk/letter.txt +svn ci -m "last change to letter" +cd .. + +echo % convert trunk and branches +hg convert --datesort $svnurl A-hg + +echo % branch again from a converted revision +cd A +svn copy -r 1 $svnurl/trunk branches/old2 +svn ci -m "branch trunk@1 into old2" +cd .. + +echo % convert again +hg convert --datesort $svnurl A-hg + +cd A-hg +hg glog --template '#rev# #desc|firstline# files: #files#\n' +hg branches | sed 's/:.*/:/' +hg tags -q +cd ..
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-convert-svn-branches.out Fri Feb 08 11:55:17 2008 +0100 @@ -0,0 +1,101 @@ +% initial svn import +Adding projA/trunk +Adding projA/branches +Adding projA/tags + +Committed revision 1. +% update svn repository +A A/trunk +A A/branches +A A/tags +Checked out revision 1. +A trunk/letter.txt +A trunk/letter2.txt +A trunk/letter3.txt +Adding trunk/letter.txt +Adding trunk/letter2.txt +Adding trunk/letter3.txt +Transmitting file data ... +Committed revision 2. +% branch to old letters +A branches/old +D branches/old/letter3.txt +Adding branches/old +Adding branches/old/letter.txt +Adding branches/old/letter2.txt +Deleting branches/old/letter3.txt + +Committed revision 3. +At revision 3. +% update trunk +Sending trunk/letter.txt +Transmitting file data . +Committed revision 4. +% update old branch +Sending branches/old/letter2.txt +Transmitting file data . +Committed revision 5. +% create a cross-branch revision +A branches/old/letter3.txt +D trunk/letter2.txt +Adding branches/old/letter3.txt +Deleting trunk/letter2.txt +Transmitting file data . +Committed revision 6. +% update old branch again +Sending branches/old/letter2.txt +Transmitting file data . +Committed revision 7. +% update trunk again +Sending trunk/letter.txt +Transmitting file data . +Committed revision 8. +% convert trunk and branches +initializing destination A-hg repository +scanning source... +sorting... +converting... +8 init projA +7 hello +6 branch trunk, remove letter3 +5 change letter +4 change letter2 +3 move and update letter3.txt +2 move and update letter3.txt +1 change letter2 again +0 last change to letter +% branch again from a converted revision +Checked out revision 1. +A branches/old2 +Adding branches/old2 + +Committed revision 9. +% convert again +scanning source... +sorting... +converting... +0 branch trunk@1 into old2 +o 9 branch trunk@1 into old2 files: +| +| o 8 last change to letter files: letter.txt +| | +| | o 7 change letter2 again files: letter2.txt +| | | +| o | 6 move and update letter3.txt files: letter2.txt +| | | +| | o 5 move and update letter3.txt files: letter3.txt +| | | +| | o 4 change letter2 files: letter2.txt +| | | +| o | 3 change letter files: letter.txt +| | | ++---o 2 branch trunk, remove letter3 files: letter.txt letter2.txt +| | +| o 1 hello files: letter.txt letter2.txt letter3.txt +|/ +o 0 init projA files: + +old2 9: +default 8: +old 7: +tip
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-convert-svn-move Fri Feb 08 11:55:17 2008 +0100 @@ -0,0 +1,65 @@ +#!/bin/sh + +"$TESTDIR/hghave" svn svn-bindings || exit 80 + +fix_path() +{ + tr '\\' / +} + +echo "[extensions]" >> $HGRCPATH +echo "convert = " >> $HGRCPATH +echo "hgext.graphlog =" >> $HGRCPATH + +svnadmin create svn-repo + +svnpath=`pwd | fix_path` +# SVN wants all paths to start with a slash. Unfortunately, +# Windows ones don't. Handle that. +expr $svnpath : "\/" > /dev/null +if [ $? -ne 0 ]; then + svnpath='/'$svnpath +fi + +echo % initial svn import +mkdir projA +cd projA +mkdir trunk +echo a > trunk/a +mkdir trunk/d1 +echo b > trunk/d1/b +echo c > trunk/d1/c +cd .. + +svnurl=file://$svnpath/svn-repo/projA +svn import -m "init projA" projA $svnurl | fix_path + +# Build a module renaming chain which used to confuse the converter. +echo % update svn repository +svn co $svnurl A | fix_path +cd A +echo a >> trunk/a +echo c >> trunk/d1/c +svn ci -m commitbeforemove +svn mv $svnurl/trunk $svnurl/subproject -m movedtrunk +svn up +mkdir subproject/trunk +svn add subproject/trunk +svn ci -m createtrunk +mkdir subproject/branches +svn add subproject/branches +svn ci -m createbranches +svn mv $svnurl/subproject/d1 $svnurl/subproject/trunk/d1 -m moved1 +svn up +echo b >> subproject/trunk/d1/b +svn ci -m changeb +svn mv $svnurl/subproject/trunk/d1 $svnurl/subproject/branches/d1 -m moved1again +cd .. + +echo % convert trunk and branches +hg convert --datesort $svnurl/subproject A-hg + +cd A-hg +hg glog --template '#rev# #desc|firstline# files: #files#\n' +hg branches | sed 's/:.*/:/' +cd ..
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-convert-svn-move.out Fri Feb 08 11:55:17 2008 +0100 @@ -0,0 +1,76 @@ +% initial svn import +Adding projA/trunk +Adding projA/trunk/a +Adding projA/trunk/d1 +Adding projA/trunk/d1/b +Adding projA/trunk/d1/c + +Committed revision 1. +% update svn repository +A A/trunk +A A/trunk/a +A A/trunk/d1 +A A/trunk/d1/b +A A/trunk/d1/c +Checked out revision 1. +Sending trunk/a +Sending trunk/d1/c +Transmitting file data .. +Committed revision 2. + +Committed revision 3. +D trunk +A subproject +A subproject/a +A subproject/d1 +A subproject/d1/b +A subproject/d1/c +Updated to revision 3. +A subproject/trunk +Adding subproject/trunk + +Committed revision 4. +A subproject/branches +Adding subproject/branches + +Committed revision 5. + +Committed revision 6. +A subproject/trunk/d1 +A subproject/trunk/d1/b +A subproject/trunk/d1/c +D subproject/d1 +Updated to revision 6. +Sending subproject/trunk/d1/b +Transmitting file data . +Committed revision 7. + +Committed revision 8. +% convert trunk and branches +initializing destination A-hg repository +scanning source... +sorting... +converting... +6 createtrunk +5 moved1 +4 moved1 +3 changeb +2 changeb +1 moved1again +0 moved1again +o 6 moved1again files: d1/b d1/c +| +| o 5 moved1again files: +| | +o | 4 changeb files: d1/b +| | +| o 3 changeb files: b +| | +o | 2 moved1 files: d1/b d1/c +| | +| o 1 moved1 files: b c +| +o 0 createtrunk files: + +default 6: +d1 5:
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-convert-svn-sink Fri Feb 08 11:55:17 2008 +0100 @@ -0,0 +1,135 @@ +#!/bin/sh + +"$TESTDIR/hghave" svn svn-bindings || exit 80 + +fixpath() +{ + tr '\\' / +} + +svnupanddisplay() +{ + ( + cd $1; + svn up; + svn st -v | fixpath + limit='' + if [ $2 -gt 0 ]; then + limit="--limit=$2" + fi + svn log --xml -v $limit | fixpath | sed 's,<date>.*,<date/>,' + ) +} + +echo "[extensions]" >> $HGRCPATH +echo "convert = " >> $HGRCPATH + +hg init a + +echo a > a/a +mkdir -p a/d1/d2 +echo b > a/d1/d2/b +echo % add +hg --cwd a ci -d '0 0' -A -m 'add a file' + +echo a >> a/a +echo % modify +hg --cwd a ci -d '1 0' -m 'modify a file' +hg --cwd a tip -q + +hg convert -d svn a +svnupanddisplay a-hg-wc 2 +ls a a-hg-wc +cmp a/a a-hg-wc/a && echo same || echo different + +hg --cwd a mv a b +echo % rename +hg --cwd a ci -d '2 0' -m 'rename a file' +hg --cwd a tip -q + +hg convert -d svn a +svnupanddisplay a-hg-wc 1 +ls a a-hg-wc + +hg --cwd a cp b c +echo % copy +hg --cwd a ci -d '3 0' -m 'copy a file' +hg --cwd a tip -q + +hg convert -d svn a +svnupanddisplay a-hg-wc 1 +ls a a-hg-wc + +hg --cwd a rm b +echo % remove +hg --cwd a ci -d '4 0' -m 'remove a file' +hg --cwd a tip -q + +hg convert -d svn a +svnupanddisplay a-hg-wc 1 +ls a a-hg-wc + +chmod +x a/c +echo % executable +hg --cwd a ci -d '5 0' -m 'make a file executable' +hg --cwd a tip -q + +hg convert -d svn a +svnupanddisplay a-hg-wc 1 +test -x a-hg-wc/c && echo executable || echo not executable + +echo % executable in new directory + +rm -rf a a-hg a-hg-wc +hg init a + +mkdir a/d1 +echo a > a/d1/a +chmod +x a/d1/a +hg --cwd a ci -d '0 0' -A -m 'add executable file in new directory' + +hg convert -d svn a +svnupanddisplay a-hg-wc 1 +test -x a-hg-wc/d1/a && echo executable || echo not executable + +echo % copy to new directory + +mkdir a/d2 +hg --cwd a cp d1/a d2/a +hg --cwd a ci -d '1 0' -A -m 'copy file to new directory' + +hg convert -d svn a +svnupanddisplay a-hg-wc 1 + +echo % branchy history + +hg init b +echo base > b/b +hg --cwd b ci -d '0 0' -Ambase + +echo left-1 >> b/b +echo left-1 > b/left-1 +hg --cwd b ci -d '1 0' -Amleft-1 + +echo left-2 >> b/b +echo left-2 > b/left-2 +hg --cwd b ci -d '2 0' -Amleft-2 + +hg --cwd b up 0 + +echo right-1 >> b/b +echo right-1 > b/right-1 +hg --cwd b ci -d '3 0' -Amright-1 + +echo right-2 >> b/b +echo right-2 > b/right-2 +hg --cwd b ci -d '4 0' -Amright-2 + +hg --cwd b up -C 2 +hg --cwd b merge +hg --cwd b revert -r 2 b +hg --cwd b ci -d '5 0' -m 'merge' + +hg convert -d svn b +echo % expect 4 changes +svnupanddisplay b-hg-wc 0
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-convert-svn-sink.out Fri Feb 08 11:55:17 2008 +0100 @@ -0,0 +1,338 @@ +% add +adding a +adding d1/d2/b +% modify +1:e0e2b8a9156b +assuming destination a-hg +initializing svn repo 'a-hg' +initializing svn wc 'a-hg-wc' +scanning source... +sorting... +converting... +1 add a file +0 modify a file +At revision 2. + 2 2 test . + 2 2 test a + 2 1 test d1 + 2 1 test d1/d2 + 2 1 test d1/d2/b +<?xml version="1.0"?> +<log> +<logentry + revision="2"> +<author>test</author> +<date/> +<paths> +<path + action="M">/a</path> +</paths> +<msg>modify a file</msg> +</logentry> +<logentry + revision="1"> +<author>test</author> +<date/> +<paths> +<path + action="A">/a</path> +<path + action="A">/d1</path> +<path + action="A">/d1/d2</path> +<path + action="A">/d1/d2/b</path> +</paths> +<msg>add a file</msg> +</logentry> +</log> +a: +a +d1 + +a-hg-wc: +a +d1 +same +% rename +2:7009fc4efb34 +assuming destination a-hg +initializing svn wc 'a-hg-wc' +scanning source... +sorting... +converting... +0 rename a file +At revision 3. + 3 3 test . + 3 3 test b + 3 1 test d1 + 3 1 test d1/d2 + 3 1 test d1/d2/b +<?xml version="1.0"?> +<log> +<logentry + revision="3"> +<author>test</author> +<date/> +<paths> +<path + action="D">/a</path> +<path + copyfrom-path="/a" + copyfrom-rev="2" + action="A">/b</path> +</paths> +<msg>rename a file</msg> +</logentry> +</log> +a: +b +d1 + +a-hg-wc: +b +d1 +% copy +3:56c519973ce6 +assuming destination a-hg +initializing svn wc 'a-hg-wc' +scanning source... +sorting... +converting... +0 copy a file +At revision 4. + 4 4 test . + 4 3 test b + 4 4 test c + 4 1 test d1 + 4 1 test d1/d2 + 4 1 test d1/d2/b +<?xml version="1.0"?> +<log> +<logentry + revision="4"> +<author>test</author> +<date/> +<paths> +<path + copyfrom-path="/b" + copyfrom-rev="3" + action="A">/c</path> +</paths> +<msg>copy a file</msg> +</logentry> +</log> +a: +b +c +d1 + +a-hg-wc: +b +c +d1 +% remove +4:ed4dc9a6f585 +assuming destination a-hg +initializing svn wc 'a-hg-wc' +scanning source... +sorting... +converting... +0 remove a file +At revision 5. + 5 5 test . + 5 4 test c + 5 1 test d1 + 5 1 test d1/d2 + 5 1 test d1/d2/b +<?xml version="1.0"?> +<log> +<logentry + revision="5"> +<author>test</author> +<date/> +<paths> +<path + action="D">/b</path> +</paths> +<msg>remove a file</msg> +</logentry> +</log> +a: +c +d1 + +a-hg-wc: +c +d1 +% executable +5:f205b3636d77 +assuming destination a-hg +initializing svn wc 'a-hg-wc' +scanning source... +sorting... +converting... +0 make a file executable +At revision 6. + 6 6 test . + 6 6 test c + 6 1 test d1 + 6 1 test d1/d2 + 6 1 test d1/d2/b +<?xml version="1.0"?> +<log> +<logentry + revision="6"> +<author>test</author> +<date/> +<paths> +<path + action="M">/c</path> +</paths> +<msg>make a file executable</msg> +</logentry> +</log> +executable +% executable in new directory +adding d1/a +assuming destination a-hg +initializing svn repo 'a-hg' +initializing svn wc 'a-hg-wc' +scanning source... +sorting... +converting... +0 add executable file in new directory +At revision 1. + 1 1 test . + 1 1 test d1 + 1 1 test d1/a +<?xml version="1.0"?> +<log> +<logentry + revision="1"> +<author>test</author> +<date/> +<paths> +<path + action="A">/d1</path> +<path + action="A">/d1/a</path> +</paths> +<msg>add executable file in new directory</msg> +</logentry> +</log> +executable +% copy to new directory +assuming destination a-hg +initializing svn wc 'a-hg-wc' +scanning source... +sorting... +converting... +0 copy file to new directory +At revision 2. + 2 2 test . + 2 1 test d1 + 2 1 test d1/a + 2 2 test d2 + 2 2 test d2/a +<?xml version="1.0"?> +<log> +<logentry + revision="2"> +<author>test</author> +<date/> +<paths> +<path + action="A">/d2</path> +<path + copyfrom-path="/d1/a" + copyfrom-rev="1" + action="A">/d2/a</path> +</paths> +<msg>copy file to new directory</msg> +</logentry> +</log> +% branchy history +adding b +adding left-1 +adding left-2 +1 files updated, 0 files merged, 2 files removed, 0 files unresolved +adding right-1 +adding right-2 +3 files updated, 0 files merged, 2 files removed, 0 files unresolved +merging b +warning: conflicts during merge. +merging b failed! +2 files updated, 0 files merged, 0 files removed, 1 files unresolved +There are unresolved merges, you can redo the full merge using: + hg update -C 2 + hg merge 4 +assuming destination b-hg +initializing svn repo 'b-hg' +initializing svn wc 'b-hg-wc' +scanning source... +sorting... +converting... +5 base +4 left-1 +3 left-2 +2 right-1 +1 right-2 +0 merge +% expect 4 changes +At revision 4. + 4 4 test . + 4 3 test b + 4 2 test left-1 + 4 3 test left-2 + 4 4 test right-1 + 4 4 test right-2 +<?xml version="1.0"?> +<log> +<logentry + revision="4"> +<author>test</author> +<date/> +<paths> +<path + action="A">/right-1</path> +<path + action="A">/right-2</path> +</paths> +<msg>merge</msg> +</logentry> +<logentry + revision="3"> +<author>test</author> +<date/> +<paths> +<path + action="M">/b</path> +<path + action="A">/left-2</path> +</paths> +<msg>left-2</msg> +</logentry> +<logentry + revision="2"> +<author>test</author> +<date/> +<paths> +<path + action="M">/b</path> +<path + action="A">/left-1</path> +</paths> +<msg>left-1</msg> +</logentry> +<logentry + revision="1"> +<author>test</author> +<date/> +<paths> +<path + action="A">/b</path> +</paths> +<msg>base</msg> +</logentry> +</log>
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-convert-svn-source Fri Feb 08 11:55:17 2008 +0100 @@ -0,0 +1,180 @@ +#!/bin/sh + +"$TESTDIR/hghave" svn svn-bindings || exit 80 + +fix_path() +{ + tr '\\' / +} + +echo "[extensions]" >> $HGRCPATH +echo "convert = " >> $HGRCPATH + +svnadmin create svn-repo + +echo % initial svn import +mkdir t +cd t +echo a > a +cd .. + +svnpath=`pwd | fix_path` +# SVN wants all paths to start with a slash. Unfortunately, +# Windows ones don't. Handle that. +expr $svnpath : "\/" > /dev/null +if [ $? -ne 0 ]; then + svnpath='/'$svnpath +fi + +svnurl=file://$svnpath/svn-repo/trunk/test +svn import -m init t $svnurl | fix_path + +echo % update svn repository +svn co $svnurl t2 | fix_path +cd t2 +echo b >> a +echo b > b +svn add b +svn ci -m changea +cd .. + +echo % convert to hg once +hg convert $svnurl + +echo % update svn repository again +cd t2 +echo c >> a +echo c >> b +svn ci -m changeb +cd .. + +echo % test incremental conversion +hg convert -v $svnurl | sed 's/source:.*/source:/' + +echo % test filemap +echo 'include b' > filemap +hg convert --filemap filemap $svnurl fmap +echo '[extensions]' >> $HGRCPATH +echo 'hgext.graphlog =' >> $HGRCPATH +hg glog -R fmap --template '#rev# #desc|firstline# files: #files#\n' + +echo % test stop revision +hg convert --rev 1 $svnurl stoprev +# Check convert_revision extra-records. +# This is also the only place testing more than one extra field +# in a revision. +hg --cwd stoprev tip --debug | grep extra | sed 's/=.*/=/' + +######################################## + +echo "# now tests that it works with trunk/branches/tags layout" +echo +echo % initial svn import +mkdir projA +cd projA +mkdir trunk +mkdir branches +mkdir tags +cd .. + +svnurl=file://$svnpath/svn-repo/projA +svn import -m "init projA" projA $svnurl | fix_path + + +echo % update svn repository +svn co $svnurl/trunk A | fix_path +cd A +echo hello > letter.txt +svn add letter.txt +svn ci -m hello + +echo world >> letter.txt +svn ci -m world + +svn copy -m "tag v0.1" $svnurl/trunk $svnurl/tags/v0.1 + +echo 'nice day today!' >> letter.txt +svn ci -m "nice day" +cd .. + +echo % convert to hg once +hg convert $svnurl A-hg + +echo % update svn repository again +cd A +echo "see second letter" >> letter.txt +# Put it in a subdirectory to test duplicate file records +# from svn source (issue 714) +mkdir todo +echo "nice to meet you" > todo/letter2.txt +svn add todo +svn ci -m "second letter" + +svn copy -m "tag v0.2" $svnurl/trunk $svnurl/tags/v0.2 + +echo "blah-blah-blah" >> todo/letter2.txt +svn ci -m "work in progress" +cd .. + +echo % test incremental conversion +hg convert $svnurl A-hg + +cd A-hg +hg glog --template '#rev# #desc|firstline# files: #files#\n' +hg tags -q +cd .. + +######################################## + +echo "# now tests that it works with trunk/tags layout, but no branches yet" +echo +echo % initial svn import +mkdir projB +cd projB +mkdir trunk +mkdir tags +cd .. + +svnurl=file://$svnpath/svn-repo/projB +svn import -m "init projB" projB $svnurl | fix_path + + +echo % update svn repository +svn co $svnurl/trunk B | fix_path +cd B +echo hello > letter.txt +svn add letter.txt +svn ci -m hello + +echo world >> letter.txt +svn ci -m world + +svn copy -m "tag v0.1" $svnurl/trunk $svnurl/tags/v0.1 + +echo 'nice day today!' >> letter.txt +svn ci -m "nice day" +cd .. + +echo % convert to hg once +hg convert $svnurl B-hg + +echo % update svn repository again +cd B +echo "see second letter" >> letter.txt +echo "nice to meet you" > letter2.txt +svn add letter2.txt +svn ci -m "second letter" + +svn copy -m "tag v0.2" $svnurl/trunk $svnurl/tags/v0.2 + +echo "blah-blah-blah" >> letter2.txt +svn ci -m "work in progress" +cd .. + +echo % test incremental conversion +hg convert $svnurl B-hg + +cd B-hg +hg glog --template '#rev# #desc|firstline# files: #files#\n' +hg tags -q +cd ..
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-convert-svn-source.out Fri Feb 08 11:55:17 2008 +0100 @@ -0,0 +1,195 @@ +% initial svn import +Adding t/a + +Committed revision 1. +% update svn repository +A t2/a +Checked out revision 1. +A b +Sending a +Adding b +Transmitting file data .. +Committed revision 2. +% convert to hg once +assuming destination test-hg +initializing destination test-hg repository +scanning source... +sorting... +converting... +1 init +0 changea +% update svn repository again +Sending a +Sending b +Transmitting file data .. +Committed revision 3. +% test incremental conversion +assuming destination test-hg +scanning source... +fetching revision log for "/trunk/test" from 3 to 2 +sorting... +converting... +0 changeb +source: +a +b +no tags found at revision 3 +% test filemap +initializing destination fmap repository +scanning source... +sorting... +converting... +2 init +1 changea +0 changeb +o 1 changeb files: b +| +o 0 changea files: b + +% test stop revision +initializing destination stoprev repository +scanning source... +sorting... +converting... +0 init +extra: branch= +extra: convert_revision= +# now tests that it works with trunk/branches/tags layout + +% initial svn import +Adding projA/trunk +Adding projA/branches +Adding projA/tags + +Committed revision 4. +% update svn repository +Checked out revision 4. +A letter.txt +Adding letter.txt +Transmitting file data . +Committed revision 5. +Sending letter.txt +Transmitting file data . +Committed revision 6. + +Committed revision 7. +Sending letter.txt +Transmitting file data . +Committed revision 8. +% convert to hg once +initializing destination A-hg repository +scanning source... +sorting... +converting... +3 init projA +2 hello +1 world +0 nice day +updating tags +% update svn repository again +A todo +A todo/letter2.txt +Sending letter.txt +Adding todo +Adding todo/letter2.txt +Transmitting file data .. +Committed revision 9. + +Committed revision 10. +Sending todo/letter2.txt +Transmitting file data . +Committed revision 11. +% test incremental conversion +scanning source... +sorting... +converting... +1 second letter +0 work in progress +updating tags +o 7 update tags files: .hgtags +| +o 6 work in progress files: todo/letter2.txt +| +o 5 second letter files: letter.txt todo/letter2.txt +| +o 4 update tags files: .hgtags +| +o 3 nice day files: letter.txt +| +o 2 world files: letter.txt +| +o 1 hello files: letter.txt +| +o 0 init projA files: + +tip +v0.2 +v0.1 +# now tests that it works with trunk/tags layout, but no branches yet + +% initial svn import +Adding projB/trunk +Adding projB/tags + +Committed revision 12. +% update svn repository +Checked out revision 12. +A letter.txt +Adding letter.txt +Transmitting file data . +Committed revision 13. +Sending letter.txt +Transmitting file data . +Committed revision 14. + +Committed revision 15. +Sending letter.txt +Transmitting file data . +Committed revision 16. +% convert to hg once +initializing destination B-hg repository +scanning source... +sorting... +converting... +3 init projB +2 hello +1 world +0 nice day +updating tags +% update svn repository again +A letter2.txt +Sending letter.txt +Adding letter2.txt +Transmitting file data .. +Committed revision 17. + +Committed revision 18. +Sending letter2.txt +Transmitting file data . +Committed revision 19. +% test incremental conversion +scanning source... +sorting... +converting... +1 second letter +0 work in progress +updating tags +o 7 update tags files: .hgtags +| +o 6 work in progress files: letter2.txt +| +o 5 second letter files: letter.txt letter2.txt +| +o 4 update tags files: .hgtags +| +o 3 nice day files: letter.txt +| +o 2 world files: letter.txt +| +o 1 hello files: letter.txt +| +o 0 init projB files: + +tip +v0.2 +v0.1
--- a/tests/test-convert-svn.out Fri Feb 08 11:50:37 2008 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,116 +0,0 @@ -% initial svn import -Adding t/a - -Committed revision 1. -% update svn repository -A t2/a -Checked out revision 1. -A b -Sending a -Adding b -Transmitting file data .. -Committed revision 2. -% convert to hg once -assuming destination trunk-hg -initializing destination trunk-hg repository -scanning source... -sorting... -converting... -1 init -0 changea -% update svn repository again -Sending a -Sending b -Transmitting file data .. -Committed revision 3. -% test incremental conversion -assuming destination trunk-hg -destination trunk-hg is a Mercurial repository -scanning source... -sorting... -converting... -0 changeb -% test filemap -initializing destination fmap repository -scanning source... -sorting... -converting... -2 init -1 changea -0 changeb -o 1 changeb files: b -| -o 0 changea files: b - -# now tests that it works with trunk/branches/tags layout - -% initial svn import -Adding projA/trunk -Adding projA/branches -Adding projA/tags - -Committed revision 4. -% update svn repository -Checked out revision 4. -A letter.txt -Adding letter.txt -Transmitting file data . -Committed revision 5. -Sending letter.txt -Transmitting file data . -Committed revision 6. - -Committed revision 7. -Sending letter.txt -Transmitting file data . -Committed revision 8. -% convert to hg once -initializing destination A-hg repository -scanning source... -sorting... -converting... -3 init projA -2 hello -1 world -0 nice day -updating tags -% update svn repository again -A todo -A todo/letter2.txt -Sending letter.txt -Adding todo -Adding todo/letter2.txt -Transmitting file data .. -Committed revision 9. - -Committed revision 10. -Sending todo/letter2.txt -Transmitting file data . -Committed revision 11. -% test incremental conversion -destination A-hg is a Mercurial repository -scanning source... -sorting... -converting... -1 second letter -0 work in progress -updating tags -o 7 update tags files: .hgtags -| -o 6 work in progress files: todo/letter2.txt -| -o 5 second letter files: letter.txt todo/letter2.txt -| -o 4 update tags files: .hgtags -| -o 3 nice day files: letter.txt -| -o 2 world files: letter.txt -| -o 1 hello files: letter.txt -| -o 0 init projA files: - -tip -v0.2 -v0.1
--- a/tests/test-convert.out Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-convert.out Fri Feb 08 11:55:17 2008 +0100 @@ -8,9 +8,11 @@ - Darcs - git - Subversion + - GNU Arch Accepted destination formats: - Mercurial + - Subversion (history on branches is not preserved) If no revision is given, all revisions will be converted. Otherwise, convert will only import up to the named revision (given in a format @@ -45,7 +47,7 @@ exclude path/to/file rename from/file to/file - + The 'include' directive causes a file, or all files under a directory, to be included in the destination repository, and the exclusion of all other files and dirs not explicitely included. @@ -54,6 +56,24 @@ subdirectory into the root of the repository, use '.' as the path to rename to. + Back end options: + + --config convert.hg.clonebranches=False (boolean) + hg target: XXX not documented + --config convert.hg.saverev=True (boolean) + hg source: allow target to preserve source revision ID + --config convert.hg.tagsbranch=default (branch name) + hg target: XXX not documented + --config convert.hg.usebranchnames=True (boolean) + hg target: preserve branch names + + --config convert.svn.branches=branches (directory name) + svn source: specify the directory containing branches + --config convert.svn.tags=tags (directory name) + svn source: specify the directory containing tags + --config convert.svn.trunk=trunk (directory name) + svn source: specify the name of the trunk branch + options: -A --authors username mapping filename @@ -93,3 +113,8 @@ 2 c 1 d 0 e +% test pre and post conversion actions +run hg source pre-conversion action +run hg sink pre-conversion action +run hg sink post-conversion action +run hg source post-conversion action
--- a/tests/test-copy-move-merge.out Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-copy-move-merge.out Fri Feb 08 11:55:17 2008 +0100 @@ -14,11 +14,15 @@ a: remote moved to b -> m copying a to b copying a to c +picked tool 'internal:merge' for a (binary False symlink False) merging a and b my a@fb3948d97f07+ other b@40da226db0f0 ancestor a@583c7b748052 + premerge successful removing a +picked tool 'internal:merge' for a (binary False symlink False) merging a and c my a@fb3948d97f07+ other c@40da226db0f0 ancestor a@583c7b748052 + premerge successful 0 files updated, 2 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) -- b --
--- a/tests/test-debugcomplete.out Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-debugcomplete.out Fri Feb 08 11:55:17 2008 +0100 @@ -4,6 +4,7 @@ annotate archive backout +bisect branch branches bundle @@ -62,6 +63,7 @@ debugconfig debugdata debugdate +debugfsinfo debugindex debugindexdot debuginstall @@ -127,6 +129,7 @@ --noninteractive --pid-file --port +--prefix --profile --quiet --repository
--- a/tests/test-diff-hashes Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-diff-hashes Fri Feb 08 11:55:17 2008 +0100 @@ -2,6 +2,7 @@ hg init a cd a +hg diff not found echo bar > foo hg add foo hg ci -m 'add foo' -d '1000000 0'
--- a/tests/test-diff-hashes.out Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-diff-hashes.out Fri Feb 08 11:55:17 2008 +0100 @@ -1,7 +1,9 @@ +found: No such file or directory +not: No such file or directory quiet: --- a/foo Mon Jan 12 13:46:40 1970 +0000 +++ b/foo Mon Jan 12 13:46:41 1970 +0000 -@@ -1,1 +1,1 @@ bar +@@ -1,1 +1,1 @@ -bar +foobar @@ -9,7 +11,7 @@ diff -r 74de3f1392e2 -r b8b5f023a6ad foo --- a/foo Mon Jan 12 13:46:40 1970 +0000 +++ b/foo Mon Jan 12 13:46:41 1970 +0000 -@@ -1,1 +1,1 @@ bar +@@ -1,1 +1,1 @@ -bar +foobar @@ -17,7 +19,7 @@ diff -r 74de3f1392e2 -r b8b5f023a6ad foo --- a/foo Mon Jan 12 13:46:40 1970 +0000 +++ b/foo Mon Jan 12 13:46:41 1970 +0000 -@@ -1,1 +1,1 @@ bar +@@ -1,1 +1,1 @@ -bar +foobar @@ -25,7 +27,7 @@ diff -r 74de3f1392e2d67856fb155963441f2610494e1a -r b8b5f023a6ad77fc378bd95cf3fa00cd1414d107 foo --- a/foo Mon Jan 12 13:46:40 1970 +0000 +++ b/foo Mon Jan 12 13:46:41 1970 +0000 -@@ -1,1 +1,1 @@ bar +@@ -1,1 +1,1 @@ -bar +foobar
--- a/tests/test-diff-ignore-whitespace.out Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-diff-ignore-whitespace.out Fri Feb 08 11:55:17 2008 +0100 @@ -4,7 +4,7 @@ diff -r 540c40a65b78 foo --- a/foo +++ b/foo -@@ -1,2 +1,5 @@ hello world +@@ -1,2 +1,5 @@ + hello world + @@ -14,7 +14,7 @@ diff -r 540c40a65b78 foo --- a/foo +++ b/foo -@@ -1,2 +1,5 @@ hello world +@@ -1,2 +1,5 @@ + hello world + @@ -28,7 +28,7 @@ diff -r 540c40a65b78 foo --- a/foo +++ b/foo -@@ -1,2 +1,2 @@ hello world +@@ -1,2 +1,2 @@ -hello world + hello world goodbye world @@ -36,7 +36,7 @@ diff -r 540c40a65b78 foo --- a/foo +++ b/foo -@@ -1,2 +1,2 @@ hello world +@@ -1,2 +1,2 @@ -hello world + hello world goodbye world @@ -44,7 +44,7 @@ diff -r 540c40a65b78 foo --- a/foo +++ b/foo -@@ -1,2 +1,2 @@ hello world +@@ -1,2 +1,2 @@ -hello world + hello world goodbye world @@ -52,7 +52,7 @@ diff -r 540c40a65b78 foo --- a/foo +++ b/foo -@@ -1,2 +1,2 @@ hello world +@@ -1,2 +1,2 @@ -hello world + hello world goodbye world @@ -61,7 +61,7 @@ diff -r 540c40a65b78 foo --- a/foo +++ b/foo -@@ -1,2 +1,2 @@ hello world +@@ -1,2 +1,2 @@ -hello world +hello world goodbye world @@ -69,7 +69,7 @@ diff -r 540c40a65b78 foo --- a/foo +++ b/foo -@@ -1,2 +1,2 @@ hello world +@@ -1,2 +1,2 @@ -hello world +hello world goodbye world @@ -81,7 +81,7 @@ diff -r 540c40a65b78 foo --- a/foo +++ b/foo -@@ -1,2 +1,2 @@ hello world +@@ -1,2 +1,2 @@ hello world -goodbye world +good bye world @@ -89,7 +89,7 @@ diff -r 540c40a65b78 foo --- a/foo +++ b/foo -@@ -1,2 +1,2 @@ hello world +@@ -1,2 +1,2 @@ hello world -goodbye world +good bye world @@ -97,7 +97,7 @@ diff -r 540c40a65b78 foo --- a/foo +++ b/foo -@@ -1,2 +1,2 @@ hello world +@@ -1,2 +1,2 @@ hello world -goodbye world +good bye world @@ -105,7 +105,7 @@ diff -r 540c40a65b78 foo --- a/foo +++ b/foo -@@ -1,2 +1,2 @@ hello world +@@ -1,2 +1,2 @@ hello world -goodbye world +good bye world @@ -114,7 +114,7 @@ diff -r 540c40a65b78 foo --- a/foo +++ b/foo -@@ -1,2 +1,2 @@ hello world +@@ -1,2 +1,2 @@ hello world -goodbye world +goodbye world @@ -122,7 +122,7 @@ diff -r 540c40a65b78 foo --- a/foo +++ b/foo -@@ -1,2 +1,2 @@ hello world +@@ -1,2 +1,2 @@ hello world -goodbye world +goodbye world @@ -134,7 +134,7 @@ diff -r 540c40a65b78 foo --- a/foo +++ b/foo -@@ -1,2 +1,3 @@ hello world +@@ -1,2 +1,3 @@ hello world + goodbye world @@ -142,7 +142,7 @@ diff -r 540c40a65b78 foo --- a/foo +++ b/foo -@@ -1,2 +1,3 @@ hello world +@@ -1,2 +1,3 @@ hello world + goodbye world @@ -150,7 +150,7 @@ diff -r 540c40a65b78 foo --- a/foo +++ b/foo -@@ -1,2 +1,3 @@ hello world +@@ -1,2 +1,3 @@ hello world + goodbye world @@ -160,7 +160,7 @@ diff -r 540c40a65b78 foo --- a/foo +++ b/foo -@@ -1,2 +1,3 @@ hello world +@@ -1,2 +1,3 @@ -hello world -goodbye world +hello world @@ -170,7 +170,7 @@ diff -r 540c40a65b78 foo --- a/foo +++ b/foo -@@ -1,2 +1,3 @@ hello world +@@ -1,2 +1,3 @@ -hello world -goodbye world +hello world @@ -180,7 +180,7 @@ diff -r 540c40a65b78 foo --- a/foo +++ b/foo -@@ -1,2 +1,3 @@ hello world +@@ -1,2 +1,3 @@ -hello world -goodbye world +hello world @@ -192,7 +192,7 @@ diff -r 540c40a65b78 foo --- a/foo +++ b/foo -@@ -1,2 +1,2 @@ hello world +@@ -1,2 +1,2 @@ -hello world -goodbye world +helloworld @@ -201,7 +201,7 @@ diff -r 540c40a65b78 foo --- a/foo +++ b/foo -@@ -1,2 +1,2 @@ hello world +@@ -1,2 +1,2 @@ -hello world -goodbye world +helloworld @@ -210,7 +210,7 @@ diff -r 540c40a65b78 foo --- a/foo +++ b/foo -@@ -1,2 +1,2 @@ hello world +@@ -1,2 +1,2 @@ -hello world -goodbye world +helloworld @@ -219,7 +219,7 @@ diff -r 540c40a65b78 foo --- a/foo +++ b/foo -@@ -1,2 +1,2 @@ hello world +@@ -1,2 +1,2 @@ -hello world -goodbye world +helloworld @@ -230,7 +230,7 @@ diff -r 540c40a65b78 foo --- a/foo +++ b/foo -@@ -1,2 +1,5 @@ hello world +@@ -1,2 +1,5 @@ -hello world -goodbye world +helloworld @@ -242,7 +242,7 @@ diff -r 540c40a65b78 foo --- a/foo +++ b/foo -@@ -1,2 +1,5 @@ hello world +@@ -1,2 +1,5 @@ -hello world -goodbye world +helloworld @@ -254,7 +254,7 @@ diff -r 540c40a65b78 foo --- a/foo +++ b/foo -@@ -1,2 +1,5 @@ hello world +@@ -1,2 +1,5 @@ -hello world -goodbye world +helloworld @@ -266,7 +266,7 @@ diff -r 540c40a65b78 foo --- a/foo +++ b/foo -@@ -1,2 +1,5 @@ hello world +@@ -1,2 +1,5 @@ -hello world -goodbye world +helloworld @@ -278,7 +278,7 @@ diff -r 540c40a65b78 foo --- a/foo +++ b/foo -@@ -1,2 +1,5 @@ hello world +@@ -1,2 +1,5 @@ -hello world -goodbye world +helloworld
--- a/tests/test-diff-newlines.out Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-diff-newlines.out Fri Feb 08 11:55:17 2008 +0100 @@ -2,7 +2,7 @@ diff -r 107ba6f817b5 -r 310ce7989cdc a --- a/a Thu Jan 01 00:00:01 1970 +0000 +++ b/a Thu Jan 01 00:00:02 1970 +0000 -@@ -1,2 +1,3 @@ confuse str.splitlines +@@ -1,2 +1,3 @@ confuse str.splitlines embedded newline +clean diff
--- a/tests/test-dispatch.py.out Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-dispatch.py.out Fri Feb 08 11:55:17 2008 +0100 @@ -1,7 +1,7 @@ running: init test1 result: None running: add foo -result: None +result: 0 running: commit -m commit1 -d 2000-01-01 foo result: None running: commit -m commit2 -d 2000-01-02 foo
--- a/tests/test-double-merge.out Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-double-merge.out Fri Feb 08 11:55:17 2008 +0100 @@ -10,10 +10,14 @@ foo: versions differ -> m foo: remote copied to bar -> m copying foo to bar +picked tool 'internal:merge' for foo (binary False symlink False) merging foo and bar my foo@2092631ce82b+ other bar@7731dad1c2b9 ancestor foo@310fd17130da + premerge successful +picked tool 'internal:merge' for foo (binary False symlink False) merging foo my foo@2092631ce82b+ other foo@7731dad1c2b9 ancestor foo@310fd17130da + premerge successful 0 files updated, 2 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) -- foo --
--- a/tests/test-execute-bit Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-execute-bit Fri Feb 08 11:55:17 2008 +0100 @@ -1,5 +1,7 @@ #!/bin/sh +"$TESTDIR/hghave" execbit || exit 80 + hg init echo a > a hg ci -d'0 0' -Am'not executable'
--- a/tests/test-fetch Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-fetch Fri Feb 08 11:55:17 2008 +0100 @@ -20,5 +20,20 @@ echo c > c/c hg --cwd c commit -d '3 0' -Amc + +hg clone c d +hg clone c e + hg --cwd c fetch -d '4 0' -m 'automated merge' ../a ls c + +hg --cwd a serve -a localhost -p $HGPORT -d --pid-file=hg.pid +cat a/hg.pid >> "$DAEMON_PIDS" + +echo '% fetch over http, no auth' +hg --cwd d fetch -d '5 0' http://localhost:$HGPORT/ +hg --cwd d tip --template '{desc}\n' + +echo '% fetch over http with auth (should be hidden in desc)' +hg --cwd e fetch -d '5 0' http://user:password@localhost:$HGPORT/ +hg --cwd e tip --template '{desc}\n'
--- a/tests/test-fetch.out Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-fetch.out Fri Feb 08 11:55:17 2008 +0100 @@ -13,6 +13,8 @@ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved 1:97d72e5f12c7 adding c +2 files updated, 0 files merged, 0 files removed, 0 files unresolved +2 files updated, 0 files merged, 0 files removed, 0 files unresolved pulling from ../a searching for changes adding changesets @@ -25,3 +27,25 @@ a b c +% fetch over http, no auth +pulling from http://localhost:20059/ +searching for changes +adding changesets +adding manifests +adding file changes +added 1 changesets with 1 changes to 1 files (+1 heads) +merging with new head 2:97d72e5f12c7 +1 files updated, 0 files merged, 0 files removed, 0 files unresolved +new changeset 3:0b6439e938f9 merges remote changes with local +Automated merge with http://localhost:20059/ +% fetch over http with auth (should be hidden in desc) +pulling from http://user:***@localhost:20059/ +searching for changes +adding changesets +adding manifests +adding file changes +added 1 changesets with 1 changes to 1 files (+1 heads) +merging with new head 2:97d72e5f12c7 +1 files updated, 0 files merged, 0 files removed, 0 files unresolved +new changeset 3:0b6439e938f9 merges remote changes with local +Automated merge with http://localhost:20059/
--- a/tests/test-filebranch Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-filebranch Fri Feb 08 11:55:17 2008 +0100 @@ -37,7 +37,7 @@ hg commit -m "branch b" -d "1000000 0" echo "we shouldn't have anything but n state here" -hg debugstate | cut -b 1-16,35- +hg debugstate | cut -b 1-16,37- echo merging hg pull ../a @@ -48,7 +48,7 @@ echo new > quux echo "we shouldn't have anything but foo in merge state here" -hg debugstate | cut -b 1-16,35- | grep "^m" +hg debugstate | cut -b 1-16,37- | grep "^m" hg ci -m "merge" -d "1000000 0"
--- a/tests/test-git-export.out Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-git-export.out Fri Feb 08 11:55:17 2008 +0100 @@ -35,7 +35,7 @@ rename to dst --- a/src +++ b/dst -@@ -3,3 +3,4 @@ 3 +@@ -3,3 +3,4 @@ 3 4 5 @@ -86,7 +86,7 @@ rename to bar --- a/foo +++ b/bar -@@ -1,2 +1,3 @@ a +@@ -1,2 +1,3 @@ a b +c @@ -96,7 +96,7 @@ rename to foo --- a/bar +++ b/foo -@@ -1,3 +1,2 @@ a +@@ -1,3 +1,2 @@ a b -c @@ -107,7 +107,7 @@ rename to bar --- a/foo +++ b/bar -@@ -1,1 +1,3 @@ a +@@ -1,1 +1,3 @@ a +b +c @@ -117,7 +117,7 @@ rename to foo --- a/bar +++ b/foo -@@ -1,3 +1,1 @@ a +@@ -1,3 +1,1 @@ a -b -c
--- a/tests/test-globalopts.out Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-globalopts.out Fri Feb 08 11:55:17 2008 +0100 @@ -147,6 +147,7 @@ annotate show changeset information per file line archive create unversioned archive of a repository revision backout reverse effect of earlier changeset + bisect subdivision search of changesets branch set or show the current branch name branches list repository named branches bundle create a changegroup file @@ -175,8 +176,8 @@ recover roll back an interrupted transaction remove remove the specified files on the next commit rename rename files; equivalent of copy + remove - revert revert files or dirs to their states as of some revision - rollback roll back the last transaction in this repository + revert restore individual files or dirs to an earlier state + rollback roll back the last transaction root print the root (top) of the current working dir serve export the repository via HTTP showconfig show combined config settings from all hgrc files @@ -199,6 +200,7 @@ annotate show changeset information per file line archive create unversioned archive of a repository revision backout reverse effect of earlier changeset + bisect subdivision search of changesets branch set or show the current branch name branches list repository named branches bundle create a changegroup file @@ -227,8 +229,8 @@ recover roll back an interrupted transaction remove remove the specified files on the next commit rename rename files; equivalent of copy + remove - revert revert files or dirs to their states as of some revision - rollback roll back the last transaction in this repository + revert restore individual files or dirs to an earlier state + rollback roll back the last transaction root print the root (top) of the current working dir serve export the repository via HTTP showconfig show combined config settings from all hgrc files
--- a/tests/test-glog Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-glog Fri Feb 08 11:55:17 2008 +0100 @@ -139,5 +139,8 @@ echo % glog hg glog +echo % file glog +hg glog 5 + echo % unused arguments -hg glog -q foo || echo failed +hg glog -q foo bar || echo failed
--- a/tests/test-glog.out Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-glog.out Fri Feb 08 11:55:17 2008 +0100 @@ -307,9 +307,17 @@ date: Thu Jan 01 00:00:00 1970 +0000 summary: (0) root +% file glog +o changeset: 5:3589c3c477ab + parent: 3:02173ffbf857 + parent: 4:e2cad8233c77 + user: test + date: Thu Jan 01 00:00:05 1970 +0000 + summary: (5) expand + % unused arguments hg glog: invalid arguments -hg glog [OPTION]... +hg glog [OPTION]... [FILE] show revision history alongside an ASCII revision graph failed
--- a/tests/test-help.out Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-help.out Fri Feb 08 11:55:17 2008 +0100 @@ -15,7 +15,6 @@ pull pull changes from the specified source push push changes to the specified destination remove remove the specified files on the next commit - revert revert files or dirs to their states as of some revision serve export the repository via HTTP status show changed files in the working directory update update working directory @@ -34,7 +33,6 @@ pull pull changes from the specified source push push changes to the specified destination remove remove the specified files on the next commit - revert revert files or dirs to their states as of some revision serve export the repository via HTTP status show changed files in the working directory update update working directory @@ -47,6 +45,7 @@ annotate show changeset information per file line archive create unversioned archive of a repository revision backout reverse effect of earlier changeset + bisect subdivision search of changesets branch set or show the current branch name branches list repository named branches bundle create a changegroup file @@ -75,8 +74,8 @@ recover roll back an interrupted transaction remove remove the specified files on the next commit rename rename files; equivalent of copy + remove - revert revert files or dirs to their states as of some revision - rollback roll back the last transaction in this repository + revert restore individual files or dirs to an earlier state + rollback roll back the last transaction root print the root (top) of the current working dir serve export the repository via HTTP showconfig show combined config settings from all hgrc files @@ -95,6 +94,7 @@ annotate show changeset information per file line archive create unversioned archive of a repository revision backout reverse effect of earlier changeset + bisect subdivision search of changesets branch set or show the current branch name branches list repository named branches bundle create a changegroup file @@ -123,8 +123,8 @@ recover roll back an interrupted transaction remove remove the specified files on the next commit rename rename files; equivalent of copy + remove - revert revert files or dirs to their states as of some revision - rollback roll back the last transaction in this repository + revert restore individual files or dirs to an earlier state + rollback roll back the last transaction root print the root (top) of the current working dir serve export the repository via HTTP showconfig show combined config settings from all hgrc files @@ -205,17 +205,22 @@ -w --ignore-all-space ignore white space when comparing lines -b --ignore-space-change ignore changes in the amount of white space -B --ignore-blank-lines ignore changes whose lines are all blank + -U --unified number of lines of context to show (default: 3) -I --include include names matching the given patterns -X --exclude exclude names matching the given patterns use "hg -v help diff" to show global options hg status [OPTION]... [FILE]... +aliases: st + show changed files in the working directory Show status of files in the repository. If names are given, only - files that match are shown. Files that are clean or ignored, are - not listed unless -c (clean), -i (ignored) or -A is given. + files that match are shown. Files that are clean or ignored or + source of a copy/move operation, are not listed unless -c (clean), + -i (ignored), -C (copies) or -A is given. Unless options described + with "show only ..." are given, the options -mardu are used. NOTE: status may appear to disagree with diff if permissions have changed or a merge has occurred. The standard diff format does not @@ -232,11 +237,9 @@ C = clean ! = deleted, but still tracked ? = not tracked - I = ignored (not shown by default) + I = ignored = the previous added file was copied from here -aliases: st - options: -A --all show status of all files @@ -276,7 +279,6 @@ pull pull changes from the specified source push push changes to the specified destination remove remove the specified files on the next commit - revert revert files or dirs to their states as of some revision serve export the repository via HTTP status show changed files in the working directory update update working directory @@ -300,7 +302,6 @@ pull pull changes from the specified source push push changes to the specified destination remove remove the specified files on the next commit - revert revert files or dirs to their states as of some revision serve export the repository via HTTP status show changed files in the working directory update update working directory
--- a/tests/test-hgweb Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-hgweb Fri Feb 08 11:55:17 2008 +0100 @@ -1,4 +1,5 @@ #!/bin/sh +# Some tests for hgweb. Tests static files, plain files and different 404's. hg init test cd test @@ -6,8 +7,36 @@ echo foo > da/foo echo foo > foo hg ci -Ambase -d '0 0' -hg serve -p $HGPORT -d --pid-file=hg.pid +hg serve -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log +cat hg.pid >> $DAEMON_PIDS echo % manifest ("$TESTDIR/get-with-headers.py" localhost:$HGPORT '/file/tip/?style=raw') ("$TESTDIR/get-with-headers.py" localhost:$HGPORT '/file/tip/da?style=raw') + +echo % plain file +"$TESTDIR/get-with-headers.py" localhost:$HGPORT '/file/tip/foo?style=raw' + +echo % should give a 404 - static file that does not exist +"$TESTDIR/get-with-headers.py" localhost:$HGPORT '/static/bogus' + +echo % should give a 404 - bad revision +"$TESTDIR/get-with-headers.py" localhost:$HGPORT '/file/spam/foo?style=raw' + +echo % should give a 400 - bad command +"$TESTDIR/get-with-headers.py" localhost:$HGPORT '/file/tip/foo?cmd=spam&style=raw' | sed 's/400.*/400/' + +echo % should give a 404 - file does not exist +"$TESTDIR/get-with-headers.py" localhost:$HGPORT '/file/tip/bork?style=raw' + +echo % stop and restart kill `cat hg.pid` +hg serve -p $HGPORT -d --pid-file=hg.pid -A access.log +cat hg.pid >> $DAEMON_PIDS +# Test the access/error files are opened in append mode +python -c "print len(file('access.log').readlines()), 'log lines written'" + +echo % static file +"$TESTDIR/get-with-headers.py" localhost:$HGPORT '/static/style-gitweb.css' + +echo % errors +cat errors.log
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-hgweb-commands Fri Feb 08 11:55:17 2008 +0100 @@ -0,0 +1,54 @@ +#!/bin/sh +# An attempt at more fully testing the hgweb web interface. +# The following things are tested elsewhere and are therefore omitted: +# - archive, tested in test-archive +# - unbundle, tested in test-push-http +# - changegroupsubset, tested in test-pull + +echo % Set up the repo +hg init test +cd test +mkdir da +echo foo > da/foo +echo foo > foo +hg ci -d'0 0' -Ambase +hg tag 1.0 +hg serve -n test -p $HGPORT -d --pid-file=hg.pid -E errors.log +cat hg.pid >> $DAEMON_PIDS + +echo % Logs and changes +"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/log/?style=atom' | sed "s/http:\/\/[^/]*\//http:\/\/127.0.0.1\//" +"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/log/1/?style=atom' | sed "s/http:\/\/[^/]*\//http:\/\/127.0.0.1\//" +"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/log/1/foo/?style=atom' | sed "s/http:\/\/[^/]*\//http:\/\/127.0.0.1\//" +"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/shortlog/' | sed "s/[0-9]* years/many years/" +"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/rev/1/?style=raw' + +echo % File-related +"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/file/1/foo/?style=raw' +"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/annotate/1/foo/?style=raw' +"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/file/1/?style=raw' +"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/file/1/foo' | sed "s/[0-9]* years/many years/" +"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/filediff/1/foo/?style=raw' + +echo % Overviews +"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/tags/?style=atom' | sed "s/http:\/\/[^/]*\//http:\/\/127.0.0.1\//" +"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/summary/?style=gitweb' | sed "s/[0-9]* years ago/long ago/g" + +echo % capabilities +"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/capabilities' +echo % heads +"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/heads' +echo % lookup +"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/lookup/1' +echo % branches +"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/branches' +echo % changegroup +"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/changegroup' +echo % stream_out +"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/stream_out' + +echo % Static files +"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/static/style.css' + +echo % ERRORS ENCOUNTERED +cat errors.log
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-hgweb-no-request-uri Fri Feb 08 11:55:17 2008 +0100 @@ -0,0 +1,77 @@ +#!/bin/sh +# This tests if hgweb and hgwebdir still work if the REQUEST_URI variable is +# no longer passed with the request. Instead, SCRIPT_NAME and PATH_INFO +# should be used from d74fc8dec2b4 onward to route the request. + +mkdir repo +cd repo +hg init +echo foo > bar +hg add bar +hg commit -m "test" -d "0 0" -u "Testing" +hg tip + +cat > request.py <<EOF +from mercurial.hgweb import hgweb, hgwebdir +from StringIO import StringIO +import os, sys + +errors = StringIO() +input = StringIO() + +def startrsp(headers, data): + print '---- HEADERS' + print headers + print '---- DATA' + print data + return output.write + +env = { + 'wsgi.version': (1, 0), + 'wsgi.url_scheme': 'http', + 'wsgi.errors': errors, + 'wsgi.input': input, + 'wsgi.multithread': False, + 'wsgi.multiprocess': False, + 'wsgi.run_once': False, + 'REQUEST_METHOD': 'GET', + 'SCRIPT_NAME': '', + 'SERVER_NAME': '127.0.0.1', + 'SERVER_PORT': os.environ['HGPORT'], + 'SERVER_PROTOCOL': 'HTTP/1.0' +} + +output = StringIO() +env['PATH_INFO'] = '/' +env['QUERY_STRING'] = 'style=atom' +hgweb('.', name = 'repo')(env, startrsp) +print output.getvalue() +print '---- ERRORS' +print errors.getvalue() + +output = StringIO() +env['PATH_INFO'] = '/file/tip/' +env['QUERY_STRING'] = 'style=raw' +hgweb('.', name = 'repo')(env, startrsp) +print output.getvalue() +print '---- ERRORS' +print errors.getvalue() + +output = StringIO() +env['PATH_INFO'] = '/' +env['QUERY_STRING'] = 'style=raw' +hgwebdir({'repo': '.'})(env, startrsp) +print output.getvalue() +print '---- ERRORS' +print errors.getvalue() + +output = StringIO() +env['PATH_INFO'] = '/repo/file/tip/' +env['QUERY_STRING'] = 'style=raw' +hgwebdir({'repo': '.'})(env, startrsp) +print output.getvalue() +print '---- ERRORS' +print errors.getvalue() +EOF + +python request.py | sed "s/http:\/\/127\.0\.0\.1:[0-9]*\//http:\/\/127.0.0.1\//"
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-hgweb-no-request-uri.out Fri Feb 08 11:55:17 2008 +0100 @@ -0,0 +1,72 @@ +changeset: 0:4cbec7e6f8c4 +tag: tip +user: Testing +date: Thu Jan 01 00:00:00 1970 +0000 +summary: test + +---- HEADERS +200 Script output follows +---- DATA +[('Content-Type', 'application/atom+xml; charset=ascii')] +<?xml version="1.0" encoding="ascii"?> +<feed xmlns="http://www.w3.org/2005/Atom"> + <!-- Changelog --> + <id>http://127.0.0.1/</id> + <link rel="self" href="http://127.0.0.1/atom-log"/> + <link rel="alternate" href="http://127.0.0.1/"/> + <title>repo Changelog</title> + <updated>1970-01-01T00:00:00+00:00</updated> + + <entry> + <title>test</title> + <id>http://www.selenic.com/mercurial/#changeset-4cbec7e6f8c42eb52b6b52670e1f7560ae9a101e</id> + <link href="http://127.0.0.1/rev/4cbec7e6f8c42eb52b6b52670e1f7560ae9a101e"/> + <author> + <name>Testing</name> + <email>Testing</email> + </author> + <updated>1970-01-01T00:00:00+00:00</updated> + <published>1970-01-01T00:00:00+00:00</published> + <content type="xhtml"> + <div xmlns="http://www.w3.org/1999/xhtml"> + <pre xml:space="preserve">test</pre> + </div> + </content> + </entry> + +</feed> + +---- ERRORS + +---- HEADERS +200 Script output follows +---- DATA +[('Content-Type', 'text/plain; charset=ascii')] + +-rw-r--r-- 4 bar + + + +---- ERRORS + +---- HEADERS +200 Script output follows +---- DATA +[('Content-Type', 'text/plain; charset=ascii')] + +/repo/ + + +---- ERRORS + +---- HEADERS +200 Script output follows +---- DATA +[('Content-Type', 'text/plain; charset=ascii')] + +-rw-r--r-- 4 bar + + + +---- ERRORS +
--- a/tests/test-hgweb.out Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-hgweb.out Fri Feb 08 11:55:17 2008 +0100 @@ -14,3 +14,126 @@ -rw-r--r-- 4 foo +% plain file +200 Script output follows + +foo +% should give a 404 - static file that does not exist +404 Not Found + +<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"> +<html> +<head> +<link rel="icon" href="/static/hgicon.png" type="image/png" /> +<meta name="robots" content="index, nofollow" /> +<link rel="stylesheet" href="/static/style.css" type="text/css" /> + +<title>Mercurial Error</title> +</head> +<body> + +<h2>Mercurial Error</h2> + +<p> +An error occurred while processing your request: +</p> +<p> +Not Found +</p> + + +<div class="logo"> +powered by<br/> +<a href="http://www.selenic.com/mercurial/">mercurial</a> +</div> + +</body> +</html> + +% should give a 404 - bad revision +404 Not Found + + +error: revision not found: spam +% should give a 400 - bad command +400 + + +error: No such method: spam +% should give a 404 - file does not exist +404 Not Found + + +error: Path not found: bork/ +% stop and restart +7 log lines written +% static file +200 Script output follows + +body { font-family: sans-serif; font-size: 12px; margin:0px; border:solid #d9d8d1; border-width:1px; margin:10px; } +a { color:#0000cc; } +a:hover, a:visited, a:active { color:#880000; } +div.page_header { height:25px; padding:8px; font-size:18px; font-weight:bold; background-color:#d9d8d1; } +div.page_header a:visited { color:#0000cc; } +div.page_header a:hover { color:#880000; } +div.page_nav { padding:8px; } +div.page_nav a:visited { color:#0000cc; } +div.page_path { padding:8px; border:solid #d9d8d1; border-width:0px 0px 1px} +div.page_footer { padding:4px 8px; background-color: #d9d8d1; } +div.page_footer_text { float:left; color:#555555; font-style:italic; } +div.page_body { padding:8px; } +div.title, a.title { + display:block; padding:6px 8px; + font-weight:bold; background-color:#edece6; text-decoration:none; color:#000000; +} +a.title:hover { background-color: #d9d8d1; } +div.title_text { padding:6px 0px; border: solid #d9d8d1; border-width:0px 0px 1px; } +div.log_body { padding:8px 8px 8px 150px; } +.age { white-space:nowrap; } +span.age { position:relative; float:left; width:142px; font-style:italic; } +div.log_link { + padding:0px 8px; + font-size:10px; font-family:sans-serif; font-style:normal; + position:relative; float:left; width:136px; +} +div.list_head { padding:6px 8px 4px; border:solid #d9d8d1; border-width:1px 0px 0px; font-style:italic; } +a.list { text-decoration:none; color:#000000; } +a.list:hover { text-decoration:underline; color:#880000; } +table { padding:8px 4px; } +th { padding:2px 5px; font-size:12px; text-align:left; } +tr.light:hover, .parity0:hover { background-color:#edece6; } +tr.dark, .parity1 { background-color:#f6f6f0; } +tr.dark:hover, .parity1:hover { background-color:#edece6; } +td { padding:2px 5px; font-size:12px; vertical-align:top; } +td.link { padding:2px 5px; font-family:sans-serif; font-size:10px; } +div.pre { font-family:monospace; font-size:12px; white-space:pre; } +div.diff_info { font-family:monospace; color:#000099; background-color:#edece6; font-style:italic; } +div.index_include { border:solid #d9d8d1; border-width:0px 0px 1px; padding:12px 8px; } +div.search { margin:4px 8px; position:absolute; top:56px; right:12px } +.linenr { color:#999999; text-decoration:none } +a.rss_logo { + float:right; padding:3px 6px; line-height:10px; + border:1px solid; border-color:#fcc7a5 #7d3302 #3e1a01 #ff954e; + color:#ffffff; background-color:#ff6600; + font-weight:bold; font-family:sans-serif; font-size:10px; + text-align:center; text-decoration:none; +} +a.rss_logo:hover { background-color:#ee5500; } +pre { margin: 0; } +span.logtags span { + padding: 0px 4px; + font-size: 10px; + font-weight: normal; + border: 1px solid; + background-color: #ffaaff; + border-color: #ffccff #ff00ee #ff00ee #ffccff; +} +span.logtags span.tagtag { + background-color: #ffffaa; + border-color: #ffffcc #ffee00 #ffee00 #ffffcc; +} +span.logtags span.branchtag { + background-color: #aaffaa; + border-color: #ccffcc #00cc33 #00cc33 #ccffcc; +} +% errors
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-hgwebdir Fri Feb 08 11:55:17 2008 +0100 @@ -0,0 +1,84 @@ +#!/bin/sh +# Tests some basic hgwebdir functionality. Tests setting up paths and +# collection, different forms of 404s and the subdirectory support. + +mkdir webdir +cd webdir + +hg init a +echo a > a/a +hg --cwd a ci -Ama -d'1 0' + +hg init b +echo b > b/b +hg --cwd b ci -Amb -d'2 0' + +hg init c +echo c > c/c +hg --cwd c ci -Amc -d'3 0' +root=`pwd` + +cd .. + +cat > paths.conf <<EOF +[paths] +a=$root/a +b=$root/b +EOF + +hg serve -p $HGPORT -d --pid-file=hg.pid --webdir-conf paths.conf \ + -A access-paths.log -E error-paths-1.log +cat hg.pid >> $DAEMON_PIDS + +echo % should give a 404 - file does not exist +"$TESTDIR/get-with-headers.py" localhost:$HGPORT '/a/file/tip/bork?style=raw' + +echo % should succeed +"$TESTDIR/get-with-headers.py" localhost:$HGPORT '/?style=raw' +"$TESTDIR/get-with-headers.py" localhost:$HGPORT '/a/file/tip/a?style=raw' +"$TESTDIR/get-with-headers.py" localhost:$HGPORT '/b/file/tip/b?style=raw' + +echo % should give a 404 - repo is not published +"$TESTDIR/get-with-headers.py" localhost:$HGPORT '/c/file/tip/c?style=raw' + +cat > paths.conf <<EOF +[paths] +t/a/=$root/a +b=$root/b +EOF + +hg serve -p $HGPORT1 -d --pid-file=hg.pid --webdir-conf paths.conf \ + -A access-paths.log -E error-paths-2.log +cat hg.pid >> $DAEMON_PIDS + +echo % should succeed, slashy names +"$TESTDIR/get-with-headers.py" localhost:$HGPORT1 '/?style=raw' +"$TESTDIR/get-with-headers.py" localhost:$HGPORT1 '/t?style=raw' +"$TESTDIR/get-with-headers.py" localhost:$HGPORT1 '/t/?style=raw' +"$TESTDIR/get-with-headers.py" localhost:$HGPORT1 '/t/a?style=atom' \ + | sed "s/http:\/\/[^/]*\//http:\/\/127.0.0.1\//" +"$TESTDIR/get-with-headers.py" localhost:$HGPORT1 '/t/a/?style=atom' \ + | sed "s/http:\/\/[^/]*\//http:\/\/127.0.0.1\//" +"$TESTDIR/get-with-headers.py" localhost:$HGPORT1 '/t/a/file/tip/a?style=raw' + +cat > collections.conf <<EOF +[collections] +$root=$root +EOF + +hg serve -p $HGPORT2 -d --pid-file=hg.pid --webdir-conf collections.conf \ + -A access-collections.log -E error-collections.log +cat hg.pid >> $DAEMON_PIDS + +echo % should succeed +"$TESTDIR/get-with-headers.py" localhost:$HGPORT2 '/?style=raw' +"$TESTDIR/get-with-headers.py" localhost:$HGPORT2 '/a/file/tip/a?style=raw' +"$TESTDIR/get-with-headers.py" localhost:$HGPORT2 '/b/file/tip/b?style=raw' +"$TESTDIR/get-with-headers.py" localhost:$HGPORT2 '/c/file/tip/c?style=raw' + +echo % paths errors 1 +cat error-paths-1.log +echo % paths errors 2 +cat error-paths-2.log +echo % collections errors +cat error-collections.log
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-hgwebdir.out Fri Feb 08 11:55:17 2008 +0100 @@ -0,0 +1,124 @@ +adding a +adding b +adding c +% should give a 404 - file does not exist +404 Not Found + + +error: Path not found: bork/ +% should succeed +200 Script output follows + + +/a/ +/b/ + +200 Script output follows + +a +200 Script output follows + +b +% should give a 404 - repo is not published +404 Not Found + + +error: repository c not found +% should succeed, slashy names +200 Script output follows + + +/b/ +/t/a/ + +200 Script output follows + + +/t/a/ + +200 Script output follows + + +/t/a/ + +200 Script output follows + +<?xml version="1.0" encoding="ascii"?> +<feed xmlns="http://127.0.0.1/2005/Atom"> + <!-- Changelog --> + <id>http://127.0.0.1/t/a/</id> + <link rel="self" href="http://127.0.0.1/t/a/atom-log"/> + <link rel="alternate" href="http://127.0.0.1/t/a/"/> + <title>t/a Changelog</title> + <updated>1970-01-01T00:00:01+00:00</updated> + + <entry> + <title>a</title> + <id>http://127.0.0.1/mercurial/#changeset-8580ff50825a50c8f716709acdf8de0deddcd6ab</id> + <link href="http://127.0.0.1/t/a/rev/8580ff50825a50c8f716709acdf8de0deddcd6ab"/> + <author> + <name>test</name> + <email>test</email> + </author> + <updated>1970-01-01T00:00:01+00:00</updated> + <published>1970-01-01T00:00:01+00:00</published> + <content type="xhtml"> + <div xmlns="http://127.0.0.1/1999/xhtml"> + <pre xml:space="preserve">a</pre> + </div> + </content> + </entry> + +</feed> +200 Script output follows + +<?xml version="1.0" encoding="ascii"?> +<feed xmlns="http://127.0.0.1/2005/Atom"> + <!-- Changelog --> + <id>http://127.0.0.1/t/a/</id> + <link rel="self" href="http://127.0.0.1/t/a/atom-log"/> + <link rel="alternate" href="http://127.0.0.1/t/a/"/> + <title>t/a Changelog</title> + <updated>1970-01-01T00:00:01+00:00</updated> + + <entry> + <title>a</title> + <id>http://127.0.0.1/mercurial/#changeset-8580ff50825a50c8f716709acdf8de0deddcd6ab</id> + <link href="http://127.0.0.1/t/a/rev/8580ff50825a50c8f716709acdf8de0deddcd6ab"/> + <author> + <name>test</name> + <email>test</email> + </author> + <updated>1970-01-01T00:00:01+00:00</updated> + <published>1970-01-01T00:00:01+00:00</published> + <content type="xhtml"> + <div xmlns="http://127.0.0.1/1999/xhtml"> + <pre xml:space="preserve">a</pre> + </div> + </content> + </entry> + +</feed> +200 Script output follows + +a +% should succeed +200 Script output follows + + +/a/ +/b/ +/c/ + +200 Script output follows + +a +200 Script output follows + +b +200 Script output follows + +c +% paths errors 1 +% paths errors 2 +% collections errors
--- a/tests/test-hup.out Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-hup.out Fri Feb 08 11:55:17 2008 +0100 @@ -4,4 +4,4 @@ transaction abort! rollback completed killed! -.hg/00changelog.i .hg/journal.dirstate .hg/requires .hg/store .hg/store/00changelog.i .hg/store/00changelog.i.a +.hg/00changelog.i .hg/journal.branch .hg/journal.dirstate .hg/requires .hg/store .hg/store/00changelog.i .hg/store/00changelog.i.a
--- a/tests/test-import Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-import Fri Feb 08 11:55:17 2008 +0100 @@ -32,6 +32,20 @@ hg --cwd b import -mpatch ../tip.patch rm -r b +echo % import of plain diff with specific date and user +hg clone -r0 a b +hg --cwd a diff -r0:1 > tip.patch +hg --cwd b import -mpatch -d '1 0' -u 'user@nowhere.net' ../tip.patch +hg -R b tip -pv +rm -r b + +echo % import of plain diff should be ok with --no-commit +hg clone -r0 a b +hg --cwd a diff -r0:1 > tip.patch +hg --cwd b import --no-commit ../tip.patch +hg --cwd b diff --nodates +rm -r b + echo % hg -R repo import # put the clone in a subdir - having a directory named "a" # used to hide a bug. @@ -57,7 +71,7 @@ cat > mkmsg.py <<EOF import email.Message, sys msg = email.Message.Message() -msg.set_payload('email commit message\n' + open('tip.patch').read()) +msg.set_payload('email commit message\n' + open('tip.patch', 'rb').read()) msg['Subject'] = 'email patch' msg['From'] = 'email patcher' sys.stdout.write(msg.as_string())
--- a/tests/test-import.out Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-import.out Fri Feb 08 11:55:17 2008 +0100 @@ -22,6 +22,7 @@ applying ../tip.patch transaction abort! rollback completed +abort: empty commit message % import of plain diff should be ok with message requesting all changes adding changesets @@ -30,6 +31,44 @@ added 1 changesets with 2 changes to 2 files 2 files updated, 0 files merged, 0 files removed, 0 files unresolved applying ../tip.patch +% import of plain diff with specific date and user +requesting all changes +adding changesets +adding manifests +adding file changes +added 1 changesets with 2 changes to 2 files +2 files updated, 0 files merged, 0 files removed, 0 files unresolved +applying ../tip.patch +changeset: 1:ca68f19f3a40 +tag: tip +user: user@nowhere.net +date: Thu Jan 01 00:00:01 1970 +0000 +files: a +description: +patch + + +diff -r 80971e65b431 -r ca68f19f3a40 a +--- a/a Thu Jan 01 00:00:00 1970 +0000 ++++ b/a Thu Jan 01 00:00:01 1970 +0000 +@@ -1,1 +1,2 @@ + line 1 ++line 2 + +% import of plain diff should be ok with --no-commit +requesting all changes +adding changesets +adding manifests +adding file changes +added 1 changesets with 2 changes to 2 files +2 files updated, 0 files merged, 0 files removed, 0 files unresolved +applying ../tip.patch +diff -r 80971e65b431 a +--- a/a ++++ b/a +@@ -1,1 +1,2 @@ + line 1 ++line 2 % hg -R repo import requesting all changes adding changesets @@ -91,6 +130,7 @@ applying patch from stdin transaction abort! rollback completed +abort: empty commit message % hg export in email, should use patch header requesting all changes adding changesets
--- a/tests/test-install.out Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-install.out Fri Feb 08 11:55:17 2008 +0100 @@ -2,7 +2,6 @@ Checking extensions... Checking templates... Checking patch... -Checking merge helper... Checking commit editor... Checking username... No problems detected
--- a/tests/test-issue522.out Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-issue522.out Fri Feb 08 11:55:17 2008 +0100 @@ -11,7 +11,7 @@ getting foo 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) -n 0 -2 unset foo +n 0 -2 unset foo M foo c6fc755d7e68f49f880599da29f15add41f42f5a 644 foo rev offset length base linkrev nodeid p1 p2
--- a/tests/test-issue612.out Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-issue612.out Fri Feb 08 11:55:17 2008 +0100 @@ -1,6 +1,5 @@ adding src/a.c -copying src/a.c to source/a.c -removing src/a.c +moving src/a.c to source/a.c 1 files updated, 0 files merged, 1 files removed, 0 files unresolved ? src/a.o merging src/a.c and source/a.c
--- a/tests/test-issue672.out Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-issue672.out Fri Feb 08 11:55:17 2008 +0100 @@ -28,8 +28,10 @@ 1a -> 1 * checking for directory renames 1a: local moved to 1 -> m +picked tool 'internal:merge' for 1a (binary False symlink False) merging 1a and 1 my 1a@ac7575e3c052+ other 1@746e9549ea96 ancestor 1@81f4b099af3d + premerge successful 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) 1 files updated, 0 files merged, 1 files removed, 0 files unresolved @@ -44,8 +46,10 @@ checking for directory renames 1: remote moved to 1a -> m copying 1 to 1a +picked tool 'internal:merge' for 1 (binary False symlink False) merging 1 and 1a -my 1@746e9549ea96+ other 1a@2f8037f47a5c ancestor 1@81f4b099af3d +my 1@746e9549ea96+ other 1a@ac7575e3c052 ancestor 1@81f4b099af3d + premerge successful removing 1 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-journal-exists Fri Feb 08 11:55:17 2008 +0100 @@ -0,0 +1,12 @@ +#!/bin/sh + +hg init +echo a > a +hg ci -Am0 + +touch .hg/store/journal + +echo foo > a +hg ci -Am0 + +hg recover
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-journal-exists.out Fri Feb 08 11:55:17 2008 +0100 @@ -0,0 +1,8 @@ +adding a +abort: journal already exists - run hg recover! +rolling back interrupted transaction +checking changesets +checking manifests +crosschecking files in changesets and manifests +checking files +1 files, 1 changesets, 1 total revisions
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-keyword Fri Feb 08 11:55:17 2008 +0100 @@ -0,0 +1,253 @@ +#!/bin/sh + +cat <<EOF >> $HGRCPATH +[extensions] +hgext.keyword = +hgext.mq = +[keyword] +* = +b = ignore +[hooks] +commit= +commit.test=cp a hooktest +EOF + +echo % help +hg help keyword + +echo % hg kwdemo +hg --quiet kwdemo --default \ +| sed -e 's![^ ][^ ]*demo.txt,v!/TMP/demo.txt,v!' \ + -e 's/,v [a-z0-9][a-z0-9]* /,v xxxxxxxxxxxx /' \ + -e '/[$]Revision/ s/: [a-z0-9][a-z0-9]* /: xxxxxxxxxxxx /' \ + -e 's! 20[0-9][0-9]/[01][0-9]/[0-3][0-9] [0-2][0-9]:[0-6][0-9]:[0-6][0-9]! 2000/00/00 00:00:00!' + +hg --quiet kwdemo "Branch = {branches}" + +hg init Test +cd Test + +echo % kwshrink should exit silently in empty/invalid repo +hg kwshrink + +echo 'expand $Id$' > a +echo 'do not process $Id:' >> a +echo 'xxx $' >> a +echo 'ignore $Id$' > b +ln -s a sym +echo % cat +cat sym a b + +echo % addremove +hg addremove +echo % status +hg status + +echo % default keyword expansion including commit hook +echo % interrupted commit should not change state or run commit hook +hg --debug commit +echo % status +hg status + +echo % commit +hg --debug commit -mabsym -d '0 0' -u 'User Name <user@example.com>' +echo % status +hg status +echo % identify +hg --quiet identify +echo % cat +cat sym a b +echo % hg cat +hg cat sym a b + +echo +echo % diff a hooktest +diff a hooktest + +echo % removing commit hook from config +sed -e '/\[hooks\]/,$ d' $HGRCPATH > $HGRCPATH.nohook +mv $HGRCPATH.nohook $HGRCPATH +rm hooktest + +echo % touch +touch a b +echo % status +hg status + +rm sym a b +echo % update +hg update +echo % cat +cat sym a b + +echo % check whether expansion is filewise +echo '$Id$' > c +echo 'tests for different changenodes' >> c +echo % commit c +hg commit -A -mcndiff -d '1 0' -u 'User Name <user@example.com>' +echo % force expansion +hg -v kwexpand +echo % compare changenodes in a c +cat a c + +echo % qimport +hg qimport -r tip -n mqtest.diff +echo % keywords should not be expanded in patch +cat .hg/patches/mqtest.diff +echo % qpop +hg qpop +echo % qgoto - should imply qpush +hg qgoto mqtest.diff +echo % cat +cat c +echo % qpop and move on +hg qpop + +echo % copy +hg cp a c + +echo % kwfiles added +hg kwfiles + +echo % commit +hg --debug commit -ma2c -d '1 0' -u 'User Name <user@example.com>' +echo % cat a c +cat a c +echo % touch copied c after 1 second +sleep 1 +touch c +echo % status +hg status + +echo % kwfiles +hg kwfiles + +echo % diff --rev +hg diff --rev 0 | grep -v 'b/c' + +echo % rollback +hg rollback +echo % status +hg status +echo % update -C +hg update --clean + +echo % custom keyword expansion +echo % try with kwdemo +hg --quiet kwdemo "Xinfo = {author}: {desc}" + +cat <<EOF >>$HGRCPATH +[keywordmaps] +Id = {file} {node|short} {date|rfc822date} {author|user} +Xinfo = {author}: {desc} +EOF + +echo % cat +cat sym a b +echo % hg cat +hg cat sym a b + +echo +echo '$Xinfo$' >> a +cat <<EOF >> log +firstline +secondline +EOF + +echo % interrupted commit should not change state +hg commit +echo % status +hg status + +echo % commit +hg --debug commit -l log -d '2 0' -u 'User Name <user@example.com>' +rm log +echo % status +hg status + +echo % cat +cat sym a b +echo % hg cat +hg cat sym a b +echo + +echo % remove +hg remove a +hg --debug commit -m rma +echo % status +hg status +echo % rollback +hg rollback +echo % status +hg status +echo % revert a +hg revert --no-backup --rev tip a +echo % cat a +cat a + +echo % clone to test incoming +cd .. +hg clone -r0 Test Test-a +cd Test-a +cat <<EOF >> .hg/hgrc +[paths] +default = ../Test +EOF +echo % incoming +# remove path to temp dir +hg incoming | sed -e 's/^\(comparing with \).*\(test-keyword.*\)/\1\2/' + +sed -e 's/Id.*/& rejecttest/' a > a.new +mv a.new a +echo % commit rejecttest +hg --debug commit -m'rejects?' -d '3 0' -u 'User Name <user@example.com>' +echo % export +hg export -o ../rejecttest.diff tip + +cd ../Test +echo % import +hg import ../rejecttest.diff +echo % cat +cat sym a b +echo +echo % rollback +hg rollback +echo % clean update +hg update --clean + +echo % kwexpand/kwshrink on selected files +mkdir x +echo % copy a x/a +hg copy a x/a +echo % kwexpand a +hg --verbose kwexpand a +echo % kwexpand x/a should abort +hg --verbose kwexpand x/a +cd x +hg --debug commit -m xa -d '3 0' -u 'User Name <user@example.com>' +echo % cat a +cat a +echo % kwshrink a inside directory x +hg --verbose kwshrink a +echo % cat a +cat a +cd .. + +echo % kwexpand nonexistent +hg kwexpand nonexistent + +echo % switch off expansion +echo % kwshrink with unknown file u +cp a u +hg --verbose kwshrink +echo % cat +cat sym a b +echo % hg cat +hg cat sym a b +echo +rm $HGRCPATH +echo % cat +cat sym a b +echo % hg cat +hg cat sym a b +echo
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-keyword.out Fri Feb 08 11:55:17 2008 +0100 @@ -0,0 +1,385 @@ +% help +keyword extension - keyword expansion in local repositories + +This extension expands RCS/CVS-like or self-customized $Keywords$ +in tracked text files selected by your configuration. + +Keywords are only expanded in local repositories and not stored in +the change history. The mechanism can be regarded as a convenience +for the current user or for archive distribution. + +Configuration is done in the [keyword] and [keywordmaps] sections +of hgrc files. + +Example: + + [keyword] + # expand keywords in every python file except those matching "x*" + **.py = + x* = ignore + +Note: the more specific you are in your filename patterns + the less you lose speed in huge repos. + +For [keywordmaps] template mapping and expansion demonstration and +control run "hg kwdemo". + +An additional date template filter {date|utcdate} is provided. + +The default template mappings (view with "hg kwdemo -d") can be replaced +with customized keywords and templates. +Again, run "hg kwdemo" to control the results of your config changes. + +Before changing/disabling active keywords, run "hg kwshrink" to avoid +the risk of inadvertedly storing expanded keywords in the change history. + +To force expansion after enabling it, or a configuration change, run +"hg kwexpand". + +Also, when committing with the record extension or using mq's qrecord, be aware +that keywords cannot be updated. Again, run "hg kwexpand" on the files in +question to update keyword expansions after all changes have been checked in. + +Expansions spanning more than one line and incremental expansions, +like CVS' $Log$, are not supported. A keyword template map +"Log = {desc}" expands to the first line of the changeset description. + +list of commands: + + kwdemo print [keywordmaps] configuration and an expansion example + kwexpand expand keywords in working directory + kwfiles print files currently configured for keyword expansion + kwshrink revert expanded keywords in working directory + +use "hg -v help keyword" to show aliases and global options +% hg kwdemo +[extensions] +hgext.keyword = +[keyword] +* = +b = ignore +demo.txt = +[keywordmaps] +RCSFile = {file|basename},v +Author = {author|user} +Header = {root}/{file},v {node|short} {date|utcdate} {author|user} +Source = {root}/{file},v +Date = {date|utcdate} +Id = {file|basename},v {node|short} {date|utcdate} {author|user} +Revision = {node|short} +$RCSFile: demo.txt,v $ +$Author: test $ +$Header: /TMP/demo.txt,v xxxxxxxxxxxx 2000/00/00 00:00:00 test $ +$Source: /TMP/demo.txt,v $ +$Date: 2000/00/00 00:00:00 $ +$Id: demo.txt,v xxxxxxxxxxxx 2000/00/00 00:00:00 test $ +$Revision: xxxxxxxxxxxx $ +[extensions] +hgext.keyword = +[keyword] +* = +b = ignore +demo.txt = +[keywordmaps] +Branch = {branches} +$Branch: demobranch $ +% kwshrink should exit silently in empty/invalid repo +% cat +expand $Id$ +do not process $Id: +xxx $ +expand $Id$ +do not process $Id: +xxx $ +ignore $Id$ +% addremove +adding a +adding b +adding sym +% status +A a +A b +A sym +% default keyword expansion including commit hook +% interrupted commit should not change state or run commit hook +a +b +sym +transaction abort! +rollback completed +abort: empty commit message +% status +A a +A b +A sym +% commit +a +b +sym +overwriting a expanding keywords +running hook commit.test: cp a hooktest +% status +? hooktest +% identify +f782df5f9602 +% cat +expand $Id: a,v f782df5f9602 1970/01/01 00:00:00 user $ +do not process $Id: +xxx $ +expand $Id: a,v f782df5f9602 1970/01/01 00:00:00 user $ +do not process $Id: +xxx $ +ignore $Id$ +% hg cat +expand $Id: a,v f782df5f9602 1970/01/01 00:00:00 user $ +do not process $Id: +xxx $ +ignore $Id$ +a +% diff a hooktest +% removing commit hook from config +% touch +% status +% update +3 files updated, 0 files merged, 0 files removed, 0 files unresolved +% cat +expand $Id: a,v f782df5f9602 1970/01/01 00:00:00 user $ +do not process $Id: +xxx $ +expand $Id: a,v f782df5f9602 1970/01/01 00:00:00 user $ +do not process $Id: +xxx $ +ignore $Id$ +% check whether expansion is filewise +% commit c +adding c +% force expansion +overwriting a expanding keywords +overwriting c expanding keywords +% compare changenodes in a c +expand $Id: a,v f782df5f9602 1970/01/01 00:00:00 user $ +do not process $Id: +xxx $ +$Id: c,v ba4426d1938e 1970/01/01 00:00:01 user $ +tests for different changenodes +% qimport +% keywords should not be expanded in patch +# HG changeset patch +# User User Name <user@example.com> +# Date 1 0 +# Node ID ba4426d1938ec9673e03ab274d88c44e24618f7f +# Parent f782df5f9602483b4e51c31a12315f353bba380c +cndiff + +diff -r f782df5f9602 -r ba4426d1938e c +--- /dev/null Thu Jan 01 00:00:00 1970 +0000 ++++ b/c Thu Jan 01 00:00:01 1970 +0000 +@@ -0,0 +1,2 @@ ++$Id$ ++tests for different changenodes +% qpop +Patch queue now empty +% qgoto - should imply qpush +applying mqtest.diff +Now at: mqtest.diff +% cat +$Id: c,v ba4426d1938e 1970/01/01 00:00:01 user $ +tests for different changenodes +% qpop and move on +Patch queue now empty +% copy +% kwfiles added +a +c +% commit +c + c: copy a:0045e12f6c5791aac80ca6cbfd97709a88307292 +overwriting c expanding keywords +% cat a c +expand $Id: a,v f782df5f9602 1970/01/01 00:00:00 user $ +do not process $Id: +xxx $ +expand $Id: c,v 0ba462c0f077 1970/01/01 00:00:01 user $ +do not process $Id: +xxx $ +% touch copied c after 1 second +% status +% kwfiles +a +c +% diff --rev +diff -r f782df5f9602 c +--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +@@ -0,0 +1,3 @@ ++expand $Id$ ++do not process $Id: ++xxx $ +% rollback +rolling back last transaction +% status +A c +% update -C +0 files updated, 0 files merged, 1 files removed, 0 files unresolved +% custom keyword expansion +% try with kwdemo +[extensions] +hgext.keyword = +[keyword] +* = +b = ignore +demo.txt = +[keywordmaps] +Xinfo = {author}: {desc} +$Xinfo: test: hg keyword config and expansion example $ +% cat +expand $Id: a,v f782df5f9602 1970/01/01 00:00:00 user $ +do not process $Id: +xxx $ +expand $Id: a,v f782df5f9602 1970/01/01 00:00:00 user $ +do not process $Id: +xxx $ +ignore $Id$ +% hg cat +expand $Id: a f782df5f9602 Thu, 01 Jan 1970 00:00:00 +0000 user $ +do not process $Id: +xxx $ +ignore $Id$ +a +% interrupted commit should not change state +transaction abort! +rollback completed +abort: empty commit message +% status +M a +? log +% commit +a +overwriting a expanding keywords +% status +% cat +expand $Id: a 0729690beff6 Thu, 01 Jan 1970 00:00:02 +0000 user $ +do not process $Id: +xxx $ +$Xinfo: User Name <user@example.com>: firstline $ +expand $Id: a 0729690beff6 Thu, 01 Jan 1970 00:00:02 +0000 user $ +do not process $Id: +xxx $ +$Xinfo: User Name <user@example.com>: firstline $ +ignore $Id$ +% hg cat +expand $Id: a 0729690beff6 Thu, 01 Jan 1970 00:00:02 +0000 user $ +do not process $Id: +xxx $ +$Xinfo: User Name <user@example.com>: firstline $ +ignore $Id$ +a +% remove +% status +% rollback +rolling back last transaction +% status +R a +% revert a +% cat a +expand $Id: a 0729690beff6 Thu, 01 Jan 1970 00:00:02 +0000 user $ +do not process $Id: +xxx $ +$Xinfo: User Name <user@example.com>: firstline $ +% clone to test incoming +requesting all changes +adding changesets +adding manifests +adding file changes +added 1 changesets with 3 changes to 3 files +3 files updated, 0 files merged, 0 files removed, 0 files unresolved +% incoming +comparing with test-keyword/Test +searching for changes +changeset: 1:0729690beff6 +tag: tip +user: User Name <user@example.com> +date: Thu Jan 01 00:00:02 1970 +0000 +summary: firstline + +% commit rejecttest +a +overwriting a expanding keywords +% export +% import +applying ../rejecttest.diff +% cat +expand $Id: a 82983f13f138 Thu, 01 Jan 1970 00:00:03 +0000 user $ rejecttest +do not process $Id: rejecttest +xxx $ +$Xinfo: User Name <user@example.com>: rejects? $ +expand $Id: a 82983f13f138 Thu, 01 Jan 1970 00:00:03 +0000 user $ rejecttest +do not process $Id: rejecttest +xxx $ +$Xinfo: User Name <user@example.com>: rejects? $ +ignore $Id$ + +% rollback +rolling back last transaction +% clean update +1 files updated, 0 files merged, 0 files removed, 0 files unresolved +% kwexpand/kwshrink on selected files +% copy a x/a +% kwexpand a +overwriting a expanding keywords +% kwexpand x/a should abort +abort: outstanding uncommitted changes in given files +x/a + x/a: copy a:779c764182ce5d43e2b1eb66ce06d7b47bfe342e +overwriting x/a expanding keywords +% cat a +expand $Id: x/a f27c134d2d9b Thu, 01 Jan 1970 00:00:03 +0000 user $ +do not process $Id: +xxx $ +$Xinfo: User Name <user@example.com>: xa $ +% kwshrink a inside directory x +overwriting x/a shrinking keywords +% cat a +expand $Id$ +do not process $Id: +xxx $ +$Xinfo$ +% kwexpand nonexistent +nonexistent: No such file or directory +% switch off expansion +% kwshrink with unknown file u +overwriting a shrinking keywords +overwriting x/a shrinking keywords +% cat +expand $Id$ +do not process $Id: +xxx $ +$Xinfo$ +expand $Id$ +do not process $Id: +xxx $ +$Xinfo$ +ignore $Id$ +% hg cat +expand $Id: a 0729690beff6 Thu, 01 Jan 1970 00:00:02 +0000 user $ +do not process $Id: +xxx $ +$Xinfo: User Name <user@example.com>: firstline $ +ignore $Id$ +a +% cat +expand $Id$ +do not process $Id: +xxx $ +$Xinfo$ +expand $Id$ +do not process $Id: +xxx $ +$Xinfo$ +ignore $Id$ +% hg cat +expand $Id$ +do not process $Id: +xxx $ +$Xinfo$ +ignore $Id$ +a
--- a/tests/test-log Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-log Fri Feb 08 11:55:17 2008 +0100 @@ -41,6 +41,11 @@ hg ci -Ame2 -d '6 0' hg log -vC --template '{rev} {file_copies%filecopy}\n' -r 5 +echo % log copies, execute bit set +chmod +x e +hg ci -me3 -d '7 0' +hg log -vC --template '{rev} {file_copies%filecopy}\n' -r 6 + echo '% log -p d' hg log -pv d
--- a/tests/test-log.out Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-log.out Fri Feb 08 11:55:17 2008 +0100 @@ -86,6 +86,8 @@ 1 files updated, 0 files merged, 1 files removed, 0 files unresolved adding foo 5 e (dir/b) +% log copies, execute bit set +6 % log -p d changeset: 3:16b60bf3f99a user: test
--- a/tests/test-merge-commit.out Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-merge-commit.out Fri Feb 08 11:55:17 2008 +0100 @@ -25,8 +25,10 @@ ancestor 0a3ab4856510 local 2d2f9a22c82b+ remote 7d3b554bfdf1 searching for copies back to rev 1 bar: versions differ -> m +picked tool 'internal:merge' for bar (binary False symlink False) merging bar my bar@2d2f9a22c82b+ other bar@7d3b554bfdf1 ancestor bar@0a3ab4856510 + premerge successful 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) % contents of bar should be line1 line2 @@ -71,8 +73,10 @@ ancestor 0a3ab4856510 local 2d2f9a22c82b+ remote 96ab80c60897 searching for copies back to rev 1 bar: versions differ -> m +picked tool 'internal:merge' for bar (binary False symlink False) merging bar my bar@2d2f9a22c82b+ other bar@96ab80c60897 ancestor bar@0a3ab4856510 + premerge successful 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) % contents of bar should be line1 line2
--- a/tests/test-merge-local.out Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-merge-local.out Fri Feb 08 11:55:17 2008 +0100 @@ -18,27 +18,28 @@ M zzz2_merge_bad # local merge with bad merge tool merging zzz1_merge_ok -merging zzz1_merge_ok failed! merging zzz2_merge_bad merging zzz2_merge_bad failed! -3 files updated, 0 files merged, 2 files removed, 2 files unresolved +3 files updated, 1 files merged, 2 files removed, 1 files unresolved There are unresolved merges with locally modified files. -You can redo the full merge using: +You can finish the partial merge using: hg update 0 hg update 1 2 files updated, 0 files merged, 3 files removed, 0 files unresolved --- a/zzz1_merge_ok +++ b/zzz1_merge_ok ++new first line +new last line --- a/zzz2_merge_bad +++ b/zzz2_merge_bad +another last line M zzz1_merge_ok M zzz2_merge_bad +? zzz2_merge_bad.orig # local merge with conflicts -warning: conflicts during merge. merging zzz1_merge_ok merging zzz2_merge_bad +warning: conflicts during merge. merging zzz2_merge_bad failed! 3 files updated, 1 files merged, 2 files removed, 1 files unresolved There are unresolved merges with locally modified files. @@ -57,6 +58,7 @@ +new last line M zzz1_merge_ok M zzz2_merge_bad +? zzz2_merge_bad.orig # local merge without conflicts merging zzz1_merge_ok 4 files updated, 1 files merged, 2 files removed, 0 files unresolved
--- a/tests/test-merge-revert2.out Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-merge-revert2.out Fri Feb 08 11:55:17 2008 +0100 @@ -9,8 +9,8 @@ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved f248da0d4c3e tip 1 files updated, 0 files merged, 0 files removed, 0 files unresolved +merging file1 warning: conflicts during merge. -merging file1 merging file1 failed! 0 files updated, 0 files merged, 0 files removed, 1 files unresolved There are unresolved merges with locally modified files. @@ -20,18 +20,22 @@ diff -r f248da0d4c3e file1 --- a/file1 +++ b/file1 -@@ -1,3 +1,7 @@ added file1 +@@ -1,3 +1,7 @@ added file1 another line of text -+<<<<<<< my ++<<<<<<< local +changed file1 different +======= changed file1 +>>>>>>> other M file1 +? file1.orig f248da0d4c3e+ tip reverting file1 +? file1.orig f248da0d4c3e tip +? file1.orig f248da0d4c3e tip 0 files updated, 0 files merged, 0 files removed, 0 files unresolved +? file1.orig f248da0d4c3e tip
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-merge-types Fri Feb 08 11:55:17 2008 +0100 @@ -0,0 +1,39 @@ +#!/bin/sh + +hg init +echo a > a +hg ci -Amadd # 0 + +chmod +x a +hg ci -mexecutable # 1 + +hg up 0 +rm a +ln -s symlink a +hg ci -msymlink # 2 +hg merge --debug + +echo % symlink is local parent, executable is other + +if [ -h a ]; then + echo a is a symlink + $TESTDIR/readlink.py a +elif [ -x a ]; then + echo a is executable +else + echo "a has no flags (default for conflicts)" +fi + +hg update -C 1 +hg merge --debug + +echo % symlink is other parent, executable is local + +if [ -h a ]; then + echo a is a symlink + $TESTDIR/readlink.py a +elif [ -x a ]; then + echo a is executable +else + echo "a has no flags (default for conflicts)" +fi
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-merge-types.out Fri Feb 08 11:55:17 2008 +0100 @@ -0,0 +1,22 @@ +adding a +0 files updated, 0 files merged, 0 files removed, 0 files unresolved +resolving manifests + overwrite None partial False + ancestor c334dc3be0da local 521a1e40188f+ remote 3574f3e69b1c + searching for copies back to rev 1 + a: update permissions -> e +0 files updated, 0 files merged, 0 files removed, 0 files unresolved +(branch merge, don't forget to commit) +% symlink is local parent, executable is other +a has no flags (default for conflicts) +1 files updated, 0 files merged, 0 files removed, 0 files unresolved +resolving manifests + overwrite None partial False + ancestor c334dc3be0da local 3574f3e69b1c+ remote 521a1e40188f + searching for copies back to rev 1 + a: remote is newer -> g +getting a +1 files updated, 0 files merged, 0 files removed, 0 files unresolved +(branch merge, don't forget to commit) +% symlink is other parent, executable is local +a has no flags (default for conflicts)
--- a/tests/test-merge1.out Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-merge1.out Fri Feb 08 11:55:17 2008 +0100 @@ -34,7 +34,7 @@ diff -r c1dd73cbf59f b --- a/b +++ b/b -@@ -1,1 +1,1 @@ This is file b1 +@@ -1,1 +1,1 @@ -This is file b1 +This is file b22 M b @@ -49,7 +49,7 @@ diff -r c1dd73cbf59f b --- a/b +++ b/b -@@ -1,1 +1,1 @@ This is file b1 +@@ -1,1 +1,1 @@ -This is file b1 +This is file b33 M b
--- a/tests/test-merge10.out Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-merge10.out Fri Feb 08 11:55:17 2008 +0100 @@ -14,6 +14,6 @@ diff -r d02b3fc32762 testdir/subdir/a --- a/testdir/subdir/a +++ b/testdir/subdir/a -@@ -1,1 +1,1 @@ a +@@ -1,1 +1,1 @@ -a +alpha
--- a/tests/test-merge7.out Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-merge7.out Fri Feb 08 11:55:17 2008 +0100 @@ -6,8 +6,8 @@ adding file changes added 1 changesets with 1 changes to 1 files (+1 heads) (run 'hg heads' to see heads, 'hg merge' to merge) +merging test.txt warning: conflicts during merge. -merging test.txt merging test.txt failed! 0 files updated, 0 files merged, 0 files removed, 1 files unresolved There are unresolved merges, you can redo the full merge using: @@ -20,21 +20,22 @@ adding file changes added 1 changesets with 1 changes to 1 files (+1 heads) (run 'hg heads' to see heads, 'hg merge' to merge) -warning: conflicts during merge. resolving manifests overwrite None partial False ancestor faaea63e63a9 local 451c744aabcc+ remote a070d41e8360 searching for copies back to rev 1 test.txt: versions differ -> m +picked tool 'internal:merge' for test.txt (binary False symlink False) merging test.txt my test.txt@451c744aabcc+ other test.txt@a070d41e8360 ancestor test.txt@faaea63e63a9 +warning: conflicts during merge. merging test.txt failed! 0 files updated, 0 files merged, 0 files removed, 1 files unresolved There are unresolved merges, you can redo the full merge using: hg update -C 3 hg merge 4 one -<<<<<<< my +<<<<<<< local two-point-five ======= two-point-one
--- a/tests/test-merge9.out Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-merge9.out Fri Feb 08 11:55:17 2008 +0100 @@ -5,8 +5,7 @@ merging bar merging bar failed! merging foo and baz -merging baz failed! -1 files updated, 0 files merged, 0 files removed, 2 files unresolved +1 files updated, 1 files merged, 0 files removed, 1 files unresolved There are unresolved merges, you can redo the full merge using: hg update -C 2 hg merge 1 @@ -14,8 +13,7 @@ merging bar merging bar failed! merging baz and foo -merging baz failed! -1 files updated, 0 files merged, 0 files removed, 2 files unresolved +1 files updated, 1 files merged, 0 files removed, 1 files unresolved There are unresolved merges, you can redo the full merge using: hg update -C 1 hg merge 2
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-mq-header-date Fri Feb 08 11:55:17 2008 +0100 @@ -0,0 +1,177 @@ +#!/bin/sh + +echo "[extensions]" >> $HGRCPATH +echo "mq=" >> $HGRCPATH +echo "[diff]" >> $HGRCPATH +echo "nodates=true" >> $HGRCPATH + + +catpatch() { + cat .hg/patches/$1.patch | sed -e "s/^diff \-r [0-9a-f]* /diff -r ... /" +} + +catlog() { + catpatch $1 + hg log --template "{rev}: {desc} - {author}\n" +} + +catlogd() { + catpatch $1 + hg log --template "{rev}: {desc} - {author} - {date}\n" +} + +drop() { + hg qpop + hg qdel $1.patch +} + + +echo ==== init +hg init a +cd a +hg qinit + + +echo ==== qnew -d +hg qnew -d '3 0' 1.patch +catlogd 1 + +echo ==== qref +echo "1" >1 +hg add +hg qref +catlogd 1 + +echo ==== qref -d +hg qref -d '4 0' +catlogd 1 + + +echo ==== qnew +hg qnew 2.patch +echo "2" >2 +hg add +hg qref +catlog 2 + +echo ==== qref -d +hg qref -d '5 0' +catlog 2 + +drop 2 + + +echo ==== qnew -d -m +hg qnew -d '6 0' -m "Three" 3.patch +catlogd 3 + +echo ==== qref +echo "3" >3 +hg add +hg qref +catlogd 3 + +echo ==== qref -m +hg qref -m "Drei" +catlogd 3 + +echo ==== qref -d +hg qref -d '7 0' +catlogd 3 + +echo ==== qref -d -m +hg qref -d '8 0' -m "Three (again)" +catlogd 3 + + +echo ==== qnew -m +hg qnew -m "Four" 4.patch +echo "4" >4 +hg add +hg qref +catlog 4 + +echo ==== qref -d +hg qref -d '9 0' +catlog 4 + +drop 4 + + +echo ==== qnew with HG header +hg qnew 5.patch +hg qpop +echo "# HG changeset patch" >>.hg/patches/5.patch +echo "# Date 10 0" >>.hg/patches/5.patch +hg qpush 2>&1 | grep 'Now at' +catlogd 5 + +echo ==== hg qref +echo "5" >5 +hg add +hg qref +catlogd 5 + +echo ==== hg qref -d +hg qref -d '11 0' +catlogd 5 + + +echo ==== qnew -u +hg qnew -u jane 6.patch +echo "6" >6 +hg add +hg qref +catlog 6 + +echo ==== qref -d +hg qref -d '12 0' +catlog 6 + +drop 6 + + +echo ==== qnew -d +hg qnew -d '13 0' 7.patch +echo "7" >7 +hg add +hg qref +catlog 7 + +echo ==== qref -u +hg qref -u john +catlogd 7 + + +echo ==== qnew +hg qnew 8.patch +echo "8" >8 +hg add +hg qref +catlog 8 + +echo ==== qref -u -d +hg qref -u john -d '14 0' +catlog 8 + +drop 8 + + +echo ==== qnew -m +hg qnew -m "Nine" 9.patch +echo "9" >9 +hg add +hg qref +catlog 9 + +echo ==== qref -u -d +hg qref -u john -d '15 0' +catlog 9 + +drop 9 + + +echo ==== "qpop -a / qpush -a" +hg qpop -a +hg qpush -a +hg log --template "{rev}: {desc} - {author} - {date}\n"
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-mq-header-date.out Fri Feb 08 11:55:17 2008 +0100 @@ -0,0 +1,284 @@ +==== init +==== qnew -d +# HG changeset patch +# Date 3 0 + +0: [mq]: 1.patch - test - 3.00 +==== qref +adding 1 +# HG changeset patch +# Date 3 0 + +diff -r ... 1 +--- /dev/null ++++ b/1 +@@ -0,0 +1,1 @@ ++1 +0: [mq]: 1.patch - test - 3.00 +==== qref -d +# HG changeset patch +# Date 4 0 + +diff -r ... 1 +--- /dev/null ++++ b/1 +@@ -0,0 +1,1 @@ ++1 +0: [mq]: 1.patch - test - 4.00 +==== qnew +adding 2 +diff -r ... 2 +--- /dev/null ++++ b/2 +@@ -0,0 +1,1 @@ ++2 +1: [mq]: 2.patch - test +0: [mq]: 1.patch - test +==== qref -d +diff -r ... 2 +--- /dev/null ++++ b/2 +@@ -0,0 +1,1 @@ ++2 +1: [mq]: 2.patch - test +0: [mq]: 1.patch - test +Now at: 1.patch +==== qnew -d -m +# HG changeset patch +# Date 6 0 + +Three +1: Three - test - 6.00 +0: [mq]: 1.patch - test - 4.00 +==== qref +adding 3 +# HG changeset patch +# Date 6 0 + +Three + +diff -r ... 3 +--- /dev/null ++++ b/3 +@@ -0,0 +1,1 @@ ++3 +1: Three - test - 6.00 +0: [mq]: 1.patch - test - 4.00 +==== qref -m +# HG changeset patch +# Date 6 0 + +Drei + +diff -r ... 3 +--- /dev/null ++++ b/3 +@@ -0,0 +1,1 @@ ++3 +1: Drei - test - 6.00 +0: [mq]: 1.patch - test - 4.00 +==== qref -d +# HG changeset patch +# Date 7 0 + +Drei + +diff -r ... 3 +--- /dev/null ++++ b/3 +@@ -0,0 +1,1 @@ ++3 +1: Drei - test - 7.00 +0: [mq]: 1.patch - test - 4.00 +==== qref -d -m +# HG changeset patch +# Date 8 0 + +Three (again) + +diff -r ... 3 +--- /dev/null ++++ b/3 +@@ -0,0 +1,1 @@ ++3 +1: Three (again) - test - 8.00 +0: [mq]: 1.patch - test - 4.00 +==== qnew -m +adding 4 +Four + +diff -r ... 4 +--- /dev/null ++++ b/4 +@@ -0,0 +1,1 @@ ++4 +2: Four - test +1: Three (again) - test +0: [mq]: 1.patch - test +==== qref -d +Four + +diff -r ... 4 +--- /dev/null ++++ b/4 +@@ -0,0 +1,1 @@ ++4 +2: Four - test +1: Three (again) - test +0: [mq]: 1.patch - test +Now at: 3.patch +==== qnew with HG header +Now at: 3.patch +Now at: 5.patch +# HG changeset patch +# Date 10 0 +2: imported patch 5.patch - test - 10.00 +1: Three (again) - test - 8.00 +0: [mq]: 1.patch - test - 4.00 +==== hg qref +adding 5 +# HG changeset patch +# Date 10 0 + +diff -r ... 5 +--- /dev/null ++++ b/5 +@@ -0,0 +1,1 @@ ++5 +2: [mq]: 5.patch - test - 10.00 +1: Three (again) - test - 8.00 +0: [mq]: 1.patch - test - 4.00 +==== hg qref -d +# HG changeset patch +# Date 11 0 + +diff -r ... 5 +--- /dev/null ++++ b/5 +@@ -0,0 +1,1 @@ ++5 +2: [mq]: 5.patch - test - 11.00 +1: Three (again) - test - 8.00 +0: [mq]: 1.patch - test - 4.00 +==== qnew -u +adding 6 +From: jane + +diff -r ... 6 +--- /dev/null ++++ b/6 +@@ -0,0 +1,1 @@ ++6 +3: [mq]: 6.patch - jane +2: [mq]: 5.patch - test +1: Three (again) - test +0: [mq]: 1.patch - test +==== qref -d +From: jane + +diff -r ... 6 +--- /dev/null ++++ b/6 +@@ -0,0 +1,1 @@ ++6 +3: [mq]: 6.patch - jane +2: [mq]: 5.patch - test +1: Three (again) - test +0: [mq]: 1.patch - test +Now at: 5.patch +==== qnew -d +adding 7 +# HG changeset patch +# Date 13 0 + +diff -r ... 7 +--- /dev/null ++++ b/7 +@@ -0,0 +1,1 @@ ++7 +3: [mq]: 7.patch - test +2: [mq]: 5.patch - test +1: Three (again) - test +0: [mq]: 1.patch - test +==== qref -u +# HG changeset patch +# User john +# Date 13 0 + +diff -r ... 7 +--- /dev/null ++++ b/7 +@@ -0,0 +1,1 @@ ++7 +3: [mq]: 7.patch - john - 13.00 +2: [mq]: 5.patch - test - 11.00 +1: Three (again) - test - 8.00 +0: [mq]: 1.patch - test - 4.00 +==== qnew +adding 8 +diff -r ... 8 +--- /dev/null ++++ b/8 +@@ -0,0 +1,1 @@ ++8 +4: [mq]: 8.patch - test +3: [mq]: 7.patch - john +2: [mq]: 5.patch - test +1: Three (again) - test +0: [mq]: 1.patch - test +==== qref -u -d +From: john + + +diff -r ... 8 +--- /dev/null ++++ b/8 +@@ -0,0 +1,1 @@ ++8 +4: [mq]: 8.patch - john +3: [mq]: 7.patch - john +2: [mq]: 5.patch - test +1: Three (again) - test +0: [mq]: 1.patch - test +Now at: 7.patch +==== qnew -m +adding 9 +Nine + +diff -r ... 9 +--- /dev/null ++++ b/9 +@@ -0,0 +1,1 @@ ++9 +4: Nine - test +3: [mq]: 7.patch - john +2: [mq]: 5.patch - test +1: Three (again) - test +0: [mq]: 1.patch - test +==== qref -u -d +From: john + +Nine + +diff -r ... 9 +--- /dev/null ++++ b/9 +@@ -0,0 +1,1 @@ ++9 +4: Nine - john +3: [mq]: 7.patch - john +2: [mq]: 5.patch - test +1: Three (again) - test +0: [mq]: 1.patch - test +Now at: 7.patch +==== qpop -a / qpush -a +Patch queue now empty +applying 1.patch +applying 3.patch +applying 5.patch +applying 7.patch +Now at: 7.patch +3: imported patch 7.patch - john - 13.00 +2: imported patch 5.patch - test - 11.00 +1: Three (again) - test - 8.00 +0: imported patch 1.patch - test - 4.00
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-mq-header-from Fri Feb 08 11:55:17 2008 +0100 @@ -0,0 +1,107 @@ +#!/bin/sh + +echo "[extensions]" >> $HGRCPATH +echo "mq=" >> $HGRCPATH +echo "[diff]" >> $HGRCPATH +echo "nodates=true" >> $HGRCPATH + + +catlog() { + cat .hg/patches/$1.patch | sed -e "s/^diff \-r [0-9a-f]* /diff -r ... /" + hg log --template "{rev}: {desc} - {author}\n" +} + + +echo ==== init +hg init a +cd a +hg qinit + + +echo ==== qnew -U +hg qnew -U 1.patch +catlog 1 + +echo ==== qref +echo "1" >1 +hg add +hg qref +catlog 1 + +echo ==== qref -u +hg qref -u mary +catlog 1 + +echo ==== qnew +hg qnew 2.patch +echo "2" >2 +hg add +hg qref +catlog 2 + +echo ==== qref -u +hg qref -u jane +catlog 2 + + +echo ==== qnew -U -m +hg qnew -U -m "Three" 3.patch +catlog 3 + +echo ==== qref +echo "3" >3 +hg add +hg qref +catlog 3 + +echo ==== qref -m +hg qref -m "Drei" +catlog 3 + +echo ==== qref -u +hg qref -u mary +catlog 3 + +echo ==== qref -u -m +hg qref -u maria -m "Three (again)" +catlog 3 + +echo ==== qnew -m +hg qnew -m "Four" 4.patch +echo "4" >4 +hg add +hg qref +catlog 4 + +echo ==== qref -u +hg qref -u jane +catlog 4 + + +echo ==== qnew with HG header +hg qnew 5.patch +hg qpop +echo "# HG changeset patch" >>.hg/patches/5.patch +echo "# User johndoe" >>.hg/patches/5.patch +hg qpush 2>&1 | grep 'Now at' +catlog 5 + +echo ==== hg qref +echo "5" >5 +hg add +hg qref +catlog 5 + +echo ==== hg qref -U +hg qref -U +catlog 5 + +echo ==== hg qref -u +hg qref -u johndeere +catlog 5 + + +echo ==== "qpop -a / qpush -a" +hg qpop -a +hg qpush -a +hg log --template "{rev}: {desc} - {author}\n"
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-mq-header-from.out Fri Feb 08 11:55:17 2008 +0100 @@ -0,0 +1,197 @@ +==== init +==== qnew -U +From: test + +0: [mq]: 1.patch - test +==== qref +adding 1 +From: test + +diff -r ... 1 +--- /dev/null ++++ b/1 +@@ -0,0 +1,1 @@ ++1 +0: [mq]: 1.patch - test +==== qref -u +From: mary + +diff -r ... 1 +--- /dev/null ++++ b/1 +@@ -0,0 +1,1 @@ ++1 +0: [mq]: 1.patch - mary +==== qnew +adding 2 +diff -r ... 2 +--- /dev/null ++++ b/2 +@@ -0,0 +1,1 @@ ++2 +1: [mq]: 2.patch - test +0: [mq]: 1.patch - mary +==== qref -u +From: jane + + +diff -r ... 2 +--- /dev/null ++++ b/2 +@@ -0,0 +1,1 @@ ++2 +1: [mq]: 2.patch - jane +0: [mq]: 1.patch - mary +==== qnew -U -m +From: test + +Three +2: Three - test +1: [mq]: 2.patch - jane +0: [mq]: 1.patch - mary +==== qref +adding 3 +From: test + +Three + +diff -r ... 3 +--- /dev/null ++++ b/3 +@@ -0,0 +1,1 @@ ++3 +2: Three - test +1: [mq]: 2.patch - jane +0: [mq]: 1.patch - mary +==== qref -m +From: test + +Drei + +diff -r ... 3 +--- /dev/null ++++ b/3 +@@ -0,0 +1,1 @@ ++3 +2: Drei - test +1: [mq]: 2.patch - jane +0: [mq]: 1.patch - mary +==== qref -u +From: mary + +Drei + +diff -r ... 3 +--- /dev/null ++++ b/3 +@@ -0,0 +1,1 @@ ++3 +2: Drei - mary +1: [mq]: 2.patch - jane +0: [mq]: 1.patch - mary +==== qref -u -m +From: maria + +Three (again) + +diff -r ... 3 +--- /dev/null ++++ b/3 +@@ -0,0 +1,1 @@ ++3 +2: Three (again) - maria +1: [mq]: 2.patch - jane +0: [mq]: 1.patch - mary +==== qnew -m +adding 4 +Four + +diff -r ... 4 +--- /dev/null ++++ b/4 +@@ -0,0 +1,1 @@ ++4 +3: Four - test +2: Three (again) - maria +1: [mq]: 2.patch - jane +0: [mq]: 1.patch - mary +==== qref -u +From: jane + +Four + +diff -r ... 4 +--- /dev/null ++++ b/4 +@@ -0,0 +1,1 @@ ++4 +3: Four - jane +2: Three (again) - maria +1: [mq]: 2.patch - jane +0: [mq]: 1.patch - mary +==== qnew with HG header +Now at: 4.patch +Now at: 5.patch +# HG changeset patch +# User johndoe +4: imported patch 5.patch - johndoe +3: Four - jane +2: Three (again) - maria +1: [mq]: 2.patch - jane +0: [mq]: 1.patch - mary +==== hg qref +adding 5 +# HG changeset patch +# User johndoe + +diff -r ... 5 +--- /dev/null ++++ b/5 +@@ -0,0 +1,1 @@ ++5 +4: [mq]: 5.patch - johndoe +3: Four - jane +2: Three (again) - maria +1: [mq]: 2.patch - jane +0: [mq]: 1.patch - mary +==== hg qref -U +# HG changeset patch +# User test + +diff -r ... 5 +--- /dev/null ++++ b/5 +@@ -0,0 +1,1 @@ ++5 +4: [mq]: 5.patch - test +3: Four - jane +2: Three (again) - maria +1: [mq]: 2.patch - jane +0: [mq]: 1.patch - mary +==== hg qref -u +# HG changeset patch +# User johndeere + +diff -r ... 5 +--- /dev/null ++++ b/5 +@@ -0,0 +1,1 @@ ++5 +4: [mq]: 5.patch - johndeere +3: Four - jane +2: Three (again) - maria +1: [mq]: 2.patch - jane +0: [mq]: 1.patch - mary +==== qpop -a / qpush -a +Patch queue now empty +applying 1.patch +applying 2.patch +applying 3.patch +applying 4.patch +applying 5.patch +Now at: 5.patch +4: imported patch 5.patch - johndeere +3: Four - jane +2: Three (again) - maria +1: imported patch 2.patch - jane +0: imported patch 1.patch - mary
--- a/tests/test-mq-missingfiles Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-mq-missingfiles Fri Feb 08 11:55:17 2008 +0100 @@ -41,6 +41,8 @@ echo % display added files cat a cat c +echo % display rejections +cat b.rej cd .. @@ -65,5 +67,7 @@ echo % display added files cat a cat c +echo % display rejections +cat b.rej cd ..
--- a/tests/test-mq-missingfiles.out Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-mq-missingfiles.out Fri Feb 08 11:55:17 2008 +0100 @@ -2,24 +2,48 @@ Patch queue now empty % push patch with missing target applying changeb -unable to find b or b for patching -unable to find b or b for patching +unable to find 'b' for patching +2 out of 2 hunks FAILED -- saving rejects to file b.rej patch failed, unable to continue (try -v) patch failed, rejects left in working dir Errors during apply, please fix and refresh changeb % display added files a c +% display rejections +--- b ++++ b +@@ -1,3 +1,5 @@ ++b ++b + a + a + a +@@ -8,3 +10,5 @@ + a + a + a ++c ++c adding b Patch queue now empty % push git patch with missing target applying changeb -unable to find b or b for patching +unable to find 'b' for patching +1 out of 1 hunk FAILED -- saving rejects to file b.rej patch failed, unable to continue (try -v) b: No such file or directory b not tracked! patch failed, rejects left in working dir Errors during apply, please fix and refresh changeb +? b.rej % display added files a c +% display rejections +--- b ++++ b +GIT binary patch +literal 2 +Jc${No0000400IC2 +
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-mq-pull-from-bundle Fri Feb 08 11:55:17 2008 +0100 @@ -0,0 +1,86 @@ +#!/bin/sh + +cat <<EOF >> $HGRCPATH +[extensions] +mq= +[defaults] +log = --template "{rev}: {desc}\\n" +heads = --template "{rev}: {desc}\\n" +incoming = --template "{rev}: {desc}\\n" +EOF + +echo "====== .hgrc" +cat $HGRCPATH + +echo "====== Setup main" +hg init base +cd base +echo "One" > one +hg add +hg ci -m "main: one added." +echo "++" >> one +hg ci -m "main: one updated." + +echo "====== Bundle main" +hg bundle --base=null ../main.hg +cd .. + +echo "====== Incoming to fresh repo" +hg init fresh +echo ">> hg -R fresh incoming main.hg" +hg -R fresh incoming main.hg +echo ">> hg -R fresh incoming bundle:fresh+main.hg" +hg -R fresh incoming bundle:fresh+main.hg + + +echo "====== Setup queue" +cd base +hg qinit -c +hg qnew -m "patch: two added." two.patch +echo two > two +hg add +hg qrefresh +hg qcommit -m "queue: two.patch added." +hg qpop -a + +echo "====== Bundle queue" +hg -R .hg/patches bundle --base=null ../queue.hgq +cd .. + + +echo "====== Clone base" +hg clone base copy +cd copy +hg qinit -c + +echo "====== Incoming queue bundle" +echo ">> hg -R .hg/patches incoming ../queue.hgq" +hg -R .hg/patches incoming ../queue.hgq + +echo "====== Pull queue bundle" +echo ">> hg -R .hg/patches pull --update ../queue.hgq" +hg -R .hg/patches pull --update ../queue.hgq +echo ">> hg -R .hg/patches heads" +hg -R .hg/patches heads +echo ">> hg -R .hg/patches log" +hg -R .hg/patches log +echo ">> hg qseries" +hg qseries +cd .. + + +echo "====== Clone base again" +hg clone base copy2 +cd copy2 +hg qinit -c + +echo "====== Unbundle queue bundle" +echo ">> hg -R .hg/patches unbundle --update ../queue.hgq" +hg -R .hg/patches unbundle --update ../queue.hgq +echo ">> hg -R .hg/patches heads" +hg -R .hg/patches heads +echo ">> hg -R .hg/patches log" +hg -R .hg/patches log +echo ">> hg qseries" +hg qseries +cd ..
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-mq-pull-from-bundle.out Fri Feb 08 11:55:17 2008 +0100 @@ -0,0 +1,70 @@ +====== .hgrc +[ui] +slash = True +[defaults] +backout = -d "0 0" +commit = -d "0 0" +debugrawcommit = -d "0 0" +tag = -d "0 0" +[extensions] +mq= +[defaults] +log = --template "{rev}: {desc}\n" +heads = --template "{rev}: {desc}\n" +incoming = --template "{rev}: {desc}\n" +====== Setup main +adding one +====== Bundle main +2 changesets found +====== Incoming to fresh repo +>> hg -R fresh incoming main.hg +comparing with main.hg +0: main: one added. +1: main: one updated. +>> hg -R fresh incoming bundle:fresh+main.hg +comparing with bundle:fresh+main.hg +0: main: one added. +1: main: one updated. +====== Setup queue +adding two +Patch queue now empty +====== Bundle queue +1 changesets found +====== Clone base +1 files updated, 0 files merged, 0 files removed, 0 files unresolved +====== Incoming queue bundle +>> hg -R .hg/patches incoming ../queue.hgq +comparing with ../queue.hgq +0: queue: two.patch added. +====== Pull queue bundle +>> hg -R .hg/patches pull --update ../queue.hgq +pulling from ../queue.hgq +requesting all changes +adding changesets +adding manifests +adding file changes +added 1 changesets with 3 changes to 3 files +merging series +2 files updated, 1 files merged, 0 files removed, 0 files unresolved +>> hg -R .hg/patches heads +0: queue: two.patch added. +>> hg -R .hg/patches log +0: queue: two.patch added. +>> hg qseries +two.patch +====== Clone base again +1 files updated, 0 files merged, 0 files removed, 0 files unresolved +====== Unbundle queue bundle +>> hg -R .hg/patches unbundle --update ../queue.hgq +adding changesets +adding manifests +adding file changes +added 1 changesets with 3 changes to 3 files +merging series +2 files updated, 1 files merged, 0 files removed, 0 files unresolved +>> hg -R .hg/patches heads +0: queue: two.patch added. +>> hg -R .hg/patches log +0: queue: two.patch added. +>> hg qseries +two.patch
--- a/tests/test-mq-qdiff.out Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-mq-qdiff.out Fri Feb 08 11:55:17 2008 +0100 @@ -7,13 +7,13 @@ diff -r 67e992f2c4f3 base --- a/base +++ b/base -@@ -1,1 +1,1 @@ base +@@ -1,1 +1,1 @@ -base +patched % qdiff dirname diff -r 67e992f2c4f3 base --- a/base +++ b/base -@@ -1,1 +1,1 @@ base +@@ -1,1 +1,1 @@ -base +patched
--- a/tests/test-mq-qrefresh.out Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-mq-qrefresh.out Fri Feb 08 11:55:17 2008 +0100 @@ -8,26 +8,26 @@ diff -r b55ecdccb5cf 1/base --- a/1/base +++ b/1/base -@@ -1,1 +1,1 @@ base +@@ -1,1 +1,1 @@ -base +patched diff -r b55ecdccb5cf 2/base --- a/2/base +++ b/2/base -@@ -1,1 +1,1 @@ base +@@ -1,1 +1,1 @@ -base +patched % qdiff dirname diff -r b55ecdccb5cf 1/base --- a/1/base +++ b/1/base -@@ -1,1 +1,1 @@ base +@@ -1,1 +1,1 @@ -base +patched diff -r b55ecdccb5cf 2/base --- a/2/base +++ b/2/base -@@ -1,1 +1,1 @@ base +@@ -1,1 +1,1 @@ -base +patched % patch file contents @@ -36,13 +36,13 @@ diff -r b55ecdccb5cf 1/base --- a/1/base +++ b/1/base -@@ -1,1 +1,1 @@ base +@@ -1,1 +1,1 @@ -base +patched diff -r b55ecdccb5cf 2/base --- a/2/base +++ b/2/base -@@ -1,1 +1,1 @@ base +@@ -1,1 +1,1 @@ -base +patched % qrefresh 1 @@ -50,26 +50,26 @@ diff -r b55ecdccb5cf 1/base --- a/1/base +++ b/1/base -@@ -1,1 +1,1 @@ base +@@ -1,1 +1,1 @@ -base +patched diff -r b55ecdccb5cf 2/base --- a/2/base +++ b/2/base -@@ -1,1 +1,1 @@ base +@@ -1,1 +1,1 @@ -base +patched % qdiff dirname diff -r b55ecdccb5cf 1/base --- a/1/base +++ b/1/base -@@ -1,1 +1,1 @@ base +@@ -1,1 +1,1 @@ -base +patched diff -r b55ecdccb5cf 2/base --- a/2/base +++ b/2/base -@@ -1,1 +1,1 @@ base +@@ -1,1 +1,1 @@ -base +patched % patch file contents @@ -78,7 +78,7 @@ diff -r b55ecdccb5cf 1/base --- a/1/base +++ b/1/base -@@ -1,1 +1,1 @@ base +@@ -1,1 +1,1 @@ -base +patched % qrefresh . in subdir @@ -86,26 +86,26 @@ diff -r b55ecdccb5cf 1/base --- a/1/base +++ b/1/base -@@ -1,1 +1,1 @@ base +@@ -1,1 +1,1 @@ -base +patched diff -r b55ecdccb5cf 2/base --- a/2/base +++ b/2/base -@@ -1,1 +1,1 @@ base +@@ -1,1 +1,1 @@ -base +patched % qdiff dirname diff -r b55ecdccb5cf 1/base --- a/1/base +++ b/1/base -@@ -1,1 +1,1 @@ base +@@ -1,1 +1,1 @@ -base +patched diff -r b55ecdccb5cf 2/base --- a/2/base +++ b/2/base -@@ -1,1 +1,1 @@ base +@@ -1,1 +1,1 @@ -base +patched % patch file contents @@ -114,7 +114,7 @@ diff -r b55ecdccb5cf 1/base --- a/1/base +++ b/1/base -@@ -1,1 +1,1 @@ base +@@ -1,1 +1,1 @@ -base +patched % qrefresh in hg-root again @@ -122,26 +122,26 @@ diff -r b55ecdccb5cf 1/base --- a/1/base +++ b/1/base -@@ -1,1 +1,1 @@ base +@@ -1,1 +1,1 @@ -base +patched diff -r b55ecdccb5cf 2/base --- a/2/base +++ b/2/base -@@ -1,1 +1,1 @@ base +@@ -1,1 +1,1 @@ -base +patched % qdiff dirname diff -r b55ecdccb5cf 1/base --- a/1/base +++ b/1/base -@@ -1,1 +1,1 @@ base +@@ -1,1 +1,1 @@ -base +patched diff -r b55ecdccb5cf 2/base --- a/2/base +++ b/2/base -@@ -1,1 +1,1 @@ base +@@ -1,1 +1,1 @@ -base +patched % patch file contents @@ -150,12 +150,12 @@ diff -r b55ecdccb5cf 1/base --- a/1/base +++ b/1/base -@@ -1,1 +1,1 @@ base +@@ -1,1 +1,1 @@ -base +patched diff -r b55ecdccb5cf 2/base --- a/2/base +++ b/2/base -@@ -1,1 +1,1 @@ base +@@ -1,1 +1,1 @@ -base +patched
--- a/tests/test-mq-symlinks Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-mq-symlinks Fri Feb 08 11:55:17 2008 +0100 @@ -3,17 +3,6 @@ echo "[extensions]" >> $HGRCPATH echo "mq=" >> $HGRCPATH -cat >> readlink.py <<EOF -import errno, os, sys - -for f in sys.argv[1:]: - try: - print f, '->', os.readlink(f) - except OSError, err: - if err.errno != errno.EINVAL: raise - print f, 'not a symlink' -EOF - hg init hg qinit hg qnew base.patch @@ -21,14 +10,14 @@ echo b > b hg add a b hg qrefresh -python readlink.py a +$TESTDIR/readlink.py a hg qnew symlink.patch rm a ln -s b a hg qrefresh --git -python readlink.py a +$TESTDIR/readlink.py a hg qpop hg qpush -python readlink.py a +$TESTDIR/readlink.py a
--- a/tests/test-mq.out Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-mq.out Fri Feb 08 11:55:17 2008 +0100 @@ -104,7 +104,7 @@ diff -r xa --- a/a +++ b/a -@@ -1,1 +1,2 @@ a +@@ -1,1 +1,2 @@ a +a % empty qrefresh @@ -115,7 +115,7 @@ working dir diff: --- a/a +++ b/a -@@ -1,1 +1,2 @@ a +@@ -1,1 +1,2 @@ a +a % qpop @@ -254,7 +254,7 @@ diff -r cb9a9f314b8b a --- a/a +++ b/a -@@ -1,1 +1,2 @@ a +@@ -1,1 +1,2 @@ a +a diff -r cb9a9f314b8b b/f @@ -327,7 +327,6 @@ adding manifests adding file changes added 1 changesets with 1 changes to 1 files -(run 'hg update' to get a working copy) Patch queue now empty applying bar Now at: bar @@ -361,7 +360,6 @@ adding manifests adding file changes added 1 changesets with 1 changes to 1 files -(run 'hg update' to get a working copy) Patch queue now empty applying bar Now at: bar @@ -436,7 +434,6 @@ adding manifests adding file changes added 1 changesets with 1 changes to 1 files -(run 'hg update' to get a working copy) changeset: 1:20cbbe65cff7 tag: tip user: test
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-newcgi Fri Feb 08 11:55:17 2008 +0100 @@ -0,0 +1,91 @@ +#!/bin/sh +# This tests if CGI files from after d0db3462d568 but +# before d74fc8dec2b4 still work. + +hg init test + +cat >hgweb.cgi <<HGWEB +#!/usr/bin/env python +# +# An example CGI script to use hgweb, edit as necessary + +import cgitb +cgitb.enable() + +from mercurial import demandimport; demandimport.enable() +from mercurial.hgweb import hgweb +from mercurial.hgweb import wsgicgi +from mercurial.hgweb.request import wsgiapplication + +def make_web_app(): + return hgweb("test", "Empty test repository") + +wsgicgi.launch(wsgiapplication(make_web_app)) +HGWEB +chmod 755 hgweb.cgi + +cat >hgweb.config <<HGWEBDIRCONF +[paths] +test = test +HGWEBDIRCONF + +cat >hgwebdir.cgi <<HGWEBDIR +#!/usr/bin/env python +# +# An example CGI script to export multiple hgweb repos, edit as necessary + +import cgitb +cgitb.enable() + +from mercurial import demandimport; demandimport.enable() +from mercurial.hgweb import hgwebdir +from mercurial.hgweb import wsgicgi +from mercurial.hgweb.request import wsgiapplication + +def make_web_app(): + return hgwebdir("hgweb.config") + +wsgicgi.launch(wsgiapplication(make_web_app)) +HGWEBDIR +chmod 755 hgwebdir.cgi + +DOCUMENT_ROOT="/var/www/hg"; export DOCUMENT_ROOT +GATEWAY_INTERFACE="CGI/1.1"; export GATEWAY_INTERFACE +HTTP_ACCEPT="text/xml,application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5"; export HTTP_ACCEPT +HTTP_ACCEPT_CHARSET="ISO-8859-1,utf-8;q=0.7,*;q=0.7"; export HTTP_ACCEPT_CHARSET +HTTP_ACCEPT_ENCODING="gzip,deflate"; export HTTP_ACCEPT_ENCODING +HTTP_ACCEPT_LANGUAGE="en-us,en;q=0.5"; export HTTP_ACCEPT_LANGUAGE +HTTP_CACHE_CONTROL="max-age=0"; export HTTP_CACHE_CONTROL +HTTP_CONNECTION="keep-alive"; export HTTP_CONNECTION +HTTP_HOST="hg.omnifarious.org"; export HTTP_HOST +HTTP_KEEP_ALIVE="300"; export HTTP_KEEP_ALIVE +HTTP_USER_AGENT="Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.8.0.4) Gecko/20060608 Ubuntu/dapper-security Firefox/1.5.0.4"; export HTTP_USER_AGENT +PATH_INFO="/"; export PATH_INFO +PATH_TRANSLATED="/var/www/hg/index.html"; export PATH_TRANSLATED +QUERY_STRING=""; export QUERY_STRING +REMOTE_ADDR="127.0.0.2"; export REMOTE_ADDR +REMOTE_PORT="44703"; export REMOTE_PORT +REQUEST_METHOD="GET"; export REQUEST_METHOD +REQUEST_URI="/test/"; export REQUEST_URI +SCRIPT_FILENAME="/home/hopper/hg_public/test.cgi"; export SCRIPT_FILENAME +SCRIPT_NAME="/test"; export SCRIPT_NAME +SCRIPT_URI="http://hg.omnifarious.org/test/"; export SCRIPT_URI +SCRIPT_URL="/test/"; export SCRIPT_URL +SERVER_ADDR="127.0.0.1"; export SERVER_ADDR +SERVER_ADMIN="eric@localhost"; export SERVER_ADMIN +SERVER_NAME="hg.omnifarious.org"; export SERVER_NAME +SERVER_PORT="80"; export SERVER_PORT +SERVER_PROTOCOL="HTTP/1.1"; export SERVER_PROTOCOL +SERVER_SIGNATURE="<address>Apache/2.0.53 (Fedora) Server at hg.omnifarious.org Port 80</address>\; export SERVER_SIGNATURE +" +SERVER_SOFTWARE="Apache/2.0.53 (Fedora)"; export SERVER_SOFTWARE +python hgweb.cgi >page1 2>&1 ; echo $? +python hgwebdir.cgi >page2 2>&1 ; echo $? +PATH_INFO="/test/" +PATH_TRANSLATED="/var/something/test.cgi" +REQUEST_URI="/test/test/" +SCRIPT_URI="http://hg.omnifarious.org/test/test/" +SCRIPT_URL="/test/test/" +python hgwebdir.cgi >page3 2>&1 ; echo $? +fgrep -i error page1 page2 page3 && exit 1 +exit 0
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-newcgi.out Fri Feb 08 11:55:17 2008 +0100 @@ -0,0 +1,3 @@ +0 +0 +0
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-newercgi Fri Feb 08 11:55:17 2008 +0100 @@ -0,0 +1,84 @@ +#!/bin/sh +# This is a rudimentary test of the CGI files as of d74fc8dec2b4. + +hg init test + +cat >hgweb.cgi <<HGWEB +#!/usr/bin/env python +# +# An example CGI script to use hgweb, edit as necessary + +import cgitb +cgitb.enable() + +from mercurial import demandimport; demandimport.enable() +from mercurial.hgweb import hgweb +from mercurial.hgweb import wsgicgi + +application = hgweb("test", "Empty test repository") +wsgicgi.launch(application) +HGWEB +chmod 755 hgweb.cgi + +cat >hgweb.config <<HGWEBDIRCONF +[paths] +test = test +HGWEBDIRCONF + +cat >hgwebdir.cgi <<HGWEBDIR +#!/usr/bin/env python +# +# An example CGI script to export multiple hgweb repos, edit as necessary + +import cgitb +cgitb.enable() + +from mercurial import demandimport; demandimport.enable() +from mercurial.hgweb import hgwebdir +from mercurial.hgweb import wsgicgi + +application = hgwebdir("hgweb.config") +wsgicgi.launch(application) +HGWEBDIR +chmod 755 hgwebdir.cgi + +DOCUMENT_ROOT="/var/www/hg"; export DOCUMENT_ROOT +GATEWAY_INTERFACE="CGI/1.1"; export GATEWAY_INTERFACE +HTTP_ACCEPT="text/xml,application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5"; export HTTP_ACCEPT +HTTP_ACCEPT_CHARSET="ISO-8859-1,utf-8;q=0.7,*;q=0.7"; export HTTP_ACCEPT_CHARSET +HTTP_ACCEPT_ENCODING="gzip,deflate"; export HTTP_ACCEPT_ENCODING +HTTP_ACCEPT_LANGUAGE="en-us,en;q=0.5"; export HTTP_ACCEPT_LANGUAGE +HTTP_CACHE_CONTROL="max-age=0"; export HTTP_CACHE_CONTROL +HTTP_CONNECTION="keep-alive"; export HTTP_CONNECTION +HTTP_HOST="hg.omnifarious.org"; export HTTP_HOST +HTTP_KEEP_ALIVE="300"; export HTTP_KEEP_ALIVE +HTTP_USER_AGENT="Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.8.0.4) Gecko/20060608 Ubuntu/dapper-security Firefox/1.5.0.4"; export HTTP_USER_AGENT +PATH_INFO="/"; export PATH_INFO +PATH_TRANSLATED="/var/www/hg/index.html"; export PATH_TRANSLATED +QUERY_STRING=""; export QUERY_STRING +REMOTE_ADDR="127.0.0.2"; export REMOTE_ADDR +REMOTE_PORT="44703"; export REMOTE_PORT +REQUEST_METHOD="GET"; export REQUEST_METHOD +REQUEST_URI="/test/"; export REQUEST_URI +SCRIPT_FILENAME="/home/hopper/hg_public/test.cgi"; export SCRIPT_FILENAME +SCRIPT_NAME="/test"; export SCRIPT_NAME +SCRIPT_URI="http://hg.omnifarious.org/test/"; export SCRIPT_URI +SCRIPT_URL="/test/"; export SCRIPT_URL +SERVER_ADDR="127.0.0.1"; export SERVER_ADDR +SERVER_ADMIN="eric@localhost"; export SERVER_ADMIN +SERVER_NAME="hg.omnifarious.org"; export SERVER_NAME +SERVER_PORT="80"; export SERVER_PORT +SERVER_PROTOCOL="HTTP/1.1"; export SERVER_PROTOCOL +SERVER_SIGNATURE="<address>Apache/2.0.53 (Fedora) Server at hg.omnifarious.org Port 80</address>\; export SERVER_SIGNATURE +" +SERVER_SOFTWARE="Apache/2.0.53 (Fedora)"; export SERVER_SOFTWARE +python hgweb.cgi >page1 2>&1 ; echo $? +python hgwebdir.cgi >page2 2>&1 ; echo $? +PATH_INFO="/test/" +PATH_TRANSLATED="/var/something/test.cgi" +REQUEST_URI="/test/test/" +SCRIPT_URI="http://hg.omnifarious.org/test/test/" +SCRIPT_URL="/test/test/" +python hgwebdir.cgi >page3 2>&1 ; echo $? +fgrep -i error page1 page2 page3 && exit 1 +exit 0
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-newercgi.out Fri Feb 08 11:55:17 2008 +0100 @@ -0,0 +1,3 @@ +0 +0 +0
--- a/tests/test-no-symlinks.out Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-no-symlinks.out Fri Feb 08 11:55:17 2008 +0100 @@ -6,6 +6,7 @@ a d/b % bundle +2 changesets found pulling from ../symlinks.hg requesting all changes adding changesets
--- a/tests/test-non-interactive-wsgi Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-non-interactive-wsgi Fri Feb 08 11:55:17 2008 +0100 @@ -1,4 +1,6 @@ #!/bin/sh +# Tests if hgweb can run without touching sys.stdin, as is required +# by the WSGI standard and strictly implemented by mod_wsgi. mkdir repo cd repo @@ -11,7 +13,6 @@ cat > request.py <<EOF from mercurial import dispatch from mercurial.hgweb.hgweb_mod import hgweb -from mercurial.hgweb.request import _wsgirequest from mercurial.ui import ui from mercurial import hg from StringIO import StringIO @@ -62,7 +63,7 @@ 'SERVER_PROTOCOL': 'HTTP/1.0' } -_wsgirequest(hgweb('.'), env, startrsp) +hgweb('.')(env, startrsp) print '---- ERRORS' print errors.getvalue() EOF
--- a/tests/test-non-interactive-wsgi.out Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-non-interactive-wsgi.out Fri Feb 08 11:55:17 2008 +0100 @@ -7,6 +7,6 @@ ---- HEADERS 200 Script output follows ---- DATA -[('content-type', 'text/html; charset=ascii')] +[('Content-Type', 'text/html; charset=ascii')] ---- ERRORS
--- a/tests/test-notfound Fri Feb 08 11:50:37 2008 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,9 +0,0 @@ -#!/bin/sh - -hg init - -echo "Is there an error message when trying to diff non-existing files?" -hg diff not found - -echo "Is there an error message when trying to add non-existing files?" -hg add not found
--- a/tests/test-notfound.out Fri Feb 08 11:50:37 2008 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,6 +0,0 @@ -Is there an error message when trying to diff non-existing files? -found: No such file or directory -not: No such file or directory -Is there an error message when trying to add non-existing files? -found: No such file or directory -not: No such file or directory
--- a/tests/test-notify.out Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-notify.out Fri Feb 08 11:55:17 2008 +0100 @@ -30,7 +30,7 @@ diff -r cb9a9f314b8b -r 0647d048b600 a --- a/a Thu Jan 01 00:00:00 1970 +0000 +++ b/a Thu Jan 01 00:00:01 1970 +0000 -@@ -1,1 +1,2 @@ a +@@ -1,1 +1,2 @@ a +a (run 'hg update' to get a working copy) @@ -60,7 +60,7 @@ diff -r cb9a9f314b8b -r 0647d048b600 a --- a/a Thu Jan 01 00:00:00 1970 +0000 +++ b/a Thu Jan 01 00:00:01 1970 +0000 -@@ -1,1 +1,2 @@ a +@@ -1,1 +1,2 @@ a +a (run 'hg update' to get a working copy)
--- a/tests/test-oldcgi Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-oldcgi Fri Feb 08 11:55:17 2008 +0100 @@ -1,4 +1,5 @@ #!/bin/sh +# This tests if CGI files from before d0db3462d568 still work. hg init test
--- a/tests/test-patchbomb.out Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-patchbomb.out Fri Feb 08 11:55:17 2008 +0100 @@ -110,6 +110,7 @@ adding c % test bundle and description searching for changes +1 changesets found Displaying test ... Content-Type: multipart/mixed; boundary="===
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-paths Fri Feb 08 11:55:17 2008 +0100 @@ -0,0 +1,10 @@ +#!/bin/sh +hg init a +hg clone a b +cd a +echo '[paths]' >> .hg/hgrc +echo 'dupe = ../b' >> .hg/hgrc +hg in dupe | fgrep '../' +cd .. +hg -R a in dupe | fgrep '../' +true
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-paths.out Fri Feb 08 11:55:17 2008 +0100 @@ -0,0 +1,1 @@ +0 files updated, 0 files merged, 0 files removed, 0 files unresolved
--- a/tests/test-permissions.out Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-permissions.out Fri Feb 08 11:55:17 2008 +0100 @@ -17,6 +17,6 @@ diff -r c1fab96507ef a --- a/a +++ b/a -@@ -1,1 +1,1 @@ foo +@@ -1,1 +1,1 @@ -foo +barber
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-qrecord Fri Feb 08 11:55:17 2008 +0100 @@ -0,0 +1,100 @@ +#!/bin/sh + +echo "[ui]" >> $HGRCPATH +echo "interactive=true" >> $HGRCPATH +echo "[extensions]" >> $HGRCPATH +echo "record=" >> $HGRCPATH + +echo "% help (no mq, so no qrecord)" + +hg help qrecord + +echo "mq=" >> $HGRCPATH + +echo "% help (mq present)" + +hg help qrecord + +hg init a +cd a + +echo % base commit + +cat > 1.txt <<EOF +1 +2 +3 +4 +5 +EOF +cat > 2.txt <<EOF +a +b +c +d +e +f +EOF +mkdir dir +cat > dir/a.txt <<EOF +hello world + +someone +up +there +loves +me +EOF + +hg add 1.txt 2.txt dir/a.txt +hg commit -d '0 0' -m 'initial checkin' + +echo % changing files + +sed -e 's/2/2 2/;s/4/4 4/' 1.txt > 1.txt.new +sed -e 's/b/b b/' 2.txt > 2.txt.new +sed -e 's/hello world/hello world!/' dir/a.txt > dir/a.txt.new + +mv -f 1.txt.new 1.txt +mv -f 2.txt.new 2.txt +mv -f dir/a.txt.new dir/a.txt + +echo % whole diff + +hg diff --nodates + +echo % qrecord a.patch + +hg qrecord -d '0 0' -m aaa a.patch <<EOF +y +y +n +y +y +n +EOF + +echo +echo % "after qrecord a.patch 'tip'" +hg tip -p +echo +echo % "after qrecord a.patch 'diff'" +hg diff --nodates + +echo % qrecord b.patch +hg qrecord -d '0 0' -m bbb b.patch <<EOF +y +y +y +y +EOF + +echo +echo % "after qrecord b.patch 'tip'" +hg tip -p +echo +echo % "after qrecord b.patch 'diff'" +hg diff --nodates + +echo +echo % --- end ---
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-qrecord.out Fri Feb 08 11:55:17 2008 +0100 @@ -0,0 +1,207 @@ +% help (no mq, so no qrecord) +hg: unknown command 'qrecord' +Mercurial Distributed SCM + +basic commands: + + add add the specified files on the next commit + annotate show changeset information per file line + clone make a copy of an existing repository + commit commit the specified files or all outstanding changes + diff diff repository (or selected files) + export dump the header and diffs for one or more changesets + init create a new repository in the given directory + log show revision history of entire repository or files + merge merge working directory with another revision + parents show the parents of the working dir or revision + pull pull changes from the specified source + push push changes to the specified destination + remove remove the specified files on the next commit + serve export the repository via HTTP + status show changed files in the working directory + update update working directory + +use "hg help" for the full list of commands or "hg -v" for details +% help (mq present) +hg qrecord [OPTION]... PATCH [FILE]... + +interactively record a new patch + + see 'hg help qnew' & 'hg help record' for more information and usage + +options: + + -e --edit edit commit message + -g --git use git extended diff format + -I --include include names matching the given patterns + -X --exclude exclude names matching the given patterns + -m --message use <text> as commit message + -l --logfile read commit message from <file> + -U --currentuser add "From: <current user>" to patch + -u --user add "From: <given user>" to patch + -D --currentdate add "Date: <current date>" to patch + -d --date add "Date: <given date>" to patch + +use "hg -v help qrecord" to show global options +% base commit +% changing files +% whole diff +diff -r 1057167b20ef 1.txt +--- a/1.txt ++++ b/1.txt +@@ -1,5 +1,5 @@ + 1 +-2 ++2 2 + 3 +-4 ++4 4 + 5 +diff -r 1057167b20ef 2.txt +--- a/2.txt ++++ b/2.txt +@@ -1,5 +1,5 @@ + a +-b ++b b + c + d + e +diff -r 1057167b20ef dir/a.txt +--- a/dir/a.txt ++++ b/dir/a.txt +@@ -1,4 +1,4 @@ +-hello world ++hello world! + + someone + up +% qrecord a.patch +diff --git a/1.txt b/1.txt +2 hunks, 4 lines changed +examine changes to '1.txt'? [Ynsfdaq?] @@ -1,3 +1,3 @@ + 1 +-2 ++2 2 + 3 +record this change to '1.txt'? [Ynsfdaq?] @@ -3,3 +3,3 @@ + 3 +-4 ++4 4 + 5 +record this change to '1.txt'? [Ynsfdaq?] diff --git a/2.txt b/2.txt +1 hunks, 2 lines changed +examine changes to '2.txt'? [Ynsfdaq?] @@ -1,5 +1,5 @@ + a +-b ++b b + c + d + e +record this change to '2.txt'? [Ynsfdaq?] diff --git a/dir/a.txt b/dir/a.txt +1 hunks, 2 lines changed +examine changes to 'dir/a.txt'? [Ynsfdaq?] +% after qrecord a.patch 'tip' +changeset: 1:5d1ca63427ee +tag: qtip +tag: tip +tag: a.patch +tag: qbase +user: test +date: Thu Jan 01 00:00:00 1970 +0000 +summary: aaa + +diff -r 1057167b20ef -r 5d1ca63427ee 1.txt +--- a/1.txt Thu Jan 01 00:00:00 1970 +0000 ++++ b/1.txt Thu Jan 01 00:00:00 1970 +0000 +@@ -1,5 +1,5 @@ + 1 +-2 ++2 2 + 3 + 4 + 5 +diff -r 1057167b20ef -r 5d1ca63427ee 2.txt +--- a/2.txt Thu Jan 01 00:00:00 1970 +0000 ++++ b/2.txt Thu Jan 01 00:00:00 1970 +0000 +@@ -1,5 +1,5 @@ + a +-b ++b b + c + d + e + + +% after qrecord a.patch 'diff' +diff -r 5d1ca63427ee 1.txt +--- a/1.txt ++++ b/1.txt +@@ -1,5 +1,5 @@ + 1 + 2 2 + 3 +-4 ++4 4 + 5 +diff -r 5d1ca63427ee dir/a.txt +--- a/dir/a.txt ++++ b/dir/a.txt +@@ -1,4 +1,4 @@ +-hello world ++hello world! + + someone + up +% qrecord b.patch +diff --git a/1.txt b/1.txt +1 hunks, 2 lines changed +examine changes to '1.txt'? [Ynsfdaq?] @@ -1,5 +1,5 @@ + 1 + 2 2 + 3 +-4 ++4 4 + 5 +record this change to '1.txt'? [Ynsfdaq?] diff --git a/dir/a.txt b/dir/a.txt +1 hunks, 2 lines changed +examine changes to 'dir/a.txt'? [Ynsfdaq?] @@ -1,4 +1,4 @@ +-hello world ++hello world! + + someone + up +record this change to 'dir/a.txt'? [Ynsfdaq?] +% after qrecord b.patch 'tip' +changeset: 2:b056198bf878 +tag: qtip +tag: tip +tag: b.patch +user: test +date: Thu Jan 01 00:00:00 1970 +0000 +summary: bbb + +diff -r 5d1ca63427ee -r b056198bf878 1.txt +--- a/1.txt Thu Jan 01 00:00:00 1970 +0000 ++++ b/1.txt Thu Jan 01 00:00:00 1970 +0000 +@@ -1,5 +1,5 @@ + 1 + 2 2 + 3 +-4 ++4 4 + 5 +diff -r 5d1ca63427ee -r b056198bf878 dir/a.txt +--- a/dir/a.txt Thu Jan 01 00:00:00 1970 +0000 ++++ b/dir/a.txt Thu Jan 01 00:00:00 1970 +0000 +@@ -1,4 +1,4 @@ +-hello world ++hello world! + + someone + up + + +% after qrecord b.patch 'diff' + +% --- end ---
--- a/tests/test-rebuildstate Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-rebuildstate Fri Feb 08 11:55:17 2008 +0100 @@ -12,13 +12,13 @@ hg rm bar echo '% state dump' -hg debugstate | cut -b 1-16,35- | sort +hg debugstate | cut -b 1-16,37- | sort echo '% status' hg st -A hg debugrebuildstate echo '% state dump' -hg debugstate | cut -b 1-16,35- | sort +hg debugstate | cut -b 1-16,37- | sort echo '% status' hg st -A
--- a/tests/test-record.out Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-record.out Fri Feb 08 11:55:17 2008 +0100 @@ -50,6 +50,7 @@ new file mode 100644 examine changes to 'empty-rw'? [Ynsfdaq?] transaction abort! rollback completed +abort: empty commit message changeset: -1:000000000000 tag: tip @@ -104,6 +105,7 @@ % add binary file +1 changesets found diff --git a/tip.bundle b/tip.bundle new file mode 100644 this is a binary file @@ -118,6 +120,7 @@ Binary file tip.bundle has changed % change binary file +1 changesets found diff --git a/tip.bundle b/tip.bundle this modifies a binary file (all or nothing) examine changes to 'tip.bundle'? [Ynsfdaq?] @@ -131,6 +134,7 @@ Binary file tip.bundle has changed % rename and change binary file +1 changesets found diff --git a/tip.bundle b/top.bundle rename from tip.bundle rename to top.bundle @@ -175,7 +179,7 @@ % modify end of plain file diff --git a/plain b/plain 1 hunks, 1 lines changed -examine changes to 'plain'? [Ynsfdaq?] @@ -8,3 +8,4 @@ 8 +examine changes to 'plain'? [Ynsfdaq?] @@ -8,3 +8,4 @@ 8 9 10 @@ -183,7 +187,7 @@ record this change to 'plain'? [Ynsfdaq?] % modify end of plain file, no EOL diff --git a/plain b/plain 1 hunks, 1 lines changed -examine changes to 'plain'? [Ynsfdaq?] @@ -9,3 +9,4 @@ 9 +examine changes to 'plain'? [Ynsfdaq?] @@ -9,3 +9,4 @@ 9 10 11 @@ -192,7 +196,7 @@ record this change to 'plain'? [Ynsfdaq?] % modify end of plain file, add EOL diff --git a/plain b/plain 1 hunks, 2 lines changed -examine changes to 'plain'? [Ynsfdaq?] @@ -9,4 +9,4 @@ 9 +examine changes to 'plain'? [Ynsfdaq?] @@ -9,4 +9,4 @@ 9 10 11 @@ -202,13 +206,13 @@ record this change to 'plain'? [Ynsfdaq?] % modify beginning, trim end, record both diff --git a/plain b/plain 2 hunks, 4 lines changed -examine changes to 'plain'? [Ynsfdaq?] @@ -1,4 +1,4 @@ 1 +examine changes to 'plain'? [Ynsfdaq?] @@ -1,4 +1,4 @@ -1 +2 2 3 4 -record this change to 'plain'? [Ynsfdaq?] @@ -8,5 +8,3 @@ 8 +record this change to 'plain'? [Ynsfdaq?] @@ -8,5 +8,3 @@ 8 9 10 @@ -224,13 +228,13 @@ diff -r e2ecd9b0b78d -r d09ab1967dab plain --- a/plain Thu Jan 01 00:00:10 1970 +0000 +++ b/plain Thu Jan 01 00:00:10 1970 +0000 -@@ -1,4 +1,4 @@ 1 +@@ -1,4 +1,4 @@ -1 +2 2 3 4 -@@ -8,5 +8,3 @@ 8 +@@ -8,5 +8,3 @@ 8 9 10 @@ -241,7 +245,7 @@ % record end diff --git a/plain b/plain 2 hunks, 5 lines changed -examine changes to 'plain'? [Ynsfdaq?] @@ -1,9 +1,6 @@ 2 +examine changes to 'plain'? [Ynsfdaq?] @@ -1,9 +1,6 @@ -2 -2 -3 @@ -270,7 +274,7 @@ diff -r d09ab1967dab -r 44516c9708ae plain --- a/plain Thu Jan 01 00:00:10 1970 +0000 +++ b/plain Thu Jan 01 00:00:11 1970 +0000 -@@ -7,4 +7,4 @@ 7 +@@ -7,4 +7,4 @@ 7 8 9 @@ -280,7 +284,7 @@ % record beginning diff --git a/plain b/plain 1 hunks, 3 lines changed -examine changes to 'plain'? [Ynsfdaq?] @@ -1,6 +1,3 @@ 2 +examine changes to 'plain'? [Ynsfdaq?] @@ -1,6 +1,3 @@ -2 -2 -3 @@ -297,7 +301,7 @@ diff -r 44516c9708ae -r 3ebbace64a8d plain --- a/plain Thu Jan 01 00:00:11 1970 +0000 +++ b/plain Thu Jan 01 00:00:12 1970 +0000 -@@ -1,6 +1,3 @@ 2 +@@ -1,6 +1,3 @@ -2 -2 -3 @@ -309,7 +313,7 @@ % record end diff --git a/plain b/plain 2 hunks, 4 lines changed -examine changes to 'plain'? [Ynsfdaq?] @@ -1,6 +1,9 @@ 4 +examine changes to 'plain'? [Ynsfdaq?] @@ -1,6 +1,9 @@ +1 +2 +3 @@ -331,7 +335,7 @@ % record beginning, middle diff --git a/plain b/plain 3 hunks, 7 lines changed -examine changes to 'plain'? [Ynsfdaq?] @@ -1,2 +1,5 @@ 4 +examine changes to 'plain'? [Ynsfdaq?] @@ -1,2 +1,5 @@ +1 +2 +3 @@ -363,7 +367,7 @@ diff -r efc0dad7bd9f -r c1c639d8b268 plain --- a/plain Thu Jan 01 00:00:13 1970 +0000 +++ b/plain Thu Jan 01 00:00:14 1970 +0000 -@@ -1,5 +1,10 @@ 4 +@@ -1,5 +1,10 @@ +1 +2 +3 @@ -378,7 +382,7 @@ % record end diff --git a/plain b/plain 1 hunks, 2 lines changed -examine changes to 'plain'? [Ynsfdaq?] @@ -9,3 +9,5 @@ 7 +examine changes to 'plain'? [Ynsfdaq?] @@ -9,3 +9,5 @@ 7 8 9 @@ -394,7 +398,7 @@ diff -r c1c639d8b268 -r 80b74bbc7808 plain --- a/plain Thu Jan 01 00:00:14 1970 +0000 +++ b/plain Thu Jan 01 00:00:15 1970 +0000 -@@ -9,3 +9,5 @@ 7 +@@ -9,3 +9,5 @@ 7 8 9 @@ -404,7 +408,7 @@ adding subdir/a diff --git a/subdir/a b/subdir/a 1 hunks, 1 lines changed -examine changes to 'subdir/a'? [Ynsfdaq?] @@ -1,1 +1,2 @@ a +examine changes to 'subdir/a'? [Ynsfdaq?] @@ -1,1 +1,2 @@ a +a record this change to 'subdir/a'? [Ynsfdaq?] @@ -417,7 +421,7 @@ diff -r aecf2b2ea83c -r 33ff5c4fb017 subdir/a --- a/subdir/a Thu Jan 01 00:00:16 1970 +0000 +++ b/subdir/a Thu Jan 01 00:00:16 1970 +0000 -@@ -1,1 +1,2 @@ a +@@ -1,1 +1,2 @@ a +a @@ -466,7 +470,7 @@ diff -r f9e855cd9374 -r 094183e04b7c subdir/f2 --- a/subdir/f2 Thu Jan 01 00:00:17 1970 +0000 +++ b/subdir/f2 Thu Jan 01 00:00:18 1970 +0000 -@@ -1,1 +1,2 @@ b +@@ -1,1 +1,2 @@ b +b @@ -483,7 +487,7 @@ diff -r 094183e04b7c -r 38164785b0ef subdir/f1 --- a/subdir/f1 Thu Jan 01 00:00:18 1970 +0000 +++ b/subdir/f1 Thu Jan 01 00:00:19 1970 +0000 -@@ -1,1 +1,2 @@ a +@@ -1,1 +1,2 @@ a +a
--- a/tests/test-rename-after-merge Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-rename-after-merge Fri Feb 08 11:55:17 2008 +0100 @@ -24,6 +24,7 @@ echo % merge repositories hg pull ../t2 hg merge +hg st echo % rename b as c hg mv b c
--- a/tests/test-rename-after-merge.out Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-rename-after-merge.out Fri Feb 08 11:55:17 2008 +0100 @@ -14,7 +14,9 @@ (run 'hg heads' to see heads, 'hg merge' to merge) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) +M b % rename b as c A c R b % rename back c as b +M b
--- a/tests/test-rename-dir-merge Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-rename-dir-merge Fri Feb 08 11:55:17 2008 +0100 @@ -7,9 +7,7 @@ mkdir a echo foo > a/a echo bar > a/b - -hg add a -hg ci -m "0" -d "0 0" +hg ci -Am "0" -d "0 0" hg co -C 0 hg mv a b @@ -17,6 +15,7 @@ hg co -C 0 echo baz > a/c +echo quux > a/d hg add a/c hg ci -m "2 add a/c" -d "0 0"
--- a/tests/test-rename-dir-merge.out Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-rename-dir-merge.out Fri Feb 08 11:55:17 2008 +0100 @@ -1,10 +1,8 @@ adding a/a adding a/b 0 files updated, 0 files merged, 0 files removed, 0 files unresolved -copying a/a to b/a -copying a/b to b/b -removing a/a -removing a/b +moving a/a to b/a +moving a/b to b/b 2 files updated, 0 files merged, 2 files removed, 0 files unresolved resolving manifests overwrite None partial False @@ -12,6 +10,7 @@ searching for copies back to rev 1 unmatched files in local: a/c + a/d unmatched files in other: b/a b/b @@ -21,6 +20,8 @@ checking for directory renames dir a/ -> b/ file a/c -> b/c + file a/d -> b/d + a/d: remote renamed directory to b/d -> d a/c: remote renamed directory to b/c -> d a/b: other deleted -> r a/a: other deleted -> r @@ -29,11 +30,12 @@ removing a/a removing a/b moving a/c to b/c +moving a/d to b/d getting b/a getting b/b -3 files updated, 0 files merged, 2 files removed, 0 files unresolved +4 files updated, 0 files merged, 2 files removed, 0 files unresolved (branch merge, don't forget to commit) -a/* b/a b/b b/c +a/* b/a b/b b/c b/d M b/a M b/b A b/c @@ -41,6 +43,7 @@ R a/a R a/b R a/c +? b/d b/c renamed from a/c:354ae8da6e890359ef49ade27b68bbc361f3ca88 0 files updated, 0 files merged, 1 files removed, 0 files unresolved resolving manifests @@ -50,6 +53,7 @@ unmatched files in local: b/a b/b + b/d unmatched files in other: a/c all copies found (* = to merge, ! = divergent): @@ -62,7 +66,8 @@ getting a/c to b/c 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) -a/* b/a b/b b/c +a/* b/a b/b b/c b/d A b/c a/c +? b/d b/c renamed from a/c:354ae8da6e890359ef49ade27b68bbc361f3ca88
--- a/tests/test-rename-dir-merge2.out Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-rename-dir-merge2.out Fri Feb 08 11:55:17 2008 +0100 @@ -1,7 +1,6 @@ adding a/f 1 files updated, 0 files merged, 0 files removed, 0 files unresolved -copying a/f to b/f -removing a/f +moving a/f to b/f adding a/aa/g pulling from ../r2 searching for changes
--- a/tests/test-rename-merge1.out Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-rename-merge1.out Fri Feb 08 11:55:17 2008 +0100 @@ -11,16 +11,18 @@ b b2 all copies found (* = to merge, ! = divergent): - c2 -> a2 + c2 -> a2 ! b -> a * - b2 -> a2 + b2 -> a2 ! checking for directory renames a2: divergent renames -> dr a: remote moved to b -> m b2: remote created -> g copying a to b +picked tool 'internal:merge' for a (binary False symlink False) merging a and b my a@f26ec4fc3fa3+ other b@8e765a822af2 ancestor a@af1939970a1c + premerge successful removing a warning: detected divergent renames of a2 to: c2
--- a/tests/test-rename-merge2.out Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-rename-merge2.out Fri Feb 08 11:55:17 2008 +0100 @@ -13,8 +13,11 @@ rev: versions differ -> m a: remote copied to b -> m copying a to b +picked tool 'python ../merge' for a (binary False symlink False) merging a and b my a@e300d1c794ec+ other b@735846fee2d7 ancestor a@924404dff337 + premerge successful +picked tool 'python ../merge' for rev (binary False symlink False) merging rev my rev@e300d1c794ec+ other rev@735846fee2d7 ancestor rev@924404dff337 0 files updated, 2 files merged, 0 files removed, 0 files unresolved @@ -41,8 +44,11 @@ b: local copied to a -> m rev: versions differ -> m getting a +picked tool 'python ../merge' for b (binary False symlink False) merging b and a my b@ac809aeed39a+ other a@f4db7e329e71 ancestor a@924404dff337 + premerge successful +picked tool 'python ../merge' for rev (binary False symlink False) merging rev my rev@ac809aeed39a+ other rev@f4db7e329e71 ancestor rev@924404dff337 1 files updated, 2 files merged, 0 files removed, 0 files unresolved @@ -68,9 +74,12 @@ rev: versions differ -> m a: remote moved to b -> m copying a to b +picked tool 'python ../merge' for a (binary False symlink False) merging a and b my a@e300d1c794ec+ other b@e03727d2d66b ancestor a@924404dff337 + premerge successful removing a +picked tool 'python ../merge' for rev (binary False symlink False) merging rev my rev@e300d1c794ec+ other rev@e03727d2d66b ancestor rev@924404dff337 0 files updated, 2 files merged, 0 files removed, 0 files unresolved @@ -94,8 +103,11 @@ checking for directory renames b: local moved to a -> m rev: versions differ -> m +picked tool 'python ../merge' for b (binary False symlink False) merging b and a my b@ecf3cb2a4219+ other a@f4db7e329e71 ancestor a@924404dff337 + premerge successful +picked tool 'python ../merge' for rev (binary False symlink False) merging rev my rev@ecf3cb2a4219+ other rev@f4db7e329e71 ancestor rev@924404dff337 0 files updated, 2 files merged, 0 files removed, 0 files unresolved @@ -120,6 +132,7 @@ rev: versions differ -> m b: remote created -> g getting b +picked tool 'python ../merge' for rev (binary False symlink False) merging rev my rev@94b33a1b7f2d+ other rev@735846fee2d7 ancestor rev@924404dff337 1 files updated, 1 files merged, 0 files removed, 0 files unresolved @@ -142,6 +155,7 @@ b -> a checking for directory renames rev: versions differ -> m +picked tool 'python ../merge' for rev (binary False symlink False) merging rev my rev@ac809aeed39a+ other rev@97c705ade336 ancestor rev@924404dff337 0 files updated, 1 files merged, 0 files removed, 0 files unresolved @@ -168,6 +182,7 @@ b: remote created -> g removing a getting b +picked tool 'python ../merge' for rev (binary False symlink False) merging rev my rev@94b33a1b7f2d+ other rev@e03727d2d66b ancestor rev@924404dff337 1 files updated, 1 files merged, 1 files removed, 0 files unresolved @@ -189,6 +204,7 @@ b -> a checking for directory renames rev: versions differ -> m +picked tool 'python ../merge' for rev (binary False symlink False) merging rev my rev@ecf3cb2a4219+ other rev@97c705ade336 ancestor rev@924404dff337 0 files updated, 1 files merged, 0 files removed, 0 files unresolved @@ -206,8 +222,10 @@ searching for copies back to rev 1 b: versions differ -> m rev: versions differ -> m +picked tool 'python ../merge' for b (binary False symlink False) merging b my b@ec03c2ca8642+ other b@79cc6877a3b7 ancestor a@924404dff337 +picked tool 'python ../merge' for rev (binary False symlink False) merging rev my rev@ec03c2ca8642+ other rev@79cc6877a3b7 ancestor rev@924404dff337 0 files updated, 2 files merged, 0 files removed, 0 files unresolved @@ -228,8 +246,8 @@ unmatched files in other: c all copies found (* = to merge, ! = divergent): - c -> a - b -> a + c -> a ! + b -> a ! checking for directory renames a: divergent renames -> dr rev: versions differ -> m @@ -238,6 +256,7 @@ b c getting c +picked tool 'python ../merge' for rev (binary False symlink False) merging rev my rev@ecf3cb2a4219+ other rev@e6abcc1a30c2 ancestor rev@924404dff337 1 files updated, 1 files merged, 0 files removed, 0 files unresolved @@ -256,8 +275,10 @@ searching for copies back to rev 1 b: versions differ -> m rev: versions differ -> m +picked tool 'python ../merge' for b (binary False symlink False) merging b my b@ac809aeed39a+ other b@af30c7647fc7 ancestor b@000000000000 +picked tool 'python ../merge' for rev (binary False symlink False) merging rev my rev@ac809aeed39a+ other rev@af30c7647fc7 ancestor rev@924404dff337 0 files updated, 2 files merged, 0 files removed, 0 files unresolved @@ -278,8 +299,10 @@ b: versions differ -> m rev: versions differ -> m removing a +picked tool 'python ../merge' for b (binary False symlink False) merging b my b@59318016310c+ other b@e03727d2d66b ancestor b@000000000000 +picked tool 'python ../merge' for rev (binary False symlink False) merging rev my rev@59318016310c+ other rev@e03727d2d66b ancestor rev@924404dff337 0 files updated, 2 files merged, 1 files removed, 0 files unresolved @@ -299,8 +322,10 @@ b: versions differ -> m rev: versions differ -> m getting a +picked tool 'python ../merge' for b (binary False symlink False) merging b my b@ac809aeed39a+ other b@8dbce441892a ancestor b@000000000000 +picked tool 'python ../merge' for rev (binary False symlink False) merging rev my rev@ac809aeed39a+ other rev@8dbce441892a ancestor rev@924404dff337 1 files updated, 2 files merged, 0 files removed, 0 files unresolved @@ -321,8 +346,10 @@ b: versions differ -> m rev: versions differ -> m removing a +picked tool 'python ../merge' for b (binary False symlink False) merging b my b@59318016310c+ other b@e03727d2d66b ancestor b@000000000000 +picked tool 'python ../merge' for rev (binary False symlink False) merging rev my rev@59318016310c+ other rev@e03727d2d66b ancestor rev@924404dff337 0 files updated, 2 files merged, 1 files removed, 0 files unresolved @@ -342,8 +369,10 @@ b: versions differ -> m rev: versions differ -> m getting a +picked tool 'python ../merge' for b (binary False symlink False) merging b my b@ac809aeed39a+ other b@8dbce441892a ancestor b@000000000000 +picked tool 'python ../merge' for rev (binary False symlink False) merging rev my rev@ac809aeed39a+ other rev@8dbce441892a ancestor rev@924404dff337 1 files updated, 2 files merged, 0 files removed, 0 files unresolved @@ -362,8 +391,10 @@ searching for copies back to rev 1 b: versions differ -> m rev: versions differ -> m +picked tool 'python ../merge' for b (binary False symlink False) merging b my b@0b76e65c8289+ other b@735846fee2d7 ancestor b@000000000000 +picked tool 'python ../merge' for rev (binary False symlink False) merging rev my rev@0b76e65c8289+ other rev@735846fee2d7 ancestor rev@924404dff337 0 files updated, 2 files merged, 0 files removed, 0 files unresolved @@ -384,8 +415,10 @@ rev: versions differ -> m a: prompt recreating -> g getting a +picked tool 'python ../merge' for b (binary False symlink False) merging b my b@ecf3cb2a4219+ other b@8dbce441892a ancestor b@000000000000 +picked tool 'python ../merge' for rev (binary False symlink False) merging rev my rev@ecf3cb2a4219+ other rev@8dbce441892a ancestor rev@924404dff337 1 files updated, 2 files merged, 0 files removed, 0 files unresolved @@ -404,8 +437,10 @@ searching for copies back to rev 1 b: versions differ -> m rev: versions differ -> m +picked tool 'python ../merge' for b (binary False symlink False) merging b my b@0b76e65c8289+ other b@e03727d2d66b ancestor b@000000000000 +picked tool 'python ../merge' for rev (binary False symlink False) merging rev my rev@0b76e65c8289+ other rev@e03727d2d66b ancestor rev@924404dff337 0 files updated, 2 files merged, 0 files removed, 0 files unresolved @@ -430,9 +465,11 @@ rev: versions differ -> m a: remote moved to b -> m copying a to b +picked tool 'python ../merge' for a (binary False symlink False) merging a and b my a@e300d1c794ec+ other b@79cc6877a3b7 ancestor a@924404dff337 removing a +picked tool 'python ../merge' for rev (binary False symlink False) merging rev my rev@e300d1c794ec+ other rev@79cc6877a3b7 ancestor rev@924404dff337 0 files updated, 2 files merged, 0 files removed, 0 files unresolved @@ -456,8 +493,10 @@ checking for directory renames b: local moved to a -> m rev: versions differ -> m +picked tool 'python ../merge' for b (binary False symlink False) merging b and a my b@ec03c2ca8642+ other a@f4db7e329e71 ancestor a@924404dff337 +picked tool 'python ../merge' for rev (binary False symlink False) merging rev my rev@ec03c2ca8642+ other rev@f4db7e329e71 ancestor rev@924404dff337 0 files updated, 2 files merged, 0 files removed, 0 files unresolved @@ -484,9 +523,12 @@ b: local moved to a -> m rev: versions differ -> m c: remote created -> g +picked tool 'python ../merge' for b (binary False symlink False) merging b and a my b@ecf3cb2a4219+ other a@2b958612230f ancestor a@924404dff337 + premerge successful getting c +picked tool 'python ../merge' for rev (binary False symlink False) merging rev my rev@ecf3cb2a4219+ other rev@2b958612230f ancestor rev@924404dff337 1 files updated, 2 files merged, 0 files removed, 0 files unresolved
--- a/tests/test-rename.out Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-rename.out Fri Feb 08 11:55:17 2008 +0100 @@ -29,14 +29,10 @@ R d2/b 1 files updated, 0 files merged, 1 files removed, 0 files unresolved # rename directory d1 as d3 -copying d1/a to d3/a -copying d1/b to d3/b -copying d1/ba to d3/ba -copying d1/d11/a1 to d3/d11/a1 -removing d1/a -removing d1/b -removing d1/ba -removing d1/d11/a1 +moving d1/a to d3/a +moving d1/b to d3/b +moving d1/ba to d3/ba +moving d1/d11/a1 to d3/d11/a1 A d3/a d1/a A d3/b @@ -51,14 +47,10 @@ R d1/d11/a1 4 files updated, 0 files merged, 4 files removed, 0 files unresolved # rename --after directory d1 as d3 -copying d1/a to d3/a -copying d1/b to d3/b -copying d1/ba to d3/ba -copying d1/d11/a1 to d3/d11/a1 -removing d1/a -removing d1/b -removing d1/ba -removing d1/d11/a1 +moving d1/a to d3/a +moving d1/b to d3/b +moving d1/ba to d3/ba +moving d1/d11/a1 to d3/d11/a1 A d3/a d1/a A d3/b @@ -73,37 +65,29 @@ R d1/d11/a1 4 files updated, 0 files merged, 4 files removed, 0 files unresolved # move a directory using a relative path -copying ../d1/d11/a1 to d3/d11/a1 -removing ../d1/d11/a1 +moving ../d1/d11/a1 to d3/d11/a1 A d2/d3/d11/a1 d1/d11/a1 R d1/d11/a1 1 files updated, 0 files merged, 1 files removed, 0 files unresolved # move --after a directory using a relative path -copying ../d1/d11/a1 to d3/d11/a1 -removing ../d1/d11/a1 +moving ../d1/d11/a1 to d3/d11/a1 A d2/d3/d11/a1 d1/d11/a1 R d1/d11/a1 1 files updated, 0 files merged, 1 files removed, 0 files unresolved # move directory d1/d11 to an existing directory d2 (removes empty d1) -copying d1/d11/a1 to d2/d11/a1 -removing d1/d11/a1 +moving d1/d11/a1 to d2/d11/a1 A d2/d11/a1 d1/d11/a1 R d1/d11/a1 1 files updated, 0 files merged, 1 files removed, 0 files unresolved # move directories d1 and d2 to a new directory d3 -copying d1/a to d3/d1/a -copying d1/b to d3/d1/b -copying d1/ba to d3/d1/ba -copying d1/d11/a1 to d3/d1/d11/a1 -copying d2/b to d3/d2/b -removing d1/a -removing d1/b -removing d1/ba -removing d1/d11/a1 -removing d2/b +moving d1/a to d3/d1/a +moving d1/b to d3/d1/b +moving d1/ba to d3/d1/ba +moving d1/d11/a1 to d3/d1/d11/a1 +moving d2/b to d3/d2/b A d3/d1/a d1/a A d3/d1/b @@ -121,16 +105,11 @@ R d2/b 5 files updated, 0 files merged, 5 files removed, 0 files unresolved # move --after directories d1 and d2 to a new directory d3 -copying d1/a to d3/d1/a -copying d1/b to d3/d1/b -copying d1/ba to d3/d1/ba -copying d1/d11/a1 to d3/d1/d11/a1 -copying d2/b to d3/d2/b -removing d1/a -removing d1/b -removing d1/ba -removing d1/d11/a1 -removing d2/b +moving d1/a to d3/d1/a +moving d1/b to d3/d1/b +moving d1/ba to d3/d1/ba +moving d1/d11/a1 to d3/d1/d11/a1 +moving d2/b to d3/d2/b A d3/d1/a d1/a A d3/d1/b @@ -150,8 +129,7 @@ # move everything under directory d1 to existing directory d2, do not # overwrite existing files (d2/b) d2/b: not overwriting - file exists -copying d1/d11/a1 to d2/d11/a1 -removing d1/d11/a1 +moving d1/d11/a1 to d2/d11/a1 A d2/a d1/a A d2/ba @@ -173,14 +151,10 @@ # directory abort: with multiple sources, destination must be an existing directory # move every file under d1 to d2/d21 (glob) -copying d1/a to d2/d21/a -copying d1/b to d2/d21/b -copying d1/ba to d2/d21/ba -copying d1/d11/a1 to d2/d21/a1 -removing d1/a -removing d1/b -removing d1/ba -removing d1/d11/a1 +moving d1/a to d2/d21/a +moving d1/b to d2/d21/b +moving d1/ba to d2/d21/ba +moving d1/d11/a1 to d2/d21/a1 A d2/d21/a d1/a A d2/d21/a1 @@ -195,10 +169,8 @@ R d1/d11/a1 4 files updated, 0 files merged, 4 files removed, 0 files unresolved # move --after some files under d1 to d2/d21 (glob) -copying d1/a to d2/d21/a -copying d1/d11/a1 to d2/d21/a1 -removing d1/a -removing d1/d11/a1 +moving d1/a to d2/d21/a +moving d1/d11/a1 to d2/d21/a1 A d2/d21/a d1/a A d2/d21/a1 @@ -207,10 +179,8 @@ R d1/d11/a1 2 files updated, 0 files merged, 2 files removed, 0 files unresolved # move every file under d1 starting with an 'a' to d2/d21 (regexp) -copying d1/a to d2/d21/a -copying d1/d11/a1 to d2/d21/a1 -removing d1/a -removing d1/d11/a1 +moving d1/a to d2/d21/a +moving d1/d11/a1 to d2/d21/a1 A d2/d21/a d1/a A d2/d21/a1 @@ -233,9 +203,8 @@ R d1/ba 1 files updated, 0 files merged, 1 files removed, 0 files unresolved # do not copy more than one source file to the same destination file -copying d1/d11/a1 to d3/d11/a1 +moving d1/d11/a1 to d3/d11/a1 d3/b: not overwriting - d2/b collides with d1/b -removing d1/d11/a1 A d3/a d1/a A d3/b @@ -250,14 +219,10 @@ R d1/d11/a1 4 files updated, 0 files merged, 4 files removed, 0 files unresolved # move a whole subtree with "hg rename ." -copying a to ../d3/d1/a -copying b to ../d3/d1/b -copying ba to ../d3/d1/ba -copying d11/a1 to ../d3/d1/d11/a1 -removing a -removing b -removing ba -removing d11/a1 +moving a to ../d3/d1/a +moving b to ../d3/d1/b +moving ba to ../d3/d1/ba +moving d11/a1 to ../d3/d1/d11/a1 A d3/d1/a d1/a A d3/d1/b @@ -272,14 +237,10 @@ R d1/d11/a1 4 files updated, 0 files merged, 4 files removed, 0 files unresolved # move a whole subtree with "hg rename --after ." -copying a to ../d3/a -copying b to ../d3/b -copying ba to ../d3/ba -copying d11/a1 to ../d3/d11/a1 -removing a -removing b -removing ba -removing d11/a1 +moving a to ../d3/a +moving b to ../d3/b +moving ba to ../d3/ba +moving d11/a1 to ../d3/d11/a1 A d3/a d1/a A d3/b @@ -294,14 +255,10 @@ R d1/d11/a1 4 files updated, 0 files merged, 4 files removed, 0 files unresolved # move the parent tree with "hg rename .." -copying ../a to ../../d3/a -copying ../b to ../../d3/b -copying ../ba to ../../d3/ba -copying a1 to ../../d3/d11/a1 -removing ../a -removing ../b -removing ../ba -removing a1 +moving ../a to ../../d3/a +moving ../b to ../../d3/b +moving ../ba to ../../d3/ba +moving a1 to ../../d3/d11/a1 A d3/a d1/a A d3/b @@ -316,12 +273,9 @@ R d1/d11/a1 4 files updated, 0 files merged, 4 files removed, 0 files unresolved # skip removed files -copying d1/a to d3/a -copying d1/ba to d3/ba -copying d1/d11/a1 to d3/d11/a1 -removing d1/a -removing d1/ba -removing d1/d11/a1 +moving d1/a to d3/a +moving d1/ba to d3/ba +moving d1/d11/a1 to d3/d11/a1 A d3/a d1/a A d3/ba
--- a/tests/test-ro-message Fri Feb 08 11:50:37 2008 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,17 +0,0 @@ -#!/bin/sh -HG=hg -"$HG" init -mkdir b -echo 'Bouncy' >b/bouncy -echo 'tricycle' >b/vehicle -"$HG" add b/bouncy -"$HG" add b/vehicle -"$HG" commit -m 'Adding bouncy' -echo 'bouncy' >>b/bouncy -"$HG" commit -m 'Making it bouncier' -"$HG" update -C 0 -echo 'stationary' >>b/vehicle -"$HG" commit -m 'Clarifying the vehicle.' -"$HG" update -C 1 -chmod a-w b/vehicle -"$HG" merge 2 2>&1 | sed 's|^\(.*[ ]\).*/\([^/]*/[^/]*/[^/]*\)$|\1\2|g'
--- a/tests/test-ro-message.out Fri Feb 08 11:50:37 2008 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,3 +0,0 @@ -1 files updated, 0 files merged, 0 files removed, 0 files unresolved -2 files updated, 0 files merged, 0 files removed, 0 files unresolved -abort: Permission denied: test-ro-message/b/vehicle
--- a/tests/test-rollback Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-rollback Fri Feb 08 11:55:17 2008 +0100 @@ -13,3 +13,9 @@ hg verify hg parents hg status + +# Test issue 902 +hg commit -m "test" +hg branch test +hg rollback +hg branch
--- a/tests/test-rollback.out Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-rollback.out Fri Feb 08 11:55:17 2008 +0100 @@ -16,3 +16,6 @@ checking files 0 files, 0 changesets, 0 total revisions A a +marked working directory as branch test +rolling back last transaction +default
--- a/tests/test-serve Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-serve Fri Feb 08 11:55:17 2008 +0100 @@ -16,3 +16,31 @@ echo % With -v hg serve -a localhost -p $HGPORT1 -d --pid-file=hg.pid -v | sed -e 's,:[0-9][0-9]*/,/,' cat hg.pid >> "$DAEMON_PIDS" +sleep 1 +kill `cat hg.pid` +sleep 1 + +echo % With --prefix foo +hg serve -a localhost -p $HGPORT1 -d --pid-file=hg.pid -v --prefix foo | sed -e 's,:[0-9][0-9]*/,/,' +cat hg.pid >> "$DAEMON_PIDS" +sleep 1 +kill `cat hg.pid` +sleep 1 + +echo % With --prefix /foo +hg serve -a localhost -p $HGPORT1 -d --pid-file=hg.pid -v --prefix /foo | sed -e 's,:[0-9][0-9]*/,/,' +cat hg.pid >> "$DAEMON_PIDS" +sleep 1 +kill `cat hg.pid` +sleep 1 + +echo % With --prefix foo/ +hg serve -a localhost -p $HGPORT1 -d --pid-file=hg.pid -v --prefix foo/ | sed -e 's,:[0-9][0-9]*/,/,' +cat hg.pid >> "$DAEMON_PIDS" +sleep 1 +kill `cat hg.pid` +sleep 1 + +echo % With --prefix /foo/ +hg serve -a localhost -p $HGPORT1 -d --pid-file=hg.pid -v --prefix /foo/ | sed -e 's,:[0-9][0-9]*/,/,' +cat hg.pid >> "$DAEMON_PIDS"
--- a/tests/test-serve.out Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-serve.out Fri Feb 08 11:55:17 2008 +0100 @@ -2,3 +2,11 @@ access log created - .hg/hgrc respected % With -v listening at http://localhost/ +% With --prefix foo +listening at http://localhost/foo/ +% With --prefix /foo +listening at http://localhost/foo/ +% With --prefix foo/ +listening at http://localhost/foo/ +% With --prefix /foo/ +listening at http://localhost/foo/
--- a/tests/test-simplemerge.py Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-simplemerge.py Fri Feb 08 11:55:17 2008 +0100 @@ -19,14 +19,24 @@ from unittest import TestCase import imp import shutil -from mercurial import util +from mercurial import util, simplemerge + +# bzr compatible interface, for the tests +class Merge3(simplemerge.Merge3Text): + """3-way merge of texts. -# copy simplemerge to the cwd to avoid creating a .pyc file in the source tree -shutil.copyfile(os.path.join(os.environ['TESTDIR'], os.path.pardir, - 'contrib', 'simplemerge'), - 'simplemerge.py') -simplemerge = imp.load_source('simplemerge', 'simplemerge.py') -Merge3 = simplemerge.Merge3 + Given BASE, OTHER, THIS, tries to produce a combined text + incorporating the changes from both BASE->OTHER and BASE->THIS. + All three will typically be sequences of lines.""" + def __init__(self, base, a, b): + basetext = '\n'.join([i.strip('\n') for i in base] + ['']) + atext = '\n'.join([i.strip('\n') for i in a] + ['']) + btext = '\n'.join([i.strip('\n') for i in b] + ['']) + if util.binary(basetext) or util.binary(atext) or util.binary(btext): + raise util.Abort("don't know how to merge binary files") + simplemerge.Merge3Text.__init__(self, basetext, atext, btext, + base, a, b) + CantReprocessAndShowBase = simplemerge.CantReprocessAndShowBase def split_lines(t):
--- a/tests/test-strict.out Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-strict.out Fri Feb 08 11:55:17 2008 +0100 @@ -18,7 +18,6 @@ pull pull changes from the specified source push push changes to the specified destination remove remove the specified files on the next commit - revert revert files or dirs to their states as of some revision serve export the repository via HTTP status show changed files in the working directory update update working directory
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-strip-cross Fri Feb 08 11:55:17 2008 +0100 @@ -0,0 +1,68 @@ +#!/bin/sh + +# test stripping of filelogs where the linkrev doesn't always increase + +echo '[extensions]' >> $HGRCPATH +echo 'hgext.mq =' >> $HGRCPATH + +hg init orig +cd orig + +hidefilename() +{ + sed -e 's/saving bundle to .*strip-backup/saving bundle to strip-backup/' +} + +commit() +{ + hg up -qC null + count=1 + for i in "$@"; do + for f in $i; do + echo $count > $f + done + count=`expr $count + 1` + done + hg commit -qAm "$*" +} + +# 2 1 0 2 0 1 2 +commit '201 210' + +commit '102 120' '210' + +commit '021' + +commit '201' '021 120' + +commit '012 021' '102 201' '120 210' + +commit 'manifest-file' + +commit '102 120' '012 210' '021 201' + +commit '201 210' '021 120' '012 102' + +HGUSER=another-user; export HGUSER +commit 'manifest-file' + +commit '012' 'manifest-file' + +cd .. +hg clone -q -U -r -1 -r -2 -r -3 -r -4 -r -6 orig crossed + +for i in crossed/.hg/store/00manifest.i crossed/.hg/store/data/*.i; do + echo $i + hg debugindex $i + echo +done + +for i in 0 1 2 3 4; do + hg clone -q -U --pull crossed $i + echo "% Trying to strip revision $i" + hg --cwd $i strip $i 2>&1 | hidefilename + echo "% Verifying" + hg --cwd $i verify + echo +done +
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-strip-cross.out Fri Feb 08 11:55:17 2008 +0100 @@ -0,0 +1,118 @@ +crossed/.hg/store/00manifest.i + rev offset length base linkrev nodeid p1 p2 + 0 0 112 0 0 6f105cbb914d 000000000000 000000000000 + 1 112 56 1 3 1b55917b3699 000000000000 000000000000 + 2 168 123 1 1 8f3d04e263e5 000000000000 000000000000 + 3 291 122 1 2 f0ef8726ac4f 000000000000 000000000000 + 4 413 87 4 4 0b76e38b4070 000000000000 000000000000 + +crossed/.hg/store/data/012.i + rev offset length base linkrev nodeid p1 p2 + 0 0 3 0 0 b8e02f643373 000000000000 000000000000 + 1 3 3 1 1 5d9299349fc0 000000000000 000000000000 + 2 6 3 2 2 2661d26c6496 000000000000 000000000000 + +crossed/.hg/store/data/021.i + rev offset length base linkrev nodeid p1 p2 + 0 0 3 0 0 b8e02f643373 000000000000 000000000000 + 1 3 3 1 2 5d9299349fc0 000000000000 000000000000 + 2 6 3 2 1 2661d26c6496 000000000000 000000000000 + +crossed/.hg/store/data/102.i + rev offset length base linkrev nodeid p1 p2 + 0 0 3 0 1 b8e02f643373 000000000000 000000000000 + 1 3 3 1 0 5d9299349fc0 000000000000 000000000000 + 2 6 3 2 2 2661d26c6496 000000000000 000000000000 + +crossed/.hg/store/data/120.i + rev offset length base linkrev nodeid p1 p2 + 0 0 3 0 1 b8e02f643373 000000000000 000000000000 + 1 3 3 1 2 5d9299349fc0 000000000000 000000000000 + 2 6 3 2 0 2661d26c6496 000000000000 000000000000 + +crossed/.hg/store/data/201.i + rev offset length base linkrev nodeid p1 p2 + 0 0 3 0 2 b8e02f643373 000000000000 000000000000 + 1 3 3 1 0 5d9299349fc0 000000000000 000000000000 + 2 6 3 2 1 2661d26c6496 000000000000 000000000000 + +crossed/.hg/store/data/210.i + rev offset length base linkrev nodeid p1 p2 + 0 0 3 0 2 b8e02f643373 000000000000 000000000000 + 1 3 3 1 1 5d9299349fc0 000000000000 000000000000 + 2 6 3 2 0 2661d26c6496 000000000000 000000000000 + +crossed/.hg/store/data/manifest-file.i + rev offset length base linkrev nodeid p1 p2 + 0 0 3 0 3 b8e02f643373 000000000000 000000000000 + 1 3 3 1 4 5d9299349fc0 000000000000 000000000000 + +% Trying to strip revision 0 +saving bundle to strip-backup/cbb8c2f0a2e3-backup +saving bundle to strip-backup/cbb8c2f0a2e3-temp +adding branch +adding changesets +adding manifests +adding file changes +added 4 changesets with 15 changes to 7 files (+3 heads) +% Verifying +checking changesets +checking manifests +crosschecking files in changesets and manifests +checking files +7 files, 4 changesets, 15 total revisions + +% Trying to strip revision 1 +saving bundle to strip-backup/124ecc0cbec9-backup +saving bundle to strip-backup/124ecc0cbec9-temp +adding branch +adding changesets +adding manifests +adding file changes +added 3 changesets with 12 changes to 7 files (+3 heads) +% Verifying +checking changesets +checking manifests +crosschecking files in changesets and manifests +checking files +7 files, 4 changesets, 14 total revisions + +% Trying to strip revision 2 +saving bundle to strip-backup/f6439b304a1a-backup +saving bundle to strip-backup/f6439b304a1a-temp +adding branch +adding changesets +adding manifests +adding file changes +added 2 changesets with 8 changes to 6 files (+2 heads) +% Verifying +checking changesets +checking manifests +crosschecking files in changesets and manifests +checking files +7 files, 4 changesets, 14 total revisions + +% Trying to strip revision 3 +saving bundle to strip-backup/6e54ec5db740-backup +saving bundle to strip-backup/6e54ec5db740-temp +adding branch +adding changesets +adding manifests +adding file changes +added 1 changesets with 1 changes to 2 files (+1 heads) +% Verifying +checking changesets +checking manifests +crosschecking files in changesets and manifests +checking files +7 files, 4 changesets, 19 total revisions + +% Trying to strip revision 4 +saving bundle to strip-backup/9147ea23c156-backup +% Verifying +checking changesets +checking manifests +crosschecking files in changesets and manifests +checking files +7 files, 4 changesets, 19 total revisions +
--- a/tests/test-symlink-basic Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-symlink-basic Fri Feb 08 11:55:17 2008 +0100 @@ -7,14 +7,6 @@ sed -e "s:/.*\(/test-symlink-basic/.*\):...\1:" } -cat >> readlink.py <<EOF -import os -import sys - -for f in sys.argv[1:]: - print f, '->', os.readlink(f) -EOF - hg init a cd a ln -s nothing dangling @@ -25,29 +17,29 @@ hg tip -v hg manifest --debug echo '% rev 0:' -python ../readlink.py dangling +$TESTDIR/readlink.py dangling rm dangling ln -s void dangling hg commit -m 'change symlink' echo '% rev 1:' -python ../readlink.py dangling +$TESTDIR/readlink.py dangling echo '% modifying link' rm dangling ln -s empty dangling -python ../readlink.py dangling +$TESTDIR/readlink.py dangling echo '% reverting to rev 0:' hg revert -r 0 -a -python ../readlink.py dangling +$TESTDIR/readlink.py dangling echo '% backups:' -python ../readlink.py *.orig +$TESTDIR/readlink.py *.orig rm *.orig hg up -C echo '% copies' hg cp -v dangling dangling2 hg st -Cmard -python ../readlink.py dangling dangling2 +$TESTDIR/readlink.py dangling dangling2
--- a/tests/test-tags Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-tags Fri Feb 08 11:55:17 2008 +0100 @@ -126,3 +126,21 @@ hg tag -m 'retag rev 0' -fr 0 bar # rev 4 bar -> 0, but bar stays at 2 echo % bar should still point to rev 2 hg tags + + +# test that removing global/local tags does not get confused when trying +# to remove a tag of type X which actually only exists as a type Y +cd .. +hg init t5 +cd t5 +echo foo > foo +hg add +hg ci -m 'add foo' # rev 0 + +hg tag -r 0 -l localtag +hg tag --remove localtag + +hg tag -r 0 globaltag +hg tag --remove -l globaltag +hg tags -v +exit 0
--- a/tests/test-tags.out Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-tags.out Fri Feb 08 11:55:17 2008 +0100 @@ -71,3 +71,9 @@ % bar should still point to rev 2 tip 4:40af5d225513 bar 2:72b852876a42 +adding foo +abort: localtag tag is local +abort: globaltag tag is global +tip 1:a0b6fe111088 +localtag 0:bbd179dfa0a7 local +globaltag 0:bbd179dfa0a7
--- a/tests/test-up-local-change.out Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-up-local-change.out Fri Feb 08 11:55:17 2008 +0100 @@ -4,7 +4,7 @@ diff -r 33aaa84a386b a --- a/a +++ b/a -@@ -1,1 +1,1 @@ a +@@ -1,1 +1,1 @@ -a +abc adding b @@ -22,6 +22,7 @@ b a: versions differ -> m b: remote created -> g +picked tool 'true' for a (binary False symlink False) merging a my a@33aaa84a386b+ other a@802f095af299 ancestor a@33aaa84a386b getting b @@ -58,6 +59,7 @@ b a: versions differ -> m b: remote created -> g +picked tool 'true' for a (binary False symlink False) merging a my a@33aaa84a386b+ other a@802f095af299 ancestor a@33aaa84a386b getting b @@ -88,7 +90,7 @@ diff -r 802f095af299 a --- a/a +++ b/a -@@ -1,1 +1,1 @@ a2 +@@ -1,1 +1,1 @@ -a2 +abc 1 files updated, 0 files merged, 1 files removed, 0 files unresolved @@ -109,8 +111,10 @@ searching for copies back to rev 1 a: versions differ -> m b: versions differ -> m +picked tool 'true' for a (binary False symlink False) merging a my a@802f095af299+ other a@030602aee63d ancestor a@33aaa84a386b +picked tool 'true' for b (binary False symlink False) merging b my b@802f095af299+ other b@030602aee63d ancestor b@000000000000 0 files updated, 2 files merged, 0 files removed, 0 files unresolved @@ -130,7 +134,7 @@ diff -r 802f095af299 a --- a/a +++ b/a -@@ -1,1 +1,1 @@ a2 +@@ -1,1 +1,1 @@ -a2 +abc adding a
--- a/tests/test-walk Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-walk Fri Feb 08 11:55:17 2008 +0100 @@ -58,6 +58,7 @@ debugwalk ../.hg chdir .. debugwalk -Ibeans +debugwalk -I '{*,{b,m}*/*}k' debugwalk 'glob:mammals/../beans/b*' debugwalk '-X*/Procyonidae' mammals debugwalk path:mammals
--- a/tests/test-walk.out Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-walk.out Fri Feb 08 11:55:17 2008 +0100 @@ -174,6 +174,11 @@ f beans/pinto beans/pinto f beans/turtle beans/turtle +hg debugwalk -I {*,{b,m}*/*}k +f beans/black beans/black +f fenugreek fenugreek +f mammals/skunk mammals/skunk + hg debugwalk glob:mammals/../beans/b* f beans/black beans/black f beans/borlotti beans/borlotti
--- a/tests/test-webraw.out Fri Feb 08 11:50:37 2008 +0100 +++ b/tests/test-webraw.out Fri Feb 08 11:55:17 2008 +0100 @@ -1,7 +1,7 @@ 200 Script output follows content-type: text/plain content-length: 157 -content-disposition: filename=sometext.txt +content-disposition: inline; filename=sometext.txt This is just some random text that will go inside the file and take a few lines.
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-win32text Fri Feb 08 11:55:17 2008 +0100 @@ -0,0 +1,101 @@ +#!/bin/sh + +cat > unix2dos.py <<EOF +import sys + +for path in sys.argv[1:]: + data = file(path, 'rb').read() + data = data.replace('\n', '\r\n') + file(path, 'wb').write(data) +EOF + +cat > print.py <<EOF +import sys +print(sys.stdin.read().replace('\n', '<LF>').replace('\r', '<CR>').replace('\0', '<NUL>')) +EOF + +hg init +echo '[hooks]' >> .hg/hgrc +echo 'pretxncommit.crlf = python:hgext.win32text.forbidcrlf' >> .hg/hgrc +echo 'pretxnchangegroup.crlf = python:hgext.win32text.forbidcrlf' >> .hg/hgrc +cat .hg/hgrc +echo + +echo hello > f +hg add f +hg ci -m 1 -d'0 0' +echo + +python unix2dos.py f +hg ci -m 2 -d'0 0' +hg revert -a +echo + +mkdir d +echo hello > d/f2 +python unix2dos.py d/f2 +hg add d/f2 +hg ci -m 3 -d'0 0' +hg revert -a +rm d/f2 +echo + +hg rem f +hg ci -m 4 -d'0 0' +echo + +python -c 'file("bin", "wb").write("hello\x00\x0D\x0A")' +hg add bin +hg ci -m 5 -d'0 0' +hg log -v +echo + +hg clone . dupe +echo +for x in a b c d; do echo content > dupe/$x; done +hg -R dupe add +python unix2dos.py dupe/b dupe/c dupe/d +hg -R dupe ci -m a -d'0 0' dupe/a +hg -R dupe ci -m b/c -d'0 0' dupe/[bc] +hg -R dupe ci -m d -d'0 0' dupe/d +hg -R dupe log -v +echo + +hg pull dupe +echo + +hg log -v +echo + +rm .hg/hgrc +(echo some; echo text) > f3 +python -c 'file("f4.bat", "wb").write("rem empty\x0D\x0A")' +hg add f3 f4.bat +hg ci -m 6 -d'0 0' + +python print.py < bin +python print.py < f3 +python print.py < f4.bat +echo + +echo '[extensions]' >> .hg/hgrc +echo 'win32text = ' >> .hg/hgrc +echo '[decode]' >> .hg/hgrc +echo '** = cleverdecode:' >> .hg/hgrc +echo '[encode]' >> .hg/hgrc +echo '** = cleverencode:' >> .hg/hgrc +cat .hg/hgrc +echo + +rm f3 f4.bat bin +hg co 2>&1 | python -c 'import sys, os; sys.stdout.write(sys.stdin.read().replace(os.getcwd(), "...."))' +python print.py < bin +python print.py < f3 +python print.py < f4.bat +echo + +python -c 'file("f5.sh", "wb").write("# empty\x0D\x0A")' +hg add f5.sh +hg ci -m 7 -d'0 0' +python print.py < f5.sh +hg cat f5.sh | python print.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-win32text.out Fri Feb 08 11:55:17 2008 +0100 @@ -0,0 +1,179 @@ +[hooks] +pretxncommit.crlf = python:hgext.win32text.forbidcrlf +pretxnchangegroup.crlf = python:hgext.win32text.forbidcrlf + + +Attempt to commit or push text file(s) using CRLF line endings +in b1aa5cde7ff4: f +transaction abort! +rollback completed +abort: pretxncommit.crlf hook failed +reverting f + +Attempt to commit or push text file(s) using CRLF line endings +in 88b17af74937: d/f2 +transaction abort! +rollback completed +abort: pretxncommit.crlf hook failed +forgetting d/f2 + + +changeset: 2:b67b2dae057a +tag: tip +user: test +date: Thu Jan 01 00:00:00 1970 +0000 +files: bin +description: +5 + + +changeset: 1:c72a7d1d0907 +user: test +date: Thu Jan 01 00:00:00 1970 +0000 +files: f +description: +4 + + +changeset: 0:fcf06d5c4e1d +user: test +date: Thu Jan 01 00:00:00 1970 +0000 +files: f +description: +1 + + + +1 files updated, 0 files merged, 0 files removed, 0 files unresolved + +adding dupe/a +adding dupe/b +adding dupe/c +adding dupe/d +changeset: 5:6e8a7629ff5b +tag: tip +user: test +date: Thu Jan 01 00:00:00 1970 +0000 +files: d +description: +d + + +changeset: 4:ac30a42ce8bc +user: test +date: Thu Jan 01 00:00:00 1970 +0000 +files: b c +description: +b/c + + +changeset: 3:a73b85ef1fb7 +user: test +date: Thu Jan 01 00:00:00 1970 +0000 +files: a +description: +a + + +changeset: 2:b67b2dae057a +user: test +date: Thu Jan 01 00:00:00 1970 +0000 +files: bin +description: +5 + + +changeset: 1:c72a7d1d0907 +user: test +date: Thu Jan 01 00:00:00 1970 +0000 +files: f +description: +4 + + +changeset: 0:fcf06d5c4e1d +user: test +date: Thu Jan 01 00:00:00 1970 +0000 +files: f +description: +1 + + + +pulling from dupe +searching for changes +adding changesets +adding manifests +adding file changes +added 3 changesets with 4 changes to 4 files +Attempt to commit or push text file(s) using CRLF line endings +in ac30a42ce8bc: b +in ac30a42ce8bc: c +in 6e8a7629ff5b: d + +To prevent this mistake in your local repository, +add to Mercurial.ini or .hg/hgrc: + +[hooks] +pretxncommit.crlf = python:hgext.win32text.forbidcrlf + +and also consider adding: + +[extensions] +hgext.win32text = +[encode] +** = cleverencode: +[decode] +** = cleverdecode: +transaction abort! +rollback completed +abort: pretxnchangegroup.crlf hook failed + +changeset: 2:b67b2dae057a +tag: tip +user: test +date: Thu Jan 01 00:00:00 1970 +0000 +files: bin +description: +5 + + +changeset: 1:c72a7d1d0907 +user: test +date: Thu Jan 01 00:00:00 1970 +0000 +files: f +description: +4 + + +changeset: 0:fcf06d5c4e1d +user: test +date: Thu Jan 01 00:00:00 1970 +0000 +files: f +description: +1 + + + +hello<NUL><CR><LF> +some<LF>text<LF> +rem empty<CR><LF> + +[extensions] +win32text = +[decode] +** = cleverdecode: +[encode] +** = cleverencode: + +WARNING: f4.bat already has CRLF line endings +and does not need EOL conversion by the win32text plugin. +Before your next commit, please reconsider your encode/decode settings in +Mercurial.ini or ..../.hg/hgrc. +3 files updated, 0 files merged, 0 files removed, 0 files unresolved +hello<NUL><CR><LF> +some<CR><LF>text<CR><LF> +rem empty<CR><LF> + +# empty<CR><LF> +# empty<LF>