Mercurial > hg
changeset 17468:8fea378242e3
Merge with stable
author | Patrick Mezard <patrick@mezard.eu> |
---|---|
date | Sun, 09 Sep 2012 12:35:06 +0200 |
parents | 448d0c452140 (diff) 8085fed2bf0a (current diff) |
children | 89467a7c2132 |
files | mercurial/cmdutil.py tests/test-largefiles.t |
diffstat | 148 files changed, 1718 insertions(+), 600 deletions(-) [+] |
line wrap: on
line diff
--- a/contrib/bash_completion Mon Sep 03 17:25:50 2012 +0100 +++ b/contrib/bash_completion Sun Sep 09 12:35:06 2012 +0200 @@ -386,6 +386,13 @@ fi } +_hg_cmd_rebase() { + if [[ "$prev" = @(-s|--source|-d|--dest|-b|--base|-r|--rev) ]]; then + _hg_labels + return + fi +} + _hg_cmd_strip() { _hg_labels
--- a/contrib/check-code.py Mon Sep 03 17:25:50 2012 +0100 +++ b/contrib/check-code.py Sun Sep 09 12:35:06 2012 +0200 @@ -91,7 +91,7 @@ uprefix = r"^ \$ " utestpats = [ [ - (r'^(\S| $ ).*(\S[ \t]+|^[ \t]+)\n', "trailing whitespace on non-output"), + (r'^(\S.*|| [$>] .*)[ \t]\n', "trailing whitespace on non-output"), (uprefix + r'.*\|\s*sed[^|>\n]*\n', "use regex test output patterns instead of sed"), (uprefix + r'(true|exit 0)', "explicit zero exit unnecessary"), @@ -136,7 +136,7 @@ (r'\w[+/*\-<>]\w', "missing whitespace in expression"), (r'^\s+\w+=\w+[^,)\n]$', "missing whitespace in assignment"), (r'(\s+)try:\n((?:\n|\1\s.*\n)+?)\1except.*?:\n' - r'((?:\n|\1\s.*\n)+?)\1finally:', 'no try/except/finally in Py2.4'), + r'((?:\n|\1\s.*\n)+?)\1finally:', 'no try/except/finally in Python 2.4'), (r'.{81}', "line too long"), (r' x+[xo][\'"]\n\s+[\'"]x', 'string join across lines with no space'), (r'[^\n]\Z', "no trailing newline"), @@ -190,8 +190,8 @@ 'hasattr(foo, bar) is broken, use util.safehasattr(foo, bar) instead'), (r'opener\([^)]*\).read\(', "use opener.read() instead"), - (r'BaseException', 'not in Py2.4, use Exception'), - (r'os\.path\.relpath', 'os.path.relpath is not in Py2.5'), + (r'BaseException', 'not in Python 2.4, use Exception'), + (r'os\.path\.relpath', 'os.path.relpath is not in Python 2.5'), (r'opener\([^)]*\).write\(', "use opener.write() instead"), (r'[\s\(](open|file)\([^)]*\)\.read\(', @@ -321,7 +321,7 @@ :f: filepath :logfunc: function used to report error logfunc(filename, linenumber, linecontent, errormessage) - :maxerr: number of error to display before arborting. + :maxerr: number of error to display before aborting. Set to false (default) to report all errors return True if no error is found, False otherwise. @@ -365,7 +365,7 @@ p, msg = pat ignore = None - # fix-up regexes for multiline searches + # fix-up regexes for multi-line searches po = p # \s doesn't match \n p = re.sub(r'(?<!\\)\\s', r'[ \\t]', p)
--- a/contrib/vim/hgcommand.vim Mon Sep 03 17:25:50 2012 +0100 +++ b/contrib/vim/hgcommand.vim Sun Sep 09 12:35:06 2012 +0200 @@ -1442,7 +1442,7 @@ *hgcommand-mappings-override* - The default mappings can be overriden by user-provided instead by mapping + The default mappings can be overridden by user-provided instead by mapping to <Plug>CommandName. This is especially useful when these mappings collide with other existing mappings (vim will warn of this during plugin initialization, but will not clobber the existing mappings).
--- a/contrib/vim/patchreview.vim Mon Sep 03 17:25:50 2012 +0100 +++ b/contrib/vim/patchreview.vim Sun Sep 09 12:35:06 2012 +0200 @@ -55,7 +55,7 @@ " 3) Optional (but recommended for speed) " " Install patchutils ( http://cyberelk.net/tim/patchutils/ ) for your -" OS. For windows it is availble from Cygwin +" OS. For windows it is available from Cygwin " " http://www.cygwin.com "
--- a/contrib/win32/hgwebdir_wsgi.py Mon Sep 03 17:25:50 2012 +0100 +++ b/contrib/win32/hgwebdir_wsgi.py Sun Sep 09 12:35:06 2012 +0200 @@ -27,7 +27,7 @@ # On 64-bit systems, make sure it's assigned a 32-bit app pool. # # - In the application, setup a wildcard script handler mapping of type -# IpsapiModule with the shim dll as its executable. This file MUST reside +# IsapiModule with the shim dll as its executable. This file MUST reside # in the same directory as the shim. Remove all other handlers, if you wish. # # - Make sure the ISAPI and CGI restrictions (configured globally on the
--- a/contrib/wix/templates.wxs Mon Sep 03 17:25:50 2012 +0100 +++ b/contrib/wix/templates.wxs Sun Sep 09 12:35:06 2012 +0200 @@ -33,7 +33,6 @@ <File Name="map-cmdline.default" /> <File Name="map-cmdline.bisect" /> <File Name="map-cmdline.xml" /> - <File Name="template-vars.txt" /> </Component> <Directory Id="templates.atomdir" Name="atom">
--- a/contrib/zsh_completion Mon Sep 03 17:25:50 2012 +0100 +++ b/contrib/zsh_completion Sun Sep 09 12:35:06 2012 +0200 @@ -361,6 +361,19 @@ 'urls:URL:_hg_urls' } +_hg_add_help_topics=( + config dates diffs environment extensions filesets glossary hgignore hgweb + merge-tools multirevs obsolescence patterns phases revisions revsets + subrepos templating urls +) + +_hg_help_topics() { + local topics + (( $#_hg_cmd_list )) || _hg_get_commands + topics=($_hg_cmd_list $_hg_add_help_topics) + _describe -t help_topics 'help topics' topics +} + # Common options _hg_global_opts=( '(--repository -R)'{-R+,--repository}'[repository root directory]:repository:_files -/' @@ -385,18 +398,49 @@ '*'{-I+,--include}'[include names matching the given patterns]:dir:_files -W $(_hg_cmd root) -/' '*'{-X+,--exclude}'[exclude names matching the given patterns]:dir:_files -W $(_hg_cmd root) -/') +_hg_clone_opts=( + $_hg_remote_opts + '(--noupdate -U)'{-U,--noupdate}'[do not update the new working directory]' + '--pull[use pull protocol to copy metadata]' + '--uncompressed[use uncompressed transfer (fast over LAN)]') + +_hg_date_user_opts=( + '(--currentdate -D)'{-D,--currentdate}'[record the current date as commit date]' + '(--currentuser -U)'{-U,--currentuser}'[record the current user as committer]' + '(--date -d)'{-d+,--date}'[record the specified date as commit date]:date:' + '(--user -u)'{-u+,--user}'[record the specified user as committer]:user:') + +_hg_gitlike_opts=( + '(--git -g)'{-g,--git}'[use git extended diff format]') + _hg_diff_opts=( + $_hg_gitlike_opts '(--text -a)'{-a,--text}'[treat all files as text]' - '(--git -g)'{-g,--git}'[use git extended diff format]' - "--nodates[omit dates from diff headers]") + '--nodates[omit dates from diff headers]') + +_hg_mergetool_opts=( + '(--tool -t)'{-t+,--tool}'[specify merge tool]:tool:') _hg_dryrun_opts=( '(--dry-run -n)'{-n,--dry-run}'[do not perform actions, just print output]') +_hg_ignore_space_opts=( + '(--ignore-all-space -w)'{-w,--ignore-all-space}'[ignore white space when comparing lines]' + '(--ignore-space-change -b)'{-b,--ignore-space-change}'[ignore changes in the amount of white space]' + '(--ignore-blank-lines -B)'{-B,--ignore-blank-lines}'[ignore changes whose lines are all blank]') + _hg_style_opts=( '--style[display using template map file]:' '--template[display with template]:') +_hg_log_opts=( + $_hg_global_opts $_hg_style_opts $_hg_gitlike_opts + '(--limit -l)'{-l+,--limit}'[limit number of changes displayed]:' + '(--no-merges -M)'{-M,--no-merges}'[do not show merges]' + '(--patch -p)'{-p,--patch}'[show patch]' + '--stat[output diffstat-style summary of changes]' +) + _hg_commit_opts=( '(-m --message -l --logfile --edit -e)'{-e,--edit}'[edit commit message]' '(-e --edit -l --logfile --message -m)'{-m+,--message}'[use <text> as commit message]:message:' @@ -406,12 +450,20 @@ '(--ssh -e)'{-e+,--ssh}'[specify ssh command to use]:' '--remotecmd[specify hg command to run on the remote side]:') +_hg_branch_bmark_opts=( + '(--bookmark -B)'{-B+,--bookmark}'[specify bookmark(s)]:bookmark:_hg_bookmarks' + '(--branch -b)'{-b+,--branch}'[specify branch(es)]:branch:_hg_branches' +) + +_hg_subrepos_opts=( + '(--subrepos -S)'{-S,--subrepos}'[recurse into subrepositories]') + _hg_cmd() { _call_program hg HGPLAIN=1 hg "$_hg_cmd_globals[@]" "$@" 2> /dev/null } _hg_cmd_add() { - _arguments -s -w : $_hg_global_opts $_hg_pat_opts $_hg_dryrun_opts \ + _arguments -s -w : $_hg_global_opts $_hg_pat_opts $_hg_dryrun_opts $_hg_subrepos_opts \ '*:unknown files:_hg_unknown' } @@ -434,7 +486,7 @@ } _hg_cmd_archive() { - _arguments -s -w : $_hg_global_opts $_hg_pat_opts \ + _arguments -s -w : $_hg_global_opts $_hg_pat_opts $_hg_subrepos_opts \ '--no-decode[do not pass files through decoders]' \ '(--prefix -p)'{-p+,--prefix}'[directory prefix for files in archive]:' \ '(--rev -r)'{-r+,--rev}'[revision to distribute]:revision:_hg_labels' \ @@ -443,7 +495,7 @@ } _hg_cmd_backout() { - _arguments -s -w : $_hg_global_opts $_hg_pat_opts \ + _arguments -s -w : $_hg_global_opts $_hg_mergetool_opts $_hg_pat_opts \ '--merge[merge with old dirstate parent after backout]' \ '(--date -d)'{-d+,--date}'[record datecode as commit date]:date code:' \ '--parent[parent to choose when backing out merge]' \ @@ -456,6 +508,7 @@ _hg_cmd_bisect() { _arguments -s -w : $_hg_global_opts \ '(-)'{-r,--reset}'[reset bisect state]' \ + '(--extend -e)'{-e,--extend}'[extend the bisect range]' \ '(--good -g --bad -b --skip -s --reset -r)'{-g,--good}'[mark changeset good]'::revision:_hg_labels \ '(--good -g --bad -b --skip -s --reset -r)'{-b,--bad}'[mark changeset bad]'::revision:_hg_labels \ '(--good -g --bad -b --skip -s --reset -r)'{-s,--skip}'[skip testing changeset]' \ @@ -466,6 +519,7 @@ _hg_cmd_bookmarks() { _arguments -s -w : $_hg_global_opts \ '(--force -f)'{-f,--force}'[force]' \ + '(--inactive -i)'{-i,--inactive}'[mark a bookmark inactive]' \ '(--rev -r --delete -d --rename -m)'{-r+,--rev}'[revision]:revision:_hg_labels' \ '(--rev -r --delete -d --rename -m)'{-d,--delete}'[delete a given bookmark]' \ '(--rev -r --delete -d --rename -m)'{-m+,--rename}'[rename a given bookmark]:bookmark:_hg_bookmarks' \ @@ -480,13 +534,17 @@ _hg_cmd_branches() { _arguments -s -w : $_hg_global_opts \ - '(--active -a)'{-a,--active}'[show only branches that have unmerge heads]' + '(--active -a)'{-a,--active}'[show only branches that have unmerge heads]' \ + '(--closed -c)'{-c,--closed}'[show normal and closed branches]' } _hg_cmd_bundle() { _arguments -s -w : $_hg_global_opts $_hg_remote_opts \ '(--force -f)'{-f,--force}'[run even when remote repository is unrelated]' \ '(2)*--base[a base changeset to specify instead of a destination]:revision:_hg_labels' \ + '(--branch -b)'{-b+,--branch}'[a specific branch to bundle]' \ + '(--rev -r)'{-r+,--rev}'[changeset(s) to bundle]:' \ + '--all[bundle all changesets in the repository]' \ ':output file:_files' \ ':destination repository:_files -/' } @@ -495,26 +553,28 @@ _arguments -s -w : $_hg_global_opts $_hg_pat_opts \ '(--output -o)'{-o+,--output}'[print output to file with formatted name]:filespec:' \ '(--rev -r)'{-r+,--rev}'[revision]:revision:_hg_labels' \ + '--decode[apply any matching decode filter]' \ '*:file:_hg_files' } _hg_cmd_clone() { - _arguments -s -w : $_hg_global_opts $_hg_remote_opts \ - '(--noupdate -U)'{-U,--noupdate}'[do not update the new working directory]' \ + _arguments -s -w : $_hg_global_opts $_hg_clone_opts \ '(--rev -r)'{-r+,--rev}'[a changeset you would like to have after cloning]:' \ - '--uncompressed[use uncompressed transfer (fast over LAN)]' \ + '(--updaterev -u)'{-u+,--updaterev}'[revision, tag or branch to check out]' \ + '(--branch -b)'{-b+,--branch}'[clone only the specified branch]' \ ':source repository:_hg_remote' \ ':destination:_hg_clone_dest' } _hg_cmd_commit() { - _arguments -s -w : $_hg_global_opts $_hg_pat_opts \ + _arguments -s -w : $_hg_global_opts $_hg_pat_opts $_hg_subrepos_opts \ '(--addremove -A)'{-A,--addremove}'[mark new/missing files as added/removed before committing]' \ '(--message -m)'{-m+,--message}'[use <text> as commit message]:text:' \ '(--logfile -l)'{-l+,--logfile}'[read commit message from <file>]:log file:_files -g \*.txt' \ '(--date -d)'{-d+,--date}'[record datecode as commit date]:date code:' \ '(--user -u)'{-u+,--user}'[record user as commiter]:user:' \ '--amend[amend the parent of the working dir]' \ + '--close-branch[mark a branch as closed]' \ '*:file:_hg_files' } @@ -527,12 +587,15 @@ _hg_cmd_diff() { typeset -A opt_args - _arguments -s -w : $_hg_global_opts $_hg_pat_opts $_hg_diff_opts \ + _arguments -s -w : $_hg_global_opts $_hg_diff_opts $_hg_ignore_space_opts \ + $_hg_pat_opts $_hg_subrepos_opts \ '*'{-r,--rev}'+[revision]:revision:_hg_revrange' \ '(--show-function -p)'{-p,--show-function}'[show which function each change is in]' \ - '(--ignore-all-space -w)'{-w,--ignore-all-space}'[ignore white space when comparing lines]' \ - '(--ignore-space-change -b)'{-b,--ignore-space-change}'[ignore changes in the amount of white space]' \ - '(--ignore-blank-lines -B)'{-B,--ignore-blank-lines}'[ignore changes whose lines are all blank]' \ + '(--change -c)'{-c,--change}'[change made by revision]' \ + '(--text -a)'{-a,--text}'[treat all files as text]' \ + '--reverse[produce a diff that undoes the changes]' \ + '(--unified -U)'{-U,--unified}'[number of lines of context to show]' \ + '--stat[output diffstat-style summary of changes]' \ '*:file:->diff_files' if [[ $state == 'diff_files' ]] @@ -550,20 +613,21 @@ _arguments -s -w : $_hg_global_opts $_hg_diff_opts \ '(--outout -o)'{-o+,--output}'[print output to file with formatted name]:filespec:' \ '--switch-parent[diff against the second parent]' \ + '(--rev -r)'{-r+,--rev}'[revision]:revision:_hg_labels' \ '*:revision:_hg_labels' } +_hg_cmd_forget() { + _arguments -s -w : $_hg_global_opts \ + '*:file:_hg_files' +} + _hg_cmd_graft() { - _arguments -s -w : $_hg_global_opts \ + _arguments -s -w : $_hg_global_opts $_hg_dryrun_opts \ + $_hg_date_user_opts $_hg_mergetool_opts \ '(--continue -c)'{-c,--continue}'[resume interrupted graft]' \ '(--edit -e)'{-e,--edit}'[invoke editor on commit messages]' \ '--log[append graft info to log message]' \ - '(--currentdate -D)'{-D,--currentdate}'[record the current date as commit date]' \ - '(--currentuser -U)'{-U,--currentuser}'[record the current user as committer]' \ - '(--date -d)'{-d,--date}'[record the specified date as commit date]' \ - '(--user -u)'{-u,--user}'[record the specified user as committer]' \ - '(--tool -t)'{-t,--tool}'[specify merge tool]' \ - '(--dry-run -n)'{-n,--dry-run}'[do not perform actions, just print output]' \ '*:revision:_hg_labels' } @@ -577,44 +641,55 @@ '(--line-number -n)'{-n,--line-number}'[print matching line numbers]' \ '*'{-r+,--rev}'[search in given revision range]:revision:_hg_revrange' \ '(--user -u)'{-u,--user}'[print user who committed change]' \ + '(--date -d)'{-d,--date}'[print date of a changeset]' \ '1:search pattern:' \ '*:files:_hg_files' } _hg_cmd_heads() { _arguments -s -w : $_hg_global_opts $_hg_style_opts \ + '(--topo -t)'{-t,--topo}'[show topological heads only]' \ + '(--closed -c)'{-c,--closed}'[show normal and closed branch heads]' \ '(--rev -r)'{-r+,--rev}'[show only heads which are descendants of rev]:revision:_hg_labels' } _hg_cmd_help() { _arguments -s -w : $_hg_global_opts \ - '*:mercurial command:_hg_commands' + '(--extension -e)'{-e,--extension}'[show only help for extensions]' \ + '(--command -c)'{-c,--command}'[show only help for commands]' \ + '(--keyword -k)'{-k+,--keyword}'[show topics matching keyword]' \ + '*:mercurial help topic:_hg_help_topics' } _hg_cmd_identify() { - _arguments -s -w : $_hg_global_opts \ + _arguments -s -w : $_hg_global_opts $_hg_remote_opts \ '(--rev -r)'{-r+,--rev}'[identify the specified rev]:revision:_hg_labels' \ '(--num -n)'{-n+,--num}'[show local revision number]' \ '(--id -i)'{-i+,--id}'[show global revision id]' \ '(--branch -b)'{-b+,--branch}'[show branch]' \ + '(--bookmark -B)'{-B+,--bookmark}'[show bookmarks]' \ '(--tags -t)'{-t+,--tags}'[show tags]' } _hg_cmd_import() { - _arguments -s -w : $_hg_global_opts \ + _arguments -s -w : $_hg_global_opts $_hg_commit_opts \ '(--strip -p)'{-p+,--strip}'[directory strip option for patch (default: 1)]:count:' \ - '(--message -m)'{-m+,--message}'[use <text> as commit message]:text:' \ '(--force -f)'{-f,--force}'[skip check for outstanding uncommitted changes]' \ '--bypass[apply patch without touching the working directory]' \ + '--no-commit[do not commit, just update the working directory]' \ + '--exact[apply patch to the nodes from which it was generated]' \ + '--import-branch[use any branch information in patch (implied by --exact)]' \ + '(--date -d)'{-d+,--date}'[record datecode as commit date]:date code:' \ + '(--user -u)'{-u+,--user}'[record user as commiter]:user:' \ + '(--similarity -s)'{-s+,--similarity}'[guess renamed files by similarity (0<=s<=100)]:' \ '*:patch:_files' } _hg_cmd_incoming() { - _arguments -s -w : $_hg_global_opts $_hg_remote_opts $_hg_style_opts \ - '(--no-merges -M)'{-M,--no-merges}'[do not show merge revisions]' \ + _arguments -s -w : $_hg_log_opts $_hg_branch_bmark_opts $_hg_remote_opts \ + $_hg_subrepos_opts \ '(--force -f)'{-f,--force}'[run even when the remote repository is unrelated]' \ - '(--patch -p)'{-p,--patch}'[show patch]' \ - '(--rev -r)'{-r+,--rev}'[a specific revision up to which you would like to pull]:revision:_hg_tags' \ + '(--rev -r)'{-r+,--rev}'[a specific revision up to which you would like to pull]:revision:_hg_labels' \ '(--newest-first -n)'{-n,--newest-first}'[show newest record first]' \ '--bundle[file to store the bundles into]:bundle file:_files' \ ':source:_hg_remote' @@ -634,42 +709,41 @@ } _hg_cmd_log() { - _arguments -s -w : $_hg_global_opts $_hg_pat_opts $_hg_style_opts \ + _arguments -s -w : $_hg_log_opts $_hg_pat_opts \ '(--follow --follow-first -f)'{-f,--follow}'[follow changeset or history]' \ '(-f --follow)--follow-first[only follow the first parent of merge changesets]' \ '(--copies -C)'{-C,--copies}'[show copied files]' \ '(--keyword -k)'{-k+,--keyword}'[search for a keyword]:' \ - '(--limit -l)'{-l+,--limit}'[limit number of changes displayed]:' \ '*'{-r,--rev}'[show the specified revision or range]:revision:_hg_revrange' \ - '(--no-merges -M)'{-M,--no-merges}'[do not show merges]' \ '(--only-merges -m)'{-m,--only-merges}'[show only merges]' \ - '(--patch -p)'{-p,--patch}'[show patch]' \ '(--prune -P)'{-P+,--prune}'[do not display revision or any of its ancestors]:revision:_hg_labels' \ + '(--graph -G)'{-G+,--graph}'[show the revision DAG]' \ '(--branch -b)'{-b+,--branch}'[show changesets within the given named branch]:branch:_hg_branches' \ + '(--user -u)'{-u+,--user}'[revisions committed by user]:user:' \ + '(--date -d)'{-d+,--date}'[show revisions matching date spec]:date:' \ '*:files:_hg_files' } _hg_cmd_manifest() { _arguments -s -w : $_hg_global_opts \ '--all[list files from all revisions]' \ + '(--rev -r)'{-r+,--rev}'[revision to display]:revision:_hg_labels' \ ':revision:_hg_labels' } _hg_cmd_merge() { - _arguments -s -w : $_hg_global_opts \ + _arguments -s -w : $_hg_global_opts $_hg_mergetool_opts \ '(--force -f)'{-f,--force}'[force a merge with outstanding changes]' \ '(--rev -r 1)'{-r,--rev}'[revision to merge]:revision:_hg_mergerevs' \ '(--preview -P)'{-P,--preview}'[review revisions to merge (no merge is performed)]' \ - '(--tool -t)'{-t,--tool}'[specify merge tool]' \ ':revision:_hg_mergerevs' } _hg_cmd_outgoing() { - _arguments -s -w : $_hg_global_opts $_hg_remote_opts $_hg_style_opts \ - '(--no-merges -M)'{-M,--no-merges}'[do not show merge revisions]' \ + _arguments -s -w : $_hg_log_opts $_hg_branch_bmark_opts $_hg_remote_opts \ + $_hg_subrepos_opts \ '(--force -f)'{-f,--force}'[run even when the remote repository is unrelated]' \ - '(--patch -p)'{-p,--patch}'[show patch]' \ - '(--rev -r)'{-r+,--rev}'[a specific revision you would like to push]' \ + '*'{-r,--rev}'[a specific revision you would like to push]:revision:_hg_revrange' \ '(--newest-first -n)'{-n,--newest-first}'[show newest record first]' \ ':destination:_hg_remote' } @@ -696,7 +770,7 @@ } _hg_cmd_pull() { - _arguments -s -w : $_hg_global_opts $_hg_remote_opts \ + _arguments -s -w : $_hg_global_opts $_hg_branch_bmark_opts $_hg_remote_opts \ '(--force -f)'{-f,--force}'[run even when the remote repository is unrelated]' \ '(--update -u)'{-u,--update}'[update to new tip if changesets were pulled]' \ '(--rev -r)'{-r+,--rev}'[a specific revision up to which you would like to pull]:revision:' \ @@ -704,9 +778,10 @@ } _hg_cmd_push() { - _arguments -s -w : $_hg_global_opts $_hg_remote_opts \ + _arguments -s -w : $_hg_global_opts $_hg_branch_bmark_opts $_hg_remote_opts \ '(--force -f)'{-f,--force}'[force push]' \ '(--rev -r)'{-r+,--rev}'[a specific revision you would like to push]:revision:_hg_labels' \ + '--new-branch[allow pushing a new branch]' \ ':destination:_hg_remote' } @@ -728,7 +803,9 @@ local context state line typeset -A opt_args - _arguments -s -w : $_hg_global_opts \ + _arguments -s -w : $_hg_global_opts $_hg_mergetool_opts $_hg_pat_opts \ + '(--all -a)'{-a,--all}'[select all unresolved files]' \ + '(--no-status -n)'{-n,--no-status}'[hide status prefix]' \ '(--list -l --mark -m --unmark -u)'{-l,--list}'[list state of files needing merge]:*:merged files:->resolve_files' \ '(--mark -m --list -l --unmark -u)'{-m,--mark}'[mark files as resolved]:*:unresolved files:_hg_unresolved' \ '(--unmark -u --list -l --mark -m)'{-u,--unmark}'[unmark files as resolved]:*:resolved files:_hg_resolved' \ @@ -749,6 +826,7 @@ '(--all -a :)'{-a,--all}'[revert all changes when no arguments given]' \ '(--rev -r)'{-r+,--rev}'[revision to revert to]:revision:_hg_labels' \ '(--no-backup -C)'{-C,--no-backup}'[do not save backup copies of files]' \ + '(--date -d)'{-d+,--date}'[tipmost revision matching date]:date code:' \ '*:file:->diff_files' if [[ $state == 'diff_files' ]] @@ -764,6 +842,11 @@ fi } +_hg_cmd_rollback() { + _arguments -s -w : $_hg_global_opts $_hg_dryrun_opts \ + '(--force -f)'{-f,--force}'[ignore safety measures]' \ +} + _hg_cmd_serve() { _arguments -s -w : $_hg_global_opts \ '(--accesslog -A)'{-A+,--accesslog}'[name of access log file]:log file:_files' \ @@ -771,10 +854,15 @@ '(--daemon -d)'{-d,--daemon}'[run server in background]' \ '(--port -p)'{-p+,--port}'[listen port]:listen port:' \ '(--address -a)'{-a+,--address}'[interface address]:interface address:' \ + '--prefix[prefix path to serve from]:directory:_files' \ '(--name -n)'{-n+,--name}'[name to show in web pages]:repository name:' \ + '--web-conf[name of the hgweb config file]:webconf_file:_files' \ + '--pid-file[name of file to write process ID to]:pid_file:_files' \ + '--cmdserver[cmdserver mode]:mode:' \ '(--templates -t)'{-t,--templates}'[web template directory]:template dir:_files -/' \ '--style[web template style]:style' \ '--stdio[for remote clients]' \ + '--certificate[certificate file]:cert_file:_files' \ '(--ipv6 -6)'{-6,--ipv6}'[use IPv6 in addition to IPv4]' } @@ -785,7 +873,7 @@ } _hg_cmd_status() { - _arguments -s -w : $_hg_global_opts $_hg_pat_opts \ + _arguments -s -w : $_hg_global_opts $_hg_pat_opts $_hg_subrepos_opts \ '(--all -A)'{-A,--all}'[show status of all files]' \ '(--modified -m)'{-m,--modified}'[show only modified files]' \ '(--added -a)'{-a,--added}'[show only added files]' \ @@ -798,6 +886,7 @@ '(--copies -C)'{-C,--copies}'[show source of copied files]' \ '(--print0 -0)'{-0,--print0}'[end filenames with NUL, for use with xargs]' \ '--rev[show difference from revision]:revision:_hg_labels' \ + '--change[list the changed files of a revision]:revision:_hg_labels' \ '*:files:_files' } @@ -813,11 +902,14 @@ '(--date -d)'{-d+,--date}'[record datecode as commit date]:date code:' \ '(--user -u)'{-u+,--user}'[record user as commiter]:user:' \ '(--rev -r)'{-r+,--rev}'[revision to tag]:revision:_hg_labels' \ + '(--force -f)'{-f,--force}'[force tag]' \ + '--remove[remove a tag]' \ + '(--edit -e)'{-e,--edit}'[edit commit message]' \ ':tag name:' } _hg_cmd_tip() { - _arguments -s -w : $_hg_global_opts $_hg_style_opts \ + _arguments -s -w : $_hg_global_opts $_hg_gitlike_opts $_hg_style_opts \ '(--patch -p)'{-p,--patch}'[show patch]' } @@ -831,6 +923,8 @@ _arguments -s -w : $_hg_global_opts \ '(--clean -C)'{-C,--clean}'[overwrite locally modified files]' \ '(--rev -r)'{-r+,--rev}'[revision]:revision:_hg_labels' \ + '(--check -c)'{-c,--check}'[update across branches if no uncommitted changes]' \ + '(--date -d)'{-d+,--date}'[tipmost revision matching date]' \ ':revision:_hg_labels' } @@ -893,7 +987,16 @@ '(--summary -s)'{-s,--summary}'[print first line of patch header]') _hg_cmd_qapplied() { - _arguments -s -w : $_hg_global_opts $_hg_qseries_opts + _arguments -s -w : $_hg_global_opts $_hg_qseries_opts \ + '(--last -1)'{-1,--last}'[show only the preceding applied patch]' \ + '*:patch:_hg_qapplied' +} + +_hg_cmd_qclone() { + _arguments -s -w : $_hg_global_opts $_hg_remote_opts $_hg_clone_opts \ + '(--patches -p)'{-p+,--patches}'[location of source patch repository]' \ + ':source repository:_hg_remote' \ + ':destination:_hg_clone_dest' } _hg_cmd_qdelete() { @@ -904,7 +1007,8 @@ } _hg_cmd_qdiff() { - _arguments -s -w : $_hg_global_opts $_hg_pat_opts \ + _arguments -s -w : $_hg_global_opts $_hg_pat_opts $_hg_diff_opts \ + $_hg_ignore_space_opts \ '*:pattern:_hg_files' } @@ -917,12 +1021,15 @@ _hg_cmd_qfold() { _arguments -s -w : $_hg_global_opts $_h_commit_opts \ '(--keep,-k)'{-k,--keep}'[keep folded patch files]' \ + '(--force -f)'{-f,--force}'[overwrite any local changes]' \ + '--no-backup[do not save backup copies of files]' \ '*:unapplied patch:_hg_qunapplied' } _hg_cmd_qgoto() { _arguments -s -w : $_hg_global_opts \ '(--force -f)'{-f,--force}'[overwrite any local changes]' \ + '--keep-changes[tolerate non-conflicting local changes]' \ ':patch:_hg_qseries' } @@ -940,17 +1047,17 @@ } _hg_cmd_qimport() { - _arguments -s -w : $_hg_global_opts \ + _arguments -s -w : $_hg_global_opts $_hg_gitlike_opts \ '(--existing -e)'{-e,--existing}'[import file in patch dir]' \ '(--name -n 2)'{-n+,--name}'[patch file name]:name:' \ '(--force -f)'{-f,--force}'[overwrite existing files]' \ '*'{-r+,--rev}'[place existing revisions under mq control]:revision:_hg_revrange' \ + '(--push -P)'{-P,--push}'[qpush after importing]' \ '*:patch:_files' } _hg_cmd_qnew() { - _arguments -s -w : $_hg_global_opts $_hg_commit_opts \ - '(--force -f)'{-f,--force}'[import uncommitted changes into patch]' \ + _arguments -s -w : $_hg_global_opts $_hg_commit_opts $_hg_date_user_opts $_hg_gitlike_opts \ ':patch:' } @@ -961,8 +1068,9 @@ _hg_cmd_qpop() { _arguments -s -w : $_hg_global_opts \ '(--all -a :)'{-a,--all}'[pop all patches]' \ - '(--name -n)'{-n+,--name}'[queue name to pop]:' \ '(--force -f)'{-f,--force}'[forget any local changes]' \ + '--keep-changes[tolerate non-conflicting local changes]' \ + '--no-backup[do not save backup copies of files]' \ ':patch:_hg_qapplied' } @@ -974,24 +1082,23 @@ _arguments -s -w : $_hg_global_opts \ '(--all -a :)'{-a,--all}'[apply all patches]' \ '(--list -l)'{-l,--list}'[list patch name in commit text]' \ - '(--merge -m)'{-m+,--merge}'[merge from another queue]:' \ - '(--name -n)'{-n+,--name}'[merge queue name]:' \ '(--force -f)'{-f,--force}'[apply if the patch has rejects]' \ '(--exact -e)'{-e,--exact}'[apply the target patch to its recorded parent]' \ '--move[reorder patch series and apply only the patch]' \ + '--keep-changes[tolerate non-conflicting local changes]' \ + '--no-backup[do not save backup copies of files]' \ ':patch:_hg_qunapplied' } _hg_cmd_qrefresh() { - _arguments -s -w : $_hg_global_opts $_hg_pat_opts $_hg_commit_opts \ - '(--git -g)'{-g,--git}'[use git extended diff format]' \ + _arguments -s -w : $_hg_global_opts $_hg_pat_opts $_hg_commit_opts $_hg_gitlike_opts \ '(--short -s)'{-s,--short}'[short refresh]' \ '*:files:_hg_files' } _hg_cmd_qrename() { _arguments -s -w : $_hg_global_opts \ - ':patch:_hg_qseries' \ + ':patch:_hg_qunapplied' \ ':destination:' } @@ -1010,7 +1117,8 @@ } _hg_cmd_qunapplied() { - _arguments -s -w : $_hg_global_opts $_hg_qseries_opts + _arguments -s -w : $_hg_global_opts $_hg_qseries_opts \ + '(--first -1)'{-1,--first}'[show only the first patch]' } _hg_cmd_qtop() { @@ -1019,16 +1127,17 @@ _hg_cmd_strip() { _arguments -s -w : $_hg_global_opts \ - '(--force -f)'{-f,--force}'[force multi-head removal]' \ - '(--backup -b)'{-b,--backup}'[bundle unrelated changesets]' \ - '(--nobackup -n)'{-n,--nobackup}'[no backups]' \ + '(--force -f)'{-f,--force}'[force removal, discard uncommitted changes, no backup]' \ + '(--no-backup -n)'{-n,--no-backup}'[no backups]' \ + '(--keep -k)'{-k,--keep}'[do not modify working copy during strip]' \ + '(--bookmark -B)'{-B+,--bookmark}'[remove revs only reachable from given bookmark]:bookmark:_hg_bookmarks' \ + '(--rev -r)'{-r+,--rev}'[revision]:revision:_hg_labels' \ ':revision:_hg_labels' } # Patchbomb _hg_cmd_email() { - _arguments -s -w : $_hg_global_opts $_hg_remote_opts \ - '(--git -g)'{-g,--git}'[use git extended diff format]' \ + _arguments -s -w : $_hg_global_opts $_hg_remote_opts $_hg_gitlike_opts \ '--plain[omit hg patch header]' \ '--body[send patches as inline message text (default)]' \ '(--outgoing -o)'{-o,--outgoing}'[send changes not found in the target repository]' \ @@ -1058,20 +1167,61 @@ # Rebase _hg_cmd_rebase() { - _arguments -s -w : $_hg_global_opts \ + _arguments -s -w : $_hg_global_opts $_hg_commit_opts $_hg_mergetool_opts \ '*'{-r,--rev}'[rebase these revisions]:revision:_hg_revrange' \ - '(--source -s)'{-s,--source}'[rebase from the specified changeset]:revision:_hg_labels' \ - '(--base -b)'{-b,--base}'[rebase from the base of the specified changeset]:revision:_hg_labels' \ - '(--dest -d)'{-d,--dest}'[rebase onto the specified changeset]' \ + '(--source -s)'{-s+,--source}'[rebase from the specified changeset]:revision:_hg_labels' \ + '(--base -b)'{-b+,--base}'[rebase from the base of the specified changeset]:revision:_hg_labels' \ + '(--dest -d)'{-d+,--dest}'[rebase onto the specified changeset]:revision:_hg_labels' \ '--collapse[collapse the rebased changeset]' \ - '(--message -m)'{-m+,--message}'[use <text> as collapse commit message]:text:' \ - '(--edit -e)'{-e,--edit}'[invoke editor on commit messages]' \ - '(--logfile -l)'{-l+,--logfile}'[read collapse commit message from <file>]:log file:_files -g \*.txt' \ '--keep[keep original changeset]' \ '--keepbranches[keep original branch name]' \ - '(--tool -t)'{-t,--tool}'[specify merge tool]' \ '(--continue -c)'{-c,--continue}'[continue an interrupted rebase]' \ '(--abort -a)'{-a,--abort}'[abort an interrupted rebase]' \ } +# Record +_hg_cmd_record() { + _arguments -s -w : $_hg_global_opts $_hg_commit_opts $_hg_pat_opts \ + $_hg_ignore_space_opts $_hg_subrepos_opts \ + '(--addremove -A)'{-A,--addremove}'[mark new/missing files as added/removed before committing]' \ + '--close-branch[mark a branch as closed, hiding it from the branch list]' \ + '--amend[amend the parent of the working dir]' \ + '(--date -d)'{-d+,--date}'[record the specified date as commit date]:date:' \ + '(--user -u)'{-u+,--user}'[record the specified user as committer]:user:' +} + +_hg_cmd_qrecord() { + _arguments -s -w : $_hg_global_opts $_hg_commit_opts $_hg_date_user_opts $_hg_gitlike_opts \ + $_hg_pat_opts $_hg_ignore_space_opts $_hg_subrepos_opts +} + +# Convert +_hg_cmd_convert() { +_arguments -s -w : $_hg_global_opts \ + '(--source-type -s)'{-s,--source-type}'[source repository type]' \ + '(--dest-type -d)'{-d,--dest-type}'[destination repository type]' \ + '(--rev -r)'{-r+,--rev}'[import up to target revision]:revision:' \ + '(--authormap -A)'{-A+,--authormap}'[remap usernames using this file]:file:_files' \ + '--filemap[remap file names using contents of file]:file:_files' \ + '--splicemap[splice synthesized history into place]:file:_files' \ + '--branchmap[change branch names while converting]:file:_files' \ + '--branchsort[try to sort changesets by branches]' \ + '--datesort[try to sort changesets by date]' \ + '--sourcesort[preserve source changesets order]' +} + +# Graphlog +_hg_cmd_glog() { + _hg_cmd_log $@ +} + +# Purge +_hg_cmd_purge() { + _arguments -s -w : $_hg_global_opts $_hg_pat_opts $_hg_subrepos_opts \ + '(--abort-on-err -a)'{-a,--abort-on-err}'[abort if an error occurs]' \ + '--all[purge ignored files too]' \ + '(--print -p)'{-p,--print}'[print filenames instead of deleting them]' \ + '(--print0 -0)'{-0,--print0}'[end filenames with NUL, for use with xargs (implies -p/--print)]' +} + _hg "$@"
--- a/hgext/bugzilla.py Mon Sep 03 17:25:50 2012 +0100 +++ b/hgext/bugzilla.py Sun Sep 09 12:35:06 2012 +0200 @@ -516,7 +516,7 @@ raise util.Abort(_('unknown database schema')) return ids[0][0] -# Buzgilla via XMLRPC interface. +# Bugzilla via XMLRPC interface. class cookietransportrequest(object): """A Transport request method that retains cookies over its lifetime.
--- a/hgext/color.py Mon Sep 03 17:25:50 2012 +0100 +++ b/hgext/color.py Sun Sep 09 12:35:06 2012 +0200 @@ -499,5 +499,5 @@ orig(m.group(2), **opts) m = re.match(ansire, m.group(3)) finally: - # Explicity reset original attributes + # Explicitly reset original attributes _kernel32.SetConsoleTextAttribute(stdout, origattr)
--- a/hgext/convert/__init__.py Mon Sep 03 17:25:50 2012 +0100 +++ b/hgext/convert/__init__.py Sun Sep 09 12:35:06 2012 +0200 @@ -74,7 +74,7 @@ The authormap is a simple text file that maps each source commit author to a destination commit author. It is handy for source SCMs - that use unix logins to identify authors (eg: CVS). One line per + that use unix logins to identify authors (e.g.: CVS). One line per author mapping and the line format is:: source author = destination author
--- a/hgext/convert/bzr.py Mon Sep 03 17:25:50 2012 +0100 +++ b/hgext/convert/bzr.py Sun Sep 09 12:35:06 2012 +0200 @@ -246,7 +246,7 @@ # register the change as move renames[topath] = frompath - # no futher changes, go to the next change + # no further changes, go to the next change continue # we got unicode paths, need to convert them
--- a/hgext/convert/convcmd.py Mon Sep 03 17:25:50 2012 +0100 +++ b/hgext/convert/convcmd.py Sun Sep 09 12:35:06 2012 +0200 @@ -167,7 +167,7 @@ def toposort(self, parents, sortmode): '''Return an ordering such that every uncommitted changeset is - preceeded by all its uncommitted ancestors.''' + preceded by all its uncommitted ancestors.''' def mapchildren(parents): """Return a (children, roots) tuple where 'children' maps parent
--- a/hgext/convert/cvs.py Mon Sep 03 17:25:50 2012 +0100 +++ b/hgext/convert/cvs.py Sun Sep 09 12:35:06 2012 +0200 @@ -202,7 +202,7 @@ def getfile(self, name, rev): def chunkedread(fp, count): - # file-objects returned by socked.makefile() do not handle + # file-objects returned by socket.makefile() do not handle # large read() requests very well. chunksize = 65536 output = StringIO()
--- a/hgext/convert/cvsps.py Mon Sep 03 17:25:50 2012 +0100 +++ b/hgext/convert/cvsps.py Sun Sep 09 12:35:06 2012 +0200 @@ -156,8 +156,8 @@ # The cvsps cache pickle needs a uniquified name, based on the # repository location. The address may have all sort of nasties # in it, slashes, colons and such. So here we take just the - # alphanumerics, concatenated in a way that does not mix up the - # various components, so that + # alphanumeric characters, concatenated in a way that does not + # mix up the various components, so that # :pserver:user@server:/path # and # /pserver/user/server/path @@ -503,7 +503,7 @@ # Check if log entry belongs to the current changeset or not. - # Since CVS is file centric, two different file revisions with + # Since CVS is file-centric, two different file revisions with # different branchpoints should be treated as belonging to two # different changesets (and the ordering is important and not # honoured by cvsps at this point).
--- a/hgext/convert/gnuarch.py Mon Sep 03 17:25:50 2012 +0100 +++ b/hgext/convert/gnuarch.py Sun Sep 09 12:35:06 2012 +0200 @@ -89,7 +89,7 @@ # Get the complete list of revisions for that tree version output, status = self.runlines('revisions', '-r', '-f', treeversion) - self.checkexit(status, 'failed retrieveing revisions for %s' + self.checkexit(status, 'failed retrieving revisions for %s' % treeversion) # No new iteration unless a revision has a continuation-of header
--- a/hgext/convert/monotone.py Mon Sep 03 17:25:50 2012 +0100 +++ b/hgext/convert/monotone.py Sun Sep 09 12:35:06 2012 +0200 @@ -225,7 +225,6 @@ return [self.rev] def getchanges(self, rev): - #revision = self.mtncmd("get_revision %s" % rev).split("\n\n") revision = self.mtnrun("get_revision", rev).split("\n\n") files = {} ignoremove = {}
--- a/hgext/convert/subversion.py Mon Sep 03 17:25:50 2012 +0100 +++ b/hgext/convert/subversion.py Sun Sep 09 12:35:06 2012 +0200 @@ -887,8 +887,8 @@ io = StringIO() info = svn.ra.get_file(self.ra, file, revnum, io) data = io.getvalue() - # ra.get_files() seems to keep a reference on the input buffer - # preventing collection. Release it explicitely. + # ra.get_file() seems to keep a reference on the input buffer + # preventing collection. Release it explicitly. io.close() if isinstance(info, list): info = info[-1] @@ -923,7 +923,7 @@ # Given the repository url of this wc, say # "http://server/plone/CMFPlone/branches/Plone-2_0-branch" # extract the "entry" portion (a relative path) from what - # svn log --xml says, ie + # svn log --xml says, i.e. # "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py" # that is to say "tests/PloneTestCase.py" if path.startswith(module):
--- a/hgext/extdiff.py Mon Sep 03 17:25:50 2012 +0100 +++ b/hgext/extdiff.py Sun Sep 09 12:35:06 2012 +0200 @@ -109,7 +109,7 @@ return dirname, fns_and_mtime def dodiff(ui, repo, diffcmd, diffopts, pats, opts): - '''Do the actuall diff: + '''Do the actual diff: - copy to a temp structure if diffing 2 internal revisions - copy to a temp structure if diffing working revision with
--- a/hgext/hgcia.py Mon Sep 03 17:25:50 2012 +0100 +++ b/hgext/hgcia.py Sun Sep 09 12:35:06 2012 +0200 @@ -22,7 +22,7 @@ # Style to use (optional) #style = foo # The URL of the CIA notification service (optional) - # You can use mailto: URLs to send by email, eg + # You can use mailto: URLs to send by email, e.g. # mailto:cia@cia.vc # Make sure to set email.from if you do this. #url = http://cia.vc/
--- a/hgext/histedit.py Mon Sep 03 17:25:50 2012 +0100 +++ b/hgext/histedit.py Sun Sep 09 12:35:06 2012 +0200 @@ -175,6 +175,26 @@ # """) +def foldchanges(ui, repo, node1, node2, opts): + """Produce a new changeset that represents the diff from node1 to node2.""" + try: + fd, patchfile = tempfile.mkstemp(prefix='hg-histedit-') + fp = os.fdopen(fd, 'w') + diffopts = patch.diffopts(ui, opts) + diffopts.git = True + diffopts.ignorews = False + diffopts.ignorewsamount = False + diffopts.ignoreblanklines = False + gen = patch.diff(repo, node1, node2, opts=diffopts) + for chunk in gen: + fp.write(chunk) + fp.close() + files = set() + patch.patch(ui, repo, patchfile, files=files, eolmode=None) + finally: + os.unlink(patchfile) + return files + def between(repo, old, new, keep): revs = [old] current = old @@ -200,27 +220,12 @@ ui.debug('node %s unchanged\n' % ha) return oldctx, [], [], [] hg.update(repo, ctx.node()) - fd, patchfile = tempfile.mkstemp(prefix='hg-histedit-') - fp = os.fdopen(fd, 'w') - diffopts = patch.diffopts(ui, opts) - diffopts.git = True - diffopts.ignorews = False - diffopts.ignorewsamount = False - diffopts.ignoreblanklines = False - gen = patch.diff(repo, oldctx.parents()[0].node(), ha, opts=diffopts) - for chunk in gen: - fp.write(chunk) - fp.close() try: - files = set() - try: - patch.patch(ui, repo, patchfile, files=files, eolmode=None) - if not files: - ui.warn(_('%s: empty changeset') - % node.hex(ha)) - return ctx, [], [], [] - finally: - os.unlink(patchfile) + files = foldchanges(ui, repo, oldctx.p1().node() , ha, opts) + if not files: + ui.warn(_('%s: empty changeset') + % node.hex(ha)) + return ctx, [], [], [] except Exception: raise util.Abort(_('Fix up the change and run ' 'hg histedit --continue')) @@ -232,23 +237,8 @@ def edit(ui, repo, ctx, ha, opts): oldctx = repo[ha] hg.update(repo, ctx.node()) - fd, patchfile = tempfile.mkstemp(prefix='hg-histedit-') - fp = os.fdopen(fd, 'w') - diffopts = patch.diffopts(ui, opts) - diffopts.git = True - diffopts.ignorews = False - diffopts.ignorewsamount = False - diffopts.ignoreblanklines = False - gen = patch.diff(repo, oldctx.parents()[0].node(), ha, opts=diffopts) - for chunk in gen: - fp.write(chunk) - fp.close() try: - files = set() - try: - patch.patch(ui, repo, patchfile, files=files, eolmode=None) - finally: - os.unlink(patchfile) + foldchanges(ui, repo, oldctx.p1().node() , ha, opts) except Exception: pass raise util.Abort(_('Make changes as needed, you may commit or record as ' @@ -258,27 +248,12 @@ def fold(ui, repo, ctx, ha, opts): oldctx = repo[ha] hg.update(repo, ctx.node()) - fd, patchfile = tempfile.mkstemp(prefix='hg-histedit-') - fp = os.fdopen(fd, 'w') - diffopts = patch.diffopts(ui, opts) - diffopts.git = True - diffopts.ignorews = False - diffopts.ignorewsamount = False - diffopts.ignoreblanklines = False - gen = patch.diff(repo, oldctx.parents()[0].node(), ha, opts=diffopts) - for chunk in gen: - fp.write(chunk) - fp.close() try: - files = set() - try: - patch.patch(ui, repo, patchfile, files=files, eolmode=None) - if not files: - ui.warn(_('%s: empty changeset') - % node.hex(ha)) - return ctx, [], [], [] - finally: - os.unlink(patchfile) + files = foldchanges(ui, repo, oldctx.p1().node() , ha, opts) + if not files: + ui.warn(_('%s: empty changeset') + % node.hex(ha)) + return ctx, [], [], [] except Exception: raise util.Abort(_('Fix up the change and run ' 'hg histedit --continue')) @@ -289,22 +264,7 @@ def finishfold(ui, repo, ctx, oldctx, newnode, opts, internalchanges): parent = ctx.parents()[0].node() hg.update(repo, parent) - fd, patchfile = tempfile.mkstemp(prefix='hg-histedit-') - fp = os.fdopen(fd, 'w') - diffopts = patch.diffopts(ui, opts) - diffopts.git = True - diffopts.ignorews = False - diffopts.ignorewsamount = False - diffopts.ignoreblanklines = False - gen = patch.diff(repo, parent, newnode, opts=diffopts) - for chunk in gen: - fp.write(chunk) - fp.close() - files = set() - try: - patch.patch(ui, repo, patchfile, files=files, eolmode=None) - finally: - os.unlink(patchfile) + foldchanges(ui, repo, parent, newnode, opts) newmessage = '\n***\n'.join( [ctx.description()] + [repo[r].description() for r in internalchanges] + @@ -326,23 +286,8 @@ def message(ui, repo, ctx, ha, opts): oldctx = repo[ha] hg.update(repo, ctx.node()) - fd, patchfile = tempfile.mkstemp(prefix='hg-histedit-') - fp = os.fdopen(fd, 'w') - diffopts = patch.diffopts(ui, opts) - diffopts.git = True - diffopts.ignorews = False - diffopts.ignorewsamount = False - diffopts.ignoreblanklines = False - gen = patch.diff(repo, oldctx.parents()[0].node(), ha, opts=diffopts) - for chunk in gen: - fp.write(chunk) - fp.close() try: - files = set() - try: - patch.patch(ui, repo, patchfile, files=files, eolmode=None) - finally: - os.unlink(patchfile) + foldchanges(ui, repo, oldctx.p1().node() , ha, opts) except Exception: raise util.Abort(_('Fix up the change and run ' 'hg histedit --continue'))
--- a/hgext/inotify/linux/watcher.py Mon Sep 03 17:25:50 2012 +0100 +++ b/hgext/inotify/linux/watcher.py Sun Sep 09 12:35:06 2012 +0200 @@ -282,7 +282,7 @@ callable that takes one parameter. It will be called each time a directory is about to be automatically watched. If it returns True, the directory will be watched if it still exists, - otherwise, it will beb skipped.''' + otherwise, it will be skipped.''' super(autowatcher, self).__init__() self.addfilter = addfilter
--- a/hgext/keyword.py Mon Sep 03 17:25:50 2012 +0100 +++ b/hgext/keyword.py Sun Sep 09 12:35:06 2012 +0200 @@ -7,7 +7,7 @@ # # $Id$ # -# Keyword expansion hack against the grain of a DSCM +# Keyword expansion hack against the grain of a Distributed SCM # # There are many good reasons why this is not needed in a distributed # SCM, still it may be useful in very small projects based on single @@ -168,7 +168,7 @@ return subfunc(r'$\1$', text) def _preselect(wstatus, changed): - '''Retrieves modfied and added files from a working directory state + '''Retrieves modified and added files from a working directory state and returns the subset of each contained in given changed files retrieved from a change context.''' modified, added = wstatus[:2]
--- a/hgext/largefiles/basestore.py Mon Sep 03 17:25:50 2012 +0100 +++ b/hgext/largefiles/basestore.py Sun Sep 09 12:35:06 2012 +0200 @@ -55,7 +55,7 @@ def get(self, files): '''Get the specified largefiles from the store and write to local files under repo.root. files is a list of (filename, hash) - tuples. Return (success, missing), lists of files successfuly + tuples. Return (success, missing), lists of files successfully downloaded and those not found in the store. success is a list of (filename, hash) tuples; missing is a list of filenames that we could not get. (The detailed error message will already have
--- a/hgext/largefiles/lfcommands.py Mon Sep 03 17:25:50 2012 +0100 +++ b/hgext/largefiles/lfcommands.py Sun Sep 09 12:35:06 2012 +0200 @@ -65,7 +65,7 @@ dstlock = rdst.lock() # Get a list of all changesets in the source. The easy way to do this - # is to simply walk the changelog, using changelog.nodesbewteen(). + # is to simply walk the changelog, using changelog.nodesbetween(). # Take a look at mercurial/revlog.py:639 for more details. # Use a generator instead of a list to decrease memory usage ctxs = (rsrc[ctx] for ctx in rsrc.changelog.nodesbetween(None, @@ -177,7 +177,7 @@ if f not in lfiles and f not in normalfiles: islfile = _islfile(f, ctx, matcher, size) # If this file was renamed or copied then copy - # the lfileness of its predecessor + # the largefile-ness of its predecessor if f in ctx.manifest(): fctx = ctx.filectx(f) renamed = fctx.renamed() @@ -389,7 +389,7 @@ # If we are mid-merge, then we have to trust the standin that is in the # working copy to have the correct hashvalue. This is because the # original hg.merge() already updated the standin as part of the normal - # merge process -- we just have to udpate the largefile to match. + # merge process -- we just have to update the largefile to match. if (getattr(repo, "_ismerging", False) and os.path.exists(repo.wjoin(lfutil.standin(lfile)))): expectedhash = lfutil.readstandin(repo, lfile) @@ -444,11 +444,13 @@ cachelfiles(ui, repo, '.', lfiles) updated, removed = 0, 0 - for i in map(lambda f: _updatelfile(repo, lfdirstate, f), lfiles): - # increment the appropriate counter according to _updatelfile's - # return value - updated += i > 0 and i or 0 - removed -= i < 0 and i or 0 + for f in lfiles: + i = _updatelfile(repo, lfdirstate, f) + if i: + if i > 0: + updated += i + else: + removed -= i if printmessage and (removed or updated) and not printed: ui.status(_('getting changed largefiles\n')) printed = True
--- a/hgext/largefiles/lfutil.py Mon Sep 03 17:25:50 2012 +0100 +++ b/hgext/largefiles/lfutil.py Sun Sep 09 12:35:06 2012 +0200 @@ -296,8 +296,8 @@ '''Return the repo-relative path to the standin for the specified big file.''' # Notes: - # 1) Most callers want an absolute path, but _createstandin() needs - # it repo-relative so lfadd() can pass it to repoadd(). So leave + # 1) Some callers want an absolute path, but for instance addlargefiles + # needs it repo-relative so it can be passed to repoadd(). So leave # it up to the caller to use repo.wjoin() to get an absolute path. # 2) Join with '/' because that's what dirstate always uses, even on # Windows. Change existing separator to '/' first in case we are
--- a/hgext/largefiles/localstore.py Mon Sep 03 17:25:50 2012 +0100 +++ b/hgext/largefiles/localstore.py Sun Sep 09 12:35:06 2012 +0200 @@ -18,7 +18,7 @@ class localstore(basestore.basestore): '''localstore first attempts to grab files out of the store in the remote - Mercurial repository. Failling that, it attempts to grab the files from + Mercurial repository. Failing that, it attempts to grab the files from the user cache.''' def __init__(self, ui, repo, remote):
--- a/hgext/largefiles/overrides.py Mon Sep 03 17:25:50 2012 +0100 +++ b/hgext/largefiles/overrides.py Sun Sep 09 12:35:06 2012 +0200 @@ -155,7 +155,7 @@ ui.status(_('removing %s\n') % m.rel(f)) # Need to lock because standin files are deleted then removed from the - # repository and we could race inbetween. + # repository and we could race in-between. wlock = repo.wlock() try: lfdirstate = lfutil.openlfdirstate(ui, repo) @@ -251,7 +251,7 @@ # Override needs to refresh standins so that update's normal merge # will go through properly. Then the other update hook (overriding repo.update) -# will get the new files. Filemerge is also overriden so that the merge +# will get the new files. Filemerge is also overridden so that the merge # will merge standins correctly. def overrideupdate(orig, ui, repo, *pats, **opts): lfdirstate = lfutil.openlfdirstate(ui, repo) @@ -696,7 +696,7 @@ result = orig(ui, repo, source, **opts) # If we do not have the new largefiles for any new heads we pulled, we # will run into a problem later if we try to merge or rebase with one of - # these heads, so cache the largefiles now direclty into the system + # these heads, so cache the largefiles now directly into the system # cache. ui.status(_("caching new largefiles\n")) numcached = 0 @@ -912,7 +912,7 @@ ui.status(_('removing %s\n') % m.rel(f)) # Need to lock because standin files are deleted then removed from the - # repository and we could race inbetween. + # repository and we could race in-between. wlock = repo.wlock() try: lfdirstate = lfutil.openlfdirstate(ui, repo)
--- a/hgext/largefiles/remotestore.py Mon Sep 03 17:25:50 2012 +0100 +++ b/hgext/largefiles/remotestore.py Sun Sep 09 12:35:06 2012 +0200 @@ -4,7 +4,7 @@ # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. -'''remote largefile store; the base class for servestore''' +'''remote largefile store; the base class for wirestore''' import urllib2
--- a/hgext/largefiles/reposetup.py Mon Sep 03 17:25:50 2012 +0100 +++ b/hgext/largefiles/reposetup.py Sun Sep 09 12:35:06 2012 +0200 @@ -156,7 +156,7 @@ # Create a function that we can use to override what is # normally the ignore matcher. We've already checked # for ignored files on the first dirstate walk, and - # unecessarily re-checking here causes a huge performance + # unnecessarily re-checking here causes a huge performance # hit because lfdirstate only knows about largefiles def _ignoreoverride(self): return False @@ -192,7 +192,7 @@ (unsure, modified, added, removed, missing, unknown, ignored, clean) = s # Replace the list of ignored and unknown files with - # the previously caclulated lists, and strip out the + # the previously calculated lists, and strip out the # largefiles lfiles = set(lfdirstate._map) ignored = set(result[5]).difference(lfiles)
--- a/hgext/mq.py Mon Sep 03 17:25:50 2012 +0100 +++ b/hgext/mq.py Sun Sep 09 12:35:06 2012 +0200 @@ -1522,7 +1522,7 @@ # # this should really read: # mm, dd, aa = repo.status(top, patchparent)[:3] - # but we do it backwards to take advantage of manifest/chlog + # but we do it backwards to take advantage of manifest/changelog # caching against the next repo.status call mm, aa, dd = repo.status(patchparent, top)[:3] changes = repo.changelog.read(top) @@ -1535,7 +1535,7 @@ # if amending a patch, we start with existing # files plus specified files - unfiltered match = scmutil.matchfiles(repo, mm + aa + dd + matchfn.files()) - # filter with inc/exl options + # filter with include/exclude options matchfn = scmutil.match(repo[None], opts=opts) else: match = scmutil.matchall(repo) @@ -3185,9 +3185,9 @@ revs = scmutil.revrange(repo, revrange) if repo['.'].rev() in revs and repo[None].files(): ui.warn(_('warning: uncommitted changes in the working directory\n')) - # queue.finish may changes phases but leave the responsability to lock the + # queue.finish may changes phases but leave the responsibility to lock the # repo to the caller to avoid deadlock with wlock. This command code is - # responsability for this locking. + # responsibility for this locking. lock = repo.lock() try: q.finish(repo, revs)
--- a/hgext/record.py Mon Sep 03 17:25:50 2012 +0100 +++ b/hgext/record.py Sun Sep 09 12:35:06 2012 +0200 @@ -33,7 +33,7 @@ - ('file', [header_lines + fromfile + tofile]) - ('context', [context_lines]) - ('hunk', [hunk_lines]) - - ('range', (-start,len, +start,len, diffp)) + - ('range', (-start,len, +start,len, proc)) """ lr = patch.linereader(fp) @@ -81,7 +81,7 @@ class header(object): """patch header - XXX shoudn't we move this to mercurial/patch.py ? + XXX shouldn't we move this to mercurial/patch.py ? """ diffgit_re = re.compile('diff --git a/(.*) b/(.*)$') diff_re = re.compile('diff -r .* (.*)$') @@ -496,6 +496,9 @@ raise util.Abort(_('running non-interactively, use %s instead') % cmdsuggest) + # make sure username is set before going interactive + ui.username() + def recordfunc(ui, repo, message, match, opts): """This is generic record driver.
--- a/hgext/schemes.py Mon Sep 03 17:25:50 2012 +0100 +++ b/hgext/schemes.py Sun Sep 09 12:35:06 2012 +0200 @@ -61,7 +61,7 @@ return '<ShortRepository: %s>' % self.scheme def instance(self, ui, url, create): - # Should this use urlmod.url(), or is manual parsing better? + # Should this use the util.url class, or is manual parsing better? url = url.split('://', 1)[1] parts = url.split('/', self.parts) if len(parts) > self.parts:
--- a/hgext/win32mbcs.py Mon Sep 03 17:25:50 2012 +0100 +++ b/hgext/win32mbcs.py Sun Sep 09 12:35:06 2012 +0200 @@ -119,7 +119,7 @@ def f(*args, **kwds): return wrapper(func, args, kwds) try: - f.__name__ = func.__name__ # fail with python23 + f.__name__ = func.__name__ # fails with Python 2.3 except Exception: pass setattr(module, name, f)
--- a/hgext/zeroconf/Zeroconf.py Mon Sep 03 17:25:50 2012 +0100 +++ b/hgext/zeroconf/Zeroconf.py Sun Sep 09 12:35:06 2012 +0200 @@ -66,7 +66,7 @@ using select() for socket reads tested on Debian unstable with Python 2.2.2""" -"""0.05 update - ensure case insensitivty on domain names +"""0.05 update - ensure case insensitivity on domain names support for unicast DNS queries""" """0.04 update - added some unit tests @@ -114,7 +114,7 @@ _FLAGS_QR_QUERY = 0x0000 # query _FLAGS_QR_RESPONSE = 0x8000 # response -_FLAGS_AA = 0x0400 # Authorative answer +_FLAGS_AA = 0x0400 # Authoritative answer _FLAGS_TC = 0x0200 # Truncated _FLAGS_RD = 0x0100 # Recursion desired _FLAGS_RA = 0x8000 # Recursion available @@ -335,7 +335,7 @@ raise AbstractMethodException def toString(self, other): - """String representation with addtional information""" + """String representation with additional information""" arg = "%s/%s,%s" % (self.ttl, self.getRemainingTTL(currentTimeMillis()), other) return DNSEntry.toString(self, "record", arg) @@ -650,7 +650,7 @@ if now == 0 or not record.isExpired(now): self.answers.append((record, now)) - def addAuthorativeAnswer(self, record): + def addAuthoritativeAnswer(self, record): """Adds an authoritative answer""" self.authorities.append(record) @@ -904,7 +904,7 @@ to cache information as it arrives. It requires registration with an Engine object in order to have - the read() method called when a socket is availble for reading.""" + the read() method called when a socket is available for reading.""" def __init__(self, zeroconf): self.zeroconf = zeroconf @@ -1140,7 +1140,7 @@ return self.port def getPriority(self): - """Pirority accessor""" + """Priority accessor""" return self.priority def getWeight(self): @@ -1259,7 +1259,7 @@ # SO_REUSEADDR should be equivalent to SO_REUSEPORT for # multicast UDP sockets (p 731, "TCP/IP Illustrated, # Volume 2"), but some BSD-derived systems require - # SO_REUSEPORT to be specified explicity. Also, not all + # SO_REUSEPORT to be specified explicitly. Also, not all # versions of Python have SO_REUSEPORT available. So # if you're on a BSD-based system, and haven't upgraded # to Python 2.3 yet, you may find this library doesn't @@ -1272,10 +1272,8 @@ self.socket.bind(self.group) except Exception: # Some versions of linux raise an exception even though - # the SO_REUSE* options have been set, so ignore it - # + # SO_REUSEADDR and SO_REUSEPORT have been set, so ignore it pass - #self.socket.setsockopt(socket.SOL_IP, socket.IP_MULTICAST_IF, socket.inet_aton(self.intf) + socket.inet_aton('0.0.0.0')) self.socket.setsockopt(socket.SOL_IP, socket.IP_ADD_MEMBERSHIP, socket.inet_aton(_MDNS_ADDR) + socket.inet_aton('0.0.0.0')) self.listeners = [] @@ -1433,7 +1431,7 @@ out = DNSOutgoing(_FLAGS_QR_QUERY | _FLAGS_AA) self.debug = out out.addQuestion(DNSQuestion(info.type, _TYPE_PTR, _CLASS_IN)) - out.addAuthorativeAnswer(DNSPointer(info.type, _TYPE_PTR, _CLASS_IN, _DNS_TTL, info.name)) + out.addAuthoritativeAnswer(DNSPointer(info.type, _TYPE_PTR, _CLASS_IN, _DNS_TTL, info.name)) self.send(out) i += 1 nextTime += _CHECK_TIME
--- a/i18n/ru.po Mon Sep 03 17:25:50 2012 +0100 +++ b/i18n/ru.po Sun Sep 09 12:35:06 2012 +0200 @@ -173,7 +173,7 @@ msgstr "" "Project-Id-Version: Mercurial\n" "Report-Msgid-Bugs-To: <mercurial-devel@selenic.com>\n" -"POT-Creation-Date: 2012-08-09 13:23+0400\n" +"POT-Creation-Date: 2012-08-13 16:01+0400\n" "PO-Revision-Date: 2011-05-12 23:48+0400\n" "Last-Translator: Alexander Sauta <demosito@gmail.com>\n" "Language-Team: Russian\n" @@ -14542,11 +14542,11 @@ " bookmarks`)." msgid "" -" Update sets the working directory's parent revison to the specified\n" +" Update sets the working directory's parent revision to the specified\n" " changeset (see :hg:`help parents`)." msgstr "" -" Update устанавливает ревизию родителя рабочего каталога в заданный\n" -" набор изменений (см. :hg:`help update`)." +" Update делает заданный набор изменений родительской ревизией рабочего\n" +" каталога (см. :hg:`help parents`)." msgid "" " If the changeset is not a descendant or ancestor of the working\n"
--- a/mercurial/archival.py Mon Sep 03 17:25:50 2012 +0100 +++ b/mercurial/archival.py Sun Sep 09 12:35:06 2012 +0200 @@ -13,6 +13,10 @@ import cStringIO, os, tarfile, time, zipfile import zlib, gzip +# from unzip source code: +_UNX_IFREG = 0x8000 +_UNX_IFLNK = 0xa000 + def tidyprefix(dest, kind, prefix): '''choose prefix to use for names in archive. make sure prefix is safe for consumers.''' @@ -173,10 +177,10 @@ # unzip will not honor unix file modes unless file creator is # set to unix (id 3). i.create_system = 3 - ftype = 0x8000 # UNX_IFREG in unzip source code + ftype = _UNX_IFREG if islink: mode = 0777 - ftype = 0xa000 # UNX_IFLNK in unzip source code + ftype = _UNX_IFLNK i.external_attr = (mode | ftype) << 16L self.z.writestr(i, data)
--- a/mercurial/bookmarks.py Mon Sep 03 17:25:50 2012 +0100 +++ b/mercurial/bookmarks.py Sun Sep 09 12:35:06 2012 +0200 @@ -58,7 +58,7 @@ raise return None try: - # No readline() in posixfile_nt, reading everything is cheap + # No readline() in osutil.posixfile, reading everything is cheap mark = encoding.tolocal((file.readlines() or [''])[0]) if mark == '' or mark not in repo._bookmarks: mark = None
--- a/mercurial/byterange.py Mon Sep 03 17:25:50 2012 +0100 +++ b/mercurial/byterange.py Sun Sep 09 12:35:06 2012 +0200 @@ -32,7 +32,7 @@ This was extremely simple. The Range header is a HTTP feature to begin with so all this class does is tell urllib2 that the - "206 Partial Content" reponse from the HTTP server is what we + "206 Partial Content" response from the HTTP server is what we expected. Example: @@ -64,7 +64,7 @@ class RangeableFileObject(object): """File object wrapper to enable raw range handling. - This was implemented primarilary for handling range + This was implemented primarily for handling range specifications for file:// urls. This object effectively makes a file object look like it consists only of a range of bytes in the stream. @@ -431,7 +431,7 @@ Return a tuple whose first element is guaranteed to be an int and whose second element will be '' (meaning: the last byte) or an int. Finally, return None if the normalized tuple == (0,'') - as that is equivelant to retrieving the entire file. + as that is equivalent to retrieving the entire file. """ if range_tup is None: return None
--- a/mercurial/changelog.py Mon Sep 03 17:25:50 2012 +0100 +++ b/mercurial/changelog.py Sun Sep 09 12:35:06 2012 +0200 @@ -183,7 +183,7 @@ nodeid\n : manifest node in ascii user\n : user, no \n or \r allowed time tz extra\n : date (time is int or float, timezone is int) - : extra is metadatas, encoded and separated by '\0' + : extra is metadata, encoded and separated by '\0' : older versions ignore it files\n\n : files modified by the cset, no \n or \r allowed (.*) : comment (free text, ideally utf-8)
--- a/mercurial/cmdutil.py Mon Sep 03 17:25:50 2012 +0100 +++ b/mercurial/cmdutil.py Sun Sep 09 12:35:06 2012 +0200 @@ -547,30 +547,37 @@ prev = (parents and parents[0]) or nullid shouldclose = False - if not fp: + if not fp and len(template) > 0: desc_lines = ctx.description().rstrip().split('\n') desc = desc_lines[0] #Commit always has a first line. fp = makefileobj(repo, template, node, desc=desc, total=total, seqno=seqno, revwidth=revwidth, mode='ab') if fp != template: shouldclose = True - if fp != sys.stdout and util.safehasattr(fp, 'name'): + if fp and fp != sys.stdout and util.safehasattr(fp, 'name'): repo.ui.note("%s\n" % fp.name) - fp.write("# HG changeset patch\n") - fp.write("# User %s\n" % ctx.user()) - fp.write("# Date %d %d\n" % ctx.date()) + if not fp: + write = repo.ui.write + else: + def write(s, **kw): + fp.write(s) + + + write("# HG changeset patch\n") + write("# User %s\n" % ctx.user()) + write("# Date %d %d\n" % ctx.date()) if branch and branch != 'default': - fp.write("# Branch %s\n" % branch) - fp.write("# Node ID %s\n" % hex(node)) - fp.write("# Parent %s\n" % hex(prev)) + write("# Branch %s\n" % branch) + write("# Node ID %s\n" % hex(node)) + write("# Parent %s\n" % hex(prev)) if len(parents) > 1: - fp.write("# Parent %s\n" % hex(parents[1])) - fp.write(ctx.description().rstrip()) - fp.write("\n\n") + write("# Parent %s\n" % hex(parents[1])) + write(ctx.description().rstrip()) + write("\n\n") - for chunk in patch.diff(repo, prev, node, opts=opts): - fp.write(chunk) + for chunk, label in patch.diffui(repo, prev, node, opts=opts): + write(chunk, label=label) if shouldclose: fp.close() @@ -1258,7 +1265,7 @@ opts['branch'] = opts.get('branch', []) + opts.get('only_branch', []) opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']] # pats/include/exclude are passed to match.match() directly in - # _matchfile() revset but walkchangerevs() builds its matcher with + # _matchfiles() revset but walkchangerevs() builds its matcher with # scmutil.match(). The difference is input pats are globbed on # platforms without shell expansion (windows). pctx = repo[None] @@ -1304,7 +1311,7 @@ fnopats = (('_ancestors', '_fancestors'), ('_descendants', '_fdescendants')) if pats: - # follow() revset inteprets its file argument as a + # follow() revset interprets its file argument as a # manifest entry, so use match.files(), not pats. opts[fpats[followfirst]] = list(match.files()) else:
--- a/mercurial/commands.py Mon Sep 03 17:25:50 2012 +0100 +++ b/mercurial/commands.py Sun Sep 09 12:35:06 2012 +0200 @@ -543,7 +543,7 @@ hg bisect --good hg bisect --bad - - mark the current revision, or a known revision, to be skipped (eg. if + - mark the current revision, or a known revision, to be skipped (e.g. if that revision is not usable because of another issue):: hg bisect --skip @@ -1252,7 +1252,7 @@ Returns 0 on success, 1 if nothing changed. """ if opts.get('subrepos'): - # Let --subrepos on the command line overide config setting. + # Let --subrepos on the command line override config setting. ui.setconfig('ui', 'commitsubrepos', True) extra = {} @@ -1352,20 +1352,20 @@ # printed anyway. # # Par Msg Comment - # NN y additional topo root + # N N y additional topo root # - # BN y additional branch root - # CN y additional topo head - # HN n usual case + # B N y additional branch root + # C N y additional topo head + # H N n usual case # - # BB y weird additional branch root - # CB y branch merge - # HB n merge with named branch + # B B y weird additional branch root + # C B y branch merge + # H B n merge with named branch # - # CC y additional head from merge - # CH n merge with a head + # C C y additional head from merge + # C H n merge with a head # - # HH n head merge: head count decreases + # H H n head merge: head count decreases if not opts.get('close_branch'): for r in parents: @@ -4181,7 +4181,7 @@ res.append(fn[plen:-slen]) finally: lock.release() - for f in sorted(res): + for f in res: ui.write("%s\n" % f) return
--- a/mercurial/commandserver.py Mon Sep 03 17:25:50 2012 +0100 +++ b/mercurial/commandserver.py Sun Sep 09 12:35:06 2012 +0200 @@ -137,7 +137,7 @@ if logpath: global logfile if logpath == '-': - # write log on a special 'd'ebug channel + # write log on a special 'd' (debug) channel logfile = channeledoutput(sys.stdout, sys.stdout, 'd') else: logfile = open(logpath, 'a')
--- a/mercurial/config.py Mon Sep 03 17:25:50 2012 +0100 +++ b/mercurial/config.py Sun Sep 09 12:35:06 2012 +0200 @@ -67,7 +67,7 @@ return self._data.get(section, {}).get(item, default) def backup(self, section, item): - """return a tuple allowing restore to reinstall a previous valuesi + """return a tuple allowing restore to reinstall previous values The main reason we need it is because it handle the "no data" case. """
--- a/mercurial/context.py Mon Sep 03 17:25:50 2012 +0100 +++ b/mercurial/context.py Sun Sep 09 12:35:06 2012 +0200 @@ -237,8 +237,8 @@ def extinct(self): """True if the changeset is extinct""" - # We should just compute a cache a check againts it. - # see revset implementation for details + # We should just compute a cache and check against it. + # See revset implementation for details. # # But this naive implementation does not require cache if self.phase() <= phases.public: @@ -885,8 +885,7 @@ p = self._repo.dirstate.parents() if p[1] == nullid: p = p[:-1] - self._parents = [changectx(self._repo, x) for x in p] - return self._parents + return [changectx(self._repo, x) for x in p] def status(self, ignored=False, clean=False, unknown=False): """Explicit status query @@ -1168,7 +1167,7 @@ returns True if different than fctx. """ - # fctx should be a filectx (not a wfctx) + # fctx should be a filectx (not a workingfilectx) # invert comparison to reuse the same code path return fctx.cmp(self)
--- a/mercurial/discovery.py Mon Sep 03 17:25:50 2012 +0100 +++ b/mercurial/discovery.py Sun Sep 09 12:35:06 2012 +0200 @@ -21,7 +21,7 @@ any longer. "heads" is either the supplied heads, or else the remote's heads. - If you pass heads and they are all known locally, the reponse lists justs + If you pass heads and they are all known locally, the response lists just these heads in "common" and in "heads". Please use findcommonoutgoing to compute the set of outgoing nodes to give @@ -348,7 +348,7 @@ def visiblebranchmap(repo): """return a branchmap for the visible set""" # XXX Recomputing this data on the fly is very slow. We should build a - # XXX cached version while computin the standard branchmap version. + # XXX cached version while computing the standard branchmap version. sroots = repo._phasecache.phaseroots[phases.secret] if sroots or repo.obsstore: vbranchmap = {}
--- a/mercurial/encoding.py Mon Sep 03 17:25:50 2012 +0100 +++ b/mercurial/encoding.py Sun Sep 09 12:35:06 2012 +0200 @@ -223,7 +223,7 @@ Principles of operation: - - ASCII and UTF-8 data sucessfully round-trips and is understood + - ASCII and UTF-8 data successfully round-trips and is understood by Unicode-oriented clients - filenames and file contents in arbitrary other encodings can have be round-tripped or recovered by clueful clients
--- a/mercurial/hbisect.py Mon Sep 03 17:25:50 2012 +0100 +++ b/mercurial/hbisect.py Sun Sep 09 12:35:06 2012 +0200 @@ -159,7 +159,7 @@ Return a list of revision(s) that match the given status: - ``good``, ``bad``, ``skip``: csets explicitly marked as good/bad/skip - - ``goods``, ``bads`` : csets topologicaly good/bad + - ``goods``, ``bads`` : csets topologically good/bad - ``range`` : csets taking part in the bisection - ``pruned`` : csets that are goods, bads or skipped - ``untested`` : csets whose fate is yet unknown @@ -170,8 +170,8 @@ if status in ('good', 'bad', 'skip', 'current'): return map(repo.changelog.rev, state[status]) else: - # In the floowing sets, we do *not* call 'bisect()' with more - # than one level of recusrsion, because that can be very, very + # In the following sets, we do *not* call 'bisect()' with more + # than one level of recursion, because that can be very, very # time consuming. Instead, we always develop the expression as # much as possible. @@ -200,7 +200,7 @@ # 'ignored' is all csets that were not used during the bisection # due to DAG topology, but may however have had an impact. - # Eg., a branch merged between bads and goods, but whose branch- + # E.g., a branch merged between bads and goods, but whose branch- # point is out-side of the range. iba = '::bisect(bad) - ::bisect(good)' # Ignored bads' ancestors iga = '::bisect(good) - ::bisect(bad)' # Ignored goods' ancestors
--- a/mercurial/help/config.txt Mon Sep 03 17:25:50 2012 +0100 +++ b/mercurial/help/config.txt Sun Sep 09 12:35:06 2012 +0200 @@ -1431,7 +1431,7 @@ Example: ``http://hgserver/static/``. ``stripes`` - How many lines a "zebra stripe" should span in multiline output. + How many lines a "zebra stripe" should span in multi-line output. Default is 1; set to 0 to disable. ``style``
--- a/mercurial/help/hgweb.txt Mon Sep 03 17:25:50 2012 +0100 +++ b/mercurial/help/hgweb.txt Sun Sep 09 12:35:06 2012 +0200 @@ -11,7 +11,7 @@ - paths - collections -The ``web`` options are thorougly described in :hg:`help config`. +The ``web`` options are thoroughly described in :hg:`help config`. The ``paths`` section maps URL paths to paths of repositories in the filesystem. hgweb will not expose the filesystem directly - only
--- a/mercurial/hgweb/common.py Mon Sep 03 17:25:50 2012 +0100 +++ b/mercurial/hgweb/common.py Sun Sep 09 12:35:06 2012 +0200 @@ -48,7 +48,7 @@ # and replayed scheme = req.env.get('wsgi.url_scheme') if hgweb.configbool('web', 'push_ssl', True) and scheme != 'https': - raise ErrorResponse(HTTP_OK, 'ssl required') + raise ErrorResponse(HTTP_FORBIDDEN, 'ssl required') deny = hgweb.configlist('web', 'deny_push') if deny and (not user or deny == ['*'] or user in deny):
--- a/mercurial/hgweb/server.py Mon Sep 03 17:25:50 2012 +0100 +++ b/mercurial/hgweb/server.py Sun Sep 09 12:35:06 2012 +0200 @@ -12,7 +12,7 @@ from mercurial.i18n import _ def _splitURI(uri): - """ Return path and query splited from uri + """Return path and query that has been split from uri Just like CGI environment, the path is unquoted, the query is not.
--- a/mercurial/hgweb/wsgicgi.py Mon Sep 03 17:25:50 2012 +0100 +++ b/mercurial/hgweb/wsgicgi.py Sun Sep 09 12:35:06 2012 +0200 @@ -19,7 +19,7 @@ environ = dict(os.environ.iteritems()) environ.setdefault('PATH_INFO', '') if environ.get('SERVER_SOFTWARE', '').startswith('Microsoft-IIS'): - # IIS includes script_name in path_info + # IIS includes script_name in PATH_INFO scriptname = environ['SCRIPT_NAME'] if environ['PATH_INFO'].startswith(scriptname): environ['PATH_INFO'] = environ['PATH_INFO'][len(scriptname):]
--- a/mercurial/hook.py Mon Sep 03 17:25:50 2012 +0100 +++ b/mercurial/hook.py Sun Sep 09 12:35:06 2012 +0200 @@ -154,7 +154,7 @@ oldstdout = os.dup(stdoutno) os.dup2(stderrno, stdoutno) except AttributeError: - # __stdout/err__ doesn't have fileno(), it's not a real file + # __stdout__/__stderr__ doesn't have fileno(), it's not a real file pass try:
--- a/mercurial/httpclient/__init__.py Mon Sep 03 17:25:50 2012 +0100 +++ b/mercurial/httpclient/__init__.py Sun Sep 09 12:35:06 2012 +0200 @@ -170,7 +170,7 @@ except socket.sslerror, e: if e.args[0] != socket.SSL_ERROR_WANT_READ: raise - logger.debug('SSL_WANT_READ in _select, should retry later') + logger.debug('SSL_ERROR_WANT_READ in _select, should retry later') return True logger.debug('response read %d data during _select', len(data)) # If the socket was readable and no data was read, that means @@ -293,7 +293,7 @@ host: The host to which we'll connect. port: Optional. The port over which we'll connect. Default 80 for non-ssl, 443 for ssl. - use_ssl: Optional. Wether to use ssl. Defaults to False if port is + use_ssl: Optional. Whether to use ssl. Defaults to False if port is not 443, true if port is 443. ssl_validator: a function(socket) to validate the ssl cert timeout: Optional. Connection timeout, default is TIMEOUT_DEFAULT. @@ -374,7 +374,7 @@ if self.ssl: # This is the default, but in the case of proxied SSL # requests the proxy logic above will have cleared - # blocking mode, so reenable it just to be safe. + # blocking mode, so re-enable it just to be safe. sock.setblocking(1) logger.debug('wrapping socket for ssl with options %r', self.ssl_opts) @@ -414,7 +414,7 @@ """Close the connection to the server. This is a no-op if the connection is already closed. The - connection may automatically close if requessted by the server + connection may automatically close if requested by the server or required by the nature of a response. """ if self.sock is None: @@ -532,7 +532,7 @@ if e.args[0] != socket.SSL_ERROR_WANT_READ: raise logger.debug( - 'SSL_WANT_READ while sending data, retrying...') + 'SSL_ERROR_WANT_READ while sending data, retrying...') continue if not data: logger.info('socket appears closed in read')
--- a/mercurial/httpclient/_readers.py Mon Sep 03 17:25:50 2012 +0100 +++ b/mercurial/httpclient/_readers.py Sun Sep 09 12:35:06 2012 +0200 @@ -120,7 +120,7 @@ if data: assert not self._finished, ( 'tried to add data (%r) to a closed reader!' % data) - logger.debug('%s read an addtional %d data', self.name, len(data)) + logger.debug('%s read an additional %d data', self.name, len(data)) self._done_chunks.append(data) @@ -162,7 +162,7 @@ def _load(self, data): assert not self._finished, 'tried to add data to a closed reader!' - logger.debug('chunked read an addtional %d data', len(data)) + logger.debug('chunked read an additional %d data', len(data)) position = 0 if self._leftover_data: logger.debug('chunked reader trying to finish block from leftover data') @@ -188,7 +188,7 @@ return if amt == 0: self._finished = True - logger.debug('closing chunked redaer due to chunk of length 0') + logger.debug('closing chunked reader due to chunk of length 0') return self._done_chunks.append(data[block_start:block_start + amt]) position = block_start + amt + len(self._eol)
--- a/mercurial/httpconnection.py Mon Sep 03 17:25:50 2012 +0100 +++ b/mercurial/httpconnection.py Sun Sep 09 12:35:06 2012 +0200 @@ -73,7 +73,7 @@ if '://' in uri: scheme, hostpath = uri.split('://', 1) else: - # py2.4.1 doesn't provide the full URI + # Python 2.4.1 doesn't provide the full URI scheme, hostpath = 'http', uri bestuser = None bestlen = 0
--- a/mercurial/keepalive.py Mon Sep 03 17:25:50 2012 +0100 +++ b/mercurial/keepalive.py Sun Sep 09 12:35:06 2012 +0200 @@ -67,8 +67,8 @@ close_connection() - close the connection to the host readlines() - you know, readlines() - status - the return status (ie 404) - reason - english translation of status (ie 'File not found') + status - the return status (i.e. 404) + reason - english translation of status (i.e. 'File not found') If you want the best of both worlds, use this inside an AttributeError-catching try: @@ -297,7 +297,7 @@ # first. We previously got into a nasty loop # where an exception was uncaught, and so the # connection stayed open. On the next try, the - # same exception was raised, etc. The tradeoff is + # same exception was raised, etc. The trade-off is # that it's now possible this call will raise # a DIFFERENT exception if DEBUG: @@ -370,7 +370,7 @@ # so if you THEN do a normal read, you must first take stuff from # the buffer. - # the read method wraps the original to accomodate buffering, + # the read method wraps the original to accommodate buffering, # although read() never adds to the buffer. # Both readline and readlines have been stolen with almost no # modification from socket.py @@ -442,7 +442,7 @@ try: chunk_left = int(line, 16) except ValueError: - # close the connection as protocol synchronisation is + # close the connection as protocol synchronization is # probably lost self.close() raise httplib.IncompleteRead(value) @@ -548,7 +548,7 @@ read = getattr(str, 'read', None) if read is not None: if self.debuglevel > 0: - print "sendIng a read()able" + print "sending a read()able" data = read(blocksize) while data: self.sock.sendall(data) @@ -737,7 +737,7 @@ def test(url, N=10): - print "checking error hander (do this on a non-200)" + print "checking error handler (do this on a non-200)" try: error_handler(url) except IOError: print "exiting - exception will prevent further tests"
--- a/mercurial/localrepo.py Mon Sep 03 17:25:50 2012 +0100 +++ b/mercurial/localrepo.py Sun Sep 09 12:35:06 2012 +0200 @@ -300,12 +300,14 @@ """hiddenrevs: revs that should be hidden by command and tools This set is carried on the repo to ease initialisation and lazy - loading it'll probably move back to changelog for efficienty and + loading; it'll probably move back to changelog for efficiency and consistency reason Note that the hiddenrevs will needs invalidations when - a new changesets is added (possible unstable above extinct) - a new obsolete marker is added (possible new extinct changeset) + + hidden changesets cannot have non-hidden descendants """ hidden = set() if self.obsstore: @@ -712,7 +714,7 @@ # Remove candidate heads that no longer are in the repo (e.g., as # the result of a strip that just happened). Avoid using 'node in # self' here because that dives down into branchcache code somewhat - # recrusively. + # recursively. bheadrevs = [self.changelog.rev(node) for node in bheads if self.changelog.hasnode(node)] newheadrevs = [self.changelog.rev(node) for node in newnodes @@ -732,7 +734,7 @@ iterrevs = list(bheadrevs) # This loop prunes out two kinds of heads - heads that are - # superceded by a head in newheadrevs, and newheadrevs that are not + # superseded by a head in newheadrevs, and newheadrevs that are not # heads because an existing head is their descendant. while iterrevs: latest = iterrevs.pop() @@ -1479,7 +1481,7 @@ and you also know the set of candidate new heads that may have resulted from the destruction, you can set newheadnodes. This will enable the code to update the branchheads cache, rather than having future code - decide it's invalid and regenrating it from scratch. + decide it's invalid and regenerating it from scratch. ''' # If we have info, newheadnodes, on how to update the branch cache, do # it, Otherwise, since nodes were destroyed, the cache is stale and this @@ -1906,7 +1908,7 @@ ret = remote.addchangegroup(cg, 'push', self.url()) if ret: - # push succeed, synchonize target of the push + # push succeed, synchronize target of the push cheads = outgoing.missingheads elif revs is None: # All out push fails. synchronize all common @@ -1925,7 +1927,7 @@ # missing = ((commonheads::missingheads) - commonheads) # # We can pick: - # * missingheads part of comon (::commonheads) + # * missingheads part of common (::commonheads) common = set(outgoing.common) cheads = [node for node in revs if node in common] # and @@ -2539,7 +2541,7 @@ # uncompressed only if compatible. if not stream: - # if the server explicitely prefer to stream (for fast LANs) + # if the server explicitly prefers to stream (for fast LANs) stream = remote.capable('stream-preferred') if stream and not heads:
--- a/mercurial/lock.py Mon Sep 03 17:25:50 2012 +0100 +++ b/mercurial/lock.py Sun Sep 09 12:35:06 2012 +0200 @@ -122,8 +122,8 @@ def release(self): """release the lock and execute callback function if any - If the lock have been aquired multiple time, the actual release is - delayed to the last relase call.""" + If the lock has been acquired multiple time, the actual release is + delayed to the last release call.""" if self.held > 1: self.held -= 1 elif self.held == 1:
--- a/mercurial/mail.py Mon Sep 03 17:25:50 2012 +0100 +++ b/mercurial/mail.py Sun Sep 09 12:35:06 2012 +0200 @@ -13,7 +13,7 @@ _oldheaderinit = email.Header.Header.__init__ def _unifiedheaderinit(self, *args, **kw): """ - Python2.7 introduces a backwards incompatible change + Python 2.7 introduces a backwards incompatible change (Python issue1974, r70772) in email.Generator.Generator code: pre-2.7 code passed "continuation_ws='\t'" to the Header constructor, and 2.7 removed this parameter. @@ -151,7 +151,7 @@ def mimetextqp(body, subtype, charset): '''Return MIME message. - Qouted-printable transfer encoding will be used if necessary. + Quoted-printable transfer encoding will be used if necessary. ''' enc = None for line in body.splitlines():
--- a/mercurial/manifest.py Mon Sep 03 17:25:50 2012 +0100 +++ b/mercurial/manifest.py Sun Sep 09 12:35:06 2012 +0200 @@ -64,9 +64,7 @@ If the string is found m[start:end] are the line containing that string. If start == end the string was not found and - they indicate the proper sorted insertion point. This was - taken from bisect_left, and modified to find line start/end as - it goes along. + they indicate the proper sorted insertion point. m should be a buffer or a string s is a string''' @@ -156,7 +154,7 @@ # combine the changed lists into one list for sorting work = [(x, False) for x in added] work.extend((x, True) for x in removed) - # this could use heapq.merge() (from python2.6+) or equivalent + # this could use heapq.merge() (from Python 2.6+) or equivalent # since the lists are already sorted work.sort()
--- a/mercurial/match.py Mon Sep 03 17:25:50 2012 +0100 +++ b/mercurial/match.py Sun Sep 09 12:35:06 2012 +0200 @@ -49,7 +49,7 @@ a pattern is one of: 'glob:<glob>' - a glob relative to cwd 're:<regexp>' - a regular expression - 'path:<path>' - a path relative to canonroot + 'path:<path>' - a path relative to repository root 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs) 'relpath:<path>' - a path relative to cwd 'relre:<regexp>' - a regexp that needn't match the start of a name
--- a/mercurial/minirst.py Mon Sep 03 17:25:50 2012 +0100 +++ b/mercurial/minirst.py Sun Sep 09 12:35:06 2012 +0200 @@ -133,7 +133,7 @@ def match(lines, i, itemre, singleline): """Does itemre match an item at line i? - A list item can be followed by an idented line or another list + A list item can be followed by an indented line or another list item (but only if singleline is True). """ line1 = lines[i]
--- a/mercurial/obsolete.py Mon Sep 03 17:25:50 2012 +0100 +++ b/mercurial/obsolete.py Sun Sep 09 12:35:06 2012 +0200 @@ -55,14 +55,12 @@ import util, base85 from i18n import _ -# the obsolete feature is not mature enought to be enabled by default. -# you have to rely on third party extension extension to enable this. -_enabled = False - _pack = struct.pack _unpack = struct.unpack -# the obsolete feature is not mature enought to be enabled by default. +_SEEK_END = 2 # os.SEEK_END was introduced in Python 2.5 + +# the obsolete feature is not mature enough to be enabled by default. # you have to rely on third party extension extension to enable this. _enabled = False @@ -211,7 +209,7 @@ # defined. So we must seek to the end before calling tell(), # or we may get a zero offset for non-zero sized files on # some platforms (issue3543). - f.seek(0, 2) # os.SEEK_END + f.seek(0, _SEEK_END) offset = f.tell() transaction.add('obsstore', offset) # offset == 0: new file - add the version header @@ -318,7 +316,7 @@ def anysuccessors(obsstore, node): """Yield every successor of <node> - This this a linear yield unsuitable to detect splitted changeset.""" + This is a linear yield unsuitable to detect split changesets.""" remaining = set([node]) seen = set(remaining) while remaining:
--- a/mercurial/parsers.c Mon Sep 03 17:25:50 2012 +0100 +++ b/mercurial/parsers.c Sun Sep 09 12:35:06 2012 +0200 @@ -9,6 +9,7 @@ #include <Python.h> #include <ctype.h> +#include <stddef.h> #include <string.h> #include "util.h" @@ -72,7 +73,7 @@ for (start = cur = str, zero = NULL; cur < str + len; cur++) { PyObject *file = NULL, *node = NULL; PyObject *flags = NULL; - int nlen; + ptrdiff_t nlen; if (!*cur) { zero = cur; @@ -94,7 +95,7 @@ nlen = cur - zero - 1; - node = unhexlify(zero + 1, nlen > 40 ? 40 : nlen); + node = unhexlify(zero + 1, nlen > 40 ? 40 : (int)nlen); if (!node) goto bail;
--- a/mercurial/phases.py Mon Sep 03 17:25:50 2012 +0100 +++ b/mercurial/phases.py Sun Sep 09 12:35:06 2012 +0200 @@ -195,7 +195,7 @@ return self._phaserevs def phase(self, repo, rev): - # We need a repo argument here to be able to build _phaserev + # We need a repo argument here to be able to build _phaserevs # if necessary. The repository instance is not stored in # phasecache to avoid reference cycles. The changelog instance # is not stored because it is a filecache() property and can @@ -363,7 +363,7 @@ """compute new head of a subset minus another * `heads`: define the first subset - * `rroots`: define the second we substract to the first""" + * `roots`: define the second we subtract from the first""" revset = repo.set('heads((%ln + parents(%ln)) - (%ln::%ln))', heads, roots, roots, heads) return [c.node() for c in revset]
--- a/mercurial/pure/osutil.py Mon Sep 03 17:25:50 2012 +0100 +++ b/mercurial/pure/osutil.py Sun Sep 09 12:35:06 2012 +0200 @@ -82,7 +82,7 @@ _FILE_ATTRIBUTE_NORMAL = 0x80 - # _open_osfhandle + # open_osfhandle flags _O_RDONLY = 0x0000 _O_RDWR = 0x0002 _O_APPEND = 0x0008
--- a/mercurial/pure/parsers.py Mon Sep 03 17:25:50 2012 +0100 +++ b/mercurial/pure/parsers.py Sun Sep 09 12:35:06 2012 +0200 @@ -70,7 +70,7 @@ def parse_dirstate(dmap, copymap, st): parents = [st[:20], st[20: 40]] - # deref fields so they will be local in loop + # dereference fields so they will be local in loop format = ">cllll" e_size = struct.calcsize(format) pos1 = 40
--- a/mercurial/pvec.py Mon Sep 03 17:25:50 2012 +0100 +++ b/mercurial/pvec.py Sun Sep 09 12:35:06 2012 +0200 @@ -57,7 +57,7 @@ _depthbytes = _depthbits / 8 _vecbytes = _bytes - _depthbytes _vecbits = _vecbytes * 8 -_radius = (_vecbits - 30) / 2 # high probability vecs are related +_radius = (_vecbits - 30) / 2 # high probability vectors are related def _bin(bs): '''convert a bytestring to a long'''
--- a/mercurial/py3kcompat.py Mon Sep 03 17:25:50 2012 +0100 +++ b/mercurial/py3kcompat.py Sun Sep 09 12:35:06 2012 +0200 @@ -12,7 +12,7 @@ def bytesformatter(format, args): '''Custom implementation of a formatter for bytestrings. - This function currently relias on the string formatter to do the + This function currently relies on the string formatter to do the formatting and always returns bytes objects. >>> bytesformatter(20, 10)
--- a/mercurial/revlog.py Mon Sep 03 17:25:50 2012 +0100 +++ b/mercurial/revlog.py Sun Sep 09 12:35:06 2012 +0200 @@ -547,7 +547,7 @@ # Our topologically sorted list of output nodes. orderedout = [] # Don't start at nullid since we don't want nullid in our output list, - # and if nullid shows up in descedents, empty parents will look like + # and if nullid shows up in descendants, empty parents will look like # they're descendants. for r in xrange(max(lowestrev, 0), highestrev + 1): n = self.node(r) @@ -1015,7 +1015,7 @@ see addrevision for argument descriptions. invariants: - text is optional (can be None); if not set, cachedelta must be set. - if both are set, they must correspond to eachother. + if both are set, they must correspond to each other. """ btext = [text] def buildtext():
--- a/mercurial/revset.py Mon Sep 03 17:25:50 2012 +0100 +++ b/mercurial/revset.py Sun Sep 09 12:35:06 2012 +0200 @@ -335,7 +335,7 @@ Changesets marked in the specified bisect status: - ``good``, ``bad``, ``skip``: csets explicitly marked as good/bad/skip - - ``goods``, ``bads`` : csets topologicaly good/bad + - ``goods``, ``bads`` : csets topologically good/bad - ``range`` : csets taking part in the bisection - ``pruned`` : csets that are goods, bads or skipped - ``untested`` : csets whose fate is yet unknown @@ -594,7 +594,7 @@ # The visited lineage is a match if the current source is in the arg # set. Since every candidate dest is visited by way of iterating - # subset, any dests futher back in the lineage will be tested by a + # subset, any dests further back in the lineage will be tested by a # different iteration over subset. Likewise, if the src was already # selected, the current lineage can be selected without going back # further. @@ -838,6 +838,14 @@ ps = set(parents(repo, subset, x)) return [r for r in s if r not in ps] +def hidden(repo, subset, x): + """``hidden()`` + Hidden changesets. + """ + # i18n: "hidden" is a keyword + getargs(x, 0, 0, _("hidden takes no arguments")) + return [r for r in subset if r in repo.hiddenrevs] + def keyword(repo, subset, x): """``keyword(string)`` Search commit message, user name, and names of changed files for @@ -1484,6 +1492,7 @@ "grep": grep, "head": head, "heads": heads, + "hidden": hidden, "id": node_, "keyword": keyword, "last": last,
--- a/mercurial/setdiscovery.py Mon Sep 03 17:25:50 2012 +0100 +++ b/mercurial/setdiscovery.py Sun Sep 09 12:35:06 2012 +0200 @@ -84,9 +84,6 @@ abortwhenunrelated=True): '''Return a tuple (common, anyincoming, remoteheads) used to identify missing nodes from or in remote. - - shortcutlocal determines whether we try use direct access to localrepo if - remote is actually local. ''' roundtrips = 0 cl = local.changelog @@ -109,7 +106,8 @@ srvheadhashes = srvheadhashesref.value yesno = yesnoref.value else: - # compatibitity with pre-batch, but post-known remotes during 1.9 devel + # compatibility with pre-batch, but post-known remotes during 1.9 + # development srvheadhashes = remote.heads() sample = []
--- a/mercurial/subrepo.py Mon Sep 03 17:25:50 2012 +0100 +++ b/mercurial/subrepo.py Sun Sep 09 12:35:06 2012 +0200 @@ -888,7 +888,7 @@ def _gitnodir(self, commands, env=None, stream=False, cwd=None): """Calls the git command - The methods tries to call the git command. versions previor to 1.6.0 + The methods tries to call the git command. versions prior to 1.6.0 are not supported and very probably fail. """ self._ui.debug('%s: git %s\n' % (self._relpath, ' '.join(commands)))
--- a/mercurial/tags.py Mon Sep 03 17:25:50 2012 +0100 +++ b/mercurial/tags.py Sun Sep 09 12:35:06 2012 +0200 @@ -124,8 +124,8 @@ continue # we prefer alltags[name] if: - # it supercedes us OR - # mutual supercedes and it has a higher rank + # it supersedes us OR + # mutual supersedes and it has a higher rank # otherwise we win because we're tip-most anode, ahist = nodehist bnode, bhist = alltags[name]
--- a/mercurial/templatekw.py Mon Sep 03 17:25:50 2012 +0100 +++ b/mercurial/templatekw.py Sun Sep 09 12:35:06 2012 +0200 @@ -275,6 +275,28 @@ """ return ctx.hex() +def showp1rev(repo, ctx, templ, **args): + """:p1rev: Integer. The repository-local revision number of the changeset's + first parent, or -1 if the changeset has no parents.""" + return ctx.p1().rev() + +def showp2rev(repo, ctx, templ, **args): + """:p2rev: Integer. The repository-local revision number of the changeset's + second parent, or -1 if the changeset has no second parent.""" + return ctx.p2().rev() + +def showp1node(repo, ctx, templ, **args): + """:p1node: String. The identification hash of the changeset's first parent, + as a 40 digit hexadecimal string. If the changeset has no parents, all + digits are 0.""" + return ctx.p1().hex() + +def showp2node(repo, ctx, templ, **args): + """:p2node: String. The identification hash of the changeset's second + parent, as a 40 digit hexadecimal string. If the changeset has no second + parent, all digits are 0.""" + return ctx.p2().hex() + def showphase(repo, ctx, templ, **args): """:phase: String. The changeset phase name.""" return ctx.phasestr() @@ -320,6 +342,10 @@ 'latesttagdistance': showlatesttagdistance, 'manifest': showmanifest, 'node': shownode, + 'p1rev': showp1rev, + 'p1node': showp1node, + 'p2rev': showp2rev, + 'p2node': showp2node, 'phase': showphase, 'phaseidx': showphaseidx, 'rev': showrev,
--- a/mercurial/templater.py Mon Sep 03 17:25:50 2012 +0100 +++ b/mercurial/templater.py Sun Sep 09 12:35:06 2012 +0200 @@ -146,7 +146,15 @@ def runfilter(context, mapping, data): func, data, filt = data - return filt(func(context, mapping, data)) + try: + return filt(func(context, mapping, data)) + except (ValueError, AttributeError, TypeError): + if isinstance(data, tuple): + dt = data[1] + else: + dt = data + raise util.Abort(_("template filter '%s' is not compatible with " + "keyword '%s'") % (filt.func_name, dt)) def buildmap(exp, context): func, data = compileexp(exp[1], context)
--- a/mercurial/templates/template-vars.txt Mon Sep 03 17:25:50 2012 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,42 +0,0 @@ -repo the name of the repo -rev a changeset.manifest revision -node a changeset node -changesets total number of changesets -file a filename -filerev a file revision -filerevs total number of file revisions -up the directory of the relevant file -path a path in the manifest, starting with "/" -basename a short pathname -date a date string -age age in hours, days, etc -line a line of text (escaped) -desc a description (escaped, with breaks) -shortdesc a short description (escaped) -author a name or email addressv(obfuscated) -parent a list of the parent -child a list of the children -tags a list of tag - -header the global page header -footer the global page footer - -files a list of file links -file_copies a list of pairs of name, source filenames -dirs a set of directory links -diff a diff of one or more files -annotate an annotated file -entries the entries relevant to the page - -url base url of hgweb interface -logourl base url of logo -staticurl base url for static resources - - -Templates and commands: - changelog(rev) - a page for browsing changesets - naventry - a link for jumping to a changeset number - filenodelink - jump to file diff - fileellipses - printed after maxfiles - changelogentry - an entry in the log - manifest - browse a manifest as a directory tree
--- a/mercurial/transaction.py Mon Sep 03 17:25:50 2012 +0100 +++ b/mercurial/transaction.py Sun Sep 09 12:35:06 2012 +0200 @@ -1,4 +1,4 @@ -# transaction.py - simple journalling scheme for mercurial +# transaction.py - simple journaling scheme for mercurial # # This transaction scheme is intended to gracefully handle program # errors and interruptions. More serious failures like system crashes
--- a/mercurial/ui.py Mon Sep 03 17:25:50 2012 +0100 +++ b/mercurial/ui.py Sun Sep 09 12:35:06 2012 +0200 @@ -713,8 +713,8 @@ With stock hg, this is simply a debug message that is hidden by default, but with extensions or GUI tools it may be visible. 'topic' is the current operation, 'item' is a - non-numeric marker of the current position (ie the currently - in-process file), 'pos' is the current numeric position (ie + non-numeric marker of the current position (i.e. the currently + in-process file), 'pos' is the current numeric position (i.e. revision, bytes, etc.), unit is a corresponding unit label, and total is the highest expected pos.
--- a/mercurial/url.py Mon Sep 03 17:25:50 2012 +0100 +++ b/mercurial/url.py Sun Sep 09 12:35:06 2012 +0200 @@ -175,7 +175,7 @@ self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.sock.connect((self.host, self.port)) if _generic_proxytunnel(self): - # we do not support client x509 certificates + # we do not support client X.509 certificates self.sock = sslutil.ssl_wrap_socket(self.sock, None, None) else: keepalive.HTTPConnection.connect(self) @@ -278,7 +278,8 @@ res.will_close = res._check_close() # do we have a Content-Length? - # NOTE: RFC 2616, S4.4, #3 says we ignore this if tr_enc is "chunked" + # NOTE: RFC 2616, section 4.4, #3 says we ignore this if + # transfer-encoding is "chunked" length = res.msg.getheader('content-length') if length and not res.chunked: try:
--- a/mercurial/util.py Mon Sep 03 17:25:50 2012 +0100 +++ b/mercurial/util.py Sun Sep 09 12:35:06 2012 +0200 @@ -1,4 +1,4 @@ -# util.py - Mercurial utility functions and platform specfic implementations +# util.py - Mercurial utility functions and platform specific implementations # # Copyright 2005 K. Thananchayan <thananck@yahoo.com> # Copyright 2005-2007 Matt Mackall <mpm@selenic.com> @@ -7,7 +7,7 @@ # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. -"""Mercurial utility functions and platform specfic implementations. +"""Mercurial utility functions and platform specific implementations. This contains helper routines that are independent of the SCM core and hide platform-specific details from the core. @@ -799,7 +799,7 @@ return temp class atomictempfile(object): - '''writeable file object that atomically updates a file + '''writable file object that atomically updates a file All writes will go to a temporary copy of the original file. Call close() when you are done writing, and atomictempfile will rename @@ -1239,7 +1239,7 @@ so overriding is needed to use width information of each characters. In addition, characters classified into 'ambiguous' width are - treated as wide in east asian area, but as narrow in other. + treated as wide in East Asian area, but as narrow in other. This requires use decision to determine width of such characters. """ @@ -1300,7 +1300,7 @@ width = self.width - len(indent) # First chunk on line is whitespace -- drop it, unless this - # is the very beginning of the text (ie. no lines started yet). + # is the very beginning of the text (i.e. no lines started yet). if self.drop_whitespace and chunks[-1].strip() == '' and lines: del chunks[-1] @@ -1477,7 +1477,11 @@ for a in _hexdig for b in _hexdig) def _urlunquote(s): - """unquote('abc%20def') -> 'abc def'.""" + """Decode HTTP/HTML % encoding. + + >>> _urlunquote('abc%20def') + 'abc def' + """ res = s.split('%') # fastpath if len(res) == 1:
--- a/mercurial/win32.py Mon Sep 03 17:25:50 2012 +0100 +++ b/mercurial/win32.py Sun Sep 09 12:35:06 2012 +0200 @@ -328,7 +328,7 @@ env += '\0' args = subprocess.list2cmdline(args) - # Not running the command in shell mode makes python26 hang when + # Not running the command in shell mode makes Python 2.6 hang when # writing to hgweb output socket. comspec = os.environ.get("COMSPEC", "cmd.exe") args = comspec + " /c " + args
--- a/mercurial/windows.py Mon Sep 03 17:25:50 2012 +0100 +++ b/mercurial/windows.py Sun Sep 09 12:35:06 2012 +0200 @@ -152,7 +152,7 @@ # backslash # (See http://msdn2.microsoft.com/en-us/library/a1y7w461.aspx ) # So, to quote a string, we must surround it in double quotes, double -# the number of backslashes that preceed double quotes and add another +# the number of backslashes that precede double quotes and add another # backslash before every double quote (being careful with the double # quote we've appended to the end) _quotere = None
--- a/mercurial/wireproto.py Mon Sep 03 17:25:50 2012 +0100 +++ b/mercurial/wireproto.py Sun Sep 09 12:35:06 2012 +0200 @@ -516,7 +516,7 @@ it is serving. Client checks to see if it understands the format. The format is simple: the server writes out a line with the amount - of files, then the total amount of bytes to be transfered (separated + of files, then the total amount of bytes to be transferred (separated by a space). Then, for each file, the server first writes the filename and filesize (separated by the null character), then the file contents. '''
--- a/tests/hghave.py Mon Sep 03 17:25:50 2012 +0100 +++ b/tests/hghave.py Sun Sep 09 12:35:06 2012 +0200 @@ -118,6 +118,9 @@ except OSError: return False +def has_killdaemons(): + return True + def has_cacheable_fs(): from mercurial import util @@ -286,6 +289,7 @@ "hardlink": (has_hardlink, "hardlinks"), "icasefs": (has_icasefs, "case insensitive file system"), "inotify": (has_inotify, "inotify extension support"), + "killdaemons": (has_killdaemons, 'killdaemons.py support'), "lsprof": (has_lsprof, "python lsprof module"), "mtn": (has_mtn, "monotone client (>= 1.0)"), "outer-repo": (has_outer_repo, "outer repo"),
--- a/tests/killdaemons.py Mon Sep 03 17:25:50 2012 +0100 +++ b/tests/killdaemons.py Sun Sep 09 12:35:06 2012 +0200 @@ -1,25 +1,54 @@ #!/usr/bin/env python -import os, time, errno, signal +import os, sys, time, errno, signal -# Kill off any leftover daemon processes -try: - fp = open(os.environ['DAEMON_PIDS']) - for line in fp: - try: - pid = int(line) - except ValueError: - continue +if os.name =='nt': + import ctypes + def kill(pid, logfn, tryhard=True): + logfn('# Killing daemon process %d' % pid) + PROCESS_TERMINATE = 1 + handle = ctypes.windll.kernel32.OpenProcess( + PROCESS_TERMINATE, False, pid) + ctypes.windll.kernel32.TerminateProcess(handle, -1) + ctypes.windll.kernel32.CloseHandle(handle) +else: + def kill(pid, logfn, tryhard=True): try: os.kill(pid, 0) + logfn('# Killing daemon process %d' % pid) os.kill(pid, signal.SIGTERM) - for i in range(10): - time.sleep(0.05) + if tryhard: + for i in range(10): + time.sleep(0.05) + os.kill(pid, 0) + else: + time.sleep(0.1) os.kill(pid, 0) + logfn('# Daemon process %d is stuck - really killing it' % pid) os.kill(pid, signal.SIGKILL) except OSError, err: if err.errno != errno.ESRCH: raise - fp.close() -except IOError: - pass + +def killdaemons(pidfile, tryhard=True, remove=False, logfn=None): + if not logfn: + logfn = lambda s: s + # Kill off any leftover daemon processes + try: + fp = open(pidfile) + for line in fp: + try: + pid = int(line) + except ValueError: + continue + kill(pid, logfn, tryhard) + fp.close() + if remove: + os.unlink(pidfile) + except IOError: + pass + +if __name__ == '__main__': + path, = sys.argv[1:] + killdaemons(path) +
--- a/tests/run-tests.py Mon Sep 03 17:25:50 2012 +0100 +++ b/tests/run-tests.py Sun Sep 09 12:35:06 2012 +0200 @@ -54,6 +54,7 @@ import time import re import threading +import killdaemons as killmod processlock = threading.Lock() @@ -348,29 +349,8 @@ pass def killdaemons(): - # Kill off any leftover daemon processes - try: - fp = open(DAEMON_PIDS) - for line in fp: - try: - pid = int(line) - except ValueError: - continue - try: - os.kill(pid, 0) - vlog('# Killing daemon process %d' % pid) - os.kill(pid, signal.SIGTERM) - time.sleep(0.1) - os.kill(pid, 0) - vlog('# Daemon process %d is stuck - really killing it' % pid) - os.kill(pid, signal.SIGKILL) - except OSError, err: - if err.errno != errno.ESRCH: - raise - fp.close() - os.unlink(DAEMON_PIDS) - except IOError: - pass + return killmod.killdaemons(DAEMON_PIDS, tryhard=False, remove=True, + logfn=vlog) def cleanup(options): if not options.keep_tmpdir:
--- a/tests/test-annotate.t Mon Sep 03 17:25:50 2012 +0100 +++ b/tests/test-annotate.t Sun Sep 09 12:35:06 2012 +0200 @@ -279,10 +279,10 @@ > EOF $ hg ci -Am "adda" adding a - $ cat > a <<EOF + $ sed 's/EOL$//g' > a <<EOF > a a > - > + > EOL > b b > EOF $ hg ci -m "changea"
--- a/tests/test-archive.t Mon Sep 03 17:25:50 2012 +0100 +++ b/tests/test-archive.t Sun Sep 09 12:35:06 2012 +0200 @@ -24,7 +24,7 @@ > echo % $3 and $4 disallowed should both give 403 > "$TESTDIR/get-with-headers.py" localhost:$HGPORT "archive/tip.$3" | head -n 1 > "$TESTDIR/get-with-headers.py" localhost:$HGPORT "archive/tip.$4" | head -n 1 - > "$TESTDIR/killdaemons.py" + > "$TESTDIR/killdaemons.py" $DAEMON_PIDS > cat errors.log > cp .hg/hgrc-base .hg/hgrc > } @@ -93,7 +93,7 @@ testing: test-archive-2c0277f05ed4/foo OK No errors detected in compressed data of archive.zip. - $ "$TESTDIR/killdaemons.py" + $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS $ hg archive -t tar test.tar $ tar tf test.tar
--- a/tests/test-basic.t Mon Sep 03 17:25:50 2012 +0100 +++ b/tests/test-basic.t Sun Sep 09 12:35:06 2012 +0200 @@ -33,7 +33,7 @@ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg identify -n 0 - + Poke around at hashes:
--- a/tests/test-bookmarks-pushpull.t Mon Sep 03 17:25:50 2012 +0100 +++ b/tests/test-bookmarks-pushpull.t Sun Sep 09 12:35:06 2012 +0200 @@ -210,7 +210,7 @@ $ cat ../hg.pid >> $DAEMON_PIDS $ cd ../a - $ hg debugpushkey http://localhost:$HGPORT/ namespaces + $ hg debugpushkey http://localhost:$HGPORT/ namespaces bookmarks phases namespaces @@ -260,7 +260,7 @@ Z 2:0d2164f0ce0d foo -1:000000000000 foobar 1:9b140be10808 - + $ cd .. Pushing a bookmark should only push the changes required by that
--- a/tests/test-check-code.t Mon Sep 03 17:25:50 2012 +0100 +++ b/tests/test-check-code.t Sun Sep 09 12:35:06 2012 +0200 @@ -83,7 +83,7 @@ any/all/format not available in Python 2.4 ./non-py24.py:11: > try: - no try/except/finally in Py2.4 + no try/except/finally in Python 2.4 ./classstyle.py:4: > class oldstyle_class: old-style class, use class foo(object)
--- a/tests/test-clone.t Mon Sep 03 17:25:50 2012 +0100 +++ b/tests/test-clone.t Sun Sep 09 12:35:06 2012 +0200 @@ -533,7 +533,7 @@ destination directory not empty - $ mkdir a + $ mkdir a $ echo stuff > a/a $ hg clone q a abort: destination 'a' is not empty @@ -558,7 +558,7 @@ $ test -d d/.hg [1] -reenable perm to allow deletion +re-enable perm to allow deletion $ chmod +rx c/.hg/store/data
--- a/tests/test-command-template.t Mon Sep 03 17:25:50 2012 +0100 +++ b/tests/test-command-template.t Sun Sep 09 12:35:06 2012 +0200 @@ -592,7 +592,8 @@ $ for key in author branch branches date desc file_adds file_dels file_mods \ > file_copies file_copies_switch files \ - > manifest node parents rev tags diffstat extras; do + > manifest node parents rev tags diffstat extras \ + > p1rev p2rev p1node p2node; do > for mode in '' --verbose --debug; do > hg log $mode --template "$key$mode: {$key}\n" > done @@ -1095,7 +1096,114 @@ extras--debug: branch=default extras--debug: branch=default extras--debug: branch=default - + p1rev: 7 + p1rev: -1 + p1rev: 5 + p1rev: 3 + p1rev: 3 + p1rev: 2 + p1rev: 1 + p1rev: 0 + p1rev: -1 + p1rev--verbose: 7 + p1rev--verbose: -1 + p1rev--verbose: 5 + p1rev--verbose: 3 + p1rev--verbose: 3 + p1rev--verbose: 2 + p1rev--verbose: 1 + p1rev--verbose: 0 + p1rev--verbose: -1 + p1rev--debug: 7 + p1rev--debug: -1 + p1rev--debug: 5 + p1rev--debug: 3 + p1rev--debug: 3 + p1rev--debug: 2 + p1rev--debug: 1 + p1rev--debug: 0 + p1rev--debug: -1 + p2rev: -1 + p2rev: -1 + p2rev: 4 + p2rev: -1 + p2rev: -1 + p2rev: -1 + p2rev: -1 + p2rev: -1 + p2rev: -1 + p2rev--verbose: -1 + p2rev--verbose: -1 + p2rev--verbose: 4 + p2rev--verbose: -1 + p2rev--verbose: -1 + p2rev--verbose: -1 + p2rev--verbose: -1 + p2rev--verbose: -1 + p2rev--verbose: -1 + p2rev--debug: -1 + p2rev--debug: -1 + p2rev--debug: 4 + p2rev--debug: -1 + p2rev--debug: -1 + p2rev--debug: -1 + p2rev--debug: -1 + p2rev--debug: -1 + p2rev--debug: -1 + p1node: 29114dbae42b9f078cf2714dbe3a86bba8ec7453 + p1node: 0000000000000000000000000000000000000000 + p1node: 13207e5a10d9fd28ec424934298e176197f2c67f + p1node: 10e46f2dcbf4823578cf180f33ecf0b957964c47 + p1node: 10e46f2dcbf4823578cf180f33ecf0b957964c47 + p1node: 97054abb4ab824450e9164180baf491ae0078465 + p1node: b608e9d1a3f0273ccf70fb85fd6866b3482bf965 + p1node: 1e4e1b8f71e05681d422154f5421e385fec3454f + p1node: 0000000000000000000000000000000000000000 + p1node--verbose: 29114dbae42b9f078cf2714dbe3a86bba8ec7453 + p1node--verbose: 0000000000000000000000000000000000000000 + p1node--verbose: 13207e5a10d9fd28ec424934298e176197f2c67f + p1node--verbose: 10e46f2dcbf4823578cf180f33ecf0b957964c47 + p1node--verbose: 10e46f2dcbf4823578cf180f33ecf0b957964c47 + p1node--verbose: 97054abb4ab824450e9164180baf491ae0078465 + p1node--verbose: b608e9d1a3f0273ccf70fb85fd6866b3482bf965 + p1node--verbose: 1e4e1b8f71e05681d422154f5421e385fec3454f + p1node--verbose: 0000000000000000000000000000000000000000 + p1node--debug: 29114dbae42b9f078cf2714dbe3a86bba8ec7453 + p1node--debug: 0000000000000000000000000000000000000000 + p1node--debug: 13207e5a10d9fd28ec424934298e176197f2c67f + p1node--debug: 10e46f2dcbf4823578cf180f33ecf0b957964c47 + p1node--debug: 10e46f2dcbf4823578cf180f33ecf0b957964c47 + p1node--debug: 97054abb4ab824450e9164180baf491ae0078465 + p1node--debug: b608e9d1a3f0273ccf70fb85fd6866b3482bf965 + p1node--debug: 1e4e1b8f71e05681d422154f5421e385fec3454f + p1node--debug: 0000000000000000000000000000000000000000 + p2node: 0000000000000000000000000000000000000000 + p2node: 0000000000000000000000000000000000000000 + p2node: bbe44766e73d5f11ed2177f1838de10c53ef3e74 + p2node: 0000000000000000000000000000000000000000 + p2node: 0000000000000000000000000000000000000000 + p2node: 0000000000000000000000000000000000000000 + p2node: 0000000000000000000000000000000000000000 + p2node: 0000000000000000000000000000000000000000 + p2node: 0000000000000000000000000000000000000000 + p2node--verbose: 0000000000000000000000000000000000000000 + p2node--verbose: 0000000000000000000000000000000000000000 + p2node--verbose: bbe44766e73d5f11ed2177f1838de10c53ef3e74 + p2node--verbose: 0000000000000000000000000000000000000000 + p2node--verbose: 0000000000000000000000000000000000000000 + p2node--verbose: 0000000000000000000000000000000000000000 + p2node--verbose: 0000000000000000000000000000000000000000 + p2node--verbose: 0000000000000000000000000000000000000000 + p2node--verbose: 0000000000000000000000000000000000000000 + p2node--debug: 0000000000000000000000000000000000000000 + p2node--debug: 0000000000000000000000000000000000000000 + p2node--debug: bbe44766e73d5f11ed2177f1838de10c53ef3e74 + p2node--debug: 0000000000000000000000000000000000000000 + p2node--debug: 0000000000000000000000000000000000000000 + p2node--debug: 0000000000000000000000000000000000000000 + p2node--debug: 0000000000000000000000000000000000000000 + p2node--debug: 0000000000000000000000000000000000000000 + p2node--debug: 0000000000000000000000000000000000000000 Filters work: @@ -1245,7 +1353,7 @@ $ hg add a $ hg commit -m future -d "`cat a`" - $ hg log -l1 --template '{date|age}\n' + $ hg log -l1 --template '{date|age}\n' 7 years from now Error on syntax: @@ -1255,6 +1363,30 @@ abort: t:3: unmatched quotes [255] +Behind the scenes, this will throw TypeError + + $ hg log -l 3 --template '{date|obfuscate}\n' + abort: template filter 'obfuscate' is not compatible with keyword 'date' + [255] + +Behind the scenes, this will throw a ValueError + + $ hg log -l 3 --template 'line: {desc|shortdate}\n' + abort: template filter 'shortdate' is not compatible with keyword 'desc' + [255] + +Behind the scenes, this will throw AttributeError + + $ hg log -l 3 --template 'line: {date|escape}\n' + abort: template filter 'escape' is not compatible with keyword 'date' + [255] + +Behind the scenes, this will throw ValueError + + $ hg tip --template '{author|email|date}\n' + abort: template filter 'datefilter' is not compatible with keyword 'author' + [255] + $ cd ..
--- a/tests/test-convert-cvsnt-mergepoints.t Mon Sep 03 17:25:50 2012 +0100 +++ b/tests/test-convert-cvsnt-mergepoints.t Sun Sep 09 12:35:06 2012 +0200 @@ -43,7 +43,7 @@ $ cvscall -Q add foo $ cd foo $ echo foo > foo.txt - $ cvscall -Q add foo.txt + $ cvscall -Q add foo.txt $ cvsci -m "add foo.txt" foo.txt $ cd ../.. $ rm -rf cvsworktmp
--- a/tests/test-convert-darcs.t Mon Sep 03 17:25:50 2012 +0100 +++ b/tests/test-convert-darcs.t Sun Sep 09 12:35:06 2012 +0200 @@ -5,25 +5,6 @@ $ echo 'graphlog =' >> $HGRCPATH $ DARCS_EMAIL='test@example.org'; export DARCS_EMAIL -skip if we can't import elementtree - - $ mkdir dummy - $ mkdir dummy/_darcs - $ if hg convert dummy 2>&1 | grep ElementTree > /dev/null; then - > echo 'skipped: missing feature: elementtree module' - > exit 80 - > fi - -#if no-outer-repo - -try converting darcs1 repository - - $ hg clone -q "$TESTDIR/bundles/darcs1.hg" darcs - $ hg convert -s darcs darcs/darcs1 2>&1 | grep darcs-1.0 - darcs-1.0 repository format is unsupported, please upgrade - -#endif - initialize darcs repo $ mkdir darcs-repo @@ -44,6 +25,13 @@ Finished recording patch 'p1.1' $ cd .. +skip if we can't import elementtree + + $ if hg convert darcs-repo darcs-dummy 2>&1 | grep ElementTree > /dev/null; then + > echo 'skipped: missing feature: elementtree module' + > exit 80 + > fi + update source $ cd darcs-repo @@ -108,3 +96,13 @@ 1e88685f5ddec574a34c70af492f95b6debc8741 644 b 37406831adc447ec2385014019599dfec953c806 644 dir2/d b783a337463792a5c7d548ad85a7d3253c16ba8c 644 ff + +#if no-outer-repo + +try converting darcs1 repository + + $ hg clone -q "$TESTDIR/bundles/darcs1.hg" darcs + $ hg convert -s darcs darcs/darcs1 2>&1 | grep darcs-1.0 + darcs-1.0 repository format is unsupported, please upgrade + +#endif
--- a/tests/test-convert-mtn-rename-directory.out Mon Sep 03 17:25:50 2012 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,23 +0,0 @@ -% tedious monotone keys configuration -% create monotone repository -mtn: adding dir1 to workspace manifest -mtn: adding dir1/subdir1 to workspace manifest -mtn: adding dir1/subdir1/file1 to workspace manifest -mtn: beginning commit on branch 'com.selenic.test' -mtn: committed revision 5ed13ff5582d8d1e319f079b694a37d2b45edfc8 -% rename directory -mtn: skipping dir1, already accounted for in workspace -mtn: renaming dir1/subdir1 to dir1/subdir2 in workspace manifest -mtn: beginning commit on branch 'com.selenic.test' -mtn: committed revision 985204142a822b22ee86b509d61f3c5ab6857d2b -% convert -assuming destination repo.mtn-hg -initializing destination repo.mtn-hg repository -scanning source... -sorting... -converting... -1 initialize -0 rename -1 files updated, 0 files merged, 0 files removed, 0 files unresolved -% manifest -dir1/subdir2/file1
--- a/tests/test-convert-mtn.t Mon Sep 03 17:25:50 2012 +0100 +++ b/tests/test-convert-mtn.t Sun Sep 09 12:35:06 2012 +0200 @@ -2,9 +2,18 @@ $ "$TESTDIR/hghave" mtn || exit 80 Monotone directory is called .monotone on *nix and monotone -on Windows. Having a variable here ease test patching. +on Windows. + +#if windows + + $ mtndir=monotone + +#else $ mtndir=.monotone + +#endif + $ echo "[extensions]" >> $HGRCPATH $ echo "convert=" >> $HGRCPATH $ echo 'graphlog =' >> $HGRCPATH @@ -210,7 +219,9 @@ test large file support (> 32kB) - $ python -c 'for x in range(10000): print x' > large-file + >>> fp = file('large-file', 'wb') + >>> for x in xrange(10000): fp.write('%d\n' % x) + >>> fp.close() $ $TESTDIR/md5sum.py large-file 5d6de8a95c3b6bf9e0ffb808ba5299c1 large-file $ mtn add large-file
--- a/tests/test-convert-svn-branches.t Mon Sep 03 17:25:50 2012 +0100 +++ b/tests/test-convert-svn-branches.t Sun Sep 09 12:35:06 2012 +0200 @@ -3,7 +3,7 @@ $ cat >> $HGRCPATH <<EOF > [extensions] - > convert = + > convert = > graphlog = > EOF @@ -14,7 +14,7 @@ $ cat > branchmap <<EOF > old3 newbranch - > + > > > EOF $ hg convert --branchmap=branchmap --datesort -r 10 svn-repo A-hg @@ -95,9 +95,5 @@ Test hg failing to call itself - $ HG=foobar hg convert svn-repo B-hg - * (glob) - initializing destination B-hg repository + $ HG=foobar hg convert svn-repo B-hg 2>&1 | grep abort abort: Mercurial failed to run itself, check hg executable is in PATH - [255] -
--- a/tests/test-convert-svn-encoding.t Mon Sep 03 17:25:50 2012 +0100 +++ b/tests/test-convert-svn-encoding.t Sun Sep 09 12:35:06 2012 +0200 @@ -3,7 +3,7 @@ $ cat >> $HGRCPATH <<EOF > [extensions] - > convert = + > convert = > graphlog = > EOF
--- a/tests/test-convert-svn-move.t Mon Sep 03 17:25:50 2012 +0100 +++ b/tests/test-convert-svn-move.t Sun Sep 09 12:35:06 2012 +0200 @@ -3,7 +3,7 @@ $ cat >> $HGRCPATH <<EOF > [extensions] - > convert = + > convert = > graphlog = > EOF @@ -155,7 +155,7 @@ $ cat >> $HGRCPATH <<EOF > [extensions] - > progress = + > progress = > [progress] > assume-tty = 1 > delay = 0
--- a/tests/test-convert-svn-sink.t Mon Sep 03 17:25:50 2012 +0100 +++ b/tests/test-convert-svn-sink.t Sun Sep 09 12:35:06 2012 +0200 @@ -16,7 +16,7 @@ $ cat >> $HGRCPATH <<EOF > [extensions] - > convert = + > convert = > graphlog = > EOF
--- a/tests/test-convert-svn-source.t Mon Sep 03 17:25:50 2012 +0100 +++ b/tests/test-convert-svn-source.t Sun Sep 09 12:35:06 2012 +0200 @@ -3,7 +3,7 @@ $ cat >> $HGRCPATH <<EOF > [extensions] - > convert = + > convert = > graphlog = > [convert] > svn.trunk = mytrunk
--- a/tests/test-convert-svn-startrev.t Mon Sep 03 17:25:50 2012 +0100 +++ b/tests/test-convert-svn-startrev.t Sun Sep 09 12:35:06 2012 +0200 @@ -3,7 +3,7 @@ $ cat >> $HGRCPATH <<EOF > [extensions] - > convert = + > convert = > graphlog = > EOF $ convert()
--- a/tests/test-convert-svn-tags.t Mon Sep 03 17:25:50 2012 +0100 +++ b/tests/test-convert-svn-tags.t Sun Sep 09 12:35:06 2012 +0200 @@ -3,7 +3,7 @@ $ cat >> $HGRCPATH <<EOF > [extensions] - > convert = + > convert = > graphlog = > EOF
--- a/tests/test-convert.t Mon Sep 03 17:25:50 2012 +0100 +++ b/tests/test-convert.t Sun Sep 09 12:35:06 2012 +0200 @@ -60,8 +60,8 @@ The authormap is a simple text file that maps each source commit author to a destination commit author. It is handy for source SCMs that use unix - logins to identify authors (eg: CVS). One line per author mapping and the - line format is: + logins to identify authors (e.g.: CVS). One line per author mapping and + the line format is: source author = destination author @@ -399,7 +399,7 @@ test revset converted() lookup - $ hg --config convert.hg.saverev=True convert a c + $ hg --config convert.hg.saverev=True convert a c initializing destination c repository scanning source... sorting...
--- a/tests/test-diffstat.t Mon Sep 03 17:25:50 2012 +0100 +++ b/tests/test-diffstat.t Sun Sep 09 12:35:06 2012 +0200 @@ -68,5 +68,5 @@ $ hg diff --stat --git file with spaces | Bin 1 files changed, 0 insertions(+), 0 deletions(-) - + $ cd ..
--- a/tests/test-encoding.t Mon Sep 03 17:25:50 2012 +0100 +++ b/tests/test-encoding.t Sun Sep 09 12:35:06 2012 +0200 @@ -252,5 +252,5 @@ $ HGENCODING=latin-1 hg up `cat latin-1-tag` 0 files updated, 0 files merged, 1 files removed, 0 files unresolved - + $ cd ..
--- a/tests/test-export.t Mon Sep 03 17:25:50 2012 +0100 +++ b/tests/test-export.t Sun Sep 09 12:35:06 2012 +0200 @@ -124,7 +124,7 @@ Checking if only alphanumeric characters are used in the file name (%m option): $ echo "line" >> foo - $ hg commit -m " !\"#$%&(,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_\`abcdefghijklmnopqrstuvwxyz{|}~" + $ hg commit -m " !\"#$%&(,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_\`abcdefghijklmnopqrstuvwxyz{|}~" $ hg export -v -o %m.patch tip exporting patch: ____________0123456789_______ABCDEFGHIJKLMNOPQRSTUVWXYZ______abcdefghijklmnopqrstuvwxyz____.patch @@ -144,4 +144,28 @@ abort: export requires at least one changeset [255] +Check for color output + $ echo "[color]" >> $HGRCPATH + $ echo "mode = ansi" >> $HGRCPATH + $ echo "[extensions]" >> $HGRCPATH + $ echo "color=" >> $HGRCPATH + + $ hg export --color always --nodates tip + # HG changeset patch + # User test + # Date 0 0 + # Node ID * (glob) + # Parent * (glob) + !"#$%&(,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~ + + \x1b[0;1mdiff -r f3acbafac161 -r 197ecd81a57f foo\x1b[0m (esc) + \x1b[0;31;1m--- a/foo\x1b[0m (esc) + \x1b[0;32;1m+++ b/foo\x1b[0m (esc) + \x1b[0;35m@@ -10,3 +10,4 @@\x1b[0m (esc) + foo-9 + foo-10 + foo-11 + \x1b[0;32m+line\x1b[0m (esc) + + $ cd ..
--- a/tests/test-glog.t Mon Sep 03 17:25:50 2012 +0100 +++ b/tests/test-glog.t Sun Sep 09 12:35:06 2012 +0200 @@ -83,7 +83,7 @@ $ cat > printrevset.py <<EOF > from mercurial import extensions, revset, commands, cmdutil - > + > > def uisetup(ui): > def printrevset(orig, ui, repo, *pats, **opts): > if opts.get('print_revset'): @@ -2044,20 +2044,40 @@ Test --hidden $ cat > $HGTMP/testhidden.py << EOF + > from mercurial import util > def reposetup(ui, repo): > for line in repo.opener('hidden'): > ctx = repo[line.strip()] > repo.hiddenrevs.add(ctx.rev()) + > if repo.revs('children(%ld) - %ld', repo.hiddenrevs, repo.hiddenrevs): + > raise util.Abort('hidden revision with children!') > EOF $ echo '[extensions]' >> .hg/hgrc $ echo "hidden=$HGTMP/testhidden.py" >> .hg/hgrc - $ hg id --debug -i -r 0 > .hg/hidden + $ hg id --debug -i -r 8 > .hg/hidden $ testlog [] [] $ testlog --hidden [] [] + $ hg glog --template '{rev} {desc}\n' + o 7 Added tag foo-bar for changeset fc281d8ff18d + | + o 6 merge 5 and 4 + |\ + | o 5 add another e + | | + o | 4 mv dir/b e + |/ + @ 3 mv a b; add d + | + o 2 mv b dir/b + | + o 1 copy a b + | + o 0 add a + A template without trailing newline should do something sane @@ -2066,6 +2086,8 @@ | o 1 copy a b | + o 0 add a + Extra newlines must be preserved @@ -2076,6 +2098,9 @@ o | 1 copy a b | + o + 0 add a + The almost-empty template should do something sane too ... @@ -2084,5 +2109,7 @@ | o | + o + $ cd ..
--- a/tests/test-hgweb-commands.t Mon Sep 03 17:25:50 2012 +0100 +++ b/tests/test-hgweb-commands.t Sun Sep 09 12:35:06 2012 +0200 @@ -1260,7 +1260,7 @@ Stop and restart with HGENCODING=cp932 and preferuncompressed - $ "$TESTDIR/killdaemons.py" + $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS $ HGENCODING=cp932 hg serve --config server.preferuncompressed=True -n test \ > -p $HGPORT -d --pid-file=hg.pid -E errors.log $ cat hg.pid >> $DAEMON_PIDS @@ -1288,7 +1288,7 @@ ERRORS ENCOUNTERED $ cat errors.log - $ "$TESTDIR/killdaemons.py" + $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS $ cd ..
--- a/tests/test-hgweb-diffs.t Mon Sep 03 17:25:50 2012 +0100 +++ b/tests/test-hgweb-diffs.t Sun Sep 09 12:35:06 2012 +0200 @@ -291,7 +291,7 @@ set up hgweb with git diffs - $ "$TESTDIR/killdaemons.py" + $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS $ hg serve --config 'diff.git=1' -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log $ cat hg.pid >> $DAEMON_PIDS @@ -936,7 +936,7 @@ raw revision with diff block numbers - $ "$TESTDIR/killdaemons.py" + $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS $ cat <<EOF > .hg/hgrc > [web] > templates = rawdiff @@ -973,7 +973,7 @@ @@ -0,0 +1,1 @@ +b - $ "$TESTDIR/killdaemons.py" + $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS $ rm .hg/hgrc rawdiff/map $ rmdir rawdiff $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
--- a/tests/test-hgweb.t Mon Sep 03 17:25:50 2012 +0100 +++ b/tests/test-hgweb.t Sun Sep 09 12:35:06 2012 +0200 @@ -299,7 +299,7 @@ stop and restart - $ "$TESTDIR/killdaemons.py" + $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS $ hg serve -p $HGPORT -d --pid-file=hg.pid -A access.log $ cat hg.pid >> $DAEMON_PIDS
--- a/tests/test-hgwebdir.t Mon Sep 03 17:25:50 2012 +0100 +++ b/tests/test-hgwebdir.t Sun Sep 09 12:35:06 2012 +0200 @@ -657,7 +657,7 @@ Test collapse = True - $ "$TESTDIR/killdaemons.py" + $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS $ cat >> paths.conf <<EOF > [web] > collapse=true @@ -723,7 +723,7 @@ Test descend = False - $ "$TESTDIR/killdaemons.py" + $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS $ cat >> paths.conf <<EOF > descend=false > EOF @@ -784,7 +784,7 @@ $ hg id http://localhost:$HGPORT1/astar 8580ff50825a - $ "$TESTDIR/killdaemons.py" + $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS $ cat > paths.conf <<EOF > [paths] > t/a = $root/a @@ -812,7 +812,7 @@ Test collapse = True - $ "$TESTDIR/killdaemons.py" + $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS $ cat >> paths.conf <<EOF > [web] > collapse=true @@ -837,7 +837,7 @@ test descend = False - $ "$TESTDIR/killdaemons.py" + $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS $ cat >> paths.conf <<EOF > descend=false > EOF @@ -857,7 +857,7 @@ /t/a/ /t/b/ - $ "$TESTDIR/killdaemons.py" + $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS $ cat > paths.conf <<EOF > [paths] > nostore = $root/nostore @@ -956,7 +956,7 @@ $ "$TESTDIR/get-with-headers.py" localhost:$HGPORT2 'a/rss-log' | grep '<guid' <guid isPermaLink="true">http://hg.example.com:8080/a/rev/8580ff50825a</guid> - $ "$TESTDIR/killdaemons.py" + $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS $ hg serve --config web.baseurl=http://hg.example.com:8080/foo/ -p $HGPORT2 -d \ > --pid-file=hg.pid --webdir-conf collections.conf \ > -A access-collections-2.log -E error-collections-2.log
--- a/tests/test-highlight.t Mon Sep 03 17:25:50 2012 +0100 +++ b/tests/test-highlight.t Sun Sep 09 12:35:06 2012 +0200 @@ -545,7 +545,7 @@ errors encountered $ cat errors.log - $ "$TESTDIR/killdaemons.py" + $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS Change the pygments style @@ -579,7 +579,7 @@ $ hg ci -Ama adding eucjp.txt $ hgserveget () { - > "$TESTDIR/killdaemons.py" + > "$TESTDIR/killdaemons.py" $DAEMON_PIDS > echo % HGENCODING="$1" hg serve > HGENCODING="$1" hg serve -p $HGPORT -d -n test --pid-file=hg.pid -E errors.log > cat hg.pid >> $DAEMON_PIDS
--- a/tests/test-http-branchmap.t Mon Sep 03 17:25:50 2012 +0100 +++ b/tests/test-http-branchmap.t Sun Sep 09 12:35:06 2012 +0200 @@ -1,7 +1,10 @@ - $ "$TESTDIR/hghave" serve || exit 80 + $ "$TESTDIR/hghave" killdaemons || exit 80 $ hgserve() { - > hg serve -a localhost -p $HGPORT1 -d --pid-file=hg.pid -E errors.log -v $@ + > hg serve -a localhost -p $HGPORT1 -d --pid-file=hg.pid \ + > -E errors.log -v $@ > startup.log + > # Grepping hg serve stdout would hang on Windows + > grep -v 'listening at' startup.log > cat hg.pid >> "$DAEMON_PIDS" > } $ hg init a @@ -12,7 +15,6 @@ $ hg -R a ci -Am foo adding foo $ hgserve -R a --config web.push_ssl=False --config web.allow_push=* --encoding latin1 - listening at http://*:$HGPORT1/ (bound to 127.0.0.1:$HGPORT1) (glob) $ hg --encoding utf-8 clone http://localhost:$HGPORT1 b requesting all changes adding changesets @@ -52,7 +54,7 @@ date: Thu Jan 01 00:00:00 1970 +0000 summary: foo - $ kill `cat hg.pid` + $ "$TESTDIR/killdaemons.py" hg.pid verify 7e7d56fe4833 (encoding fallback in branchmap to maintain compatibility with 1.3.x)
--- a/tests/test-hybridencode.py Mon Sep 03 17:25:50 2012 +0100 +++ b/tests/test-hybridencode.py Sun Sep 09 12:35:06 2012 +0200 @@ -6,10 +6,51 @@ enc = hybridencode # used for 'dotencode' repo format def show(s): - print "A = '%s'" % s - print "B = '%s'" % enc(s) + print "A = '%s'" % s.encode("string_escape") + print "B = '%s'" % enc(s).encode("string_escape") print +show("data/abcdefghijklmnopqrstuvwxyz0123456789 !#%&'()+,-.;=[]^`{}") + +print "uppercase char X is encoded as _x" +show("data/ABCDEFGHIJKLMNOPQRSTUVWXYZ") + +print "underbar is doubled" +show("data/_") + +print "tilde is character-encoded" +show("data/~") + +print "characters in ASCII code range 1..31" +show('data/\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f' + '\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f') + +print "characters in ASCII code range 126..255" +show('data/\x7e\x7f' + '\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f' + '\x90\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f') +show('data/\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7\xa8\xa9\xaa\xab\xac\xad\xae\xaf' + '\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf') +show('data/\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf' + '\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf') +show('data/\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7\xe8\xe9\xea\xeb\xec\xed\xee\xef' + '\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff') + +print "Windows reserved characters" +show('data/less <, greater >, colon :, double-quote ", backslash \\' + ', pipe |, question-mark ?, asterisk *') + +print "encoding directories ending in .hg, .i or .d with '.hg' suffix" +show('data/x.hg/x.i/x.d/foo') + +print "but these are not encoded on *filenames*" +show('data/foo/x.hg') +show('data/foo/x.i') +show('data/foo/x.d') + +print "plain .hg, .i and .d directories have the leading dot encoded" +show('data/.hg/.i/.d/foo') + show('data/aux.bla/bla.aux/prn/PRN/lpt/com3/nul/coma/foo.NUL/normal.c.i') show('data/AUX/SECOND/X.PRN/FOURTH/FI:FTH/SIXTH/SEVENTH/EIGHTH/NINETH/' @@ -25,3 +66,362 @@ 'Followedbyanother/AndAnother/AndThenAnExtremelyLongFileName.txt') show('data/foo.../foo / /a./_. /__/.x../ bla/.FOO/something.i') +show('data/c/co/com/com0/com1/com2/com3/com4/com5/com6/com7/com8/com9') +show('data/C/CO/COM/COM0/COM1/COM2/COM3/COM4/COM5/COM6/COM7/COM8/COM9') +show('data/c.x/co.x/com.x/com0.x/com1.x/com2.x/com3.x/com4.x/com5.x' + '/com6.x/com7.x/com8.x/com9.x') +show('data/x.c/x.co/x.com0/x.com1/x.com2/x.com3/x.com4/x.com5' + '/x.com6/x.com7/x.com8/x.com9') +show('data/cx/cox/comx/com0x/com1x/com2x/com3x/com4x/com5x' + '/com6x/com7x/com8x/com9x') +show('data/xc/xco/xcom0/xcom1/xcom2/xcom3/xcom4/xcom5' + '/xcom6/xcom7/xcom8/xcom9') + +show('data/l/lp/lpt/lpt0/lpt1/lpt2/lpt3/lpt4/lpt5/lpt6/lpt7/lpt8/lpt9') +show('data/L/LP/LPT/LPT0/LPT1/LPT2/LPT3/LPT4/LPT5/LPT6/LPT7/LPT8/LPT9') +show('data/l.x/lp.x/lpt.x/lpt0.x/lpt1.x/lpt2.x/lpt3.x/lpt4.x/lpt5.x' + '/lpt6.x/lpt7.x/lpt8.x/lpt9.x') +show('data/x.l/x.lp/x.lpt/x.lpt0/x.lpt1/x.lpt2/x.lpt3/x.lpt4/x.lpt5' + '/x.lpt6/x.lpt7/x.lpt8/x.lpt9') +show('data/lx/lpx/lptx/lpt0x/lpt1x/lpt2x/lpt3x/lpt4x/lpt5x' + '/lpt6x/lpt7x/lpt8x/lpt9x') +show('data/xl/xlp/xlpt/xlpt0/xlpt1/xlpt2/xlpt3/xlpt4/xlpt5' + '/xlpt6/xlpt7/xlpt8/xlpt9') + +show('data/con/p/pr/prn/a/au/aux/n/nu/nul') +show('data/CON/P/PR/PRN/A/AU/AUX/N/NU/NUL') +show('data/con.x/p.x/pr.x/prn.x/a.x/au.x/aux.x/n.x/nu.x/nul.x') +show('data/x.con/x.p/x.pr/x.prn/x.a/x.au/x.aux/x.n/x.nu/x.nul') +show('data/conx/px/prx/prnx/ax/aux/auxx/nx/nux/nulx') +show('data/xcon/xp/xpr/xprn/xa/xau/xaux/xn/xnu/xnul') + +print "largest unhashed path" +show('data/123456789-123456789-123456789-123456789-123456789-' + 'unhashed--xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-12345') + +print "shortest hashed path" +show('data/123456789-123456789-123456789-123456789-123456789-' + 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-123456') + +print "changing one char in part that's hashed away produces a different hash" +show('data/123456789-123456789-123456789-123456789-123456789-' + 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxy-' + '123456789-123456') + +print "uppercase hitting length limit due to encoding" +show('data/A23456789-123456789-123456789-123456789-123456789-' + 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-12345') +show('data/Z23456789-123456789-123456789-123456789-123456789-' + 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-12345') + +print "compare with lowercase not hitting limit" +show('data/a23456789-123456789-123456789-123456789-123456789-' + 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-12345') +show('data/z23456789-123456789-123456789-123456789-123456789-' + 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-12345') + +print "not hitting limit with any of these" +show("data/abcdefghijklmnopqrstuvwxyz0123456789 !#%&'()+,-.;=" + "[]^`{}xxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-" + "123456789-12345") + +print "underbar hitting length limit due to encoding" +show('data/_23456789-123456789-123456789-123456789-123456789-' + 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-12345') + +print "tilde hitting length limit due to encoding" +show('data/~23456789-123456789-123456789-123456789-123456789-' + 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-12345') + +print "Windows reserved characters hitting length limit" +show('data/<23456789-123456789-123456789-123456789-123456789-' + 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-12345') +show('data/>23456789-123456789-123456789-123456789-123456789-' + 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-12345') +show('data/:23456789-123456789-123456789-123456789-123456789-' + 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-12345') +show('data/"23456789-123456789-123456789-123456789-123456789-' + 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-12345') +show('data/\\23456789-123456789-123456789-123456789-123456789-' + 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-12345') +show('data/|23456789-123456789-123456789-123456789-123456789-' + 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-12345') +show('data/?23456789-123456789-123456789-123456789-123456789-' + 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-12345') +show('data/*23456789-123456789-123456789-123456789-123456789-' + 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-12345') + +print "initial space hitting length limit" +show('data/ 23456789-123456789-123456789-123456789-123456789-' + 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-12345') + +print "initial dot hitting length limit" +show('data/.23456789-123456789-123456789-123456789-123456789-' + 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-12345') + +print "trailing space in filename hitting length limit" +show('data/123456789-123456789-123456789-123456789-123456789-' + 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-1234 ') + +print "trailing dot in filename hitting length limit" +show('data/123456789-123456789-123456789-123456789-123456789-' + 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-1234.') + +print "initial space in directory hitting length limit" +show('data/ x/456789-123456789-123456789-123456789-123456789-' + 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-12345') + +print "initial dot in directory hitting length limit" +show('data/.x/456789-123456789-123456789-123456789-123456789-' + 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-12345') + +print "trailing space in directory hitting length limit" +show('data/x /456789-123456789-123456789-123456789-123456789-' + 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-12345') + +print "trailing dot in directory hitting length limit" +show('data/x./456789-123456789-123456789-123456789-123456789-' + 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-12345') + +print "with directories that need direncoding, hitting length limit" +show('data/x.i/56789-123456789-123456789-123456789-123456789-' + 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-12345') +show('data/x.d/56789-123456789-123456789-123456789-123456789-' + 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-12345') +show('data/x.hg/5789-123456789-123456789-123456789-123456789-' + 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-12345') + +print "Windows reserved filenames, hitting length limit" +show('data/con/56789-123456789-123456789-123456789-123456789-' + 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-12345') +show('data/prn/56789-123456789-123456789-123456789-123456789-' + 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-12345') +show('data/aux/56789-123456789-123456789-123456789-123456789-' + 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-12345') +show('data/nul/56789-123456789-123456789-123456789-123456789-' + 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-12345') +show('data/com1/6789-123456789-123456789-123456789-123456789-' + 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-12345') +show('data/com9/6789-123456789-123456789-123456789-123456789-' + 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-12345') +show('data/lpt1/6789-123456789-123456789-123456789-123456789-' + 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-12345') +show('data/lpt9/6789-123456789-123456789-123456789-123456789-' + 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-12345') + +print "non-reserved names, just not hitting limit" +show('data/123456789-123456789-123456789-123456789-123456789-' + '/com/com0/lpt/lpt0/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-12345') + +print "hashed path with largest untruncated 1st dir" +show('data/12345678/-123456789-123456789-123456789-123456789-' + 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-123456') + +print "hashed path with smallest truncated 1st dir" +show('data/123456789/123456789-123456789-123456789-123456789-' + 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-123456') + +print "hashed path with largest untruncated two dirs" +show('data/12345678/12345678/9-123456789-123456789-123456789-' + 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-123456') + +print "hashed path with smallest truncated two dirs" +show('data/123456789/123456789/123456789-123456789-123456789-' + 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-123456') + +print "hashed path with largest untruncated three dirs" +show('data/12345678/12345678/12345678/89-123456789-123456789-' + 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-123456') + +print "hashed path with smallest truncated three dirs" +show('data/123456789/123456789/123456789/123456789-123456789-' + 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-123456') + +print "hashed path with largest untruncated four dirs" +show('data/12345678/12345678/12345678/12345678/789-123456789-' + 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-123456') + +print "hashed path with smallest truncated four dirs" +show('data/123456789/123456789/123456789/123456789/123456789-' + 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-123456') + +print "hashed path with largest untruncated five dirs" +show('data/12345678/12345678/12345678/12345678/12345678/6789-' + 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-123456') + +print "hashed path with smallest truncated five dirs" +show('data/123456789/123456789/123456789/123456789/123456789/' + 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-123456') + +print "hashed path with largest untruncated six dirs" +show('data/12345678/12345678/12345678/12345678/12345678/12345' + '678/ed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-123456') + +print "hashed path with smallest truncated six dirs" +show('data/123456789/123456789/123456789/123456789/123456789/' + '123456789/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-123456') + +print "hashed path with largest untruncated seven dirs" +show('data/12345678/12345678/12345678/12345678/12345678/12345' + '678/12345678/xxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-123456') + +print "hashed path with smallest truncated seven dirs" +show('data/123456789/123456789/123456789/123456789/123456789/' + '123456789/123456789/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-123456') + +print "hashed path with largest untruncated eight dirs" +print "(directory 8 is dropped because it hits _maxshortdirslen)" +show('data/12345678/12345678/12345678/12345678/12345678/12345' + '678/12345678/12345678/xxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-123456') + +print "hashed path with smallest truncated eight dirs" +print "(directory 8 is dropped because it hits _maxshortdirslen)" +show('data/123456789/123456789/123456789/123456789/123456789/' + '123456789/123456789/123456789/xxxxxxxxx-xxxxxxxxx-' + '123456789-123456') + +print "hashed path with largest non-dropped directory 8" +print "(just not hitting the _maxshortdirslen boundary)" +show('data/12345678/12345678/12345678/12345678/12345678/12345' + '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-123456') + +print "...adding one truncated char to dir 1..7 won't drop dir 8" +show('data/12345678x/12345678/12345678/12345678/12345678/12345' + '678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-123456') +show('data/12345678/12345678x/12345678/12345678/12345678/12345' + '678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-123456') +show('data/12345678/12345678/12345678x/12345678/12345678/12345' + '678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-123456') +show('data/12345678/12345678/12345678/12345678x/12345678/12345' + '678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-123456') +show('data/12345678/12345678/12345678/12345678/12345678x/12345' + '678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-123456') +show('data/12345678/12345678/12345678/12345678/12345678/12345' + '678x/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-123456') +show('data/12345678/12345678/12345678/12345678/12345678/12345' + '678/12345678x/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-123456') + +print "hashed path with shortest dropped directory 8" +print "(just hitting the _maxshortdirslen boundary)" +show('data/12345678/12345678/12345678/12345678/12345678/12345' + '678/12345678/123456/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-123456') + +print "hashed path that drops dir 8 due to dot or space at end is" +print "encoded, and thus causing to hit _maxshortdirslen" +show('data/12345678/12345678/12345678/12345678/12345678/12345' + '678/12345678/1234./-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-123456') +show('data/12345678/12345678/12345678/12345678/12345678/12345' + '678/12345678/1234 /-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-123456') + +print "... with dir 8 short enough for encoding" +show('data/12345678/12345678/12345678/12345678/12345678/12345' + '678/12345678/12./xx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-123456') +show('data/12345678/12345678/12345678/12345678/12345678/12345' + '678/12345678/12 /xx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-123456') + +print "extensions are replicated on hashed paths (unbounded!)" +show('data/12345678/12345678/12345678/12345678/12345678/12345' + '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-12.345') +show('data/12345678/12345678/12345678/12345678/12345678/12345' + '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-12.3456') +show('data/12345678/12345678/12345678/12345678/12345678/12345' + '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-12.34567') +show('data/12345678/12345678/12345678/12345678/12345678/12345' + '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-12.345678') +show('data/12345678/12345678/12345678/12345678/12345678/12345' + '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-12.3456789') +show('data/12345678/12345678/12345678/12345678/12345678/12345' + '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-12.3456789-') +show('data/12345678/12345678/12345678/12345678/12345678/12345' + '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-12.3456789-1') +show('data/12345678/12345678/12345678/12345678/12345678/12345' + '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-12.3456789-12') +show('data/12345678/12345678/12345678/12345678/12345678/12345' + '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-12.3456789-123') +show('data/12345678/12345678/12345678/12345678/12345678/12345' + '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-12.3456789-1234') +show('data/12345678/12345678/12345678/12345678/12345678/12345' + '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-12.3456789-12345') +show('data/12345678/12345678/12345678/12345678/12345678/12345' + '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' + '123456789-12.3456789-12345-ABCDEFGHIJKLMNOPRSTUVWX' + 'YZ-abcdefghjiklmnopqrstuvwxyz-ABCDEFGHIJKLMNOPRSTU' + 'VWXYZ-1234567890-xxxxxxxxx-xxxxxxxxx-xxxxxxxx-xxxx' + 'xxxxx-wwwwwwwww-wwwwwwwww-wwwwwwwww-wwwwwwwww-wwww' + 'wwwww-wwwwwwwww-wwwwwwwww-wwwwwwwww-wwwwwwwww') +
--- a/tests/test-hybridencode.py.out Mon Sep 03 17:25:50 2012 +0100 +++ b/tests/test-hybridencode.py.out Sun Sep 09 12:35:06 2012 +0200 @@ -1,3 +1,57 @@ +A = 'data/abcdefghijklmnopqrstuvwxyz0123456789 !#%&\'()+,-.;=[]^`{}' +B = 'data/abcdefghijklmnopqrstuvwxyz0123456789 !#%&\'()+,-.;=[]^`{}' + +uppercase char X is encoded as _x +A = 'data/ABCDEFGHIJKLMNOPQRSTUVWXYZ' +B = 'data/_a_b_c_d_e_f_g_h_i_j_k_l_m_n_o_p_q_r_s_t_u_v_w_x_y_z' + +underbar is doubled +A = 'data/_' +B = 'data/__' + +tilde is character-encoded +A = 'data/~' +B = 'data/~7e' + +characters in ASCII code range 1..31 +A = 'data/\x01\x02\x03\x04\x05\x06\x07\x08\t\n\x0b\x0c\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f' +B = 'data/~01~02~03~04~05~06~07~08~09~0a~0b~0c~0d~0e~0f~10~11~12~13~14~15~16~17~18~19~1a~1b~1c~1d~1e~1f' + +characters in ASCII code range 126..255 +A = 'data/~\x7f\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f\x90\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f' +B = 'data/~7e~7f~80~81~82~83~84~85~86~87~88~89~8a~8b~8c~8d~8e~8f~90~91~92~93~94~95~96~97~98~99~9a~9b~9c~9d~9e~9f' + +A = 'data/\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7\xa8\xa9\xaa\xab\xac\xad\xae\xaf\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf' +B = 'data/~a0~a1~a2~a3~a4~a5~a6~a7~a8~a9~aa~ab~ac~ad~ae~af~b0~b1~b2~b3~b4~b5~b6~b7~b8~b9~ba~bb~bc~bd~be~bf' + +A = 'data/\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf' +B = 'data/~c0~c1~c2~c3~c4~c5~c6~c7~c8~c9~ca~cb~cc~cd~ce~cf~d0~d1~d2~d3~d4~d5~d6~d7~d8~d9~da~db~dc~dd~de~df' + +A = 'data/\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7\xe8\xe9\xea\xeb\xec\xed\xee\xef\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff' +B = 'data/~e0~e1~e2~e3~e4~e5~e6~e7~e8~e9~ea~eb~ec~ed~ee~ef~f0~f1~f2~f3~f4~f5~f6~f7~f8~f9~fa~fb~fc~fd~fe~ff' + +Windows reserved characters +A = 'data/less <, greater >, colon :, double-quote ", backslash \\, pipe |, question-mark ?, asterisk *' +B = 'data/less ~3c, greater ~3e, colon ~3a, double-quote ~22, backslash ~5c, pipe ~7c, question-mark ~3f, asterisk ~2a' + +encoding directories ending in .hg, .i or .d with '.hg' suffix +A = 'data/x.hg/x.i/x.d/foo' +B = 'data/x.hg.hg/x.i.hg/x.d.hg/foo' + +but these are not encoded on *filenames* +A = 'data/foo/x.hg' +B = 'data/foo/x.hg' + +A = 'data/foo/x.i' +B = 'data/foo/x.i' + +A = 'data/foo/x.d' +B = 'data/foo/x.d' + +plain .hg, .i and .d directories have the leading dot encoded +A = 'data/.hg/.i/.d/foo' +B = 'data/~2ehg.hg/~2ei.hg/~2ed.hg/foo' + A = 'data/aux.bla/bla.aux/prn/PRN/lpt/com3/nul/coma/foo.NUL/normal.c.i' B = 'data/au~78.bla/bla.aux/pr~6e/_p_r_n/lpt/co~6d3/nu~6c/coma/foo._n_u_l/normal.c.i' @@ -19,3 +73,341 @@ A = 'data/foo.../foo / /a./_. /__/.x../ bla/.FOO/something.i' B = 'data/foo..~2e/foo ~20/~20/a~2e/__.~20/____/~2ex.~2e/~20 bla/~2e_f_o_o/something.i' +A = 'data/c/co/com/com0/com1/com2/com3/com4/com5/com6/com7/com8/com9' +B = 'data/c/co/com/com0/co~6d1/co~6d2/co~6d3/co~6d4/co~6d5/co~6d6/co~6d7/co~6d8/co~6d9' + +A = 'data/C/CO/COM/COM0/COM1/COM2/COM3/COM4/COM5/COM6/COM7/COM8/COM9' +B = 'data/_c/_c_o/_c_o_m/_c_o_m0/_c_o_m1/_c_o_m2/_c_o_m3/_c_o_m4/_c_o_m5/_c_o_m6/_c_o_m7/_c_o_m8/_c_o_m9' + +A = 'data/c.x/co.x/com.x/com0.x/com1.x/com2.x/com3.x/com4.x/com5.x/com6.x/com7.x/com8.x/com9.x' +B = 'data/c.x/co.x/com.x/com0.x/co~6d1.x/co~6d2.x/co~6d3.x/co~6d4.x/co~6d5.x/co~6d6.x/co~6d7.x/co~6d8.x/co~6d9.x' + +A = 'data/x.c/x.co/x.com0/x.com1/x.com2/x.com3/x.com4/x.com5/x.com6/x.com7/x.com8/x.com9' +B = 'data/x.c/x.co/x.com0/x.com1/x.com2/x.com3/x.com4/x.com5/x.com6/x.com7/x.com8/x.com9' + +A = 'data/cx/cox/comx/com0x/com1x/com2x/com3x/com4x/com5x/com6x/com7x/com8x/com9x' +B = 'data/cx/cox/comx/com0x/com1x/com2x/com3x/com4x/com5x/com6x/com7x/com8x/com9x' + +A = 'data/xc/xco/xcom0/xcom1/xcom2/xcom3/xcom4/xcom5/xcom6/xcom7/xcom8/xcom9' +B = 'data/xc/xco/xcom0/xcom1/xcom2/xcom3/xcom4/xcom5/xcom6/xcom7/xcom8/xcom9' + +A = 'data/l/lp/lpt/lpt0/lpt1/lpt2/lpt3/lpt4/lpt5/lpt6/lpt7/lpt8/lpt9' +B = 'data/l/lp/lpt/lpt0/lp~741/lp~742/lp~743/lp~744/lp~745/lp~746/lp~747/lp~748/lp~749' + +A = 'data/L/LP/LPT/LPT0/LPT1/LPT2/LPT3/LPT4/LPT5/LPT6/LPT7/LPT8/LPT9' +B = 'data/_l/_l_p/_l_p_t/_l_p_t0/_l_p_t1/_l_p_t2/_l_p_t3/_l_p_t4/_l_p_t5/_l_p_t6/_l_p_t7/_l_p_t8/_l_p_t9' + +A = 'data/l.x/lp.x/lpt.x/lpt0.x/lpt1.x/lpt2.x/lpt3.x/lpt4.x/lpt5.x/lpt6.x/lpt7.x/lpt8.x/lpt9.x' +B = 'data/l.x/lp.x/lpt.x/lpt0.x/lp~741.x/lp~742.x/lp~743.x/lp~744.x/lp~745.x/lp~746.x/lp~747.x/lp~748.x/lp~749.x' + +A = 'data/x.l/x.lp/x.lpt/x.lpt0/x.lpt1/x.lpt2/x.lpt3/x.lpt4/x.lpt5/x.lpt6/x.lpt7/x.lpt8/x.lpt9' +B = 'data/x.l/x.lp/x.lpt/x.lpt0/x.lpt1/x.lpt2/x.lpt3/x.lpt4/x.lpt5/x.lpt6/x.lpt7/x.lpt8/x.lpt9' + +A = 'data/lx/lpx/lptx/lpt0x/lpt1x/lpt2x/lpt3x/lpt4x/lpt5x/lpt6x/lpt7x/lpt8x/lpt9x' +B = 'data/lx/lpx/lptx/lpt0x/lpt1x/lpt2x/lpt3x/lpt4x/lpt5x/lpt6x/lpt7x/lpt8x/lpt9x' + +A = 'data/xl/xlp/xlpt/xlpt0/xlpt1/xlpt2/xlpt3/xlpt4/xlpt5/xlpt6/xlpt7/xlpt8/xlpt9' +B = 'data/xl/xlp/xlpt/xlpt0/xlpt1/xlpt2/xlpt3/xlpt4/xlpt5/xlpt6/xlpt7/xlpt8/xlpt9' + +A = 'data/con/p/pr/prn/a/au/aux/n/nu/nul' +B = 'data/co~6e/p/pr/pr~6e/a/au/au~78/n/nu/nu~6c' + +A = 'data/CON/P/PR/PRN/A/AU/AUX/N/NU/NUL' +B = 'data/_c_o_n/_p/_p_r/_p_r_n/_a/_a_u/_a_u_x/_n/_n_u/_n_u_l' + +A = 'data/con.x/p.x/pr.x/prn.x/a.x/au.x/aux.x/n.x/nu.x/nul.x' +B = 'data/co~6e.x/p.x/pr.x/pr~6e.x/a.x/au.x/au~78.x/n.x/nu.x/nu~6c.x' + +A = 'data/x.con/x.p/x.pr/x.prn/x.a/x.au/x.aux/x.n/x.nu/x.nul' +B = 'data/x.con/x.p/x.pr/x.prn/x.a/x.au/x.aux/x.n/x.nu/x.nul' + +A = 'data/conx/px/prx/prnx/ax/aux/auxx/nx/nux/nulx' +B = 'data/conx/px/prx/prnx/ax/au~78/auxx/nx/nux/nulx' + +A = 'data/xcon/xp/xpr/xprn/xa/xau/xaux/xn/xnu/xnul' +B = 'data/xcon/xp/xpr/xprn/xa/xau/xaux/xn/xnu/xnul' + +largest unhashed path +A = 'data/123456789-123456789-123456789-123456789-123456789-unhashed--xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' +B = 'data/123456789-123456789-123456789-123456789-123456789-unhashed--xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' + +shortest hashed path +A = 'data/123456789-123456789-123456789-123456789-123456789-hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456' +B = 'dh/123456789-123456789-123456789-123456789-123456789-hashed----xxxxxxxxx-xxxxxxxe9c55002b50bf5181e7a6fc1f60b126e2a6fcf71' + +changing one char in part that's hashed away produces a different hash +A = 'data/123456789-123456789-123456789-123456789-123456789-hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxy-123456789-123456' +B = 'dh/123456789-123456789-123456789-123456789-123456789-hashed----xxxxxxxxx-xxxxxxxd24fa4455faf8a94350c18e5eace7c2bb17af706' + +uppercase hitting length limit due to encoding +A = 'data/A23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' +B = 'dh/a23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxcbbc657029b41b94ed510d05feb6716a5c03bc6b' + +A = 'data/Z23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' +B = 'dh/z23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxx938f32a725c89512833fb96b6602dd9ebff51ddd' + +compare with lowercase not hitting limit +A = 'data/a23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' +B = 'data/a23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' + +A = 'data/z23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' +B = 'data/z23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' + +not hitting limit with any of these +A = 'data/abcdefghijklmnopqrstuvwxyz0123456789 !#%&\'()+,-.;=[]^`{}xxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' +B = 'data/abcdefghijklmnopqrstuvwxyz0123456789 !#%&\'()+,-.;=[]^`{}xxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' + +underbar hitting length limit due to encoding +A = 'data/_23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' +B = 'dh/_23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxx9921a01af50feeabc060ce00eee4cba6efc31d2b' + +tilde hitting length limit due to encoding +A = 'data/~23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' +B = 'dh/~7e23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxx9cec6f97d569c10995f785720044ea2e4227481b' + +Windows reserved characters hitting length limit +A = 'data/<23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' +B = 'dh/~3c23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxee67d8f275876ca1ef2500fc542e63c885c4e62d' + +A = 'data/>23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' +B = 'dh/~3e23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxx387a85a5b1547cc9136310c974df716818458ddb' + +A = 'data/:23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' +B = 'dh/~3a23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxx2e4154fb571d13d22399c58cc4ef4858e4b75999' + +A = 'data/"23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' +B = 'dh/~2223456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxfc7e3ec7b0687ee06ed8c32fef0eb0c1980259f5' + +A = 'data/\\23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' +B = 'dh/~5c23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxx944e1f2b7110687e116e0d151328ac648b06ab4a' + +A = 'data/|23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' +B = 'dh/~7c23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxx28b23dd3fd0242946334126ab62bcd772aac32f4' + +A = 'data/?23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' +B = 'dh/~3f23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxa263022d3994d2143d98f94f431eef8b5e7e0f8a' + +A = 'data/*23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' +B = 'dh/~2a23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxx0e7e6020e3c00ba7bb7893d84ca2966fbf53e140' + +initial space hitting length limit +A = 'data/ 23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' +B = 'dh/~2023456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxx92acbc78ef8c0b796111629a02601f07d8aec4ea' + +initial dot hitting length limit +A = 'data/.23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' +B = 'dh/~2e23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxdbe19cc6505b3515ab9228cebf877ad07075168f' + +trailing space in filename hitting length limit +A = 'data/123456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-1234 ' +B = 'dh/123456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxx0025dc73e04f97426db4893e3bf67d581dc6d066' + +trailing dot in filename hitting length limit +A = 'data/123456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-1234.' +B = 'dh/123456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxx85a16cf03ee7feba8a5abc626f1ba9886d01e89d' + +initial space in directory hitting length limit +A = 'data/ x/456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' +B = 'dh/~20x/456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxx1b3a3b712b2ac00d6af14ae8b4c14fdbf904f516' + +initial dot in directory hitting length limit +A = 'data/.x/456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' +B = 'dh/~2ex/456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxx39dbc4c193a5643a8936fc69c3363cd7ac91ab14' + +trailing space in directory hitting length limit +A = 'data/x /456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' +B = 'dh/x~20/456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxx2253c341df0b5290790ad312cd8499850f2273e5' + +trailing dot in directory hitting length limit +A = 'data/x./456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' +B = 'dh/x~2e/456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxcc0324d696d34562b44b5138db08ee1594ccc583' + +with directories that need direncoding, hitting length limit +A = 'data/x.i/56789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' +B = 'dh/x.i.hg/56789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxa4c4399bdf81c67dbbbb7060aa0124d8dea94f74' + +A = 'data/x.d/56789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' +B = 'dh/x.d.hg/56789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxx1303fa90473b230615f5b3ea7b660e881ae5270a' + +A = 'data/x.hg/5789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' +B = 'dh/x.hg.hg/5789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxx26d724a8af68e7a4e4455e6602ea9adbd0eb801f' + +Windows reserved filenames, hitting length limit +A = 'data/con/56789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' +B = 'dh/co~6e/56789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxc0794d4f4c605a2617900eb2563d7113cf6ea7d3' + +A = 'data/prn/56789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' +B = 'dh/pr~6e/56789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxx64db876e1a9730e27236cb9b167aff942240e932' + +A = 'data/aux/56789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' +B = 'dh/au~78/56789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxx8a178558405ca6fb4bbd75446dfa186f06751a0d' + +A = 'data/nul/56789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' +B = 'dh/nu~6c/56789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxc5e51b6fec1bd07bd243b053a0c3f7209855b886' + +A = 'data/com1/6789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' +B = 'dh/co~6d1/6789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxx32f5f44ece3bb62b9327369ca84cc19c86259fcd' + +A = 'data/com9/6789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' +B = 'dh/co~6d9/6789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxx734360b28c66a3230f55849fe8926206d229f990' + +A = 'data/lpt1/6789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' +B = 'dh/lp~741/6789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxe6f16ab4b6b0637676b2842b3345c9836df46ef7' + +A = 'data/lpt9/6789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' +B = 'dh/lp~749/6789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxa475814c51acead3e44f2ff801f0c4903f986157' + +non-reserved names, just not hitting limit +A = 'data/123456789-123456789-123456789-123456789-123456789-/com/com0/lpt/lpt0/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' +B = 'data/123456789-123456789-123456789-123456789-123456789-/com/com0/lpt/lpt0/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' + +hashed path with largest untruncated 1st dir +A = 'data/12345678/-123456789-123456789-123456789-123456789-hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456' +B = 'dh/12345678/-123456789-123456789-123456789-123456789-hashed----xxxxxxxxx-xxxxxxx4e9e9e384d00929a93b6835fbf976eb32321ff3c' + +hashed path with smallest truncated 1st dir +A = 'data/123456789/123456789-123456789-123456789-123456789-hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456' +B = 'dh/12345678/123456789-123456789-123456789-123456789-hashed----xxxxxxxxx-xxxxxxxx1f4e4ec5f2be76e109bfaa8e31c062fe426d5490' + +hashed path with largest untruncated two dirs +A = 'data/12345678/12345678/9-123456789-123456789-123456789-hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456' +B = 'dh/12345678/12345678/9-123456789-123456789-123456789-hashed----xxxxxxxxx-xxxxxxx3332d8329d969cf835542a9f2cbcfb385b6cf39d' + +hashed path with smallest truncated two dirs +A = 'data/123456789/123456789/123456789-123456789-123456789-hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456' +B = 'dh/12345678/12345678/123456789-123456789-123456789-hashed----xxxxxxxxx-xxxxxxxxx9699559798247dffa18717138859be5f8874840e' + +hashed path with largest untruncated three dirs +A = 'data/12345678/12345678/12345678/89-123456789-123456789-hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456' +B = 'dh/12345678/12345678/12345678/89-123456789-123456789-hashed----xxxxxxxxx-xxxxxxxf0a2b053bb1369cce02f78c217d6a7aaea18c439' + +hashed path with smallest truncated three dirs +A = 'data/123456789/123456789/123456789/123456789-123456789-hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456' +B = 'dh/12345678/12345678/12345678/123456789-123456789-hashed----xxxxxxxxx-xxxxxxxxx-1c6f8284967384ec13985a046d3553179d9d03cd' + +hashed path with largest untruncated four dirs +A = 'data/12345678/12345678/12345678/12345678/789-123456789-hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456' +B = 'dh/12345678/12345678/12345678/12345678/789-123456789-hashed----xxxxxxxxx-xxxxxxx0d30c99049d8f0ff97b94d4ef302027e8d54c6fd' + +hashed path with smallest truncated four dirs +A = 'data/123456789/123456789/123456789/123456789/123456789-hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456' +B = 'dh/12345678/12345678/12345678/12345678/123456789-hashed----xxxxxxxxx-xxxxxxxxx-x46162779e1a771810b37a737f82ae7ed33771402' + +hashed path with largest untruncated five dirs +A = 'data/12345678/12345678/12345678/12345678/12345678/6789-hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456' +B = 'dh/12345678/12345678/12345678/12345678/12345678/6789-hashed----xxxxxxxxx-xxxxxxxbfe752ddc8b003c2790c66a9f2eb1ea75c114390' + +hashed path with smallest truncated five dirs +A = 'data/123456789/123456789/123456789/123456789/123456789/hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456' +B = 'dh/12345678/12345678/12345678/12345678/12345678/hashed----xxxxxxxxx-xxxxxxxxx-xxb94c27b3532fa880cdd572b1c514785cab7b6ff2' + +hashed path with largest untruncated six dirs +A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/ed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456' +B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/ed----xxxxxxxxx-xxxxxxxcd8cc5483a0f3be409e0e5d4bf9e36e113c59235' + +hashed path with smallest truncated six dirs +A = 'data/123456789/123456789/123456789/123456789/123456789/123456789/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456' +B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/xxxxxxxxx-xxxxxxxxx-xxx47dd6f616f833a142da00701b334cebbf640da06' + +hashed path with largest untruncated seven dirs +A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/xxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456' +B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/xxxxxx-xxxxxxx1c8ed635229fc22efe51035feeadeb4c8a0ecb82' + +hashed path with smallest truncated seven dirs +A = 'data/123456789/123456789/123456789/123456789/123456789/123456789/123456789/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456' +B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/xxxxxxxxx-xxxx298ff7d33f8ce6db57930837ffea2fb2f48bb926' + +hashed path with largest untruncated eight dirs +(directory 8 is dropped because it hits _maxshortdirslen) +A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345678/xxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456' +B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/xxxxxxx-xxxxxxc8996ccd41b471f768057181a4d59d2febe7277d' + +hashed path with smallest truncated eight dirs +(directory 8 is dropped because it hits _maxshortdirslen) +A = 'data/123456789/123456789/123456789/123456789/123456789/123456789/123456789/123456789/xxxxxxxxx-xxxxxxxxx-123456789-123456' +B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/xxxxxxxxx-xxxx4fa04a839a6bda93e1c21c713f2edcbd16e8890d' + +hashed path with largest non-dropped directory 8 +(just not hitting the _maxshortdirslen boundary) +A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456' +B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxxx4d43d1ccaa20efbfe99ec779dc063611536ff2c5' + +...adding one truncated char to dir 1..7 won't drop dir 8 +A = 'data/12345678x/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456' +B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxx0f9efce65189cc60fd90fe4ffd49d7b58bbe0f2e' + +A = 'data/12345678/12345678x/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456' +B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxx945ca395708cafdd54a94501859beabd3e243921' + +A = 'data/12345678/12345678/12345678x/12345678/12345678/12345678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456' +B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxxac62bf6898c4fd0502146074547c11caa751a327' + +A = 'data/12345678/12345678/12345678/12345678x/12345678/12345678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456' +B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxx2ae5a2baed7983fae8974d0ca06c6bf08b9aee92' + +A = 'data/12345678/12345678/12345678/12345678/12345678x/12345678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456' +B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxx214aba07b6687532a43d1e9eaf6e88cfca96b68c' + +A = 'data/12345678/12345678/12345678/12345678/12345678/12345678x/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456' +B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxxe7a022ae82f0f55cf4e0498e55ba59ea4ebb55bf' + +A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678x/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456' +B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxxb51ce61164996a80f36ce3cfe64b62d519aedae3' + +hashed path with shortest dropped directory 8 +(just hitting the _maxshortdirslen boundary) +A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/123456/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456' +B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/xxxxxxxxx-xxxx11fa9873cc6c3215eae864528b5530a04efc6cfe' + +hashed path that drops dir 8 due to dot or space at end is +encoded, and thus causing to hit _maxshortdirslen +A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/1234./-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456' +B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/-xxxxxxxxx-xxx602df9b45bec564e2e1f0645d5140dddcc76ed58' + +A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/1234 /-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456' +B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/-xxxxxxxxx-xxxd99ff212bc84b4d1f70cd6b0071e3ef69d4e12ce' + +... with dir 8 short enough for encoding +A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12./xx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456' +B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12~2e/xx-xxxxx7baeb5ed7f14a586ee1cacecdbcbff70032d1b3c' + +A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12 /xx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456' +B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12~20/xx-xxxxxcf79ca9795f77d7f75745da36807e5d772bd5182' + +extensions are replicated on hashed paths (unbounded!) +A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.345' +B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxx50a65dfc4ab6bf5fb9ba949447ccaf456c1ebf30.345' + +A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456' +B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxe5f03f29a0a5876660a28aefc43f978add5a7659.3456' + +A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.34567' +B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xc2609a92347e5a14c29211c6cd634732d1f0c968.34567' + +A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.345678' +B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-695e97d34c6cd8186afa3c74a56243bd645d50c3.345678' + +A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789' +B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/b067582449be78bc1266e9195785a5c6165380e6.3456789' + +A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-' +B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/d3442b4cfe1fdbde1c4c60ad421e946344111961.3456789-' + +A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-1' +B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/40a78aaf2114e1b82f91ac3ff234bf358d99dbba.3456789-1' + +A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-12' +B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/1e04221e6fbff8cfbeaf555d11550dc573e3326d.3456789-12' + +A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-123' +B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/5e3b3973b3c9297b4577a4cf1d58162545a6b454.3456789-123' + +A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-1234' +B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/6ee6ef477c7ba44ff19c7d97fc4ac7fa4a8e4665.3456789-1234' + +A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-12345' +B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/f08b927027344ec4a7f1994ecb0effe2b9cc1215.3456789-12345' + +A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-12345-ABCDEFGHIJKLMNOPRSTUVWXYZ-abcdefghjiklmnopqrstuvwxyz-ABCDEFGHIJKLMNOPRSTUVWXYZ-1234567890-xxxxxxxxx-xxxxxxxxx-xxxxxxxx-xxxxxxxxx-wwwwwwwww-wwwwwwwww-wwwwwwwww-wwwwwwwww-wwwwwwwww-wwwwwwwww-wwwwwwwww-wwwwwwwww-wwwwwwwww' +B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/28de8651e30eeb95f4b97edb7d12b281d3fb3ce0.3456789-12345-abcdefghijklmnoprstuvwxyz-abcdefghjiklmnopqrstuvwxyz-abcdefghijklmnoprstuvwxyz-1234567890-xxxxxxxxx-xxxxxxxxx-xxxxxxxx-xxxxxxxxx-wwwwwwwww-wwwwwwwww-wwwwwwwww-wwwwwwwww-wwwwwwwww-wwwwwwwww-wwwwwwwww-wwwwwwwww-wwwwwwwww' +
--- a/tests/test-import-git.t Mon Sep 03 17:25:50 2012 +0100 +++ b/tests/test-import-git.t Sun Sep 09 12:35:06 2012 +0200 @@ -322,12 +322,12 @@ Filenames with spaces: - $ hg import -d "1000000 0" -m spaces - <<EOF + $ sed 's,EOL$,,g' <<EOF | hg import -d "1000000 0" -m spaces - > diff --git a/foo bar b/foo bar > new file mode 100644 > index 0000000..257cc56 > --- /dev/null - > +++ b/foo bar + > +++ b/foo bar EOL > @@ -0,0 +1 @@ > +foo > EOF @@ -384,7 +384,7 @@ b \x00 (no-eol) (esc) - $ hg st --copies --change . + $ hg st --copies --change . A binary2 text2 R text2
--- a/tests/test-inotify-issue1371.t Mon Sep 03 17:25:50 2012 +0100 +++ b/tests/test-inotify-issue1371.t Sun Sep 09 12:35:06 2012 +0200 @@ -1,6 +1,6 @@ $ "$TESTDIR/hghave" inotify || exit 80 - $ hg init + $ hg init $ touch a b c d e f $ echo "[extensions]" >> $HGRCPATH $ echo "inotify=" >> $HGRCPATH @@ -41,4 +41,4 @@ Are we able to kill the service? if not, the service died on some error - $ kill `cat hg.pid` + $ kill `cat hg.pid`
--- a/tests/test-keyword.t Mon Sep 03 17:25:50 2012 +0100 +++ b/tests/test-keyword.t Sun Sep 09 12:35:06 2012 +0200 @@ -727,7 +727,7 @@ ignore $Id$ a -Write custom keyword and prepare multiline commit message +Write custom keyword and prepare multi-line commit message $ echo '$Xinfo$' >> a $ cat <<EOF >> log @@ -745,7 +745,7 @@ ? c ? log -Commit with multiline message and custom expansion +Commit with multi-line message and custom expansion $ hg --debug commit -l log -d '2 0' -u 'User Name <user@example.com>' a @@ -998,7 +998,7 @@ $ echo '$Id$' > m $ hg add m - $ hg commit -m 4kw + $ hg commit -m 4kw $ echo foo >> m $ hg commit -m 5foo
--- a/tests/test-largefiles.t Mon Sep 03 17:25:50 2012 +0100 +++ b/tests/test-largefiles.t Sun Sep 09 12:35:06 2012 +0200 @@ -80,7 +80,7 @@ $ rm sub/unknown Remove both largefiles and normal files. - + $ hg remove normal1 large1 $ hg status large1 R large1 @@ -200,7 +200,7 @@ -rw-r--r-- 9 normal4 - $ "$TESTDIR/killdaemons.py" + $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS #endif Test archiving the various revisions. These hit corner cases known with @@ -212,7 +212,7 @@ $ hg archive -r 3 ../archive3 $ hg archive -r 4 ../archive4 $ cd ../archive0 - $ cat normal1 + $ cat normal1 normal1 $ cat large1 large1 @@ -632,7 +632,7 @@ Old revisions of a clone have correct largefiles content (this also tests update). - $ hg update -r 1 + $ hg update -r 1 2 files updated, 0 files merged, 0 files removed, 0 files unresolved getting changed largefiles 1 largefiles updated, 0 removed @@ -785,7 +785,7 @@ Rollback on largefiles. - $ echo large4-modified-again > sub/large4 + $ echo large4-modified-again > sub/large4 $ hg commit -m "Modify large4 again" Invoking status precommit hook M sub/large4 @@ -815,7 +815,7 @@ "update --clean" leaves correct largefiles in working copy. - $ hg update --clean + $ hg update --clean 0 files updated, 0 files merged, 0 files removed, 0 files unresolved getting changed largefiles 1 largefiles updated, 0 removed @@ -1019,7 +1019,7 @@ getting changed largefiles 3 largefiles updated, 0 removed $ cd g - $ hg transplant -s ../d 598410d3eb9a + $ hg transplant -s ../d 598410d3eb9a searching for changes searching for changes adding changesets @@ -1166,7 +1166,7 @@ [255] used all HGPORTs, kill all daemons - $ "$TESTDIR/killdaemons.py" + $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS #endif vanilla clients locked out from largefiles ssh repos @@ -1252,7 +1252,7 @@ $ rm -rf empty used all HGPORTs, kill all daemons - $ "$TESTDIR/killdaemons.py" + $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS #endif
--- a/tests/test-mq-header-date.t Mon Sep 03 17:25:50 2012 +0100 +++ b/tests/test-mq-header-date.t Sun Sep 09 12:35:06 2012 +0200 @@ -128,7 +128,7 @@ > catlogd 6 > > drop 6 - > + > > > echo ==== qnew -u > hg qnew -u jane 6.patch
--- a/tests/test-mq-qimport.t Mon Sep 03 17:25:50 2012 +0100 +++ b/tests/test-mq-qimport.t Sun Sep 09 12:35:06 2012 +0200 @@ -169,7 +169,7 @@ $ cat > appendfoo.diff <<EOF > append foo - > + > > diff -r 07f494440405 -r 261500830e46 baz > --- /dev/null Thu Jan 01 00:00:00 1970 +0000 > +++ b/baz Thu Jan 01 00:00:00 1970 +0000 @@ -179,7 +179,7 @@ $ cat > appendbar.diff <<EOF > append bar - > + > > diff -r 07f494440405 -r 261500830e46 baz > --- a/baz Thu Jan 01 00:00:00 1970 +0000 > +++ b/baz Thu Jan 01 00:00:00 1970 +0000
--- a/tests/test-mv-cp-st-diff.t Mon Sep 03 17:25:50 2012 +0100 +++ b/tests/test-mv-cp-st-diff.t Sun Sep 09 12:35:06 2012 +0200 @@ -187,7 +187,7 @@ +y1 - $ tb "add a a1" "add a a2" "hg cp a b" "copy in working dir" + $ tb "add a a1" "add a a2" "hg cp a b" "copy in working dir" updating to branch default 3 files updated, 0 files merged, 0 files removed, 0 files unresolved created new head
--- a/tests/test-obsolete.t Mon Sep 03 17:25:50 2012 +0100 +++ b/tests/test-obsolete.t Sun Sep 09 12:35:06 2012 +0200 @@ -68,7 +68,10 @@ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ mkcommit new_c created new head + $ hg log -r 'hidden()' --template '{rev}:{node|short} {desc}\n' --hidden $ hg debugobsolete `getid original_c` `getid new_c` -d '56 12' + $ hg log -r 'hidden()' --template '{rev}:{node|short} {desc}\n' --hidden + 2:245bde4270cd add original_c $ hg debugobsolete 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f 0 {'date': '56 12', 'user': 'test'}
--- a/tests/test-phases-exchange.t Mon Sep 03 17:25:50 2012 +0100 +++ b/tests/test-phases-exchange.t Sun Sep 09 12:35:06 2012 +0200 @@ -84,7 +84,7 @@ pull did not updated ../alpha state. -push from alpha to beta should update phase even if nothing is transfered +push from alpha to beta should update phase even if nothing is transferred $ cd ../alpha $ hgph # not updated by remote pull
--- a/tests/test-pull-branch.t Mon Sep 03 17:25:50 2012 +0100 +++ b/tests/test-pull-branch.t Sun Sep 09 12:35:06 2012 +0200 @@ -142,7 +142,7 @@ $ hg branch branchC marked working directory as branch branchC (branches are permanent and global, did you want a bookmark?) - $ echo b1 > bar + $ echo b1 > bar $ hg ci -Am "commit on branchC on tt" adding bar @@ -151,7 +151,7 @@ $ cd ../t $ hg up -C default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved - $ echo a1 > bar + $ echo a1 > bar $ hg ci -Am "commit on default on t" adding bar
--- a/tests/test-pull-http.t Mon Sep 03 17:25:50 2012 +0100 +++ b/tests/test-pull-http.t Sun Sep 09 12:35:06 2012 +0200 @@ -28,7 +28,7 @@ $ cat test3/.hg/hgrc [paths] default = http://foo@localhost:$HGPORT/ - $ "$TESTDIR/killdaemons.py" + $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS expect error, cloning not allowed @@ -40,7 +40,7 @@ requesting all changes abort: authorization failed [255] - $ "$TESTDIR/killdaemons.py" + $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS serve errors
--- a/tests/test-push-http.t Mon Sep 03 17:25:50 2012 +0100 +++ b/tests/test-push-http.t Sun Sep 09 12:35:06 2012 +0200 @@ -16,9 +16,11 @@ > hg serve -p $HGPORT -d --pid-file=hg.pid -E errors.log > cat hg.pid >> $DAEMON_PIDS > hg --cwd ../test2 push http://localhost:$HGPORT/ - > "$TESTDIR/killdaemons.py" + > exitstatus=$? + > "$TESTDIR/killdaemons.py" $DAEMON_PIDS > echo % serve errors > cat errors.log + > return $exitstatus > } $ cd ../test @@ -27,10 +29,9 @@ $ req pushing to http://localhost:$HGPORT/ searching for changes - remote: ssl required - remote: ssl required - updating cb9a9f314b8b to public failed! + abort: HTTP Error 403: ssl required % serve errors + [255] expect authorization error @@ -41,6 +42,7 @@ searching for changes abort: authorization failed % serve errors + [255] expect authorization error: must have authorized user @@ -50,6 +52,7 @@ searching for changes abort: authorization failed % serve errors + [255] expect success @@ -110,6 +113,7 @@ searching for changes abort: authorization failed % serve errors + [255] expect authorization error: some users denied, users must be authenticated @@ -119,5 +123,6 @@ searching for changes abort: authorization failed % serve errors + [255] $ cd ..
--- a/tests/test-rebase-bookmarks.t Mon Sep 03 17:25:50 2012 +0100 +++ b/tests/test-rebase-bookmarks.t Sun Sep 09 12:35:06 2012 +0200 @@ -23,7 +23,7 @@ adding b $ hg book 'X' $ hg book 'Y' - + $ echo c > c $ hg ci -Am C adding c @@ -38,7 +38,7 @@ $ hg book W - $ hg tglog + $ hg tglog @ 3: 'D' bookmarks: W | | o 2: 'C' bookmarks: Y Z @@ -47,7 +47,7 @@ |/ o 0: 'A' bookmarks: - + Move only rebased bookmarks $ cd .. @@ -59,7 +59,7 @@ $ hg rebase -s Y -d 3 saved backup bundle to $TESTTMP/a1/.hg/strip-backup/*-backup.hg (glob) - $ hg tglog + $ hg tglog @ 3: 'C' bookmarks: Y Z | o 2: 'D' bookmarks: W @@ -79,7 +79,7 @@ $ hg rebase -s 1 -d 3 saved backup bundle to $TESTTMP/a2/.hg/strip-backup/*-backup.hg (glob) - $ hg tglog + $ hg tglog @ 3: 'C' bookmarks: Y Z | o 2: 'B' bookmarks: X
--- a/tests/test-rebase-cache.t Mon Sep 03 17:25:50 2012 +0100 +++ b/tests/test-rebase-cache.t Sun Sep 09 12:35:06 2012 +0200 @@ -73,7 +73,7 @@ $ hg clone -q -u . a a1 $ cd a1 - $ hg tglog + $ hg tglog @ 8: 'F' branch3 | o 7: 'branch3' branch3 @@ -120,7 +120,7 @@ 2: 'B' branch1 0: 'A' - $ hg tglog + $ hg tglog @ 8: 'E' branch3 | o 7: 'D' branch3 @@ -244,7 +244,7 @@ 2: 'B' branch1 0: 'A' - $ hg tglog + $ hg tglog @ 7: 'F' branch2 | o 6: 'E' branch2
--- a/tests/test-rebase-conflicts.t Mon Sep 03 17:25:50 2012 +0100 +++ b/tests/test-rebase-conflicts.t Sun Sep 09 12:35:06 2012 +0200 @@ -69,7 +69,7 @@ Try to continue without solving the conflict: - $ hg rebase --continue + $ hg rebase --continue abort: unresolved merge conflicts (see hg help resolve) [255]
--- a/tests/test-rebase-mq-skip.t Mon Sep 03 17:25:50 2012 +0100 +++ b/tests/test-rebase-mq-skip.t Sun Sep 09 12:35:06 2012 +0200 @@ -39,7 +39,7 @@ $ hg add p1 $ hg qref -m P1 - $ hg export qtip > p1.patch + $ hg export qtip > p1.patch $ hg up -q -C 1
--- a/tests/test-record.t Mon Sep 03 17:25:50 2012 +0100 +++ b/tests/test-record.t Sun Sep 09 12:35:06 2012 +0200 @@ -246,10 +246,19 @@ +9 +10 +Modify end of plain file with username unset + + $ echo 11 >> plain + $ unset HGUSER + $ hg record --config ui.username= -d '8 0' -m end plain + abort: no username supplied (see "hg help config") + [255] + Modify end of plain file - $ echo 11 >> plain + $ HGUSER="test" + $ export HGUSER $ hg record -d '8 0' -m end plain <<EOF > y > y
--- a/tests/test-run-tests.t Mon Sep 03 17:25:50 2012 +0100 +++ b/tests/test-run-tests.t Sun Sep 09 12:35:06 2012 +0200 @@ -95,5 +95,5 @@ Exit code: - $ (exit 1) + $ (exit 1) [1]
--- a/tests/test-status.t Mon Sep 03 17:25:50 2012 +0100 +++ b/tests/test-status.t Sun Sep 09 12:35:06 2012 +0200 @@ -330,4 +330,9 @@ $ hg status -A --rev 1 1 R 1/2/3/4/5/b.txt +#if windows + $ hg --config ui.slash=false status -A --rev 1 1 + R 1\2\3\4\5\b.txt +#endif + $ cd ..
--- a/tests/test-subrepo-deep-nested-change.t Mon Sep 03 17:25:50 2012 +0100 +++ b/tests/test-subrepo-deep-nested-change.t Sun Sep 09 12:35:06 2012 +0200 @@ -100,7 +100,7 @@ revision 53dd3430bcaf5ab4a7c48262bcad6d441f510487 Check that deep archiving works - + $ cd cloned $ echo 'test' > sub1/sub2/test.txt $ hg --config extensions.largefiles=! add sub1/sub2/test.txt
--- a/tests/test-subrepo-git.t Mon Sep 03 17:25:50 2012 +0100 +++ b/tests/test-subrepo-git.t Sun Sep 09 12:35:06 2012 +0200 @@ -446,7 +446,7 @@ $ git rev-parse HEAD da5f5b1d8ffcf62fb8327bcd3c89a4367a6018e7 $ cd .. - $ hg update --clean tip > /dev/null 2>&1 + $ hg update --clean tip > /dev/null 2>&1 Sticky subrepository, revision updates $ hg id -n
--- a/tests/test-subrepo-missing.t Mon Sep 03 17:25:50 2012 +0100 +++ b/tests/test-subrepo-missing.t Sun Sep 09 12:35:06 2012 +0200 @@ -60,7 +60,7 @@ warning: subrepo spec file .hgsub not found 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ rm .hgsubstate - $ hg up 0 + $ hg up 0 remote changed .hgsubstate which local deleted use (c)hanged version or leave (d)eleted? c 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
--- a/tests/test-subrepo-relative-path.t Mon Sep 03 17:25:50 2012 +0100 +++ b/tests/test-subrepo-relative-path.t Sun Sep 09 12:35:06 2012 +0200 @@ -70,7 +70,7 @@ source ../sub revision 863c1745b441bd97a8c4a096e87793073f4fb215 - $ "$TESTDIR/killdaemons.py" + $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS subrepo paths with ssh urls
--- a/tests/test-subrepo.t Mon Sep 03 17:25:50 2012 +0100 +++ b/tests/test-subrepo.t Sun Sep 09 12:35:06 2012 +0200 @@ -730,7 +730,7 @@ 925c17564ef8 tip $ hg -R s id 12a213df6fa9 tip - $ hg -R t id + $ hg -R t id 52c0adc0515a tip $ hg update 11 1 files updated, 0 files merged, 0 files removed, 0 files unresolved @@ -738,7 +738,7 @@ 365661e5936a $ hg -R s id fc627a69481f - $ hg -R t id + $ hg -R t id e95bcfa18a35 Sticky subrepositorys, file changes @@ -750,7 +750,7 @@ 365661e5936a+ $ hg -R s id fc627a69481f+ - $ hg -R t id + $ hg -R t id e95bcfa18a35+ $ hg update tip subrepository sources for s differ @@ -764,7 +764,7 @@ 925c17564ef8+ tip $ hg -R s id fc627a69481f+ - $ hg -R t id + $ hg -R t id e95bcfa18a35+ $ hg update --clean tip 1 files updated, 0 files merged, 0 files removed, 0 files unresolved @@ -774,7 +774,7 @@ 925c17564ef8 tip $ hg -R s id 12a213df6fa9 tip - $ hg -R t id + $ hg -R t id 52c0adc0515a tip $ cd s $ hg update -r -2 @@ -792,7 +792,7 @@ e45c8b14af55+ $ hg -R s id 02dcf1d70411 - $ hg -R t id + $ hg -R t id 7af322bc1198 Sticky subrepository, file changes and revision updates @@ -804,7 +804,7 @@ e45c8b14af55+ $ hg -R s id 02dcf1d70411+ - $ hg -R t id + $ hg -R t id 7af322bc1198+ $ hg update tip subrepository sources for s differ @@ -818,7 +818,7 @@ 925c17564ef8+ tip $ hg -R s id 02dcf1d70411+ - $ hg -R t id + $ hg -R t id 7af322bc1198+ Sticky repository, update --clean @@ -828,7 +828,7 @@ 925c17564ef8 tip $ hg -R s id 12a213df6fa9 tip - $ hg -R t id + $ hg -R t id 52c0adc0515a tip Test subrepo already at intended revision: @@ -843,7 +843,7 @@ 11+ $ hg -R s id fc627a69481f - $ hg -R t id + $ hg -R t id e95bcfa18a35 Test that removing .hgsubstate doesn't break anything:
--- a/tests/test-tags.t Mon Sep 03 17:25:50 2012 +0100 +++ b/tests/test-tags.t Sun Sep 09 12:35:06 2012 +0200 @@ -137,7 +137,7 @@ $ echo >> .hgtags $ echo "foo bar" >> .hgtags $ echo "a5a5 invalid" >> .hg/localtags - $ cat .hgtags + $ cat .hgtags acb14030fe0a21b60322c440ad2d20cf7685a376 first spam
--- a/tests/test-template-engine.t Mon Sep 03 17:25:50 2012 +0100 +++ b/tests/test-template-engine.t Sun Sep 09 12:35:06 2012 +0200 @@ -36,4 +36,12 @@ $ hg log --style=./mymap 0 97e5f848f0936960273bbf75be6388cd0350a32b test + $ cat > changeset.txt << EOF + > {{p1rev}} {{p1node}} {{p2rev}} {{p2node}} + > EOF + $ hg ci -Ama + $ hg log --style=./mymap + 0 97e5f848f0936960273bbf75be6388cd0350a32b -1 0000000000000000000000000000000000000000 + -1 0000000000000000000000000000000000000000 -1 0000000000000000000000000000000000000000 + $ cd ..
--- a/tests/test-treediscovery-legacy.t Mon Sep 03 17:25:50 2012 +0100 +++ b/tests/test-treediscovery-legacy.t Sun Sep 09 12:35:06 2012 +0200 @@ -35,7 +35,7 @@ > cat hg.pid >> $DAEMON_PIDS > } $ tstop() { - > "$TESTDIR/killdaemons.py" + > "$TESTDIR/killdaemons.py" $DAEMON_PIDS > cp $HGRCPATH-withcap $HGRCPATH > } @@ -330,7 +330,7 @@ $ hg ci -Am A adding A $ cd .. - $ hg clone rlocal rremote + $ hg clone rlocal rremote updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd rlocal @@ -341,7 +341,7 @@ $ tstart rremote $ cd rlocal - $ hg incoming $remote + $ hg incoming $remote comparing with http://localhost:$HGPORT/ searching for changes no changes found
--- a/tests/test-treediscovery.t Mon Sep 03 17:25:50 2012 +0100 +++ b/tests/test-treediscovery.t Sun Sep 09 12:35:06 2012 +0200 @@ -23,7 +23,7 @@ > cat hg.pid >> $DAEMON_PIDS > } $ tstop() { - > "$TESTDIR/killdaemons.py" + > "$TESTDIR/killdaemons.py" $DAEMON_PIDS > } Both are empty:
--- a/tests/test-update-branches.t Mon Sep 03 17:25:50 2012 +0100 +++ b/tests/test-update-branches.t Sun Sep 09 12:35:06 2012 +0200 @@ -61,7 +61,7 @@ > hg up $opt $targetrev > hg parent --template 'parent={rev}\n' > hg stat -S - > } + > } $ norevtest () { > msg=$1 @@ -74,7 +74,7 @@ > hg up $opt > hg parent --template 'parent={rev}\n' > hg stat -S - > } + > } Test cases are documented in a table in the update function of merge.py. Cases are run as shown in that table, row by row.