Mercurial > hg
changeset 1930:70be74899338
merge with crew
author | Peter van Dijk <peter@dataloss.nl> |
---|---|
date | Mon, 06 Mar 2006 18:00:44 +0100 |
parents | 50e1c90b0fcf (current diff) 85daa4e03b4c (diff) |
children | 819a2508f2c6 |
files | |
diffstat | 51 files changed, 2658 insertions(+), 1067 deletions(-) [+] |
line wrap: on
line diff
--- a/.hgignore Mon Mar 06 17:58:53 2006 +0100 +++ b/.hgignore Mon Mar 06 18:00:44 2006 +0100 @@ -12,6 +12,7 @@ build dist doc/*.[0-9] +doc/*.[0-9].gendoc.txt doc/*.[0-9].{x,ht}ml MANIFEST patches
--- a/contrib/bash_completion Mon Mar 06 17:58:53 2006 +0100 +++ b/contrib/bash_completion Mon Mar 06 18:00:44 2006 +0100 @@ -3,23 +3,26 @@ _hg_command_list() { "$hg" --debug help 2>/dev/null | \ - awk 'function command_line(line) { - gsub(/,/, "", line) - gsub(/:.*/, "", line) - split(line, aliases) + awk -F', ' '/^list of commands:/ {commands=1} + commands==1 && /^ [^ ]/ { + line = substr($0, 2) + colon = index(line, ":") + if (colon > 0) + line = substr(line, 1, colon-1) + n = split(line, aliases) command = aliases[1] - delete aliases[1] + if (index(command, "debug") == 1) { + for (i=1; i<=n; i++) + debug[j++] = aliases[i] + next + } print command - for (i in aliases) + for (i=2; i<=n; i++) if (index(command, aliases[i]) != 1) print aliases[i] } - /^list of commands:/ {commands=1} - commands && /^ debug/ {a[i++] = $0; next;} - commands && /^ [^ ]/ {command_line($0)} /^global options:/ {exit 0} - END {for (i in a) command_line(a[i])}' - + END {for (i in debug) print debug[i]}' } _hg_option_list()
--- a/contrib/hbisect.py Mon Mar 06 17:58:53 2006 +0100 +++ b/contrib/hbisect.py Mon Mar 06 18:00:44 2006 +0100 @@ -187,7 +187,7 @@ check_clean(self.ui, self.repo) rev = self.next() self.ui.write("Now testing %s\n" % hg.hex(rev)) - return self.repo.update(rev, allow=True, force=True) + return self.repo.update(rev, force=True) def good(self, rev): self.goodrevs.append(rev) @@ -232,7 +232,7 @@ b.good(new_rev) ui.write("it is good\n") anc = b.ancestors() - repo.update(new_rev, allow=True, force=True) + repo.update(new_rev, force=True) for v in anc: if v != rev: ui.warn("fail to found cset! :(\n")
--- a/doc/Makefile Mon Mar 06 17:58:53 2006 +0100 +++ b/doc/Makefile Mon Mar 06 18:00:44 2006 +0100 @@ -8,6 +8,12 @@ html: $(HTML) +hg.1.txt: hg.1.gendoc.txt + touch hg.1.txt + +hg.1.gendoc.txt: ../mercurial/commands.py + python gendoc.py > $@ + %: %.xml xmlto man $*.xml
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/doc/gendoc.py Mon Mar 06 18:00:44 2006 +0100 @@ -0,0 +1,92 @@ +import sys, textwrap +# import from the live mercurial repo +sys.path.insert(0, "..") +from mercurial.commands import table, globalopts +from mercurial.i18n import gettext as _ + +def get_desc(docstr): + if not docstr: + return "", "" + # sanitize + docstr = docstr.strip("\n") + docstr = docstr.rstrip() + shortdesc = docstr.splitlines()[0].strip() + + i = docstr.find("\n") + if i != -1: + desc = docstr[i+2:] + else: + desc = " %s" % shortdesc + return (shortdesc, desc) + +def get_opts(opts): + for shortopt, longopt, default, desc in opts: + allopts = [] + if shortopt: + allopts.append("-%s" % shortopt) + if longopt: + allopts.append("--%s" % longopt) + desc += default and _(" (default: %s)") % default or "" + yield(", ".join(allopts), desc) + +def get_cmd(cmd): + d = {} + attr = table[cmd] + cmds = cmd.lstrip("^").split("|") + + d['synopsis'] = attr[2] + d['cmd'] = cmds[0] + d['aliases'] = cmd.split("|")[1:] + d['desc'] = get_desc(attr[0].__doc__) + d['opts'] = list(get_opts(attr[1])) + return d + + +def show_doc(ui): + def bold(s, text=""): + ui.write("%s\n%s\n%s\n" % (s, "="*len(s), text)) + def underlined(s, text=""): + ui.write("%s\n%s\n%s\n" % (s, "-"*len(s), text)) + + # print options + underlined(_("OPTIONS")) + for optstr, desc in get_opts(globalopts): + ui.write("%s::\n %s\n\n" % (optstr, desc)) + + # print cmds + underlined(_("COMMANDS")) + h = {} + for c, attr in table.items(): + f = c.split("|")[0] + f = f.lstrip("^") + h[f] = c + cmds = h.keys() + cmds.sort() + + for f in cmds: + if f.startswith("debug"): continue + d = get_cmd(h[f]) + # synopsis + ui.write("%s::\n" % d['synopsis'].replace("hg ","", 1)) + # description + ui.write("%s\n\n" % d['desc'][1]) + # options + opt_output = list(d['opts']) + if opt_output: + opts_len = max([len(line[0]) for line in opt_output]) + ui.write(_(" options:\n")) + for optstr, desc in opt_output: + if desc: + s = "%-*s %s" % (opts_len, optstr, desc) + else: + s = optstr + s = textwrap.fill(s, initial_indent=4 * " ", + subsequent_indent=(6 + opts_len) * " ") + ui.write("%s\n" % s) + ui.write("\n") + # aliases + if d['aliases']: + ui.write(_(" aliases: %s\n\n") % " ".join(d['aliases'])) + +if __name__ == "__main__": + show_doc(sys.stdout)
--- a/doc/hg.1.txt Mon Mar 06 17:58:53 2006 +0100 +++ b/doc/hg.1.txt Mon Mar 06 18:00:44 2006 +0100 @@ -14,42 +14,6 @@ ----------- The hg(1) command provides a command line interface to the Mercurial system. -OPTIONS -------- - --R, --repository:: - repository root directory - ---cwd:: - change working directory - --y, --noninteractive:: - do not prompt, assume 'yes' for any required answers - --q, --quiet:: - suppress output - --v, --verbose:: - enable additional output - ---debug:: - enable debugging output - ---traceback:: - print traceback on exception - ---time:: - time how long the command takes - ---profile:: - print command execution profile - ---version:: - output version information and exit - --h, --help:: - display help and exit - COMMAND ELEMENTS ---------------- @@ -70,586 +34,8 @@ fast and the old-http:// protocol which is much slower but does not require a special server on the web host. -COMMANDS --------- -add [options] [files ...]:: - Schedule files to be version controlled and added to the repository. - - The files will be added to the repository at the next commit. - - If no names are given, add all files in the current directory and - its subdirectories. - -addremove [options] [files ...]:: - Add all new files and remove all missing files from the repository. - - New files are ignored if they match any of the patterns in .hgignore. As - with add, these changes take effect at the next commit. - -annotate [-r <rev> -u -n -c -d] [files ...]:: - List changes in files, showing the revision id responsible for each line - - This command is useful to discover who did a change or when a change took - place. - - Without the -a option, annotate will avoid processing files it - detects as binary. With -a, annotate will generate an annotation - anyway, probably with undesirable results. - - options: - -a, --text treat all files as text - -I, --include <pat> include names matching the given patterns - -X, --exclude <pat> exclude names matching the given patterns - -r, --revision <rev> annotate the specified revision - -u, --user list the author - -d, --date list the commit date - -c, --changeset list the changeset - -n, --number list the revision number (default) - -bundle <file> <other>:: - (EXPERIMENTAL) - - Generate a compressed changegroup file collecting all changesets - not found in the other repository. - - This file can then be transferred using conventional means and - applied to another repository with the unbundle command. This is - useful when native push and pull are not available or when - exporting an entire repository is undesirable. The standard file - extension is ".hg". - - Unlike import/export, this exactly preserves all changeset - contents including permissions, rename data, and revision history. - -cat [options] <file ...>:: - Print the specified files as they were at the given revision. - If no revision is given then the tip is used. - - Output may be to a file, in which case the name of the file is - given using a format string. The formatting rules are the same as - for the export command, with the following additions: - - %s basename of file being printed - %d dirname of file being printed, or '.' if in repo root - %p root-relative path name of file being printed - - options: - -I, --include <pat> include names matching the given patterns - -X, --exclude <pat> exclude names matching the given patterns - -o, --output <filespec> print output to file with formatted name - -r, --rev <rev> print the given revision - -clone [options] <source> [dest]:: - Create a copy of an existing repository in a new directory. - - If no destination directory name is specified, it defaults to the - basename of the source. - - The location of the source is added to the new repository's - .hg/hgrc file, as the default to be used for future pulls. - - For efficiency, hardlinks are used for cloning whenever the source - and destination are on the same filesystem. Some filesystems, - such as AFS, implement hardlinking incorrectly, but do not report - errors. In these cases, use the --pull option to avoid - hardlinking. - - See pull for valid source format details. - - options: - -U, --noupdate do not update the new working directory - --pull use pull protocol to copy metadata - -e, --ssh specify ssh command to use - --remotecmd specify hg command to run on the remote side - -commit [options] [files...]:: - Commit changes to the given files into the repository. - - If a list of files is omitted, all changes reported by "hg status" - from the root of the repository will be commited. - - The HGEDITOR or EDITOR environment variables are used to start an - editor to add a commit comment. - - Options: - - -A, --addremove run addremove during commit - -I, --include <pat> include names matching the given patterns - -X, --exclude <pat> exclude names matching the given patterns - -m, --message <text> use <text> as commit message - -l, --logfile <file> read the commit message from <file> - -d, --date <datecode> record datecode as commit date - -u, --user <user> record user as commiter - - aliases: ci - -copy <source ...> <dest>:: - Mark dest as having copies of source files. If dest is a - directory, copies are put in that directory. If dest is a file, - there can only be one source. - - By default, this command copies the contents of files as they - stand in the working directory. If invoked with --after, the - operation is recorded, but no copying is performed. - - This command takes effect in the next commit. - - NOTE: This command should be treated as experimental. While it - should properly record copied files, this information is not yet - fully used by merge, nor fully reported by log. - - Options: - -A, --after record a copy that has already occurred - -I, --include <pat> include names matching the given patterns - -X, --exclude <pat> exclude names matching the given patterns - -f, --force forcibly copy over an existing managed file - - aliases: cp - -diff [-a] [-r revision] [-r revision] [files ...]:: - Show differences between revisions for the specified files. - - Differences between files are shown using the unified diff format. - - When two revision arguments are given, then changes are shown - between those revisions. If only one revision is specified then - that revision is compared to the working directory, and, when no - revisions are specified, the working directory files are compared - to its parent. - - Without the -a option, diff will avoid generating diffs of files - it detects as binary. With -a, diff will generate a diff anyway, - probably with undesirable results. - - options: - -a, --text treat all files as text - -I, --include <pat> include names matching the given patterns - -p, --show-function show which function each change is in - -X, --exclude <pat> exclude names matching the given patterns - -w, --ignore-all-space ignore white space when comparing lines - -export [-o filespec] [revision] ...:: - Print the changeset header and diffs for one or more revisions. - - The information shown in the changeset header is: author, - changeset hash, parent and commit comment. - - Output may be to a file, in which case the name of the file is - given using a format string. The formatting rules are as follows: - - %% literal "%" character - %H changeset hash (40 bytes of hexadecimal) - %N number of patches being generated - %R changeset revision number - %b basename of the exporting repository - %h short-form changeset hash (12 bytes of hexadecimal) - %n zero-padded sequence number, starting at 1 - %r zero-padded changeset revision number - - Without the -a option, export will avoid generating diffs of files - it detects as binary. With -a, export will generate a diff anyway, - probably with undesirable results. - - options: - -a, --text treat all files as text - -o, --output <filespec> print output to file with formatted name - -forget [options] [files]:: - Undo an 'hg add' scheduled for the next commit. - - options: - -I, --include <pat> include names matching the given patterns - -X, --exclude <pat> exclude names matching the given patterns - -grep [options] pattern [files]:: - Search revisions of files for a regular expression. - - This command behaves differently than Unix grep. It only accepts - Python/Perl regexps. It searches repository history, not the - working directory. It always prints the revision number in which - a match appears. - - By default, grep only prints output for the first revision of a - file in which it finds a match. To get it to print every revision - that contains a change in match status ("-" for a match that - becomes a non-match, or "+" for a non-match that becomes a match), - use the --all flag. - - options: - -0, --print0 end fields with NUL - -I, --include <pat> include names matching the given patterns - -X, --exclude <pat> exclude names matching the given patterns - --all print all revisions that match - -i, --ignore-case ignore case when matching - -l, --files-with-matches print only filenames and revs that match - -n, --line-number print matching line numbers - -r <rev>, --rev <rev> search in given revision range - -u, --user print user who committed change - -heads:: - Show all repository head changesets. - - Repository "heads" are changesets that don't have children - changesets. They are where development generally takes place and - are the usual targets for update and merge operations. - -identify:: - Print a short summary of the current state of the repo. - - This summary identifies the repository state using one or two parent - hash identifiers, followed by a "+" if there are uncommitted changes - in the working directory, followed by a list of tags for this revision. - - aliases: id - -import [-p <n> -b <base> -f] <patches>:: - Import a list of patches and commit them individually. - - If there are outstanding changes in the working directory, import - will abort unless given the -f flag. - - If a patch looks like a mail message (its first line starts with - "From " or looks like an RFC822 header), it will not be applied - unless the -f option is used. The importer neither parses nor - discards mail headers, so use -f only to override the "mailness" - safety check, not to import a real mail message. - - options: - -p, --strip <n> directory strip option for patch. This has the same - meaning as the corresponding patch option - -b <path> base directory to read patches from - -f, --force skip check for outstanding uncommitted changes - - aliases: patch - -incoming [-p] [source]:: - Show new changesets found in the specified repo or the default - pull repo. These are the changesets that would be pulled if a pull - was requested. - - Currently only local repositories are supported. - - options: - -p, --patch show patch - - aliases: in - -init [dest]:: - Initialize a new repository in the given directory. If the given - directory does not exist, it is created. - - If no directory is given, the current directory is used. - -locate [options] [files]:: - Print all files under Mercurial control whose names match the - given patterns. - - This command searches the current directory and its - subdirectories. To search an entire repository, move to the root - of the repository. - - If no patterns are given to match, this command prints all file - names. - - If you want to feed the output of this command into the "xargs" - command, use the "-0" option to both this command and "xargs". - This will avoid the problem of "xargs" treating single filenames - that contain white space as multiple filenames. - - options: - - -0, --print0 end filenames with NUL, for use with xargs - -f, --fullpath print complete paths from the filesystem root - -I, --include <pat> include names matching the given patterns - -r, --rev <rev> search the repository as it stood at rev - -X, --exclude <pat> exclude names matching the given patterns - -log [-r revision ...] [-p] [files]:: - Print the revision history of the specified files or the entire project. - - By default this command outputs: changeset id and hash, tags, - parents, user, date and time, and a summary for each commit. The - -v switch adds some more detail, such as changed files, manifest - hashes or message signatures. - - options: - -I, --include <pat> include names matching the given patterns - -X, --exclude <pat> exclude names matching the given patterns - -r, --rev <A> show the specified revision or range - -p, --patch show patch - - aliases: history - -manifest [revision]:: - Print a list of version controlled files for the given revision. - - The manifest is the list of files being version controlled. If no revision - is given then the tip is used. - -outgoing [-p] [dest]:: - Show changesets not found in the specified destination repo or the - default push repo. These are the changesets that would be pushed - if a push was requested. - - See pull for valid source format details. - - options: - -p, --patch show patch - - aliases: out - -parents:: - Print the working directory's parent revisions. - -paths [NAME]:: - Show definition of symbolic path name NAME. If no name is given, show - definition of available names. - - Path names are defined in the [paths] section of /etc/mercurial/hgrc - and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too. - -pull <repository path>:: - Pull changes from a remote repository to a local one. - - This finds all changes from the repository at the specified path - or URL and adds them to the local repository. By default, this - does not update the copy of the project in the working directory. - - Valid URLs are of the form: - - local/filesystem/path - http://[user@]host[:port][/path] - https://[user@]host[:port][/path] - ssh://[user@]host[:port][/path] - - SSH requires an accessible shell account on the destination machine - and a copy of hg in the remote path. With SSH, paths are relative - to the remote user's home directory by default; use two slashes at - the start of a path to specify it as relative to the filesystem root. - - options: - -u, --update update the working directory to tip after pull - -e, --ssh specify ssh command to use - --remotecmd specify hg command to run on the remote side - -push <destination>:: - Push changes from the local repository to the given destination. - - This is the symmetrical operation for pull. It helps to move - changes from the current repository to a different one. If the - destination is local this is identical to a pull in that directory - from the current one. - - By default, push will refuse to run if it detects the result would - increase the number of remote heads. This generally indicates the - the client has forgotten to sync and merge before pushing. - - Valid URLs are of the form: - - local/filesystem/path - ssh://[user@]host[:port][/path] - - SSH requires an accessible shell account on the destination - machine and a copy of hg in the remote path. - - options: - - -f, --force force update - -e, --ssh specify ssh command to use - --remotecmd specify hg command to run on the remote side - -rawcommit [-p -d -u -F -m -l]:: - Lowlevel commit, for use in helper scripts. (DEPRECATED) - - This command is not intended to be used by normal users, as it is - primarily useful for importing from other SCMs. - - This command is now deprecated and will be removed in a future - release, please use debugsetparents and commit instead. - -recover:: - Recover from an interrupted commit or pull. - - This command tries to fix the repository status after an interrupted - operation. It should only be necessary when Mercurial suggests it. - -remove [options] [files ...]:: - Schedule the indicated files for removal from the repository. - - This command schedules the files to be removed at the next commit. - This only removes files from the current branch, not from the - entire project history. If the files still exist in the working - directory, they will be deleted from it. - - aliases: rm - -rename <source ...> <dest>:: - Mark dest as copies of sources; mark sources for deletion. If - dest is a directory, copies are put in that directory. If dest is - a file, there can only be one source. - - By default, this command copies the contents of files as they - stand in the working directory. If invoked with --after, the - operation is recorded, but no copying is performed. - - This command takes effect in the next commit. - - NOTE: This command should be treated as experimental. While it - should properly record rename files, this information is not yet - fully used by merge, nor fully reported by log. - - Options: - -A, --after record a rename that has already occurred - -f, --force forcibly copy over an existing managed file - - aliases: mv - -revert [names ...]:: - The revert command has two modes of operation. - - In its default mode, it reverts any uncommitted modifications made - to the named files or directories. This restores the contents of - the affected files to an unmodified state. - - Using the -r option, it reverts the given files or directories to - their state as of an earlier revision. This can be helpful to "roll - back" some or all of a change that should not have been committed. - - Revert modifies the working directory. It does not commit any - changes, or change the parent of the current working directory. - - If a file has been deleted, it is recreated. If the executable - mode of a file was changed, it is reset. - - If a directory is given, all files in that directory and its - subdirectories are reverted. - - If no arguments are given, all files in the current directory and - its subdirectories are reverted. - - options: - -r, --rev <rev> revision to revert to - -n, --nonrecursive do not recurse into subdirectories - -root:: - Print the root directory of the current repository. - -serve [options]:: - Start a local HTTP repository browser and pull server. - - By default, the server logs accesses to stdout and errors to - stderr. Use the "-A" and "-E" options to log to files. - - options: - -A, --accesslog <file> name of access log file to write to - -E, --errorlog <file> name of error log file to write to - -a, --address <addr> address to use - -p, --port <n> port to use (default: 8000) - -n, --name <name> name to show in web pages (default: working dir) - -t, --templatedir <path> web templates to use - -6, --ipv6 use IPv6 in addition to IPv4 - -status [options] [files]:: - Show changed files in the working directory. If no names are - given, all files are shown. Otherwise, only files matching the - given names are shown. - - The codes used to show the status of files are: - - M = changed - A = added - R = removed - ? = not tracked - - options: - - -m, --modified show only modified files - -a, --added show only added files - -r, --removed show only removed files - -u, --unknown show only unknown (not tracked) files - -n, --no-status hide status prefix - -0, --print0 end filenames with NUL, for use with xargs - -I, --include <pat> include names matching the given patterns - -X, --exclude <pat> exclude names matching the given patterns - -tag [-l -m <text> -d <datecode> -u <user>] <name> [revision]:: - Name a particular revision using <name>. - - Tags are used to name particular revisions of the repository and are - very useful to compare different revision, to go back to significant - earlier versions or to mark branch points as releases, etc. - - If no revision is given, the tip is used. - - To facilitate version control, distribution, and merging of tags, - they are stored as a file named ".hgtags" which is managed - similarly to other project files and can be hand-edited if - necessary. - - options: - -l, --local make the tag local - -m, --message <text> message for tag commit log entry - -d, --date <datecode> datecode for commit - -u, --user <user> user for commit - - Note: Local tags are not version-controlled or distributed and are - stored in the .hg/localtags file. If there exists a local tag and - a public tag with the same name, local tag is used. - -tags:: - List the repository tags. - - This lists both regular and local tags. - -tip [-p]:: - Show the tip revision. - - options: - -p, --patch show patch - -unbundle <file>:: - (EXPERIMENTAL) - - Apply a compressed changegroup file generated by the bundle - command. - -undo:: - Undo the last commit or pull transaction. - - Roll back the last pull or commit transaction on the - repository, restoring the project to its earlier state. - - This command should be used with care. There is only one level of - undo and there is no redo. - - This command is not intended for use on public repositories. Once - a change is visible for pull by other users, undoing it locally is - ineffective. - -update [-m -C] [revision]:: - Update the working directory to the specified revision. - - By default, update will refuse to run if doing so would require - merging or discarding local changes. - - With the -m option, a merge will be performed. - - With the -C option, local changes will be lost. - - options: - -m, --merge allow merging of branches - -C, --clean overwrite locally modified files - - aliases: up checkout co - -verify:: - Verify the integrity of the current repository. - - This will perform an extensive check of the repository's - integrity, validating the hashes and checksums of each entry in - the changelog, manifest, and tracked files, as well as the - integrity of their crosslinks and indices. +include::hg.1.gendoc.txt[] FILE NAME PATTERNS ------------------
--- a/doc/hgrc.5.txt Mon Mar 06 17:58:53 2006 +0100 +++ b/doc/hgrc.5.txt Mon Mar 06 18:00:44 2006 +0100 @@ -247,6 +247,9 @@ remote command to use for clone/push/pull operations. Default is 'hg'. ssh;; command to use for SSH connections. Default is 'ssh'. + timeout;; + The timeout used when a lock is held (in seconds), a negative value + means no timeout. Default is 600. username;; The committer of a changeset created when running "commit". Typically a person's name and email address, e.g. "Fred Widget
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/hgext/mq.py Mon Mar 06 18:00:44 2006 +0100 @@ -0,0 +1,1308 @@ +#!/usr/bin/env python +# queue.py - patch queues for mercurial +# +# Copyright 2005 Chris Mason <mason@suse.com> +# +# This software may be used and distributed according to the terms +# of the GNU General Public License, incorporated herein by reference. + +from mercurial.demandload import * +demandload(globals(), "os sys re struct traceback errno bz2") +from mercurial.i18n import gettext as _ +from mercurial import ui, hg, revlog, commands, util + +versionstr = "0.45" + +repomap = {} + +class queue: + def __init__(self, ui, path, patchdir=None): + self.opener = util.opener(path) + self.basepath = path + if patchdir: + self.path = patchdir + else: + self.path = os.path.join(path, "patches") + self.ui = ui + self.applied = [] + self.full_series = [] + self.applied_dirty = 0 + self.series_dirty = 0 + self.series_path = os.path.join(self.path, "series") + self.status_path = os.path.join(self.path, "status") + + s = self.series_path + if os.path.exists(s): + self.full_series = self.opener(s).read().splitlines() + self.read_series(self.full_series) + + s = self.status_path + if os.path.exists(s): + self.applied = self.opener(s).read().splitlines() + + def find_series(self, patch): + pre = re.compile("(\s*)([^#]+)") + index = 0 + for l in self.full_series: + m = pre.match(l) + if m: + s = m.group(2) + s = s.rstrip() + if s == patch: + return index + index += 1 + return None + + def read_series(self, list): + def matcher(list): + pre = re.compile("(\s*)([^#]+)") + for l in list: + m = pre.match(l) + if m: + s = m.group(2) + s = s.rstrip() + if len(s) > 0: + yield s + self.series = [] + self.series = [ x for x in matcher(list) ] + + def save_dirty(self): + if self.applied_dirty: + if len(self.applied) > 0: + nl = "\n" + else: + nl = "" + f = self.opener(self.status_path, "w") + f.write("\n".join(self.applied) + nl) + if self.series_dirty: + if len(self.full_series) > 0: + nl = "\n" + else: + nl = "" + f = self.opener(self.series_path, "w") + f.write("\n".join(self.full_series) + nl) + + def readheaders(self, patch): + def eatdiff(lines): + while lines: + l = lines[-1] + if (l.startswith("diff -") or + l.startswith("Index:") or + l.startswith("===========")): + del lines[-1] + else: + break + def eatempty(lines): + while lines: + l = lines[-1] + if re.match('\s*$', l): + del lines[-1] + else: + break + + pf = os.path.join(self.path, patch) + message = [] + comments = [] + user = None + format = None + subject = None + diffstart = 0 + + for line in file(pf): + line = line.rstrip() + if diffstart: + if line.startswith('+++ '): + diffstart = 2 + break + if line.startswith("--- "): + diffstart = 1 + continue + elif format == "hgpatch": + # parse values when importing the result of an hg export + if line.startswith("# User "): + user = line[7:] + elif not line.startswith("# ") and line: + message.append(line) + format = None + elif line == '# HG changeset patch': + format = "hgpatch" + elif (format != "tagdone" and (line.startswith("Subject: ") or + line.startswith("subject: "))): + subject = line[9:] + format = "tag" + elif (format != "tagdone" and (line.startswith("From: ") or + line.startswith("from: "))): + user = line[6:] + format = "tag" + elif format == "tag" and line == "": + # when looking for tags (subject: from: etc) they + # end once you find a blank line in the source + format = "tagdone" + else: + message.append(line) + comments.append(line) + + eatdiff(message) + eatdiff(comments) + eatempty(message) + eatempty(comments) + + # make sure message isn't empty + if format and format.startswith("tag") and subject: + message.insert(0, "") + message.insert(0, subject) + return (message, comments, user, diffstart > 1) + + def mergeone(self, repo, mergeq, head, patch, rev, wlock): + # first try just applying the patch + (err, n) = self.apply(repo, [ patch ], update_status=False, + strict=True, merge=rev, wlock=wlock) + + if err == 0: + return (err, n) + + if n is None: + self.ui.warn("apply failed for patch %s\n" % patch) + sys.exit(1) + + self.ui.warn("patch didn't work out, merging %s\n" % patch) + + # apply failed, strip away that rev and merge. + repo.update(head, allow=False, force=True, wlock=wlock) + self.strip(repo, n, update=False, backup='strip', wlock=wlock) + + c = repo.changelog.read(rev) + ret = repo.update(rev, allow=True, wlock=wlock) + if ret: + self.ui.warn("update returned %d\n" % ret) + sys.exit(1) + n = repo.commit(None, c[4], c[1], force=1, wlock=wlock) + if n == None: + self.ui.warn("repo commit failed\n") + sys.exit(1) + try: + message, comments, user, patchfound = mergeq.readheaders(patch) + except: + self.ui.warn("Unable to read %s\n" % patch) + sys.exit(1) + + patchf = self.opener(os.path.join(self.path, patch), "w") + if comments: + comments = "\n".join(comments) + '\n\n' + patchf.write(comments) + commands.dodiff(patchf, self.ui, repo, head, n) + patchf.close() + return (0, n) + + def qparents(self, repo, rev=None): + if rev is None: + (p1, p2) = repo.dirstate.parents() + if p2 == revlog.nullid: + return p1 + if len(self.applied) == 0: + return None + (top, patch) = self.applied[-1].split(':') + top = revlog.bin(top) + return top + pp = repo.changelog.parents(rev) + if pp[1] != revlog.nullid: + arevs = [ x.split(':')[0] for x in self.applied ] + p0 = revlog.hex(pp[0]) + p1 = revlog.hex(pp[1]) + if p0 in arevs: + return pp[0] + if p1 in arevs: + return pp[1] + return None + return pp[0] + + def mergepatch(self, repo, mergeq, series, wlock): + if len(self.applied) == 0: + # each of the patches merged in will have two parents. This + # can confuse the qrefresh, qdiff, and strip code because it + # needs to know which parent is actually in the patch queue. + # so, we insert a merge marker with only one parent. This way + # the first patch in the queue is never a merge patch + # + pname = ".hg.patches.merge.marker" + n = repo.commit(None, '[mq]: merge marker', user=None, force=1, + wlock=wlock) + self.applied.append(revlog.hex(n) + ":" + pname) + self.applied_dirty = 1 + + head = self.qparents(repo) + + for patch in series: + patch = mergeq.lookup(patch) + if not patch: + self.ui.warn("patch %s does not exist\n" % patch) + return (1, None) + + info = mergeq.isapplied(patch) + if not info: + self.ui.warn("patch %s is not applied\n" % patch) + return (1, None) + rev = revlog.bin(info[1]) + (err, head) = self.mergeone(repo, mergeq, head, patch, rev, wlock) + if head: + self.applied.append(revlog.hex(head) + ":" + patch) + self.applied_dirty = 1 + if err: + return (err, head) + return (0, head) + + def apply(self, repo, series, list=False, update_status=True, + strict=False, patchdir=None, merge=None, wlock=None): + # TODO unify with commands.py + if not patchdir: + patchdir = self.path + pwd = os.getcwd() + os.chdir(repo.root) + err = 0 + if not wlock: + wlock = repo.wlock() + lock = repo.lock() + tr = repo.transaction() + n = None + for patch in series: + self.ui.warn("applying %s\n" % patch) + pf = os.path.join(patchdir, patch) + + try: + message, comments, user, patchfound = self.readheaders(patch) + except: + self.ui.warn("Unable to read %s\n" % pf) + err = 1 + break + + if not message: + message = "imported patch %s\n" % patch + else: + if list: + message.append("\nimported patch %s" % patch) + message = '\n'.join(message) + + try: + f = os.popen("patch -p1 --no-backup-if-mismatch < '%s'" % (pf)) + except: + self.ui.warn("patch failed, unable to continue (try -v)\n") + err = 1 + break + files = [] + fuzz = False + for l in f: + l = l.rstrip('\r\n'); + if self.ui.verbose: + self.ui.warn(l + "\n") + if l[:14] == 'patching file ': + pf = os.path.normpath(l[14:]) + # when patch finds a space in the file name, it puts + # single quotes around the filename. strip them off + if pf[0] == "'" and pf[-1] == "'": + pf = pf[1:-1] + if pf not in files: + files.append(pf) + printed_file = False + file_str = l + elif l.find('with fuzz') >= 0: + if not printed_file: + self.ui.warn(file_str + '\n') + printed_file = True + self.ui.warn(l + '\n') + fuzz = True + elif l.find('saving rejects to file') >= 0: + self.ui.warn(l + '\n') + elif l.find('FAILED') >= 0: + if not printed_file: + self.ui.warn(file_str + '\n') + printed_file = True + self.ui.warn(l + '\n') + patcherr = f.close() + + if merge and len(files) > 0: + # Mark as merged and update dirstate parent info + repo.dirstate.update(repo.dirstate.filterfiles(files), 'm') + p1, p2 = repo.dirstate.parents() + repo.dirstate.setparents(p1, merge) + if len(files) > 0: + commands.addremove_lock(self.ui, repo, files, + opts={}, wlock=wlock) + n = repo.commit(files, message, user, force=1, lock=lock, + wlock=wlock) + + if n == None: + self.ui.warn("repo commit failed\n") + sys.exit(1) + + if update_status: + self.applied.append(revlog.hex(n) + ":" + patch) + + if patcherr: + if not patchfound: + self.ui.warn("patch %s is empty\n" % patch) + err = 0 + else: + self.ui.warn("patch failed, rejects left in working dir\n") + err = 1 + break + + if fuzz and strict: + self.ui.warn("fuzz found when applying patch, stopping\n") + err = 1 + break + tr.close() + os.chdir(pwd) + return (err, n) + + def delete(self, repo, patch): + patch = self.lookup(patch) + info = self.isapplied(patch) + if info: + self.ui.warn("cannot delete applied patch %s\n" % patch) + sys.exit(1) + if patch not in self.series: + self.ui.warn("patch %s not in series file\n" % patch) + sys.exit(1) + i = self.find_series(patch) + del self.full_series[i] + self.read_series(self.full_series) + self.series_dirty = 1 + + def check_toppatch(self, repo): + if len(self.applied) > 0: + (top, patch) = self.applied[-1].split(':') + top = revlog.bin(top) + pp = repo.dirstate.parents() + if top not in pp: + self.ui.warn("queue top not at dirstate parents. top %s dirstate %s %s\n" %( revlog.short(top), revlog.short(pp[0]), revlog.short(pp[1]))) + sys.exit(1) + return top + return None + def check_localchanges(self, repo): + (c, a, r, d, u) = repo.changes(None, None) + if c or a or d or r: + self.ui.write("Local changes found, refresh first\n") + sys.exit(1) + def new(self, repo, patch, msg=None, force=None): + if not force: + self.check_localchanges(repo) + self.check_toppatch(repo) + wlock = repo.wlock() + insert = self.series_end() + if msg: + n = repo.commit([], "[mq]: %s" % msg, force=True, wlock=wlock) + else: + n = repo.commit([], + "New patch: %s" % patch, force=True, wlock=wlock) + if n == None: + self.ui.warn("repo commit failed\n") + sys.exit(1) + self.full_series[insert:insert] = [patch] + self.applied.append(revlog.hex(n) + ":" + patch) + self.read_series(self.full_series) + self.series_dirty = 1 + self.applied_dirty = 1 + p = self.opener(os.path.join(self.path, patch), "w") + if msg: + msg = msg + "\n" + p.write(msg) + p.close() + wlock = None + r = self.qrepo() + if r: r.add([patch]) + + def strip(self, repo, rev, update=True, backup="all", wlock=None): + def limitheads(chlog, stop): + """return the list of all nodes that have no children""" + p = {} + h = [] + stoprev = 0 + if stop in chlog.nodemap: + stoprev = chlog.rev(stop) + + for r in range(chlog.count() - 1, -1, -1): + n = chlog.node(r) + if n not in p: + h.append(n) + if n == stop: + break + if r < stoprev: + break + for pn in chlog.parents(n): + p[pn] = 1 + return h + + def bundle(cg): + backupdir = repo.join("strip-backup") + if not os.path.isdir(backupdir): + os.mkdir(backupdir) + name = os.path.join(backupdir, "%s" % revlog.short(rev)) + name = savename(name) + self.ui.warn("saving bundle to %s\n" % name) + # TODO, exclusive open + f = open(name, "wb") + try: + f.write("HG10") + z = bz2.BZ2Compressor(9) + while 1: + chunk = cg.read(4096) + if not chunk: + break + f.write(z.compress(chunk)) + f.write(z.flush()) + except: + os.unlink(name) + raise + f.close() + return name + + def stripall(rev, revnum): + cl = repo.changelog + c = cl.read(rev) + mm = repo.manifest.read(c[0]) + seen = {} + + for x in xrange(revnum, cl.count()): + c = cl.read(cl.node(x)) + for f in c[3]: + if f in seen: + continue + seen[f] = 1 + if f in mm: + filerev = mm[f] + else: + filerev = 0 + seen[f] = filerev + # we go in two steps here so the strip loop happens in a + # sensible order. When stripping many files, this helps keep + # our disk access patterns under control. + list = seen.keys() + list.sort() + for f in list: + ff = repo.file(f) + filerev = seen[f] + if filerev != 0: + if filerev in ff.nodemap: + filerev = ff.rev(filerev) + else: + filerev = 0 + ff.strip(filerev, revnum) + + if not wlock: + wlock = repo.wlock() + lock = repo.lock() + chlog = repo.changelog + # TODO delete the undo files, and handle undo of merge sets + pp = chlog.parents(rev) + revnum = chlog.rev(rev) + + if update: + urev = self.qparents(repo, rev) + repo.update(urev, allow=False, force=True, wlock=wlock) + repo.dirstate.write() + + # save is a list of all the branches we are truncating away + # that we actually want to keep. changegroup will be used + # to preserve them and add them back after the truncate + saveheads = [] + savebases = {} + + tip = chlog.tip() + heads = limitheads(chlog, rev) + seen = {} + + # search through all the heads, finding those where the revision + # we want to strip away is an ancestor. Also look for merges + # that might be turned into new heads by the strip. + while heads: + h = heads.pop() + n = h + while True: + seen[n] = 1 + pp = chlog.parents(n) + if pp[1] != revlog.nullid and chlog.rev(pp[1]) > revnum: + if pp[1] not in seen: + heads.append(pp[1]) + if pp[0] == revlog.nullid: + break + if chlog.rev(pp[0]) < revnum: + break + n = pp[0] + if n == rev: + break + r = chlog.reachable(h, rev) + if rev not in r: + saveheads.append(h) + for x in r: + if chlog.rev(x) > revnum: + savebases[x] = 1 + + # create a changegroup for all the branches we need to keep + if backup is "all": + backupch = repo.changegroupsubset([rev], chlog.heads(), 'strip') + bundle(backupch) + if saveheads: + backupch = repo.changegroupsubset(savebases.keys(), saveheads, 'strip') + chgrpfile = bundle(backupch) + + stripall(rev, revnum) + + change = chlog.read(rev) + repo.manifest.strip(repo.manifest.rev(change[0]), revnum) + chlog.strip(revnum, revnum) + if saveheads: + self.ui.status("adding branch\n") + commands.unbundle(self.ui, repo, chgrpfile, update=False) + if backup is not "strip": + os.unlink(chgrpfile) + + def isapplied(self, patch): + """returns (index, rev, patch)""" + for i in xrange(len(self.applied)): + p = self.applied[i] + a = p.split(':') + if a[1] == patch: + return (i, a[0], a[1]) + return None + + def lookup(self, patch): + if patch == None: + return None + if patch in self.series: + return patch + if not os.path.isfile(os.path.join(self.path, patch)): + try: + sno = int(patch) + except(ValueError, OverflowError): + self.ui.warn("patch %s not in series\n" % patch) + sys.exit(1) + if sno >= len(self.series): + self.ui.warn("patch number %d is out of range\n" % sno) + sys.exit(1) + patch = self.series[sno] + else: + self.ui.warn("patch %s not in series\n" % patch) + sys.exit(1) + return patch + + def push(self, repo, patch=None, force=False, list=False, + mergeq=None, wlock=None): + if not wlock: + wlock = repo.wlock() + patch = self.lookup(patch) + if patch and self.isapplied(patch): + self.ui.warn("patch %s is already applied\n" % patch) + sys.exit(1) + if self.series_end() == len(self.series): + self.ui.warn("File series fully applied\n") + sys.exit(1) + if not force: + self.check_localchanges(repo) + + self.applied_dirty = 1; + start = self.series_end() + if start > 0: + self.check_toppatch(repo) + if not patch: + patch = self.series[start] + end = start + 1 + else: + end = self.series.index(patch, start) + 1 + s = self.series[start:end] + if mergeq: + ret = self.mergepatch(repo, mergeq, s, wlock) + else: + ret = self.apply(repo, s, list, wlock=wlock) + top = self.applied[-1].split(':')[1] + if ret[0]: + self.ui.write("Errors during apply, please fix and refresh %s\n" % + top) + else: + self.ui.write("Now at: %s\n" % top) + return ret[0] + + def pop(self, repo, patch=None, force=False, update=True, wlock=None): + def getfile(f, rev): + t = repo.file(f).read(rev) + try: + repo.wfile(f, "w").write(t) + except IOError: + os.makedirs(os.path.dirname(repo.wjoin(f))) + repo.wfile(f, "w").write(t) + + if not wlock: + wlock = repo.wlock() + if patch: + # index, rev, patch + info = self.isapplied(patch) + if not info: + patch = self.lookup(patch) + info = self.isapplied(patch) + if not info: + self.ui.warn("patch %s is not applied\n" % patch) + sys.exit(1) + if len(self.applied) == 0: + self.ui.warn("No patches applied\n") + sys.exit(1) + + if not update: + parents = repo.dirstate.parents() + rr = [ revlog.bin(x.split(':')[0]) for x in self.applied ] + for p in parents: + if p in rr: + self.ui.warn("qpop: forcing dirstate update\n") + update = True + + if not force and update: + self.check_localchanges(repo) + + self.applied_dirty = 1; + end = len(self.applied) + if not patch: + info = [len(self.applied) - 1] + self.applied[-1].split(':') + start = info[0] + rev = revlog.bin(info[1]) + + # we know there are no local changes, so we can make a simplified + # form of hg.update. + if update: + top = self.check_toppatch(repo) + qp = self.qparents(repo, rev) + changes = repo.changelog.read(qp) + mf1 = repo.manifest.readflags(changes[0]) + mmap = repo.manifest.read(changes[0]) + (c, a, r, d, u) = repo.changes(qp, top) + if d: + raise util.Abort("deletions found between repo revs") + for f in c: + getfile(f, mmap[f]) + for f in r: + getfile(f, mmap[f]) + util.set_exec(repo.wjoin(f), mf1[f]) + repo.dirstate.update(c + r, 'n') + for f in a: + try: os.unlink(repo.wjoin(f)) + except: raise + try: os.removedirs(os.path.dirname(repo.wjoin(f))) + except: pass + if a: + repo.dirstate.forget(a) + repo.dirstate.setparents(qp, revlog.nullid) + self.strip(repo, rev, update=False, backup='strip', wlock=wlock) + del self.applied[start:end] + if len(self.applied): + self.ui.write("Now at: %s\n" % self.applied[-1].split(':')[1]) + else: + self.ui.write("Patch queue now empty\n") + + def diff(self, repo, files): + top = self.check_toppatch(repo) + if not top: + self.ui.write("No patches applied\n") + return + qp = self.qparents(repo, top) + commands.dodiff(sys.stdout, self.ui, repo, qp, None, files) + + def refresh(self, repo, short=False): + if len(self.applied) == 0: + self.ui.write("No patches applied\n") + return + wlock = repo.wlock() + self.check_toppatch(repo) + qp = self.qparents(repo) + (top, patch) = self.applied[-1].split(':') + top = revlog.bin(top) + cparents = repo.changelog.parents(top) + patchparent = self.qparents(repo, top) + message, comments, user, patchfound = self.readheaders(patch) + + patchf = self.opener(os.path.join(self.path, patch), "w") + if comments: + comments = "\n".join(comments) + '\n\n' + patchf.write(comments) + + tip = repo.changelog.tip() + if top == tip: + # if the top of our patch queue is also the tip, there is an + # optimization here. We update the dirstate in place and strip + # off the tip commit. Then just commit the current directory + # tree. We can also send repo.commit the list of files + # changed to speed up the diff + # + # in short mode, we only diff the files included in the + # patch already + # + # this should really read: + #(cc, dd, aa, aa2, uu) = repo.changes(tip, patchparent) + # but we do it backwards to take advantage of manifest/chlog + # caching against the next repo.changes call + # + (cc, aa, dd, aa2, uu) = repo.changes(patchparent, tip) + if short: + filelist = cc + aa + dd + else: + filelist = None + (c, a, r, d, u) = repo.changes(None, None, filelist) + + # we might end up with files that were added between tip and + # the dirstate parent, but then changed in the local dirstate. + # in this case, we want them to only show up in the added section + for x in c: + if x not in aa: + cc.append(x) + # we might end up with files added by the local dirstate that + # were deleted by the patch. In this case, they should only + # show up in the changed section. + for x in a: + if x in dd: + del dd[dd.index(x)] + cc.append(x) + else: + aa.append(x) + # make sure any files deleted in the local dirstate + # are not in the add or change column of the patch + forget = [] + for x in d + r: + if x in aa: + del aa[aa.index(x)] + forget.append(x) + continue + elif x in cc: + del cc[cc.index(x)] + dd.append(x) + + c = list(util.unique(cc)) + r = list(util.unique(dd)) + a = list(util.unique(aa)) + filelist = list(util.unique(c + r + a )) + commands.dodiff(patchf, self.ui, repo, patchparent, None, + filelist, changes=(c, a, r, [], u)) + patchf.close() + + changes = repo.changelog.read(tip) + repo.dirstate.setparents(*cparents) + repo.dirstate.update(a, 'a') + repo.dirstate.update(r, 'r') + repo.dirstate.update(c, 'n') + repo.dirstate.forget(forget) + + if not message: + message = "patch queue: %s\n" % patch + else: + message = "\n".join(message) + self.strip(repo, top, update=False, backup='strip', wlock=wlock) + n = repo.commit(filelist, message, changes[1], force=1, wlock=wlock) + self.applied[-1] = revlog.hex(n) + ':' + patch + self.applied_dirty = 1 + else: + commands.dodiff(patchf, self.ui, repo, patchparent, None) + patchf.close() + self.pop(repo, force=True, wlock=wlock) + self.push(repo, force=True, wlock=wlock) + + def init(self, repo, create=False): + if os.path.isdir(self.path): + raise util.Abort("patch queue directory already exists") + os.mkdir(self.path) + if create: + return self.qrepo(create=True) + + def unapplied(self, repo, patch=None): + if patch and patch not in self.series: + self.ui.warn("%s not in the series file\n" % patch) + sys.exit(1) + if not patch: + start = self.series_end() + else: + start = self.series.index(patch) + 1 + for p in self.series[start:]: + self.ui.write("%s\n" % p) + + def qseries(self, repo, missing=None): + start = self.series_end() + if not missing: + for p in self.series[:start]: + if self.ui.verbose: + self.ui.write("%d A " % self.series.index(p)) + self.ui.write("%s\n" % p) + for p in self.series[start:]: + if self.ui.verbose: + self.ui.write("%d U " % self.series.index(p)) + self.ui.write("%s\n" % p) + else: + list = [] + for root, dirs, files in os.walk(self.path): + d = root[len(self.path) + 1:] + for f in files: + fl = os.path.join(d, f) + if (fl not in self.series and fl != "status" and + fl != "series" and not fl.startswith('.')): + list.append(fl) + list.sort() + if list: + for x in list: + if self.ui.verbose: + self.ui.write("D ") + self.ui.write("%s\n" % x) + + def issaveline(self, l): + name = l.split(':')[1] + if name == '.hg.patches.save.line': + return True + + def qrepo(self, create=False): + if create or os.path.isdir(os.path.join(self.path, ".hg")): + return hg.repository(ui=self.ui, path=self.path, create=create) + + def restore(self, repo, rev, delete=None, qupdate=None): + c = repo.changelog.read(rev) + desc = c[4].strip() + lines = desc.splitlines() + i = 0 + datastart = None + series = [] + applied = [] + qpp = None + for i in xrange(0, len(lines)): + if lines[i] == 'Patch Data:': + datastart = i + 1 + elif lines[i].startswith('Dirstate:'): + l = lines[i].rstrip() + l = l[10:].split(' ') + qpp = [ hg.bin(x) for x in l ] + elif datastart != None: + l = lines[i].rstrip() + index = l.index(':') + id = l[:index] + file = l[index + 1:] + if id: + applied.append(l) + series.append(file) + if datastart == None: + self.ui.warn("No saved patch data found\n") + return 1 + self.ui.warn("restoring status: %s\n" % lines[0]) + self.full_series = series + self.applied = applied + self.read_series(self.full_series) + self.series_dirty = 1 + self.applied_dirty = 1 + heads = repo.changelog.heads() + if delete: + if rev not in heads: + self.ui.warn("save entry has children, leaving it alone\n") + else: + self.ui.warn("removing save entry %s\n" % hg.short(rev)) + pp = repo.dirstate.parents() + if rev in pp: + update = True + else: + update = False + self.strip(repo, rev, update=update, backup='strip') + if qpp: + self.ui.warn("saved queue repository parents: %s %s\n" % + (hg.short(qpp[0]), hg.short(qpp[1]))) + if qupdate: + print "queue directory updating" + r = self.qrepo() + if not r: + self.ui.warn("Unable to load queue repository\n") + return 1 + r.update(qpp[0], allow=False, force=True) + + def save(self, repo, msg=None): + if len(self.applied) == 0: + self.ui.warn("save: no patches applied, exiting\n") + return 1 + if self.issaveline(self.applied[-1]): + self.ui.warn("status is already saved\n") + return 1 + + ar = [ ':' + x for x in self.full_series ] + if not msg: + msg = "hg patches saved state" + else: + msg = "hg patches: " + msg.rstrip('\r\n') + r = self.qrepo() + if r: + pp = r.dirstate.parents() + msg += "\nDirstate: %s %s" % (hg.hex(pp[0]), hg.hex(pp[1])) + msg += "\n\nPatch Data:\n" + text = msg + "\n".join(self.applied) + '\n' + (ar and "\n".join(ar) + + '\n' or "") + n = repo.commit(None, text, user=None, force=1) + if not n: + self.ui.warn("repo commit failed\n") + return 1 + self.applied.append(revlog.hex(n) + ":" + '.hg.patches.save.line') + self.applied_dirty = 1 + + def series_end(self): + end = 0 + if len(self.applied) > 0: + (top, p) = self.applied[-1].split(':') + try: + end = self.series.index(p) + except ValueError: + return 0 + return end + 1 + return end + + def qapplied(self, repo, patch=None): + if patch and patch not in self.series: + self.ui.warn("%s not in the series file\n" % patch) + sys.exit(1) + if not patch: + end = len(self.applied) + else: + end = self.series.index(patch) + 1 + for x in xrange(end): + p = self.appliedname(x) + self.ui.write("%s\n" % p) + + def appliedname(self, index): + p = self.applied[index] + if not self.ui.verbose: + p = p.split(':')[1] + return p + + def top(self, repo): + if len(self.applied): + p = self.appliedname(-1) + self.ui.write(p + '\n') + else: + self.ui.write("No patches applied\n") + + def next(self, repo): + end = self.series_end() + if end == len(self.series): + self.ui.write("All patches applied\n") + else: + self.ui.write(self.series[end] + '\n') + + def prev(self, repo): + if len(self.applied) > 1: + p = self.appliedname(-2) + self.ui.write(p + '\n') + elif len(self.applied) == 1: + self.ui.write("Only one patch applied\n") + else: + self.ui.write("No patches applied\n") + + def qimport(self, repo, files, patch=None, existing=None, force=None): + if len(files) > 1 and patch: + self.ui.warn("-n option not valid when importing multiple files\n") + sys.exit(1) + i = 0 + for filename in files: + if existing: + if not patch: + patch = filename + if not os.path.isfile(os.path.join(self.path, patch)): + self.ui.warn("patch %s does not exist\n" % patch) + sys.exit(1) + else: + try: + text = file(filename).read() + except IOError: + self.ui.warn("Unable to read %s\n" % patch) + sys.exit(1) + if not patch: + patch = os.path.split(filename)[1] + if not force and os.path.isfile(os.path.join(self.path, patch)): + self.ui.warn("patch %s already exists\n" % patch) + sys.exit(1) + patchf = self.opener(os.path.join(self.path, patch), "w") + patchf.write(text) + if patch in self.series: + self.ui.warn("patch %s is already in the series file\n" % patch) + sys.exit(1) + index = self.series_end() + i + self.full_series[index:index] = [patch] + self.read_series(self.full_series) + self.ui.warn("adding %s to series file\n" % patch) + i += 1 + patch = None + self.series_dirty = 1 + +def delete(ui, repo, patch, **opts): + """remove a patch from the series file""" + q = repomap[repo] + q.delete(repo, patch) + q.save_dirty() + return 0 + +def applied(ui, repo, patch=None, **opts): + """print the patches already applied""" + repomap[repo].qapplied(repo, patch) + return 0 + +def unapplied(ui, repo, patch=None, **opts): + """print the patches not yet applied""" + repomap[repo].unapplied(repo, patch) + return 0 + +def qimport(ui, repo, *filename, **opts): + """import a patch""" + q = repomap[repo] + q.qimport(repo, filename, patch=opts['name'], + existing=opts['existing'], force=opts['force']) + q.save_dirty() + return 0 + +def init(ui, repo, **opts): + """init a new queue repository""" + q = repomap[repo] + r = q.init(repo, create=opts['create_repo']) + q.save_dirty() + if r: + fp = r.wopener('.hgignore', 'w') + print >> fp, 'syntax: glob' + print >> fp, 'status' + fp.close() + r.wopener('series', 'w').close() + r.add(['.hgignore', 'series']) + return 0 + +def commit(ui, repo, *pats, **opts): + q = repomap[repo] + r = q.qrepo() + if not r: raise util.Abort('no queue repository') + commands.commit(r.ui, r, *pats, **opts) + +def series(ui, repo, **opts): + """print the entire series file""" + repomap[repo].qseries(repo, missing=opts['missing']) + return 0 + +def top(ui, repo, **opts): + """print the name of the current patch""" + repomap[repo].top(repo) + return 0 + +def next(ui, repo, **opts): + """print the name of the next patch""" + repomap[repo].next(repo) + return 0 + +def prev(ui, repo, **opts): + """print the name of the previous patch""" + repomap[repo].prev(repo) + return 0 + +def new(ui, repo, patch, **opts): + """create a new patch""" + q = repomap[repo] + q.new(repo, patch, msg=opts['message'], force=opts['force']) + q.save_dirty() + return 0 + +def refresh(ui, repo, **opts): + """update the current patch""" + q = repomap[repo] + q.refresh(repo, short=opts['short']) + q.save_dirty() + return 0 + +def diff(ui, repo, *files, **opts): + """diff of the current patch""" + repomap[repo].diff(repo, files) + return 0 + +def lastsavename(path): + (dir, base) = os.path.split(path) + names = os.listdir(dir) + namere = re.compile("%s.([0-9]+)" % base) + max = None + maxname = None + for f in names: + m = namere.match(f) + if m: + index = int(m.group(1)) + if max == None or index > max: + max = index + maxname = f + if maxname: + return (os.path.join(dir, maxname), max) + return (None, None) + +def savename(path): + (last, index) = lastsavename(path) + if last is None: + index = 0 + newpath = path + ".%d" % (index + 1) + return newpath + +def push(ui, repo, patch=None, **opts): + """push the next patch onto the stack""" + q = repomap[repo] + mergeq = None + + if opts['all']: + patch = q.series[-1] + if opts['merge']: + if opts['name']: + newpath = opts['name'] + else: + newpath, i = lastsavename(q.path) + if not newpath: + ui.warn("no saved queues found, please use -n\n") + return 1 + mergeq = queue(ui, repo.join(""), newpath) + ui.warn("merging with queue at: %s\n" % mergeq.path) + ret = q.push(repo, patch, force=opts['force'], list=opts['list'], + mergeq=mergeq) + q.save_dirty() + return ret + +def pop(ui, repo, patch=None, **opts): + """pop the current patch off the stack""" + localupdate = True + if opts['name']: + q = queue(ui, repo.join(""), repo.join(opts['name'])) + ui.warn('using patch queue: %s\n' % q.path) + localupdate = False + else: + q = repomap[repo] + if opts['all'] and len(q.applied) > 0: + patch = q.applied[0].split(':')[1] + q.pop(repo, patch, force=opts['force'], update=localupdate) + q.save_dirty() + return 0 + +def restore(ui, repo, rev, **opts): + """restore the queue state saved by a rev""" + rev = repo.lookup(rev) + q = repomap[repo] + q.restore(repo, rev, delete=opts['delete'], + qupdate=opts['update']) + q.save_dirty() + return 0 + +def save(ui, repo, **opts): + """save current queue state""" + q = repomap[repo] + ret = q.save(repo, msg=opts['message']) + if ret: + return ret + q.save_dirty() + if opts['copy']: + path = q.path + if opts['name']: + newpath = os.path.join(q.basepath, opts['name']) + if os.path.exists(newpath): + if not os.path.isdir(newpath): + ui.warn("destination %s exists and is not a directory\n" % + newpath) + sys.exit(1) + if not opts['force']: + ui.warn("destination %s exists, use -f to force\n" % + newpath) + sys.exit(1) + else: + newpath = savename(path) + ui.warn("copy %s to %s\n" % (path, newpath)) + util.copyfiles(path, newpath) + if opts['empty']: + try: + os.unlink(q.status_path) + except: + pass + return 0 + +def strip(ui, repo, rev, **opts): + """strip a revision and all later revs on the same branch""" + rev = repo.lookup(rev) + backup = 'all' + if opts['backup']: + backup = 'strip' + elif opts['nobackup']: + backup = 'none' + repomap[repo].strip(repo, rev, backup=backup) + return 0 + +def version(ui, q=None): + """print the version number""" + ui.write("mq version %s\n" % versionstr) + return 0 + +def reposetup(ui, repo): + repomap[repo] = queue(ui, repo.join("")) + +cmdtable = { + "qapplied": (applied, [], 'hg qapplied [patch]'), + "qcommit|qci": + (commit, + [('A', 'addremove', None, _('run addremove during commit')), + ('I', 'include', [], _('include names matching the given patterns')), + ('X', 'exclude', [], _('exclude names matching the given patterns')), + ('m', 'message', '', _('use <text> as commit message')), + ('l', 'logfile', '', _('read the commit message from <file>')), + ('d', 'date', '', _('record datecode as commit date')), + ('u', 'user', '', _('record user as commiter'))], + 'hg qcommit [options] [files]'), + "^qdiff": (diff, [], 'hg qdiff [files]'), + "qdelete": (delete, [], 'hg qdelete [patch]'), + "^qimport": + (qimport, + [('e', 'existing', None, 'import file in patch dir'), + ('n', 'name', '', 'patch file name'), + ('f', 'force', None, 'overwrite existing files')], + 'hg qimport'), + "^qinit": + (init, + [('c', 'create-repo', None, 'create patch repository')], + 'hg [-c] qinit'), + "qnew": + (new, + [('m', 'message', '', 'commit message'), + ('f', 'force', None, 'force')], + 'hg qnew [-m message ] patch'), + "qnext": (next, [], 'hg qnext'), + "qprev": (prev, [], 'hg qprev'), + "^qpop": + (pop, + [('a', 'all', None, 'pop all patches'), + ('n', 'name', '', 'queue name to pop'), + ('f', 'force', None, 'forget any local changes')], + 'hg qpop [options] [patch/index]'), + "^qpush": + (push, + [('f', 'force', None, 'apply if the patch has rejects'), + ('l', 'list', None, 'list patch name in commit text'), + ('a', 'all', None, 'apply all patches'), + ('m', 'merge', None, 'merge from another queue'), + ('n', 'name', '', 'merge queue name')], + 'hg qpush [options] [patch/index]'), + "^qrefresh": + (refresh, + [('s', 'short', None, 'short refresh')], + 'hg qrefresh'), + "qrestore": + (restore, + [('d', 'delete', None, 'delete save entry'), + ('u', 'update', None, 'update queue working dir')], + 'hg qrestore rev'), + "qsave": + (save, + [('m', 'message', '', 'commit message'), + ('c', 'copy', None, 'copy patch directory'), + ('n', 'name', '', 'copy directory name'), + ('e', 'empty', None, 'clear queue status file'), + ('f', 'force', None, 'force copy')], + 'hg qsave'), + "qseries": + (series, + [('m', 'missing', None, 'print patches not in series')], + 'hg qseries'), + "^strip": + (strip, + [('f', 'force', None, 'force multi-head removal'), + ('b', 'backup', None, 'bundle unrelated changesets'), + ('n', 'nobackup', None, 'no backups')], + 'hg strip rev'), + "qtop": (top, [], 'hg qtop'), + "qunapplied": (unapplied, [], 'hg qunapplied [patch]'), + "qversion": (version, [], 'hg qversion') +} +
--- a/hgext/patchbomb.py Mon Mar 06 17:58:53 2006 +0100 +++ b/hgext/patchbomb.py Mon Mar 06 18:00:44 2006 +0100 @@ -49,20 +49,11 @@ # to = recipient1, recipient2, ... # cc = cc1, cc2, ... -from email.MIMEMultipart import MIMEMultipart -from email.MIMEText import MIMEText -from email.Utils import parseaddr -from mercurial import commands -from mercurial import hg -from mercurial import ui +from mercurial.demandload import * +demandload(globals(), '''email.MIMEMultipart email.MIMEText email.Utils + mercurial:commands,hg,ui + os popen2 smtplib socket sys tempfile time''') from mercurial.i18n import gettext as _ -import os -import popen2 -import smtplib -import socket -import sys -import tempfile -import time try: # readline gives raw_input editing capabilities, but is not @@ -149,7 +140,7 @@ if opts['diffstat']: body += cdiffstat('\n'.join(desc), patch) + '\n\n' body += '\n'.join(patch) - msg = MIMEText(body) + msg = email.MIMEText.MIMEText(body) subj = '[PATCH %d of %d] %s' % (idx, total, desc[0].strip()) if subj.endswith('.'): subj = subj[:-1] msg['Subject'] = subj @@ -194,7 +185,7 @@ sender = (opts['from'] or ui.config('patchbomb', 'from') or prompt('From', ui.username())) - msg = MIMEMultipart() + msg = email.MIMEMultipart.MIMEMultipart() msg['Subject'] = '[PATCH 0 of %d] %s' % ( len(patches), opts['subject'] or @@ -217,13 +208,13 @@ if l == '.': break body.append(l) - msg.attach(MIMEText('\n'.join(body) + '\n')) + msg.attach(email.MIMEText.MIMEText('\n'.join(body) + '\n')) ui.write('\n') if opts['diffstat']: d = cdiffstat(_('Final summary:\n'), jumbo) - if d: msg.attach(MIMEText(d)) + if d: msg.attach(email.MIMEText.MIMEText(d)) msgs.insert(0, msg) @@ -241,7 +232,7 @@ s.login(username, password) parent = None tz = time.strftime('%z') - sender_addr = parseaddr(sender)[1] + sender_addr = email.Utils.parseaddr(sender)[1] for m in msgs: try: m['Message-Id'] = genmsgid(m['X-Mercurial-Node'])
--- a/hgmerge Mon Mar 06 17:58:53 2006 +0100 +++ b/hgmerge Mon Mar 06 18:00:44 2006 +0100 @@ -17,28 +17,32 @@ # find decent versions of our utilities, insisting on the GNU versions where we # need to -MERGE=merge -DIFF3=gdiff3 -DIFF=gdiff -PATCH=gpatch +MERGE="merge" +DIFF3="gdiff3" +DIFF="gdiff" +PATCH="gpatch" -type $MERGE >/dev/null 2>&1 || MERGE= -type $DIFF3 >/dev/null 2>&1 || DIFF3=diff3 -type $DIFF >/dev/null 2>&1 || DIFF=diff -type $PATCH >/dev/null 2>&1 || PATCH=patch +type "$MERGE" >/dev/null 2>&1 || MERGE= +type "$DIFF3" >/dev/null 2>&1 || DIFF3="diff3" $DIFF3 --version >/dev/null 2>&1 || DIFF3= +type "$DIFF" >/dev/null 2>&1 || DIFF="diff" +type "$DIFF" >/dev/null 2>&1 || DIFF= +type "$PATCH" >/dev/null 2>&1 || PATCH="patch" +type "$PATCH" >/dev/null 2>&1 || PATCH= # find optional visual utilities -FILEMERGE='/Developer/Applications/Utilities/FileMerge.app/Contents/MacOS/FileMerge' -KDIFF3=kdiff3 -TKDIFF=tkdiff +FILEMERGE="/Developer/Applications/Utilities/FileMerge.app/Contents/MacOS/FileMerge" +KDIFF3="kdiff3" +TKDIFF="tkdiff" +MELD="meld" -type $FILEMERGE >/dev/null 2>&1 || FILEMERGE= -type $KDIFF3 >/dev/null 2>&1 || KDIFF3= -type $TKDIFF >/dev/null 2>&1 || TKDIFF= +type "$FILEMERGE" >/dev/null 2>&1 || FILEMERGE= +type "$KDIFF3" >/dev/null 2>&1 || KDIFF3= +type "$TKDIFF" >/dev/null 2>&1 || TKDIFF= +type "$MELD" >/dev/null 2>&1 || MELD= # random part of names -RAND="$RANDOM.$RANDOM.$RANDOM.$$" +RAND="$RANDOM$RANDOM" # temporary directory for diff+patch merge HGTMP="${TMPDIR-/tmp}/hgmerge.$RAND" @@ -68,6 +72,19 @@ exit 1 } +# Ask if the merge was successful +ask_if_merged() { + while true; do + echo "$LOCAL seems unchanged." + echo "Was the merge successful? [y/n]" + read answer + case "$answer" in + y*|Y*) success;; + n*|N*) failure;; + esac + done +} + # Clean up when interrupted trap "failure" 1 2 3 6 15 # HUP INT QUIT ABRT TERM @@ -76,18 +93,16 @@ cp "$BACKUP" "$LOCAL" # Attempt to do a non-interactive merge -if [ -n "$MERGE" ]; then - $MERGE "$LOCAL" "$BASE" "$OTHER" 2> /dev/null && success - cp "$BACKUP" "$LOCAL" -elif [ -n "$DIFF3" ]; then - echo $DIFF3 -m "$BACKUP" "$BASE" "$OTHER" - $DIFF3 -m "$BACKUP" "$BASE" "$OTHER" > "$LOCAL" && success - if [ $? -eq 2 ]; then - echo "$DIFF3 failed! Exiting." 1>&2 - cp "$BACKUP" "$LOCAL" +if [ -n "$MERGE" -o -n "$DIFF3" ]; then + if [ -n "$MERGE" ]; then + $MERGE "$LOCAL" "$BASE" "$OTHER" 2> /dev/null && success + elif [ -n "$DIFF3" ]; then + $DIFF3 -m "$BACKUP" "$BASE" "$OTHER" > "$LOCAL" && success + fi + if [ $? -gt 1 ]; then + echo "automatic merge failed! Exiting." 1>&2 failure fi - cp "$BACKUP" "$LOCAL" fi # on MacOS X try FileMerge.app, shipped with Apple's developer tools @@ -97,71 +112,66 @@ # filemerge prefers the right by default $FILEMERGE -left "$OTHER" -right "$LOCAL" -ancestor "$BASE" -merge "$LOCAL" [ $? -ne 0 ] && echo "FileMerge failed to launch" && failure - if test "$LOCAL" -nt "$CHGTEST" - then - success - else - echo "$LOCAL seems unchanged. Was the merge successful?" - select answer in yes no - do - test "$answer" == "yes" && success || failure - done - fi - failure + test "$LOCAL" -nt "$CHGTEST" && success || ask_if_merged fi if [ -n "$DISPLAY" ]; then # try using kdiff3, which is fairly nice if [ -n "$KDIFF3" ]; then - $KDIFF3 --auto "$BASE" "$LOCAL" "$OTHER" -o "$LOCAL" || failure - success + $KDIFF3 --auto "$BASE" "$BACKUP" "$OTHER" -o "$LOCAL" || failure + success fi # try using tkdiff, which is a bit less sophisticated if [ -n "$TKDIFF" ]; then - $TKDIFF "$LOCAL" "$OTHER" -a "$BASE" -o "$LOCAL" || failure - success + $TKDIFF "$BACKUP" "$OTHER" -a "$BASE" -o "$LOCAL" || failure + success + fi + + if [ -n "$MELD" ]; then + cp "$BACKUP" "$CHGTEST" + # protect our feet - meld allows us to save to the left file + cp "$BACKUP" "$LOCAL.tmp.$RAND" + # Meld doesn't have automatic merging, so to reduce intervention + # use the file with conflicts + $MELD "$LOCAL.tmp.$RAND" "$LOCAL" "$OTHER" || failure + # Also it doesn't return good error code + test "$LOCAL" -nt "$CHGTEST" && success || ask_if_merged fi fi # Attempt to do a merge with $EDITOR -if [ -n "$MERGE" ]; then - echo "conflicts detected in $LOCAL" - $MERGE "$LOCAL" "$BASE" "$OTHER" 2>/dev/null || $EDITOR "$LOCAL" - success -fi - -if [ -n "$DIFF3" ]; then +if [ -n "$MERGE" -o -n "$DIFF3" ]; then echo "conflicts detected in $LOCAL" - $DIFF3 -m "$BACKUP" "$BASE" "$OTHER" > "$LOCAL" || { - case $? in - 1) - $EDITOR "$LOCAL" ;; - 2) echo "$DIFF3 failed! Exiting." 1>&2 - cp "$BACKUP" "$LOCAL" - failure ;; - esac - success - } + cp "$BACKUP" "$CHGTEST" + $EDITOR "$LOCAL" || failure + # Some editors do not return meaningful error codes + # Do not take any chances + test "$LOCAL" -nt "$CHGTEST" && success || ask_if_merged fi # attempt to manually merge with diff and patch if [ -n "$DIFF" -a -n "$PATCH" ]; then (umask 077 && mkdir "$HGTMP") || { - echo "Could not create temporary directory $HGTMP" 1>&2 - failure + echo "Could not create temporary directory $HGTMP" 1>&2 + failure } $DIFF -u "$BASE" "$OTHER" > "$HGTMP/diff" || : if $PATCH "$LOCAL" < "$HGTMP/diff"; then - success + success else - # If rejects are empty after using the editor, merge was ok - $EDITOR "$LOCAL" "$LOCAL.rej" && test -s "$LOCAL.rej" || success + # If rejects are empty after using the editor, merge was ok + $EDITOR "$LOCAL" "$LOCAL.rej" || failure + test -s "$LOCAL.rej" || success fi failure fi -echo "hgmerge: unable to find merge, tkdiff, kdiff3, or diff+patch!" +echo +echo "hgmerge: unable to find any merge utility!" +echo "supported programs:" +echo "merge, FileMerge, tkdiff, kdiff3, meld, diff+patch" +echo failure
--- a/hgwebdir.cgi Mon Mar 06 17:58:53 2006 +0100 +++ b/hgwebdir.cgi Mon Mar 06 18:00:44 2006 +0100 @@ -8,10 +8,21 @@ # sys.path.insert(0, "/path/to/python/lib") # if not a system-wide install from mercurial import hgweb -# The config file looks like this: +# The config file looks like this. You can have paths to individual +# repos, collections of repos in a directory tree, or both. +# # [paths] # virtual/path = /real/path # virtual/path = /real/path +# +# [collections] +# /prefix/to/strip/off = /root/of/tree/full/of/repos +# +# collections example: say directory tree /foo contains repos /foo/bar, +# /foo/quux/baz. Give this config section: +# [collections] +# /foo = /foo +# Then repos will list as bar and quux/baz. # Alternatively you can pass a list of ('virtual/path', '/real/path') tuples # or use a dictionary with entries like 'virtual/path': '/real/path'
--- a/mercurial/bdiff.c Mon Mar 06 17:58:53 2006 +0100 +++ b/mercurial/bdiff.c Mon Mar 06 18:00:44 2006 +0100 @@ -17,6 +17,10 @@ #define inline #endif +#ifdef __SUNPRO_C +# define inline +#endif + #ifdef _WIN32 #ifdef _MSC_VER #define inline __inline
--- a/mercurial/commands.py Mon Mar 06 17:58:53 2006 +0100 +++ b/mercurial/commands.py Mon Mar 06 18:00:44 2006 +0100 @@ -82,6 +82,21 @@ "iter", rev, None: in-order traversal of the revs earlier iterated over with "add" - use to display data''' + def increasing_windows(start, end, windowsize=8, sizelimit=512): + if start < end: + while start < end: + yield start, min(windowsize, end-start) + start += windowsize + if windowsize < sizelimit: + windowsize *= 2 + else: + while start > end: + yield start, min(windowsize, start-end-1) + start -= windowsize + if windowsize < sizelimit: + windowsize *= 2 + + files, matchfn, anypats = matchpats(repo, pats, opts) if repo.changelog.count() == 0: @@ -90,7 +105,6 @@ revs = map(int, revrange(ui, repo, opts['rev'] or ['tip:0'])) wanted = {} slowpath = anypats - window = 300 fncache = {} chcache = {} @@ -106,17 +120,17 @@ if not slowpath: # Only files, no patterns. Check the history of each file. def filerevgen(filelog): - for i in xrange(filelog.count() - 1, -1, -window): + for i, window in increasing_windows(filelog.count()-1, -1): revs = [] - for j in xrange(max(0, i - window), i + 1): + for j in xrange(i - window, i + 1): revs.append(filelog.linkrev(filelog.node(j))) revs.reverse() for rev in revs: yield rev minrev, maxrev = min(revs), max(revs) - for file in files: - filelog = repo.file(file) + for file_ in files: + filelog = repo.file(file_) # A zero count may be a directory or deleted file, so # try to find matching entries on the slow path. if filelog.count() == 0: @@ -127,13 +141,13 @@ if rev < minrev: break fncache.setdefault(rev, []) - fncache[rev].append(file) + fncache[rev].append(file_) wanted[rev] = 1 if slowpath: # The slow path checks files modified in every changeset. def changerevgen(): - for i in xrange(repo.changelog.count() - 1, -1, -window): - for j in xrange(max(0, i - window), i + 1): + for i, window in increasing_windows(repo.changelog.count()-1, -1): + for j in xrange(i - window, i + 1): yield j, getchange(j)[3] for rev, changefiles in changerevgen(): @@ -143,9 +157,9 @@ wanted[rev] = 1 def iterate(): - for i in xrange(0, len(revs), window): + for i, window in increasing_windows(0, len(revs)): yield 'window', revs[0] < revs[-1], revs[-1] - nrevs = [rev for rev in revs[i:min(i+window, len(revs))] + nrevs = [rev for rev in revs[i:i+window] if rev in wanted] srevs = list(nrevs) srevs.sort() @@ -262,6 +276,14 @@ def dodiff(fp, ui, repo, node1, node2, files=None, match=util.always, changes=None, text=False, opts={}): + if not node1: + node1 = repo.dirstate.parents()[0] + # reading the data for node1 early allows it to play nicely + # with repo.changes and the revlog cache. + change = repo.changelog.read(node1) + mmap = repo.manifest.read(change[0]) + date1 = util.datestr(change[2]) + if not changes: changes = repo.changes(node1, node2, files, match=match) modified, added, removed, deleted, unknown = changes @@ -280,8 +302,6 @@ return repo.file(f).read(mmap2[f]) else: date2 = util.datestr() - if not node1: - node1 = repo.dirstate.parents()[0] def read(f): return repo.wread(f) @@ -291,10 +311,6 @@ hexfunc = ui.verbose and hex or short r = [hexfunc(node) for node in [node1, node2] if node] - change = repo.changelog.read(node1) - mmap = repo.manifest.read(change[0]) - date1 = util.datestr(change[2]) - diffopts = ui.diffopts() showfunc = opts.get('show_function') or diffopts['showfunc'] ignorews = opts.get('ignore_all_space') or diffopts['ignorews'] @@ -447,7 +463,6 @@ f = f.lstrip("^") if not ui.debugflag and f.startswith("debug"): continue - d = "" doc = e[0].__doc__ if not doc: doc = _("(No help text available)") @@ -681,6 +696,8 @@ such as AFS, implement hardlinking incorrectly, but do not report errors. In these cases, use the --pull option to avoid hardlinking. + + See pull for valid source format details. """ if dest is None: dest = os.path.basename(os.path.normpath(source)) @@ -725,8 +742,8 @@ # can end up with extra data in the cloned revlogs that's # not pointed to by changesets, thus causing verify to # fail - l1 = lock.lock(os.path.join(source, ".hg", "lock")) - except OSError: + l1 = other.lock() + except lock.LockException: copy = False if copy: @@ -808,7 +825,8 @@ except ValueError, inst: raise util.Abort(str(inst)) -def docopy(ui, repo, pats, opts): +def docopy(ui, repo, pats, opts, wlock): + # called with the repo lock held cwd = repo.getcwd() errors = 0 copied = [] @@ -818,14 +836,19 @@ reasons = {'?': _('is not managed'), 'a': _('has been marked for add'), 'r': _('has been marked for remove')} - reason = reasons.get(repo.dirstate.state(abs)) + state = repo.dirstate.state(abs) + reason = reasons.get(state) if reason: + if state == 'a': + origsrc = repo.dirstate.copied(abs) + if origsrc is not None: + return origsrc if exact: ui.warn(_('%s: not copying - file %s\n') % (rel, reason)) else: - return True - - def copy(abssrc, relsrc, target, exact): + return abs + + def copy(origsrc, abssrc, relsrc, target, exact): abstarget = util.canonpath(repo.root, cwd, target) reltarget = util.pathto(cwd, abstarget) prevsrc = targets.get(abstarget) @@ -849,8 +872,16 @@ if not os.path.isdir(targetdir): os.makedirs(targetdir) try: - shutil.copyfile(relsrc, reltarget) - shutil.copymode(relsrc, reltarget) + restore = repo.dirstate.state(abstarget) == 'r' + if restore: + repo.undelete([abstarget], wlock) + try: + shutil.copyfile(relsrc, reltarget) + shutil.copymode(relsrc, reltarget) + restore = False + finally: + if restore: + repo.remove([abstarget], wlock) except shutil.Error, inst: raise util.Abort(str(inst)) except IOError, inst: @@ -864,7 +895,8 @@ if ui.verbose or not exact: ui.status(_('copying %s to %s\n') % (relsrc, reltarget)) targets[abstarget] = abssrc - repo.copy(abssrc, abstarget) + if abstarget != origsrc: + repo.copy(origsrc, abstarget, wlock) copied.append((abssrc, relsrc, exact)) def targetpathfn(pat, dest, srcs): @@ -938,8 +970,9 @@ for pat in pats: srcs = [] for tag, abssrc, relsrc, exact in walk(repo, [pat], opts): - if okaytocopy(abssrc, relsrc, exact): - srcs.append((abssrc, relsrc, exact)) + origsrc = okaytocopy(abssrc, relsrc, exact) + if origsrc: + srcs.append((origsrc, abssrc, relsrc, exact)) if not srcs: continue copylist.append((tfn(pat, dest, srcs), srcs)) @@ -947,8 +980,8 @@ raise util.Abort(_('no files to copy')) for targetpath, srcs in copylist: - for abssrc, relsrc, exact in srcs: - copy(abssrc, relsrc, targetpath(abssrc), exact) + for origsrc, abssrc, relsrc, exact in srcs: + copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact) if errors: ui.warn(_('(consider using --after)\n')) @@ -971,15 +1004,32 @@ should properly record copied files, this information is not yet fully used by merge, nor fully reported by log. """ - errs, copied = docopy(ui, repo, pats, opts) + try: + wlock = repo.wlock(0) + errs, copied = docopy(ui, repo, pats, opts, wlock) + except lock.LockHeld, inst: + ui.warn(_("repository lock held by %s\n") % inst.args[0]) + errs = 1 return errs def debugancestor(ui, index, rev1, rev2): """find the ancestor revision of two revisions in a given index""" - r = revlog.revlog(util.opener(os.getcwd()), index, "") + r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "") a = r.ancestor(r.lookup(rev1), r.lookup(rev2)) ui.write("%d:%s\n" % (r.rev(a), hex(a))) +def debugrebuildstate(ui, repo, rev=None): + """rebuild the dirstate as it would look like for the given revision""" + if not rev: + rev = repo.changelog.tip() + else: + rev = repo.lookup(rev) + change = repo.changelog.read(rev) + n = change[0] + files = repo.manifest.readflags(n) + wlock = repo.wlock() + repo.dirstate.rebuild(rev, files.iteritems()) + def debugcheckstate(ui, repo): """validate the correctness of the current dirstate""" parent1, parent2 = repo.dirstate.parents() @@ -1050,7 +1100,8 @@ def debugdata(ui, file_, rev): """dump the contents of an data file revision""" - r = revlog.revlog(util.opener(os.getcwd()), file_[:-2] + ".i", file_) + r = revlog.revlog(util.opener(os.getcwd(), audit=False), + file_[:-2] + ".i", file_) try: ui.write(r.revision(r.lookup(rev))) except KeyError: @@ -1058,7 +1109,7 @@ def debugindex(ui, file_): """dump the contents of an index file""" - r = revlog.revlog(util.opener(os.getcwd()), file_, "") + r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "") ui.write(" rev offset length base linkrev" + " nodeid p1 p2\n") for i in range(r.count()): @@ -1069,7 +1120,7 @@ def debugindexdot(ui, file_): """dump an index DAG as a .dot file""" - r = revlog.revlog(util.opener(os.getcwd()), file_, "") + r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "") ui.write("digraph G {\n") for i in range(r.count()): e = r.index[i] @@ -1284,6 +1335,7 @@ s = linestate(line, lnum, cstart, cend) m[s] = s + # FIXME: prev isn't used, why ? prev = {} ucache = {} def display(fn, rev, states, prevstates): @@ -1593,7 +1645,19 @@ self.write(*args) def __getattr__(self, key): return getattr(self.ui, key) + changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts) + + if opts['limit']: + try: + limit = int(opts['limit']) + except ValueError: + raise util.Abort(_('limit must be a positive integer')) + if limit <= 0: raise util.Abort(_('limit must be positive')) + else: + limit = sys.maxint + count = 0 + for st, rev, fns in changeiter: if st == 'window': du = dui(ui) @@ -1607,7 +1671,6 @@ if opts['only_merges'] and len(parents) != 2: continue - br = None if opts['keyword']: changes = getchange(rev) miss = 0 @@ -1620,7 +1683,8 @@ if miss: continue - if opts['branch']: + br = None + if opts['branches']: br = repo.branchlookup([repo.changelog.node(rev)]) show_changeset(du, repo, rev, brinfo=br) @@ -1629,8 +1693,11 @@ dodiff(du, du, repo, prev, changenode, match=matchfn) du.write("\n\n") elif st == 'iter': - for args in du.hunk[rev]: - ui.write(*args) + if count == limit: break + if du.hunk[rev]: + count += 1 + for args in du.hunk[rev]: + ui.write(*args) def manifest(ui, repo, rev=None): """output the latest or given revision of the project manifest @@ -1664,6 +1731,8 @@ Show changesets not found in the specified destination repo or the default push repo. These are the changesets that would be pushed if a push was requested. + + See pull for valid source format details. """ dest = ui.expandpath(dest, repo.root) other = hg.repository(ui, dest) @@ -1681,7 +1750,7 @@ dodiff(ui, ui, repo, prev, n) ui.write("\n") -def parents(ui, repo, rev=None, branch=None): +def parents(ui, repo, rev=None, branches=None): """show the parents of the working dir or revision Print the working directory's parent revisions. @@ -1692,7 +1761,7 @@ p = repo.dirstate.parents() br = None - if branch is not None: + if branches is not None: br = repo.branchlookup(p) for n in p: if n != nullid: @@ -1767,7 +1836,7 @@ return r -def push(ui, repo, dest="default-push", force=False, ssh=None, remotecmd=None): +def push(ui, repo, dest="default-push", **opts): """push changes to the specified destination Push changes from the local repository to the given destination. @@ -1792,18 +1861,22 @@ dest = ui.expandpath(dest, repo.root) ui.status('pushing to %s\n' % (dest)) - if ssh: - ui.setconfig("ui", "ssh", ssh) - if remotecmd: - ui.setconfig("ui", "remotecmd", remotecmd) + if opts['ssh']: + ui.setconfig("ui", "ssh", opts['ssh']) + if opts['remotecmd']: + ui.setconfig("ui", "remotecmd", opts['remotecmd']) other = hg.repository(ui, dest) - r = repo.push(other, force) + revs = None + if opts['rev']: + revs = [repo.lookup(rev) for rev in opts['rev']] + r = repo.push(other, opts['force'], revs=revs) return r def rawcommit(ui, repo, *flist, **rc): """raw commit interface (DEPRECATED) + (DEPRECATED) Lowlevel commit, for use in helper scripts. This command is not intended to be used by normal users, as it is @@ -1896,21 +1969,33 @@ should properly record rename files, this information is not yet fully used by merge, nor fully reported by log. """ - errs, copied = docopy(ui, repo, pats, opts) - names = [] - for abs, rel, exact in copied: - if ui.verbose or not exact: - ui.status(_('removing %s\n') % rel) - names.append(abs) - repo.remove(names, unlink=True) + try: + wlock = repo.wlock(0) + errs, copied = docopy(ui, repo, pats, opts, wlock) + names = [] + for abs, rel, exact in copied: + if ui.verbose or not exact: + ui.status(_('removing %s\n') % rel) + names.append(abs) + repo.remove(names, True, wlock) + except lock.LockHeld, inst: + ui.warn(_("repository lock held by %s\n") % inst.args[0]) + errs = 1 return errs def revert(ui, repo, *pats, **opts): """revert modified files or dirs back to their unmodified states - Revert any uncommitted modifications made to the named files or - directories. This restores the contents of the affected files to - an unmodified state. + In its default mode, it reverts any uncommitted modifications made + to the named files or directories. This restores the contents of + the affected files to an unmodified state. + + Using the -r option, it reverts the given files or directories to + their state as of an earlier revision. This can be helpful to "roll + back" some or all of a change that should not have been committed. + + Revert modifies the working directory. It does not commit any + changes, or change the parent of the current working directory. If a file has been deleted, it is recreated. If the executable mode of a file was changed, it is reset. @@ -1925,7 +2010,7 @@ files, choose, anypats = matchpats(repo, pats, opts) modified, added, removed, deleted, unknown = repo.changes(match=choose) repo.forget(added) - repo.undelete(removed + deleted) + repo.undelete(removed) return repo.update(node, False, True, choose, False) @@ -2022,6 +2107,16 @@ if opts[o]: ui.setconfig("web", o, opts[o]) + if opts['daemon'] and not opts['daemon_pipefds']: + rfd, wfd = os.pipe() + args = sys.argv[:] + args.append('--daemon-pipefds=%d,%d' % (rfd, wfd)) + pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0), + args[0], args) + os.close(wfd) + os.read(rfd, 1) + os._exit(0) + try: httpd = hgweb.create_server(repo) except socket.error, inst: @@ -2040,6 +2135,25 @@ ui.status(_('listening at http://%s:%d/\n') % (addr, port)) else: ui.status(_('listening at http://%s/\n') % addr) + + if opts['pid_file']: + fp = open(opts['pid_file'], 'w') + fp.write(str(os.getpid())) + fp.close() + + if opts['daemon_pipefds']: + rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')] + os.close(rfd) + os.write(wfd, 'y') + os.close(wfd) + sys.stdout.flush() + sys.stderr.flush() + fd = os.open(util.nulldev, os.O_RDWR) + if fd != 0: os.dup2(fd, 0) + if fd != 1: os.dup2(fd, 1) + if fd != 2: os.dup2(fd, 2) + if fd not in (0, 1, 2): os.close(fd) + httpd.serve_forever() def status(ui, repo, *pats, **opts): @@ -2164,7 +2278,10 @@ Show the tip revision. """ n = repo.changelog.tip() - show_changeset(ui, repo, changenode=n) + br = None + if opts['branches']: + br = repo.branchlookup([n]) + show_changeset(ui, repo, changenode=n, brinfo=br) if opts['patch']: dodiff(ui, ui, repo, repo.changelog.parents(n)[0], n) @@ -2283,47 +2400,51 @@ ('c', 'changeset', None, _('list the changeset')), ('I', 'include', [], _('include names matching the given patterns')), ('X', 'exclude', [], _('exclude names matching the given patterns'))], - _('hg annotate [OPTION]... FILE...')), + _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')), "bundle": (bundle, [], _('hg bundle FILE DEST')), "cat": (cat, - [('I', 'include', [], _('include names matching the given patterns')), - ('X', 'exclude', [], _('exclude names matching the given patterns')), - ('o', 'output', '', _('print output to file with formatted name')), - ('r', 'rev', '', _('print the given revision'))], + [('o', 'output', '', _('print output to file with formatted name')), + ('r', 'rev', '', _('print the given revision')), + ('I', 'include', [], _('include names matching the given patterns')), + ('X', 'exclude', [], _('exclude names matching the given patterns'))], _('hg cat [OPTION]... FILE...')), "^clone": (clone, [('U', 'noupdate', None, _('do not update the new working directory')), - ('e', 'ssh', '', _('specify ssh command to use')), - ('', 'pull', None, _('use pull protocol to copy metadata')), ('r', 'rev', [], _('a changeset you would like to have after cloning')), + ('', 'pull', None, _('use pull protocol to copy metadata')), + ('e', 'ssh', '', _('specify ssh command to use')), ('', 'remotecmd', '', _('specify hg command to run on the remote side'))], _('hg clone [OPTION]... SOURCE [DEST]')), "^commit|ci": (commit, [('A', 'addremove', None, _('run addremove during commit')), - ('I', 'include', [], _('include names matching the given patterns')), - ('X', 'exclude', [], _('exclude names matching the given patterns')), ('m', 'message', '', _('use <text> as commit message')), ('l', 'logfile', '', _('read the commit message from <file>')), ('d', 'date', '', _('record datecode as commit date')), - ('u', 'user', '', _('record user as commiter'))], + ('u', 'user', '', _('record user as commiter')), + ('I', 'include', [], _('include names matching the given patterns')), + ('X', 'exclude', [], _('exclude names matching the given patterns'))], _('hg commit [OPTION]... [FILE]...')), "copy|cp": (copy, - [('I', 'include', [], _('include names matching the given patterns')), - ('X', 'exclude', [], _('exclude names matching the given patterns')), - ('A', 'after', None, _('record a copy that has already occurred')), + [('A', 'after', None, _('record a copy that has already occurred')), ('f', 'force', None, - _('forcibly copy over an existing managed file'))], + _('forcibly copy over an existing managed file')), + ('I', 'include', [], _('include names matching the given patterns')), + ('X', 'exclude', [], _('exclude names matching the given patterns'))], _('hg copy [OPTION]... [SOURCE]... DEST')), "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')), + "debugrebuildstate": + (debugrebuildstate, + [('r', 'rev', '', _('revision to rebuild to'))], + _('debugrebuildstate [-r REV] [REV]')), "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')), "debugconfig": (debugconfig, [], _('debugconfig')), "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')), @@ -2341,20 +2462,19 @@ (diff, [('r', 'rev', [], _('revision')), ('a', 'text', None, _('treat all files as text')), - ('I', 'include', [], _('include names matching the given patterns')), ('p', 'show-function', None, _('show which function each change is in')), ('w', 'ignore-all-space', None, _('ignore white space when comparing lines')), - ('X', 'exclude', [], - _('exclude names matching the given patterns'))], + ('I', 'include', [], _('include names matching the given patterns')), + ('X', 'exclude', [], _('exclude names matching the given patterns'))], _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')), "^export": (export, [('o', 'output', '', _('print output to file with formatted name')), ('a', 'text', None, _('treat all files as text')), ('', 'switch-parent', None, _('diff against the second parent'))], - _('hg export [-a] [-o OUTFILE] REV...')), + _('hg export [-a] [-o OUTFILESPEC] REV...')), "forget": (forget, [('I', 'include', [], _('include names matching the given patterns')), @@ -2363,19 +2483,19 @@ "grep": (grep, [('0', 'print0', None, _('end fields with NUL')), - ('I', 'include', [], _('include names matching the given patterns')), - ('X', 'exclude', [], _('exclude names matching the given patterns')), ('', 'all', None, _('print all revisions that match')), ('i', 'ignore-case', None, _('ignore case when matching')), ('l', 'files-with-matches', None, _('print only filenames and revs that match')), ('n', 'line-number', None, _('print matching line numbers')), ('r', 'rev', [], _('search in given revision range')), - ('u', 'user', None, _('print user who committed change'))], + ('u', 'user', None, _('print user who committed change')), + ('I', 'include', [], _('include names matching the given patterns')), + ('X', 'exclude', [], _('exclude names matching the given patterns'))], _('hg grep [OPTION]... PATTERN [FILE]...')), "heads": (heads, - [('b', 'branches', None, _('find branch info')), + [('b', 'branches', None, _('show branches')), ('r', 'rev', '', _('show only heads which are descendants of rev'))], _('hg heads [-b] [-r <rev>]')), "help": (help_, [], _('hg help [COMMAND]')), @@ -2385,10 +2505,10 @@ [('p', 'strip', 1, _('directory strip option for patch. This has the same\n') + _('meaning as the corresponding patch option')), + ('b', 'base', '', _('base path')), ('f', 'force', None, - _('skip check for outstanding uncommitted changes')), - ('b', 'base', '', _('base path'))], - _('hg import [-f] [-p NUM] [-b BASE] PATCH...')), + _('skip check for outstanding uncommitted changes'))], + _('hg import [-p NUM] [-b BASE] [-f] PATCH...')), "incoming|in": (incoming, [('M', 'no-merges', None, _('do not show merges')), ('p', 'patch', None, _('show patch')), @@ -2407,24 +2527,25 @@ _('hg locate [OPTION]... [PATTERN]...')), "^log|history": (log, - [('I', 'include', [], _('include names matching the given patterns')), - ('X', 'exclude', [], _('exclude names matching the given patterns')), - ('b', 'branch', None, _('show branches')), + [('b', 'branches', None, _('show branches')), ('k', 'keyword', [], _('search for a keyword')), + ('l', 'limit', '', _('limit number of changes displayed')), ('r', 'rev', [], _('show the specified revision or range')), ('M', 'no-merges', None, _('do not show merges')), ('m', 'only-merges', None, _('show only merges')), - ('p', 'patch', None, _('show patch'))], - _('hg log [-I] [-X] [-r REV]... [-p] [FILE]')), + ('p', 'patch', None, _('show patch')), + ('I', 'include', [], _('include names matching the given patterns')), + ('X', 'exclude', [], _('exclude names matching the given patterns'))], + _('hg log [OPTION]... [FILE]')), "manifest": (manifest, [], _('hg manifest [REV]')), "outgoing|out": (outgoing, [('M', 'no-merges', None, _('do not show merges')), ('p', 'patch', None, _('show patch')), ('n', 'newest-first', None, _('show newest record first'))], - _('hg outgoing [-p] [-n] [-M] [DEST]')), + _('hg outgoing [-M] [-p] [-n] [DEST]')), "^parents": (parents, - [('b', 'branch', None, _('show branches'))], + [('b', 'branches', None, _('show branches'))], _('hg parents [-b] [REV]')), "paths": (paths, [], _('hg paths [NAME]')), "^pull": @@ -2435,15 +2556,16 @@ ('r', 'rev', [], _('a specific revision you would like to pull')), ('', 'remotecmd', '', _('specify hg command to run on the remote side'))], - _('hg pull [-u] [-e FILE] [-r rev] [--remotecmd FILE] [SOURCE]')), + _('hg pull [-u] [-e FILE] [-r REV]... [--remotecmd FILE] [SOURCE]')), "^push": (push, [('f', 'force', None, _('force push')), ('e', 'ssh', '', _('specify ssh command to use')), + ('r', 'rev', [], _('a specific revision you would like to push')), ('', 'remotecmd', '', _('specify hg command to run on the remote side'))], - _('hg push [-f] [-e FILE] [--remotecmd FILE] [DEST]')), - "rawcommit": + _('hg push [-f] [-e FILE] [-r REV]... [--remotecmd FILE] [DEST]')), + "debugrawcommit|rawcommit": (rawcommit, [('p', 'parent', [], _('parent')), ('d', 'date', '', _('date code')), @@ -2451,7 +2573,7 @@ ('F', 'files', '', _('file list')), ('m', 'message', '', _('commit message')), ('l', 'logfile', '', _('commit message file'))], - _('hg rawcommit [OPTION]... [FILE]...')), + _('hg debugrawcommit [OPTION]... [FILE]...')), "recover": (recover, [], _('hg recover')), "^remove|rm": (remove, @@ -2460,27 +2582,30 @@ _('hg remove [OPTION]... FILE...')), "rename|mv": (rename, - [('I', 'include', [], _('include names matching the given patterns')), - ('X', 'exclude', [], _('exclude names matching the given patterns')), - ('A', 'after', None, _('record a rename that has already occurred')), + [('A', 'after', None, _('record a rename that has already occurred')), ('f', 'force', None, - _('forcibly copy over an existing managed file'))], + _('forcibly copy over an existing managed file')), + ('I', 'include', [], _('include names matching the given patterns')), + ('X', 'exclude', [], _('exclude names matching the given patterns'))], _('hg rename [OPTION]... [SOURCE]... DEST')), "^revert": (revert, - [('I', 'include', [], _('include names matching the given patterns')), - ('X', 'exclude', [], _('exclude names matching the given patterns')), - ('r', 'rev', '', _('revision to revert to'))], - _('hg revert [-n] [-r REV] [NAME]...')), + [('r', 'rev', '', _('revision to revert to')), + ('I', 'include', [], _('include names matching the given patterns')), + ('X', 'exclude', [], _('exclude names matching the given patterns'))], + _('hg revert [-r REV] [NAME]...')), "root": (root, [], _('hg root')), "^serve": (serve, [('A', 'accesslog', '', _('name of access log file to write to')), + ('d', 'daemon', None, _('run server in background')), + ('', 'daemon-pipefds', '', _('used internally by daemon mode')), ('E', 'errorlog', '', _('name of error log file to write to')), ('p', 'port', 0, _('port to use (default: 8000)')), ('a', 'address', '', _('address to use')), ('n', 'name', '', _('name to show in web pages (default: working dir)')), + ('', 'pid-file', '', _('name of file to write process ID to')), ('', 'stdio', None, _('for remote clients')), ('t', 'templates', '', _('web templates to use')), ('', 'style', '', _('template style to use')), @@ -2506,9 +2631,13 @@ ('d', 'date', '', _('record datecode as commit date')), ('u', 'user', '', _('record user as commiter')), ('r', 'rev', '', _('revision to tag'))], - _('hg tag [-r REV] [OPTION]... NAME')), + _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')), "tags": (tags, [], _('hg tags')), - "tip": (tip, [('p', 'patch', None, _('show patch'))], _('hg tip')), + "tip": + (tip, + [('b', 'branches', None, _('show branches')), + ('p', 'patch', None, _('show patch'))], + _('hg tip [-b] [-p]')), "unbundle": (unbundle, [('u', 'update', None, @@ -2734,13 +2863,22 @@ if options['profile']: import hotshot, hotshot.stats prof = hotshot.Profile("hg.prof") - r = prof.runcall(d) - prof.close() - stats = hotshot.stats.load("hg.prof") - stats.strip_dirs() - stats.sort_stats('time', 'calls') - stats.print_stats(40) - return r + try: + try: + return prof.runcall(d) + except: + try: + u.warn(_('exception raised - generating profile ' + 'anyway\n')) + except: + pass + raise + finally: + prof.close() + stats = hotshot.stats.load("hg.prof") + stats.strip_dirs() + stats.sort_stats('time', 'calls') + stats.print_stats(40) else: return d() except:
--- a/mercurial/demandload.py Mon Mar 06 17:58:53 2006 +0100 +++ b/mercurial/demandload.py Mon Mar 06 18:00:44 2006 +0100 @@ -1,15 +1,125 @@ -def demandload(scope, modules): - class d: - def __getattr__(self, name): - mod = self.__dict__["mod"] - scope = self.__dict__["scope"] - scope[mod] = __import__(mod, scope, scope, []) - return getattr(scope[mod], name) +'''Demand load modules when used, not when imported.''' + +__author__ = '''Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>. +This software may be used and distributed according to the terms +of the GNU General Public License, incorporated herein by reference.''' + +# this is based on matt's original demandload module. it is a +# complete rewrite. some time, we may need to support syntax of +# "import foo as bar". + +class _importer(object): + '''import a module. it is not imported until needed, and is + imported at most once per scope.''' + + def __init__(self, scope, modname, fromlist): + '''scope is context (globals() or locals()) in which import + should be made. modname is name of module to import. + fromlist is list of modules for "from foo import ..." + emulation.''' + + self.scope = scope + self.modname = modname + self.fromlist = fromlist + self.mod = None + + def module(self): + '''import the module if needed, and return.''' + if self.mod is None: + self.mod = __import__(self.modname, self.scope, self.scope, + self.fromlist) + del self.modname, self.fromlist + return self.mod + +class _replacer(object): + '''placeholder for a demand loaded module. demandload puts this in + a target scope. when an attribute of this object is looked up, + this object is replaced in the target scope with the actual + module. + + we use __getattribute__ to avoid namespace clashes between + placeholder object and real module.''' + + def __init__(self, importer, target): + self.importer = importer + self.target = target + # consider case where we do this: + # demandload(globals(), 'foo.bar foo.quux') + # foo will already exist in target scope when we get to + # foo.quux. so we remember that we will need to demandload + # quux into foo's scope when we really load it. + self.later = [] + + def module(self): + return object.__getattribute__(self, 'importer').module() + + def __getattribute__(self, key): + '''look up an attribute in a module and return it. replace the + name of the module in the caller\'s dict with the actual + module.''' - for m in modules.split(): - dl = d() - dl.mod = m - dl.scope = scope - scope[m] = dl + module = object.__getattribute__(self, 'module')() + target = object.__getattribute__(self, 'target') + importer = object.__getattribute__(self, 'importer') + later = object.__getattribute__(self, 'later') + + if later: + demandload(module.__dict__, ' '.join(later)) + + importer.scope[target] = module + + return getattr(module, key) + +class _replacer_from(_replacer): + '''placeholder for a demand loaded module. used for "from foo + import ..." emulation. semantics of this are different than + regular import, so different implementation needed.''' + + def module(self): + importer = object.__getattribute__(self, 'importer') + target = object.__getattribute__(self, 'target') + + return getattr(importer.module(), target) + +def demandload(scope, modules): + '''import modules into scope when each is first used. + + scope should be the value of globals() in the module calling this + function, or locals() in the calling function. + + modules is a string listing module names, separated by white + space. names are handled like this: + foo import foo + foo bar import foo, bar + foo.bar import foo.bar + foo:bar from foo import bar + foo:bar,quux from foo import bar, quux + foo.bar:quux from foo.bar import quux''' + for mod in modules.split(): + col = mod.find(':') + if col >= 0: + fromlist = mod[col+1:].split(',') + mod = mod[:col] + else: + fromlist = [] + importer = _importer(scope, mod, fromlist) + if fromlist: + for name in fromlist: + scope[name] = _replacer_from(importer, name) + else: + dot = mod.find('.') + if dot >= 0: + basemod = mod[:dot] + val = scope.get(basemod) + # if base module has already been demandload()ed, + # remember to load this submodule into its namespace + # when needed. + if isinstance(val, _replacer): + later = object.__getattribute__(val, 'later') + later.append(mod[dot+1:]) + continue + else: + basemod = mod + scope[basemod] = _replacer(importer, basemod)
--- a/mercurial/dirstate.py Mon Mar 06 17:58:53 2006 +0100 +++ b/mercurial/dirstate.py Mon Mar 06 18:00:44 2006 +0100 @@ -197,9 +197,24 @@ def clear(self): self.map = {} + self.copies = {} + self.markdirty() + + def rebuild(self, parent, files): + self.clear() + umask = os.umask(0) + os.umask(umask) + for f, mode in files: + if mode: + self.map[f] = ('n', ~umask, -1, 0) + else: + self.map[f] = ('n', ~umask & 0666, -1, 0) + self.pl = (parent, nullid) self.markdirty() def write(self): + if not self.dirty: + return st = self.opener("dirstate", "w", atomic=True) st.write("".join(self.pl)) for f, e in self.map.items(): @@ -270,11 +285,11 @@ elif not dc: dc = self.filterfiles(files) - def statmatch(file, stat): - file = util.pconvert(file) - if file not in dc and self.ignore(file): + def statmatch(file_, stat): + file_ = util.pconvert(file_) + if file_ not in dc and self.ignore(file_): return False - return match(file) + return match(file_) return self.walkhelper(files=files, statmatch=statmatch, dc=dc) @@ -350,9 +365,9 @@ continue if stat.S_ISDIR(st.st_mode): cmp1 = (lambda x, y: cmp(x[1], y[1])) - sorted = [ x for x in findfiles(f) ] - sorted.sort(cmp1) - for e in sorted: + sorted_ = [ x for x in findfiles(f) ] + sorted_.sort(cmp1) + for e in sorted_: yield e else: ff = util.normpath(ff) @@ -380,7 +395,7 @@ for src, fn, st in self.statwalk(files, match): try: - type, mode, size, time = self[fn] + type_, mode, size, time = self[fn] except KeyError: unknown.append(fn) continue @@ -399,22 +414,23 @@ nonexistent = False # XXX: what to do with file no longer present in the fs # who are not removed in the dirstate ? - if nonexistent and type in "nm": + if nonexistent and type_ in "nm": deleted.append(fn) continue # check the common case first - if type == 'n': + if type_ == 'n': if not st: st = os.stat(fn) - if size != st.st_size or (mode ^ st.st_mode) & 0100: + if size >= 0 and (size != st.st_size + or (mode ^ st.st_mode) & 0100): modified.append(fn) elif time != st.st_mtime: lookup.append(fn) - elif type == 'm': + elif type_ == 'm': modified.append(fn) - elif type == 'a': + elif type_ == 'a': added.append(fn) - elif type == 'r': + elif type_ == 'r': removed.append(fn) return (lookup, modified, added, removed, deleted, unknown)
--- a/mercurial/hgweb.py Mon Mar 06 17:58:53 2006 +0100 +++ b/mercurial/hgweb.py Mon Mar 06 18:00:44 2006 +0100 @@ -7,6 +7,7 @@ # of the GNU General Public License, incorporated herein by reference. import os, cgi, sys, urllib +import mimetypes from demandload import demandload demandload(globals(), "mdiff time re socket zlib errno ui hg ConfigParser") demandload(globals(), "zipfile tempfile StringIO tarfile BaseHTTPServer util") @@ -18,7 +19,11 @@ for f in "templates", "../templates": p = os.path.join(os.path.dirname(__file__), f) if os.path.isdir(p): - return p + return os.path.normpath(p) + else: + # executable version (py2exe) doesn't support __file__ + if hasattr(sys, 'frozen'): + return os.path.join(sys.prefix, "templates") def age(x): def plural(t, c): @@ -71,6 +76,30 @@ else: return os.stat(hg_path).st_mtime +def staticfile(directory, fname): + """return a file inside directory with guessed content-type header + + fname always uses '/' as directory separator and isn't allowed to + contain unusual path components. + Content-type is guessed using the mimetypes module. + Return an empty string if fname is illegal or file not found. + + """ + parts = fname.split('/') + path = directory + for part in parts: + if (part in ('', os.curdir, os.pardir) or + os.sep in part or os.altsep is not None and os.altsep in part): + return "" + path = os.path.join(path, part) + try: + os.stat(path) + ct = mimetypes.guess_type(path)[0] or "text/plain" + return "Content-type: %s\n\n%s" % (ct, file(path).read()) + except (TypeError, OSError): + # illegal fname or unreadable file + return "" + class hgrequest(object): def __init__(self, inp=None, out=None, env=None): self.inp = inp or sys.stdin @@ -660,9 +689,10 @@ i = self.repo.tagslist() i.reverse() - def entries(**map): + def entries(notip=False, **map): parity = 0 for k,n in i: + if notip and k == "tip": continue yield {"parity": parity, "tag": k, "tagmanifest": hex(cl.read(n)[0]), @@ -672,7 +702,8 @@ yield self.t("tags", manifest=hex(mf), - entries=entries) + entries=lambda **x: entries(False, **x), + entriesnotip=lambda **x: entries(True, **x)) def summary(self): cl = self.repo.changelog @@ -843,6 +874,7 @@ 'ca': [('cmd', ['archive']), ('node', None)], 'tags': [('cmd', ['tags'])], 'tip': [('cmd', ['changeset']), ('node', ['tip'])], + 'static': [('cmd', ['static']), ('file', None)] } for k in shortcuts.iterkeys(): @@ -858,6 +890,7 @@ expand_form(req.form) t = self.repo.ui.config("web", "templates", templatepath()) + static = self.repo.ui.config("web", "static", os.path.join(t,"static")) m = os.path.join(t, "map") style = self.repo.ui.config("web", "style", "") if req.form.has_key('style'): @@ -981,6 +1014,11 @@ req.write(self.t("error")) + elif req.form['cmd'][0] == 'static': + fname = req.form['file'][0] + req.write(staticfile(static, fname) + or self.t("error", error="%r not found" % fname)) + else: req.write(self.t("error")) @@ -1075,17 +1113,27 @@ class hgwebdir(object): def __init__(self, config): def cleannames(items): - return [(name.strip('/'), path) for name, path in items] + return [(name.strip(os.sep), path) for name, path in items] - if type(config) == type([]): + if isinstance(config, (list, tuple)): self.repos = cleannames(config) - elif type(config) == type({}): + elif isinstance(config, dict): self.repos = cleannames(config.items()) self.repos.sort() else: cp = ConfigParser.SafeConfigParser() cp.read(config) - self.repos = cleannames(cp.items("paths")) + self.repos = [] + if cp.has_section('paths'): + self.repos.extend(cleannames(cp.items('paths'))) + if cp.has_section('collections'): + for prefix, root in cp.items('collections'): + for path in util.walkrepos(root): + repo = os.path.normpath(path) + name = repo + if name.startswith(prefix): + name = name[len(prefix):] + self.repos.append((name.lstrip(os.sep), repo)) self.repos.sort() def run(self, req=hgrequest()): @@ -1142,4 +1190,10 @@ else: req.write(tmpl("notfound", repo=virtual)) else: - req.write(tmpl("index", entries=entries)) + if req.form.has_key('static'): + static = os.path.join(templatepath(), "static") + fname = req.form['static'][0] + req.write(staticfile(static, fname) + or tmpl("error", error="%r not found" % fname)) + else: + req.write(tmpl("index", entries=entries))
--- a/mercurial/localrepo.py Mon Mar 06 17:58:53 2006 +0100 +++ b/mercurial/localrepo.py Mon Mar 06 18:00:44 2006 +0100 @@ -13,6 +13,8 @@ demandload(globals(), "re lock transaction tempfile stat mdiff errno") class localrepository(object): + def __del__(self): + self.transhandle = None def __init__(self, ui, path=None, create=0): if not path: p = os.getcwd() @@ -37,6 +39,7 @@ self.nodetagscache = None self.encodepats = None self.decodepats = None + self.transhandle = None if create: os.mkdir(self.path) @@ -215,6 +218,10 @@ return self.wopener(filename, 'w').write(data) def transaction(self): + tr = self.transhandle + if tr != None and tr.running(): + return tr.nest() + # save dirstate for undo try: ds = self.opener("dirstate").read() @@ -222,21 +229,18 @@ ds = "" self.opener("journal.dirstate", "w").write(ds) - def after(): - util.rename(self.join("journal"), self.join("undo")) - util.rename(self.join("journal.dirstate"), - self.join("undo.dirstate")) - - return transaction.transaction(self.ui.warn, self.opener, - self.join("journal"), after) + tr = transaction.transaction(self.ui.warn, self.opener, + self.join("journal"), + aftertrans(self.path)) + self.transhandle = tr + return tr def recover(self): - lock = self.lock() + l = self.lock() if os.path.exists(self.join("journal")): self.ui.status(_("rolling back interrupted transaction\n")) transaction.rollback(self.opener, self.join("journal")) - self.manifest = manifest.manifest(self.opener) - self.changelog = changelog.changelog(self.opener) + self.reload() return True else: self.ui.warn(_("no interrupted transaction available\n")) @@ -245,34 +249,51 @@ def undo(self, wlock=None): if not wlock: wlock = self.wlock() - lock = self.lock() + l = self.lock() if os.path.exists(self.join("undo")): self.ui.status(_("rolling back last transaction\n")) transaction.rollback(self.opener, self.join("undo")) util.rename(self.join("undo.dirstate"), self.join("dirstate")) - self.dirstate.read() + self.reload() + self.wreload() else: self.ui.warn(_("no undo information available\n")) - def lock(self, wait=1): + def wreload(self): + self.dirstate.read() + + def reload(self): + self.changelog.load() + self.manifest.load() + self.tagscache = None + self.nodetagscache = None + + def do_lock(self, lockname, wait, releasefn=None, acquirefn=None): try: - return lock.lock(self.join("lock"), 0) - except lock.LockHeld, inst: - if wait: - self.ui.warn(_("waiting for lock held by %s\n") % inst.args[0]) - return lock.lock(self.join("lock"), wait) - raise inst - - def wlock(self, wait=1): - try: - wlock = lock.lock(self.join("wlock"), 0, self.dirstate.write) + l = lock.lock(self.join(lockname), 0, releasefn) except lock.LockHeld, inst: if not wait: raise inst self.ui.warn(_("waiting for lock held by %s\n") % inst.args[0]) - wlock = lock.lock(self.join("wlock"), wait, self.dirstate.write) - self.dirstate.read() - return wlock + try: + # default to 600 seconds timeout + l = lock.lock(self.join(lockname), + int(self.ui.config("ui", "timeout") or 600), + releasefn) + except lock.LockHeld, inst: + raise util.Abort(_("timeout while waiting for " + "lock held by %s") % inst.args[0]) + if acquirefn: + acquirefn() + return l + + def lock(self, wait=1): + return self.do_lock("lock", wait, acquirefn=self.reload) + + def wlock(self, wait=1): + return self.do_lock("wlock", wait, + self.dirstate.write, + self.wreload) def checkfilemerge(self, filename, text, filelog, manifest1, manifest2): "determine whether a new filenode is needed" @@ -311,7 +332,7 @@ if not wlock: wlock = self.wlock() - lock = self.lock() + l = self.lock() tr = self.transaction() mm = m1.copy() mfm = mf1.copy() @@ -350,7 +371,7 @@ self.dirstate.setparents(n, nullid) def commit(self, files=None, text="", user=None, date=None, - match=util.always, force=False, wlock=None): + match=util.always, force=False, lock=None, wlock=None): commit = [] remove = [] changed = [] @@ -388,7 +409,8 @@ if not wlock: wlock = self.wlock() - lock = self.lock() + if not lock: + lock = self.lock() tr = self.transaction() # check in files @@ -503,12 +525,18 @@ del mf[fn] return mf + if node1: + # read the manifest from node1 before the manifest from node2, + # so that we'll hit the manifest cache if we're going through + # all the revisions in parent->child order. + mf1 = mfmatches(node1) + # are we comparing the working directory? if not node2: if not wlock: try: wlock = self.wlock(wait=0) - except lock.LockHeld: + except lock.LockException: wlock = None lookup, modified, added, removed, deleted, unknown = ( self.dirstate.changes(files, match)) @@ -541,8 +569,6 @@ # flush lists from dirstate before comparing manifests modified, added = [], [] - mf1 = mfmatches(node1) - for fn in mf2: if mf1.has_key(fn): if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)): @@ -597,7 +623,6 @@ if os.path.exists(p): self.ui.warn(_("%s still exists!\n") % f) elif self.dirstate.state(f) == 'a': - self.ui.warn(_("%s never committed!\n") % f) self.dirstate.forget([f]) elif f not in self.dirstate: self.ui.warn(_("%s not tracked!\n") % f) @@ -932,7 +957,7 @@ return subset def pull(self, remote, heads=None): - lock = self.lock() + l = self.lock() # if we have an empty repo, fetch everything if self.changelog.tip() == nullid: @@ -951,7 +976,7 @@ cg = remote.changegroupsubset(fetch, heads, 'pull') return self.addchangegroup(cg) - def push(self, remote, force=False): + def push(self, remote, force=False, revs=None): lock = remote.lock() base = {} @@ -963,17 +988,25 @@ return 1 update = self.findoutgoing(remote, base) - if not update: + if revs is not None: + msng_cl, bases, heads = self.changelog.nodesbetween(update, revs) + else: + bases, heads = update, self.changelog.heads() + + if not bases: self.ui.status(_("no changes found\n")) return 1 elif not force: - if len(heads) < len(self.changelog.heads()): + if len(bases) < len(heads): self.ui.warn(_("abort: push creates new remote branches!\n")) self.ui.status(_("(did you forget to merge?" " use push -f to force)\n")) return 1 - cg = self.changegroup(update, 'push') + if revs is None: + cg = self.changegroup(update, 'push') + else: + cg = self.changegroupsubset(update, revs, 'push') return remote.addchangegroup(cg) def changegroupsubset(self, bases, heads, source): @@ -1646,6 +1679,7 @@ remove.sort() for f in remove: self.ui.note(_("removing %s\n") % f) + util.audit_path(f) try: util.unlink(self.wjoin(f)) except OSError, inst: @@ -1852,3 +1886,13 @@ if errors[0]: self.ui.warn(_("%d integrity errors encountered!\n") % errors[0]) return 1 + +# used to avoid circular references so destructors work +def aftertrans(base): + p = base + def a(): + util.rename(os.path.join(p, "journal"), os.path.join(p, "undo")) + util.rename(os.path.join(p, "journal.dirstate"), + os.path.join(p, "undo.dirstate")) + return a +
--- a/mercurial/lock.py Mon Mar 06 17:58:53 2006 +0100 +++ b/mercurial/lock.py Mon Mar 06 18:00:44 2006 +0100 @@ -5,17 +5,21 @@ # This software may be used and distributed according to the terms # of the GNU General Public License, incorporated herein by reference. -import os, time -import util +from demandload import * +demandload(globals(), 'errno os time util') -class LockHeld(Exception): +class LockException(Exception): + pass +class LockHeld(LockException): + pass +class LockUnavailable(LockException): pass class lock(object): - def __init__(self, file, wait=1, releasefn=None): + def __init__(self, file, timeout=-1, releasefn=None): self.f = file self.held = 0 - self.wait = wait + self.timeout = timeout self.releasefn = releasefn self.lock() @@ -23,13 +27,16 @@ self.release() def lock(self): + timeout = self.timeout while 1: try: self.trylock() return 1 except LockHeld, inst: - if self.wait: + if timeout != 0: time.sleep(1) + if timeout > 0: + timeout -= 1 continue raise inst @@ -38,8 +45,11 @@ try: util.makelock(str(pid), self.f) self.held = 1 - except (OSError, IOError): - raise LockHeld(util.readlock(self.f)) + except (OSError, IOError), why: + if why.errno == errno.EEXIST: + raise LockHeld(util.readlock(self.f)) + else: + raise LockUnavailable(why) def release(self): if self.held:
--- a/mercurial/mpatch.c Mon Mar 06 17:58:53 2006 +0100 +++ b/mercurial/mpatch.c Mon Mar 06 18:00:44 2006 +0100 @@ -66,7 +66,7 @@ a = NULL; } else a->head = a->tail = a->base; - return a; + return a; } if (!PyErr_Occurred()) PyErr_NoMemory();
--- a/mercurial/revlog.py Mon Mar 06 17:58:53 2006 +0100 +++ b/mercurial/revlog.py Mon Mar 06 18:00:44 2006 +0100 @@ -13,7 +13,7 @@ from node import * from i18n import gettext as _ from demandload import demandload -demandload(globals(), "binascii errno heapq mdiff sha struct zlib") +demandload(globals(), "binascii errno heapq mdiff os sha struct zlib") def hash(text, p1, p2): """generate a hash from the given text and its parent hashes @@ -187,15 +187,33 @@ self.indexfile = indexfile self.datafile = datafile self.opener = opener + + self.indexstat = None self.cache = None self.chunkcache = None + self.load() + def load(self): try: - i = self.opener(self.indexfile).read() + f = self.opener(self.indexfile) except IOError, inst: if inst.errno != errno.ENOENT: raise i = "" + else: + try: + st = os.fstat(f.fileno()) + except AttributeError, inst: + st = None + else: + oldst = self.indexstat + if (oldst and st.st_dev == oldst.st_dev + and st.st_ino == oldst.st_ino + and st.st_mtime == oldst.st_mtime + and st.st_ctime == oldst.st_ctime): + return + self.indexstat = st + i = f.read() if i and i[:4] != "\0\0\0\0": raise RevlogError(_("incompatible revlog signature on %s") % @@ -624,12 +642,10 @@ # we store negative distances because heap returns smallest member h = [(-dist[node], node)] seen = {} - earliest = self.count() while h: d, n = heapq.heappop(h) if n not in seen: seen[n] = 1 - r = self.rev(n) yield (-d, n) for p in self.parents(n): heapq.heappush(h, (-dist[p], p)) @@ -690,11 +706,6 @@ p = self.parents(self.node(revs[0]))[0] revs.insert(0, self.rev(p)) - # helper to reconstruct intermediate versions - def construct(text, base, rev): - bins = [self.chunk(r) for r in xrange(base + 1, rev + 1)] - return mdiff.patches(text, bins) - # build deltas for d in xrange(0, len(revs) - 1): a, b = revs[d], revs[d + 1] @@ -738,10 +749,10 @@ base = prev = -1 start = end = measure = 0 if r: - start = self.start(self.base(t)) + base = self.base(t) + start = self.start(base) end = self.end(t) - measure = self.length(self.base(t)) - base = self.base(t) + measure = self.length(base) prev = self.tip() transaction.add(self.datafile, end) @@ -793,14 +804,15 @@ raise RevlogError(_("consistency error adding group")) measure = len(text) else: - e = (end, len(cdelta), self.base(t), link, p1, p2, node) + e = (end, len(cdelta), base, link, p1, p2, node) self.index.append(e) self.nodemap[node] = r dfh.write(cdelta) ifh.write(struct.pack(indexformat, *e)) t, r, chain, prev = r, r + 1, node, node - start = self.start(self.base(t)) + base = self.base(t) + start = self.start(base) end = self.end(t) dfh.close()
--- a/mercurial/statichttprepo.py Mon Mar 06 17:58:53 2006 +0100 +++ b/mercurial/statichttprepo.py Mon Mar 06 18:00:44 2006 +0100 @@ -15,8 +15,10 @@ def read(self, size=None): try: return httprangereader.httprangereader.read(self, size) + except urllib2.HTTPError, inst: + raise IOError(None, inst) except urllib2.URLError, inst: - raise IOError(None, str(inst)) + raise IOError(None, inst.reason[1]) def opener(base): """return a function that opens files over http"""
--- a/mercurial/transaction.py Mon Mar 06 17:58:53 2006 +0100 +++ b/mercurial/transaction.py Mon Mar 06 18:00:44 2006 +0100 @@ -22,6 +22,7 @@ if os.path.exists(journal): raise AssertionError(_("journal already exists - run hg recover")) + self.count = 1 self.report = report self.opener = opener self.after = after @@ -46,7 +47,17 @@ self.file.write("%s\0%d\n" % (file, offset)) self.file.flush() + def nest(self): + self.count += 1 + return self + + def running(self): + return self.count > 0 + def close(self): + self.count -= 1 + if self.count != 0: + return self.file.close() self.entries = [] if self.after:
--- a/mercurial/util.py Mon Mar 06 17:58:53 2006 +0100 +++ b/mercurial/util.py Mon Mar 06 18:00:44 2006 +0100 @@ -179,7 +179,7 @@ if root == os.sep: rootsep = os.sep else: - rootsep = root + os.sep + rootsep = root + os.sep name = myname if not name.startswith(os.sep): name = os.path.join(root, cwd, name) @@ -363,7 +363,14 @@ else: shutil.copy(src, dst) -def opener(base): +def audit_path(path): + """Abort if path contains dangerous components""" + parts = os.path.normcase(path).split(os.sep) + if (os.path.splitdrive(path)[0] or parts[0] in ('.hg', '') + or os.pardir in parts): + raise Abort(_("path contains illegal component: %s\n") % path) + +def opener(base, audit=True): """ return a function that opens files relative to base @@ -371,6 +378,7 @@ remote file access from higher level code. """ p = base + audit_p = audit def mktempcopy(name): d, fn = os.path.split(name) @@ -401,6 +409,8 @@ self.close() def o(path, mode="r", text=False, atomic=False): + if audit_p: + audit_path(path) f = os.path.join(p, path) if not text: @@ -690,3 +700,16 @@ (time.strftime(format, time.gmtime(float(t) - tz)), -tz / 3600, ((-tz % 3600) / 60))) + +def walkrepos(path): + '''yield every hg repository under path, recursively.''' + def errhandler(err): + if err.filename == path: + raise err + + for root, dirs, files in os.walk(path, onerror=errhandler): + for d in dirs: + if d == '.hg': + yield root + dirs[:] = [] + break
--- a/setup.py Mon Mar 06 17:58:53 2006 +0100 +++ b/setup.py Mon Mar 06 18:00:44 2006 +0100 @@ -89,7 +89,9 @@ data_files=[('mercurial/templates', ['templates/map'] + glob.glob('templates/map-*') + - glob.glob('templates/*.tmpl'))], + glob.glob('templates/*.tmpl')), + ('mercurial/templates/static', + glob.glob('templates/static/*'))], cmdclass=cmdclass, scripts=['hg', 'hgmerge'], options=dict(bdist_mpkg=dict(zipdist=True,
--- a/templates/error-gitweb.tmpl Mon Mar 06 17:58:53 2006 +0100 +++ b/templates/error-gitweb.tmpl Mon Mar 06 18:00:44 2006 +0100 @@ -1,11 +1,21 @@ #header# +<title>#repo|escape#: Error</title> +<link rel="alternate" type="application/rss+xml" + href="?cmd=changelog;style=rss" title="RSS feed for #repo|escape#"> +</head> +<body> + +<div class="page_header"> +<a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="?cmd=summary;style=gitweb">#repo|escape#</a> / error +</div> + <div class="page_nav"> -<a href="?cmd=summary;style=gitweb">summary</a> | <a href="?cmd=changelog;style=gitweb">log</a> | <a href="?cmd=tags;style=gitweb">tags</a> | <a href="?cmd=manifest;manifest=#manifest#;path=/;style=gitweb">manifest</a><br/> +<a href="?cmd=summary;style=gitweb">summary</a> | <a href="?cmd=changelog;style=gitweb">changelog</a> | <a href="?cmd=tags;style=gitweb">tags</a> | <a href="?cmd=manifest;manifest=#manifest#;path=/;style=gitweb">manifest</a><br/> </div> <div> <br/> -<i>Error parsing query string</i><br/> +<i>An error occured while processing your request</i><br/> <br/> </div>
--- a/templates/header-gitweb.tmpl Mon Mar 06 17:58:53 2006 +0100 +++ b/templates/header-gitweb.tmpl Mon Mar 06 18:00:44 2006 +0100 @@ -4,56 +4,8 @@ <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd"> <html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en-US" lang="en-US"> <head> +<link rel="icon" href="?static=hgicon.png" type="image/png"> <meta http-equiv="content-type" content="text/html; charset=utf-8"/> <meta name="robots" content="index, nofollow"/> -<style type="text/css"> -body { font-family: sans-serif; font-size: 12px; margin:0px; border:solid #d9d8d1; border-width:1px; margin:10px; } -a { color:#0000cc; } -a:hover, a:visited, a:active { color:#880000; } -div.page_header { height:25px; padding:8px; font-size:18px; font-weight:bold; background-color:#d9d8d1; } -div.page_header a:visited { color:#0000cc; } -div.page_header a:hover { color:#880000; } -div.page_nav { padding:8px; } -div.page_nav a:visited { color:#0000cc; } -div.page_path { padding:8px; border:solid #d9d8d1; border-width:0px 0px 1px} -div.page_footer { height:17px; padding:4px 8px; background-color: #d9d8d1; } -div.page_footer_text { float:left; color:#555555; font-style:italic; } -div.page_body { padding:8px; } -div.title, a.title { - display:block; padding:6px 8px; - font-weight:bold; background-color:#edece6; text-decoration:none; color:#000000; -} -a.title:hover { background-color: #d9d8d1; } -div.title_text { padding:6px 0px; border: solid #d9d8d1; border-width:0px 0px 1px; } -div.log_body { padding:8px 8px 8px 150px; } -span.age { position:relative; float:left; width:142px; font-style:italic; } -div.log_link { - padding:0px 8px; - font-size:10px; font-family:sans-serif; font-style:normal; - position:relative; float:left; width:136px; -} -div.list_head { padding:6px 8px 4px; border:solid #d9d8d1; border-width:1px 0px 0px; font-style:italic; } -a.list { text-decoration:none; color:#000000; } -a.list:hover { text-decoration:underline; color:#880000; } -table { padding:8px 4px; } -th { padding:2px 5px; font-size:12px; text-align:left; } -tr.light:hover, .parity0:hover { background-color:#edece6; } -tr.dark, .parity1 { background-color:#f6f6f0; } -tr.dark:hover, .parity1:hover { background-color:#edece6; } -td { padding:2px 5px; font-size:12px; vertical-align:top; } -td.link { padding:2px 5px; font-family:sans-serif; font-size:10px; } -div.pre { font-family:monospace; font-size:12px; white-space:pre; } -div.diff_info { font-family:monospace; color:#000099; background-color:#edece6; font-style:italic; } -div.index_include { border:solid #d9d8d1; border-width:0px 0px 1px; padding:12px 8px; } -div.search { margin:4px 8px; position:absolute; top:56px; right:12px } -.linenr { color:#999999; text-decoration:none } -a.rss_logo { - float:right; padding:3px 0px; width:35px; line-height:10px; - border:1px solid; border-color:#fcc7a5 #7d3302 #3e1a01 #ff954e; - color:#ffffff; background-color:#ff6600; - font-weight:bold; font-family:sans-serif; font-size:10px; - text-align:center; text-decoration:none; -} -a.rss_logo:hover { background-color:#ee5500; } -</style> +<style type="text/css">/*<![CDATA[*/ @import "?static=style-gitweb.css"; /*]]>*/</style>
--- a/templates/header.tmpl Mon Mar 06 17:58:53 2006 +0100 +++ b/templates/header.tmpl Mon Mar 06 18:00:44 2006 +0100 @@ -3,78 +3,6 @@ <!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"> <html> <head> +<link rel="icon" href="?static=hgicon.png" type="image/png"> <meta name="robots" content="index, nofollow" /> -<style type="text/css"> -<!-- -a { text-decoration:none; } -.parity0 { background-color: #dddddd; } -.parity1 { background-color: #eeeeee; } -.lineno { width: 60px; color: #aaaaaa; font-size: smaller; - text-align: right; padding-right:1em; } -.plusline { color: green; } -.minusline { color: red; } -.atline { color: purple; } -.annotate { font-size: smaller; text-align: right; padding-right: 1em; } -.buttons a { - background-color: #666666; - padding: 2pt; - color: white; - font-family: sans; - font-weight: bold; -} -.navigate a { - background-color: #ccc; - padding: 2pt; - font-family: sans; - color: black; -} - -.metatag { - background-color: #888888; - color: white; - text-align: right; -} - -/* Common */ -pre { margin: 0; } - -.logo { - background-color: #333; - padding: 4pt; - margin: 8pt 0 8pt 8pt; - font-family: sans; - font-size: 60%; - color: white; - float: right; - clear: right; - text-align: left; -} - -.logo a { - font-weight: bold; - font-size: 150%; - color: #999; -} - -/* Changelog entries */ -.changelogEntry { width: 100%; } -.changelogEntry th { font-weight: normal; text-align: right; vertical-align: top; } -.changelogEntry th.age, .changelogEntry th.firstline { font-weight: bold; } -.changelogEntry th.firstline { text-align: left; width: inherit; } - -/* Tag entries */ -#tagEntries { list-style: none; margin: 0; padding: 0; } -#tagEntries .tagEntry { list-style: none; margin: 0; padding: 0; } -#tagEntries .tagEntry span.node { font-family: monospace; } - -/* Changeset entry */ -#changesetEntry { } -#changesetEntry th { font-weight: normal; background-color: #888; color: #fff; text-align: right; } -#changesetEntry th.files, #changesetEntry th.description { vertical-align: top; } - -/* File diff view */ -#filediffEntry { } -#filediffEntry th { font-weight: normal; background-color: #888; color: #fff; text-align: right; } - ---> -</style> +<style type="text/css">/*<![CDATA[*/ @import "?static=style.css"; /*]]>*/</style>
--- a/templates/map-rss Mon Mar 06 17:58:53 2006 +0100 +++ b/templates/map-rss Mon Mar 06 18:00:44 2006 +0100 @@ -4,3 +4,5 @@ changelogentry = changelogentry-rss.tmpl filelog = filelog-rss.tmpl filelogentry = filelogentry-rss.tmpl +tags = tags-rss.tmpl +tagentry = tagentry-rss.tmpl
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/templates/static/style-gitweb.css Mon Mar 06 18:00:44 2006 +0100 @@ -0,0 +1,48 @@ +body { font-family: sans-serif; font-size: 12px; margin:0px; border:solid #d9d8d1; border-width:1px; margin:10px; } +a { color:#0000cc; } +a:hover, a:visited, a:active { color:#880000; } +div.page_header { height:25px; padding:8px; font-size:18px; font-weight:bold; background-color:#d9d8d1; } +div.page_header a:visited { color:#0000cc; } +div.page_header a:hover { color:#880000; } +div.page_nav { padding:8px; } +div.page_nav a:visited { color:#0000cc; } +div.page_path { padding:8px; border:solid #d9d8d1; border-width:0px 0px 1px} +div.page_footer { height:17px; padding:4px 8px; background-color: #d9d8d1; } +div.page_footer_text { float:left; color:#555555; font-style:italic; } +div.page_body { padding:8px; } +div.title, a.title { + display:block; padding:6px 8px; + font-weight:bold; background-color:#edece6; text-decoration:none; color:#000000; +} +a.title:hover { background-color: #d9d8d1; } +div.title_text { padding:6px 0px; border: solid #d9d8d1; border-width:0px 0px 1px; } +div.log_body { padding:8px 8px 8px 150px; } +span.age { position:relative; float:left; width:142px; font-style:italic; } +div.log_link { + padding:0px 8px; + font-size:10px; font-family:sans-serif; font-style:normal; + position:relative; float:left; width:136px; +} +div.list_head { padding:6px 8px 4px; border:solid #d9d8d1; border-width:1px 0px 0px; font-style:italic; } +a.list { text-decoration:none; color:#000000; } +a.list:hover { text-decoration:underline; color:#880000; } +table { padding:8px 4px; } +th { padding:2px 5px; font-size:12px; text-align:left; } +tr.light:hover, .parity0:hover { background-color:#edece6; } +tr.dark, .parity1 { background-color:#f6f6f0; } +tr.dark:hover, .parity1:hover { background-color:#edece6; } +td { padding:2px 5px; font-size:12px; vertical-align:top; } +td.link { padding:2px 5px; font-family:sans-serif; font-size:10px; } +div.pre { font-family:monospace; font-size:12px; white-space:pre; } +div.diff_info { font-family:monospace; color:#000099; background-color:#edece6; font-style:italic; } +div.index_include { border:solid #d9d8d1; border-width:0px 0px 1px; padding:12px 8px; } +div.search { margin:4px 8px; position:absolute; top:56px; right:12px } +.linenr { color:#999999; text-decoration:none } +a.rss_logo { + float:right; padding:3px 0px; width:35px; line-height:10px; + border:1px solid; border-color:#fcc7a5 #7d3302 #3e1a01 #ff954e; + color:#ffffff; background-color:#ff6600; + font-weight:bold; font-family:sans-serif; font-size:10px; + text-align:center; text-decoration:none; +} +a.rss_logo:hover { background-color:#ee5500; }
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/templates/static/style.css Mon Mar 06 18:00:44 2006 +0100 @@ -0,0 +1,70 @@ +a { text-decoration:none; } +.parity0 { background-color: #dddddd; } +.parity1 { background-color: #eeeeee; } +.lineno { width: 60px; color: #aaaaaa; font-size: smaller; + text-align: right; padding-right:1em; } +.plusline { color: green; } +.minusline { color: red; } +.atline { color: purple; } +.annotate { font-size: smaller; text-align: right; padding-right: 1em; } +.buttons a { + background-color: #666666; + padding: 2pt; + color: white; + font-family: sans; + font-weight: bold; +} +.navigate a { + background-color: #ccc; + padding: 2pt; + font-family: sans; + color: black; +} + +.metatag { + background-color: #888888; + color: white; + text-align: right; +} + +/* Common */ +pre { margin: 0; } + +.logo { + background-color: #333; + padding: 4pt; + margin: 8pt 0 8pt 8pt; + font-family: sans; + font-size: 60%; + color: white; + float: right; + clear: right; + text-align: left; +} + +.logo a { + font-weight: bold; + font-size: 150%; + color: #999; +} + +/* Changelog entries */ +.changelogEntry { width: 100%; } +.changelogEntry th { font-weight: normal; text-align: right; vertical-align: top; } +.changelogEntry th.age, .changelogEntry th.firstline { font-weight: bold; } +.changelogEntry th.firstline { text-align: left; width: inherit; } + +/* Tag entries */ +#tagEntries { list-style: none; margin: 0; padding: 0; } +#tagEntries .tagEntry { list-style: none; margin: 0; padding: 0; } +#tagEntries .tagEntry span.node { font-family: monospace; } + +/* Changeset entry */ +#changesetEntry { } +#changesetEntry th { font-weight: normal; background-color: #888; color: #fff; text-align: right; } +#changesetEntry th.files, #changesetEntry th.description { vertical-align: top; } + +/* File diff view */ +#filediffEntry { } +#filediffEntry th { font-weight: normal; background-color: #888; color: #fff; text-align: right; } +
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/templates/tagentry-rss.tmpl Mon Mar 06 18:00:44 2006 +0100 @@ -0,0 +1,6 @@ +<item> + <title>#tag|escape#</title> + <link>#url#?cs=#node|short#</link> + <description><![CDATA[#tag|strip|escape|addbreaks#]]></description> + <pubDate>#date|rfc822date#</pubDate> +</item>
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/templates/tags-rss.tmpl Mon Mar 06 18:00:44 2006 +0100 @@ -0,0 +1,6 @@ +#header# + <title>#repo|escape#: tags </title> + <description>#repo|escape# tag history</description> + #entriesnotip%tagentry# + </channel> +</rss>
--- a/templates/tags.tmpl Mon Mar 06 17:58:53 2006 +0100 +++ b/templates/tags.tmpl Mon Mar 06 18:00:44 2006 +0100 @@ -1,11 +1,14 @@ #header# <title>#repo|escape#: tags</title> +<link rel="alternate" type="application/rss+xml" + href="?cmd=tags;style=rss" title="RSS feed for #repo|escape#: tags"> </head> <body> <div class="buttons"> <a href="?cl=tip">changelog</a> <a href="?mf=#manifest|short#;path=/">manifest</a> +<a type="application/rss+xml" href="?cmd=tags;style=rss">rss</a> </div> <h2>tags:</h2>
--- a/tests/test-archive Mon Mar 06 17:58:53 2006 +0100 +++ b/tests/test-archive Mon Mar 06 18:00:44 2006 +0100 @@ -18,8 +18,7 @@ echo "allowzip = true" >> .hg/hgrc echo "allowgz = true" >> .hg/hgrc echo "allowbz2 = true" >> .hg/hgrc -hg serve -p 20059 > /dev/null & -sleep 1 # wait for server to be started +hg serve -p 20059 -d --pid-file=hg.pid TIP=`hg id -v | cut -f1 -d' '` QTIP=`hg id -q` @@ -35,5 +34,5 @@ http_proxy= python getarchive.py "$TIP" zip > archive.zip unzip -t archive.zip | sed "s/$QTIP/TIP/" -kill $! +kill `cat hg.pid` sleep 1 # wait for server to scream and die
--- a/tests/test-archive.out Mon Mar 06 17:58:53 2006 +0100 +++ b/tests/test-archive.out Mon Mar 06 18:00:44 2006 +0100 @@ -12,4 +12,3 @@ testing: test-archive-TIP/baz/bletch OK testing: test-archive-TIP/foo OK No errors detected in compressed data of archive.zip. -killed!
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-clone-pull-corruption Mon Mar 06 18:00:44 2006 +0100 @@ -0,0 +1,32 @@ +#!/bin/sh +# +# Corrupt an hg repo with a pull started during an aborted commit +# + +# Create two repos, so that one of them can pull from the other one. +hg init source +cd source +touch foo +hg add foo +hg ci -m 'add foo' +hg clone . ../corrupted +echo >> foo +hg ci -m 'change foo' + +# Add a hook to wait 5 seconds and then abort the commit +cd ../corrupted +echo '[hooks]' >> .hg/hgrc +echo 'pretxncommit = sleep 5; exit 1' >> .hg/hgrc + +# start a commit... +touch bar +hg add bar +hg ci -m 'add bar' & + +# ... and start a pull while the commit is still running +sleep 1 +hg pull ../source 2>/dev/null + +# see what happened +wait +hg verify
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-clone-pull-corruption.out Mon Mar 06 18:00:44 2006 +0100 @@ -0,0 +1,15 @@ +pulling from ../source +abort: pretxncommit hook exited with status 1 +transaction abort! +rollback completed +searching for changes +adding changesets +adding manifests +adding file changes +added 1 changesets with 1 changes to 1 files +(run 'hg update' to get a working copy) +checking changesets +checking manifests +crosschecking files in changesets and manifests +checking files +1 files, 2 changesets, 2 total revisions
--- a/tests/test-commit.out Mon Mar 06 17:58:53 2006 +0100 +++ b/tests/test-commit.out Mon Mar 06 18:00:44 2006 +0100 @@ -1,5 +1,3 @@ -transaction abort! -rollback completed abort: impossible time zone offset: 4444444 transaction abort! rollback completed @@ -13,4 +11,6 @@ transaction abort! rollback completed abort: date exceeds 32 bits: 111111111111 +transaction abort! +rollback completed abort: No such file or directory: .../test/bar
--- a/tests/test-help.out Mon Mar 06 17:58:53 2006 +0100 +++ b/tests/test-help.out Mon Mar 06 18:00:44 2006 +0100 @@ -64,7 +64,6 @@ paths show definition of symbolic path names pull pull changes from the specified source push push changes to the specified destination - rawcommit raw commit interface (DEPRECATED) recover roll back an interrupted transaction remove remove the specified files on the next commit rename rename files; equivalent of copy + remove @@ -106,7 +105,6 @@ paths show definition of symbolic path names pull pull changes from the specified source push push changes to the specified destination - rawcommit raw commit interface (DEPRECATED) recover roll back an interrupted transaction remove remove the specified files on the next commit rename rename files; equivalent of copy + remove @@ -173,9 +171,9 @@ -r --rev revision -a --text treat all files as text - -I --include include names matching the given patterns -p --show-function show which function each change is in -w --ignore-all-space ignore white space when comparing lines + -I --include include names matching the given patterns -X --exclude exclude names matching the given patterns hg status [OPTION]... [FILE]...
--- a/tests/test-merge3.out Mon Mar 06 17:58:53 2006 +0100 +++ b/tests/test-merge3.out Mon Mar 06 18:00:44 2006 +0100 @@ -1,3 +1,2 @@ removing b -b never committed! nothing changed
--- a/tests/test-pull Mon Mar 06 17:58:53 2006 +0100 +++ b/tests/test-pull Mon Mar 06 18:00:44 2006 +0100 @@ -7,8 +7,7 @@ hg addremove hg commit -m 1 hg verify -hg serve -p 20059 > /dev/null & -sleep 1 # wait for server to be started +hg serve -p 20059 -d --pid-file=hg.pid cd .. http_proxy= hg clone http://localhost:20059/ copy @@ -19,4 +18,4 @@ hg manifest hg pull -kill $! +kill `cat ../test/hg.pid`
--- a/tests/test-pull-permission Mon Mar 06 17:58:53 2006 +0100 +++ b/tests/test-pull-permission Mon Mar 06 18:00:44 2006 +0100 @@ -12,9 +12,8 @@ cd .. hg clone a b + +chmod +w a/.hg # let test clean up + cd b hg verify - -cd .. - -chmod +w a/.hg # let test clean up
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-pull-pull-corruption Mon Mar 06 18:00:44 2006 +0100 @@ -0,0 +1,41 @@ +#!/bin/sh +# +# Corrupt an hg repo with two pulls. +# + +# create one repo with a long history +hg init source1 +cd source1 +touch foo +hg add foo +for i in 1 2 3 4 5 6 7 8 9 10; do + echo $i >> foo + hg ci -m $i +done +cd .. + +# create one repo with a shorter history +hg clone -r 0 source1 source2 +cd source2 +echo a >> foo +hg ci -m a +cd .. + +# create a third repo to pull both other repos into it +hg init corrupted +cd corrupted +# use a hook to make the second pull start while the first one is still running +echo '[hooks]' >> .hg/hgrc +echo 'prechangegroup = sleep 5' >> .hg/hgrc + +# start a pull... +hg pull ../source1 & + +# ... and start another pull before the first one has finished +sleep 1 +hg pull ../source2 2>/dev/null + +# see the result +wait +hg verify +
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-pull-pull-corruption.out Mon Mar 06 18:00:44 2006 +0100 @@ -0,0 +1,24 @@ +requesting all changes +adding changesets +adding manifests +adding file changes +added 1 changesets with 1 changes to 1 files +pulling from ../source2 +pulling from ../source1 +requesting all changes +adding changesets +adding manifests +adding file changes +added 10 changesets with 10 changes to 1 files +(run 'hg update' to get a working copy) +searching for changes +adding changesets +adding manifests +adding file changes +added 1 changesets with 1 changes to 1 files (+1 heads) +(run 'hg update' to get a working copy) +checking changesets +checking manifests +crosschecking files in changesets and manifests +checking files +1 files, 11 changesets, 11 total revisions
--- a/tests/test-pull.out Mon Mar 06 17:58:53 2006 +0100 +++ b/tests/test-pull.out Mon Mar 06 18:00:44 2006 +0100 @@ -19,4 +19,3 @@ pulling from http://localhost:20059/ searching for changes no changes found -killed!
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-push-r Mon Mar 06 18:00:44 2006 +0100 @@ -0,0 +1,61 @@ +#!/bin/bash + +hg init test +cd test +cat >>afile <<EOF +0 +EOF +hg add afile +hg commit -m "0.0" +cat >>afile <<EOF +1 +EOF +hg commit -m "0.1" +cat >>afile <<EOF +2 +EOF +hg commit -m "0.2" +cat >>afile <<EOF +3 +EOF +hg commit -m "0.3" +hg update -C 0 +cat >>afile <<EOF +1 +EOF +hg commit -m "1.1" +cat >>afile <<EOF +2 +EOF +hg commit -m "1.2" +cat >fred <<EOF +a line +EOF +cat >>afile <<EOF +3 +EOF +hg add fred +hg commit -m "1.3" +hg mv afile adifferentfile +hg commit -m "1.3m" +hg update -C 3 +hg mv afile anotherfile +hg commit -m "0.3m" +hg debugindex .hg/data/afile.i +hg debugindex .hg/data/adifferentfile.i +hg debugindex .hg/data/anotherfile.i +hg debugindex .hg/data/fred.i +hg debugindex .hg/00manifest.i +hg verify +cd .. +for i in 0 1 2 3 4 5 6 7 8; do + mkdir test-"$i" + hg --cwd test-"$i" init + hg -R test push -r "$i" test-"$i" + cd test-"$i" + hg verify + cd .. +done +cd test-8 +hg pull ../test-7 +hg verify
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-push-r.out Mon Mar 06 18:00:44 2006 +0100 @@ -0,0 +1,135 @@ + rev offset length base linkrev nodeid p1 p2 + 0 0 3 0 0 362fef284ce2 000000000000 000000000000 + 1 3 5 1 1 125144f7e028 362fef284ce2 000000000000 + 2 8 7 2 2 4c982badb186 125144f7e028 000000000000 + 3 15 9 3 3 19b1fc555737 4c982badb186 000000000000 + rev offset length base linkrev nodeid p1 p2 + 0 0 75 0 7 905359268f77 000000000000 000000000000 + rev offset length base linkrev nodeid p1 p2 + 0 0 75 0 8 905359268f77 000000000000 000000000000 + rev offset length base linkrev nodeid p1 p2 + 0 0 8 0 6 12ab3bcc5ea4 000000000000 000000000000 + rev offset length base linkrev nodeid p1 p2 + 0 0 48 0 0 43eadb1d2d06 000000000000 000000000000 + 1 48 48 1 1 8b89697eba2c 43eadb1d2d06 000000000000 + 2 96 48 2 2 626a32663c2f 8b89697eba2c 000000000000 + 3 144 48 3 3 f54c32f13478 626a32663c2f 000000000000 + 4 192 58 3 6 de68e904d169 626a32663c2f 000000000000 + 5 250 68 3 7 3b45cc2ab868 de68e904d169 000000000000 + 6 318 54 6 8 24d86153a002 f54c32f13478 000000000000 +checking changesets +checking manifests +crosschecking files in changesets and manifests +checking files +4 files, 9 changesets, 7 total revisions +pushing to test-0 +searching for changes +adding changesets +adding manifests +adding file changes +added 1 changesets with 1 changes to 1 files +checking changesets +checking manifests +crosschecking files in changesets and manifests +checking files +1 files, 1 changesets, 1 total revisions +pushing to test-1 +searching for changes +adding changesets +adding manifests +adding file changes +added 2 changesets with 2 changes to 1 files +checking changesets +checking manifests +crosschecking files in changesets and manifests +checking files +1 files, 2 changesets, 2 total revisions +pushing to test-2 +searching for changes +adding changesets +adding manifests +adding file changes +added 3 changesets with 3 changes to 1 files +checking changesets +checking manifests +crosschecking files in changesets and manifests +checking files +1 files, 3 changesets, 3 total revisions +pushing to test-3 +searching for changes +adding changesets +adding manifests +adding file changes +added 4 changesets with 4 changes to 1 files +checking changesets +checking manifests +crosschecking files in changesets and manifests +checking files +1 files, 4 changesets, 4 total revisions +pushing to test-4 +searching for changes +adding changesets +adding manifests +adding file changes +added 2 changesets with 2 changes to 1 files +checking changesets +checking manifests +crosschecking files in changesets and manifests +checking files +1 files, 2 changesets, 2 total revisions +pushing to test-5 +searching for changes +adding changesets +adding manifests +adding file changes +added 3 changesets with 3 changes to 1 files +checking changesets +checking manifests +crosschecking files in changesets and manifests +checking files +1 files, 3 changesets, 3 total revisions +pushing to test-6 +searching for changes +adding changesets +adding manifests +adding file changes +added 4 changesets with 5 changes to 2 files +checking changesets +checking manifests +crosschecking files in changesets and manifests +checking files +2 files, 4 changesets, 5 total revisions +pushing to test-7 +searching for changes +adding changesets +adding manifests +adding file changes +added 5 changesets with 6 changes to 3 files +checking changesets +checking manifests +crosschecking files in changesets and manifests +checking files +3 files, 5 changesets, 6 total revisions +pushing to test-8 +searching for changes +adding changesets +adding manifests +adding file changes +added 5 changesets with 5 changes to 2 files +checking changesets +checking manifests +crosschecking files in changesets and manifests +checking files +2 files, 5 changesets, 5 total revisions +pulling from ../test-7 +searching for changes +adding changesets +adding manifests +adding file changes +added 4 changesets with 2 changes to 3 files (+1 heads) +(run 'hg update' to get a working copy) +checking changesets +checking manifests +crosschecking files in changesets and manifests +checking files +4 files, 9 changesets, 7 total revisions
--- a/tests/test-rename Mon Mar 06 17:58:53 2006 +0100 +++ b/tests/test-rename Mon Mar 06 18:00:44 2006 +0100 @@ -158,3 +158,24 @@ hg rename d1 d3 hg status hg update -C + +echo "# transitive rename" +hg rename d1/b d1/bb +hg rename d1/bb d1/bc +hg status +hg update -C + +echo "# transitive rename --after" +hg rename d1/b d1/bb +mv d1/bb d1/bc +hg rename --after d1/bb d1/bc +hg status +hg update -C + +echo "# idempotent renames (d1/b -> d1/bb followed by d1/bb -> d1/b)" +hg rename d1/b d1/bb +echo "some stuff added to d1/bb" >> d1/bb +hg rename d1/bb d1/b +hg status +hg debugstate | grep copy +hg update -C
--- a/tests/test-rename.out Mon Mar 06 17:58:53 2006 +0100 +++ b/tests/test-rename.out Mon Mar 06 18:00:44 2006 +0100 @@ -246,3 +246,11 @@ R d1/b R d1/ba R d1/d11/a1 +# transitive rename +A d1/bc +R d1/b +# transitive rename --after +A d1/bc +R d1/b +# idempotent renames (d1/b -> d1/bb followed by d1/bb -> d1/b) +M d1/b