merge with crew.
--- a/.hgignore Sun Mar 12 15:58:56 2006 -0800
+++ b/.hgignore Sun Mar 12 16:21:59 2006 -0800
@@ -12,6 +12,7 @@
build
dist
doc/*.[0-9]
+doc/*.[0-9].gendoc.txt
doc/*.[0-9].{x,ht}ml
MANIFEST
patches
--- a/PKG-INFO Sun Mar 12 15:58:56 2006 -0800
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,10 +0,0 @@
-Metadata-Version: 1.0
-Name: mercurial
-Version: 0.7
-Summary: scalable distributed SCM
-Home-page: http://selenic.com/mercurial
-Author: Matt Mackall
-Author-email: mpm@selenic.com
-License: GNU GPL
-Description: UNKNOWN
-Platform: UNKNOWN
--- a/contrib/bash_completion Sun Mar 12 15:58:56 2006 -0800
+++ b/contrib/bash_completion Sun Mar 12 16:21:59 2006 -0800
@@ -1,27 +1,5 @@
shopt -s extglob
-_hg_command_list()
-{
- "$hg" --debug help 2>/dev/null | \
- awk 'function command_line(line) {
- gsub(/,/, "", line)
- gsub(/:.*/, "", line)
- split(line, aliases)
- command = aliases[1]
- delete aliases[1]
- print command
- for (i in aliases)
- if (index(command, aliases[i]) != 1)
- print aliases[i]
- }
- /^list of commands:/ {commands=1}
- commands && /^ debug/ {a[i++] = $0; next;}
- commands && /^ [^ ]/ {command_line($0)}
- /^global options:/ {exit 0}
- END {for (i in a) command_line(a[i])}'
-
-}
-
_hg_option_list()
{
"$hg" -v help $1 2>/dev/null | \
@@ -37,21 +15,9 @@
_hg_commands()
{
- local all commands result
-
- all=$(_hg_command_list)
- commands=${all%%$'\n'debug*}
- result=$(compgen -W '$commands' -- "$cur")
-
- # hide debug commands from users, but complete them if
- # there is no other possible command
- if [ "$result" = "" ]; then
- local debug
- debug=debug${all#*$'\n'debug}
- result=$(compgen -W '$debug' -- "$cur")
- fi
-
- COMPREPLY=(${COMPREPLY[@]:-} $result)
+ local commands
+ commands="$("$hg" debugcomplete "$cur" 2>/dev/null)" || commands=""
+ COMPREPLY=(${COMPREPLY[@]:-} $(compgen -W '$commands' -- "$cur"))
}
_hg_paths()
--- a/contrib/hbisect.py Sun Mar 12 15:58:56 2006 -0800
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,287 +0,0 @@
-#!/usr/bin/env python
-#
-# This software may be used and distributed according to the terms
-# of the GNU General Public License, incorporated herein by reference.
-
-from mercurial.demandload import demandload
-demandload(globals(), "os sys sets")
-from mercurial import hg
-
-versionstr = "0.0.3"
-
-def lookup_rev(ui, repo, rev=None):
- """returns rev or the checked-out revision if rev is None"""
- if not rev is None:
- return repo.lookup(rev)
- parents = [p for p in repo.dirstate.parents() if p != hg.nullid]
- if len(parents) != 1:
- ui.warn("unexpected number of parents\n")
- ui.warn("please commit or revert\n")
- sys.exit(1)
- return parents.pop()
-
-def check_clean(ui, repo):
- modified, added, removed, deleted, unknown = repo.changes()
- if modified or added or removed:
- ui.warn("Repository is not clean, please commit or revert\n")
- sys.exit(1)
-
-class bisect(object):
- """dichotomic search in the DAG of changesets"""
- def __init__(self, ui, repo):
- self.repo = repo
- self.path = os.path.join(repo.join(""), "bisect")
- self.ui = ui
- self.goodrevs = []
- self.badrev = None
- self.good_dirty = 0
- self.bad_dirty = 0
- self.good_path = os.path.join(self.path, "good")
- self.bad_path = os.path.join(self.path, "bad")
-
- s = self.good_path
- if os.path.exists(s):
- self.goodrevs = self.repo.opener(s).read().splitlines()
- self.goodrevs = [hg.bin(x) for x in self.goodrevs]
- s = self.bad_path
- if os.path.exists(s):
- r = self.repo.opener(s).read().splitlines()
- if r:
- self.badrev = hg.bin(r.pop(0))
-
- def __del__(self):
- if not os.path.isdir(self.path):
- return
- f = self.repo.opener(self.good_path, "w")
- f.write("\n".join([hg.hex(r) for r in self.goodrevs]))
- if len(self.goodrevs) > 0:
- f.write("\n")
- f = self.repo.opener(self.bad_path, "w")
- if self.badrev:
- f.write(hg.hex(self.badrev) + "\n")
-
- def init(self):
- """start a new bisection"""
- if os.path.isdir(self.path):
- self.ui.warn("bisect directory already exists\n")
- return 1
- os.mkdir(self.path)
- check_clean(self.ui, self.repo)
- return 0
-
- def reset(self):
- """finish a bisection"""
- if os.path.isdir(self.path):
- sl = [self.bad_path, self.good_path]
- for s in sl:
- if os.path.exists(s):
- os.unlink(s)
- os.rmdir(self.path)
- # Not sure about this
- #self.ui.write("Going back to tip\n")
- #self.repo.update(self.repo.changelog.tip())
- return 1
-
- def num_ancestors(self, head=None, stop=None):
- """
- returns a dict with the mapping:
- node -> number of ancestors (self included)
- for all nodes who are ancestor of head and
- not in stop.
- """
- if head is None:
- head = self.badrev
- return self.__ancestors_and_nb_ancestors(head, stop)[1]
-
- def ancestors(self, head=None, stop=None):
- """
- returns the set of the ancestors of head (self included)
- who are not in stop.
- """
- if head is None:
- head = self.badrev
- return self.__ancestors_and_nb_ancestors(head, stop)[0]
-
- def __ancestors_and_nb_ancestors(self, head, stop=None):
- """
- if stop is None then ancestors of goodrevs are used as
- lower limit.
-
- returns (anc, n_child) where anc is the set of the ancestors of head
- and n_child is a dictionary with the following mapping:
- node -> number of ancestors (self included)
- """
- cl = self.repo.changelog
- if not stop:
- stop = sets.Set([])
- for g in reversed(self.goodrevs):
- if g in stop:
- continue
- stop.update(cl.reachable(g))
- def num_children(a):
- """
- returns a dictionnary with the following mapping
- node -> [number of children, empty set]
- """
- d = {a: [0, sets.Set([])]}
- for i in xrange(cl.rev(a)+1):
- n = cl.node(i)
- if not d.has_key(n):
- d[n] = [0, sets.Set([])]
- parents = [p for p in cl.parents(n) if p != hg.nullid]
- for p in parents:
- d[p][0] += 1
- return d
-
- if head in stop:
- self.ui.warn("Unconsistent state, %s is good and bad\n"
- % hg.hex(head))
- sys.exit(1)
- n_child = num_children(head)
- for i in xrange(cl.rev(head)+1):
- n = cl.node(i)
- parents = [p for p in cl.parents(n) if p != hg.nullid]
- for p in parents:
- n_child[p][0] -= 1
- if not n in stop:
- n_child[n][1].union_update(n_child[p][1])
- if n_child[p][0] == 0:
- n_child[p] = len(n_child[p][1])
- if not n in stop:
- n_child[n][1].add(n)
- if n_child[n][0] == 0:
- if n == head:
- anc = n_child[n][1]
- n_child[n] = len(n_child[n][1])
- return anc, n_child
-
- def next(self):
- if not self.badrev:
- self.ui.warn("You should give at least one bad\n")
- sys.exit(1)
- if not self.goodrevs:
- self.ui.warn("No good revision given\n")
- self.ui.warn("Assuming the first revision is good\n")
- ancestors, num_ancestors = self.__ancestors_and_nb_ancestors(self.badrev)
- tot = len(ancestors)
- if tot == 1:
- if ancestors.pop() != self.badrev:
- self.ui.warn("Could not find the first bad revision\n")
- sys.exit(1)
- self.ui.write(
- "The first bad revision is : %s\n" % hg.hex(self.badrev))
- sys.exit(0)
- self.ui.write("%d revisions left\n" % tot)
- best_rev = None
- best_len = -1
- for n in ancestors:
- l = num_ancestors[n]
- l = min(l, tot - l)
- if l > best_len:
- best_len = l
- best_rev = n
- return best_rev
-
- def autonext(self):
- """find and update to the next revision to test"""
- check_clean(self.ui, self.repo)
- rev = self.next()
- self.ui.write("Now testing %s\n" % hg.hex(rev))
- return self.repo.update(rev, force=True)
-
- def good(self, rev):
- self.goodrevs.append(rev)
-
- def autogood(self, rev=None):
- """mark revision as good and update to the next revision to test"""
- check_clean(self.ui, self.repo)
- rev = lookup_rev(self.ui, self.repo, rev)
- self.good(rev)
- if self.badrev:
- self.autonext()
-
- def bad(self, rev):
- self.badrev = rev
-
- def autobad(self, rev=None):
- """mark revision as bad and update to the next revision to test"""
- check_clean(self.ui, self.repo)
- rev = lookup_rev(self.ui, self.repo, rev)
- self.bad(rev)
- if self.goodrevs:
- self.autonext()
-
-# should we put it in the class ?
-def test(ui, repo, rev):
- """test the bisection code"""
- b = bisect(ui, repo)
- rev = repo.lookup(rev)
- ui.write("testing with rev %s\n" % hg.hex(rev))
- anc = b.ancestors()
- while len(anc) > 1:
- if not rev in anc:
- ui.warn("failure while bisecting\n")
- sys.exit(1)
- ui.write("it worked :)\n")
- new_rev = b.next()
- ui.write("choosing if good or bad\n")
- if rev in b.ancestors(head=new_rev):
- b.bad(new_rev)
- ui.write("it is bad\n")
- else:
- b.good(new_rev)
- ui.write("it is good\n")
- anc = b.ancestors()
- repo.update(new_rev, force=True)
- for v in anc:
- if v != rev:
- ui.warn("fail to found cset! :(\n")
- return 1
- ui.write("Found bad cset: %s\n" % hg.hex(b.badrev))
- ui.write("Everything is ok :)\n")
- return 0
-
-def bisect_run(ui, repo, cmd=None, *args):
- """bisect extension: dichotomic search in the DAG of changesets
-for subcommands see "hg bisect help\"
- """
- def help_(cmd=None, *args):
- """show help for a given bisect subcommand or all subcommands"""
- cmdtable = bisectcmdtable
- if cmd:
- doc = cmdtable[cmd][0].__doc__
- synopsis = cmdtable[cmd][2]
- ui.write(synopsis + "\n")
- ui.write("\n" + doc + "\n")
- return
- ui.write("list of subcommands for the bisect extension\n\n")
- cmds = cmdtable.keys()
- cmds.sort()
- m = max([len(c) for c in cmds])
- for cmd in cmds:
- doc = cmdtable[cmd][0].__doc__.splitlines(0)[0].rstrip()
- ui.write(" %-*s %s\n" % (m, cmd, doc))
-
- b = bisect(ui, repo)
- bisectcmdtable = {
- "init": (b.init, 0, "hg bisect init"),
- "bad": (b.autobad, 1, "hg bisect bad [<rev>]"),
- "good": (b.autogood, 1, "hg bisect good [<rev>]"),
- "next": (b.autonext, 0, "hg bisect next"),
- "reset": (b.reset, 0, "hg bisect reset"),
- "help": (help_, 1, "hg bisect help [<subcommand>]"),
- }
-
- if not bisectcmdtable.has_key(cmd):
- ui.warn("bisect: Unknown sub-command\n")
- return help_()
- if len(args) > bisectcmdtable[cmd][1]:
- ui.warn("bisect: Too many arguments\n")
- return help_()
- return bisectcmdtable[cmd][0](*args)
-
-cmdtable = {
- "bisect": (bisect_run, [],
- "hg bisect [help|init|reset|next|good|bad]"),
- #"bisect-test": (test, [], "hg bisect-test rev"),
-}
--- a/contrib/mercurial.spec Sun Mar 12 15:58:56 2006 -0800
+++ b/contrib/mercurial.spec Sun Mar 12 16:21:59 2006 -0800
@@ -1,7 +1,7 @@
Summary: Mercurial -- a distributed SCM
Name: mercurial
-Version: 0.7
-Release: 1
+Version: 0.8
+Release: 0
License: GPL
Group: Development/Tools
Source: http://www.selenic.com/mercurial/release/%{name}-%{version}.tar.gz
@@ -10,6 +10,7 @@
%define pythonver %(python -c 'import sys;print ".".join(map(str, sys.version_info[:2]))')
%define pythonlib %{_libdir}/python%{pythonver}/site-packages/%{name}
+%define hgext %{_libdir}/python%{pythonver}/site-packages/hgext
%description
Mercurial is a fast, lightweight source control management system designed
@@ -30,10 +31,12 @@
%files
%defattr(-,root,root,-)
-%doc doc/* contrib/patchbomb *.cgi
+%doc doc/* *.cgi
%dir %{pythonlib}
+%dir %{hgext}
%{_bindir}/hgmerge
%{_bindir}/hg
%{pythonlib}/templates
%{pythonlib}/*.py*
%{pythonlib}/*.so
+%{hgext}/*.py*
--- a/contrib/win32/mercurial.iss Sun Mar 12 15:58:56 2006 -0800
+++ b/contrib/win32/mercurial.iss Sun Mar 12 16:21:59 2006 -0800
@@ -28,23 +28,22 @@
DefaultGroupName=Mercurial
[Files]
-Source: templates\*.*; DestDir: {app}\Templates; Flags: recursesubdirs createallsubdirs
+Source: ..\..\msys\1.0\bin\patch.exe; DestDir: {app}
Source: contrib\mercurial.el; DestDir: {app}/Contrib
-Source: contrib\patchbomb; DestDir: {app}/Contrib
-Source: dist\w9xpopen.exe; DestDir: {app}
+Source: contrib\win32\ReadMe.html; DestDir: {app}; Flags: isreadme
+Source: contrib\win32\mercurial.ini; DestDir: {app}; DestName: Mercurial.ini; Flags: confirmoverwrite
+Source: contrib\win32\postinstall.txt; DestDir: {app}; DestName: ReleaseNotes.txt
Source: dist\hg.exe; DestDir: {app}
+Source: dist\library.zip; DestDir: {app}
+Source: dist\mfc71.dll; DestDir: {sys}; Flags: sharedfile uninsnosharedfileprompt
Source: dist\msvcr71.dll; DestDir: {sys}; Flags: sharedfile uninsnosharedfileprompt
-Source: dist\library.zip; DestDir: {app}
+Source: dist\w9xpopen.exe; DestDir: {app}
Source: doc\*.txt; DestDir: {app}\Docs
-Source: dist\mfc71.dll; DestDir: {sys}; Flags: sharedfile uninsnosharedfileprompt
+Source: templates\*.*; DestDir: {app}\Templates; Flags: recursesubdirs createallsubdirs
+Source: CONTRIBUTORS; DestDir: {app}; DestName: Contributors.txt
Source: COPYING; DestDir: {app}; DestName: Copying.txt
Source: comparison.txt; DestDir: {app}\Docs; DestName: Comparison.txt
Source: notes.txt; DestDir: {app}\Docs; DestName: DesignNotes.txt
-Source: CONTRIBUTORS; DestDir: {app}; DestName: Contributors.txt
-Source: contrib\win32\ReadMe.html; DestDir: {app}; Flags: isreadme
-Source: ..\..\msys\1.0\bin\patch.exe; DestDir: {app}
-Source: contrib\win32\mercurial.ini; DestDir: {app}; DestName: Mercurial.ini; Flags: confirmoverwrite
-Source: contrib\win32\postinstall.txt; DestDir: {app}; DestName: ReleaseNotes.txt
[INI]
Filename: {app}\Mercurial.url; Section: InternetShortcut; Key: URL; String: http://www.selenic.com/mercurial/
--- a/doc/Makefile Sun Mar 12 15:58:56 2006 -0800
+++ b/doc/Makefile Sun Mar 12 16:21:59 2006 -0800
@@ -8,6 +8,12 @@
html: $(HTML)
+hg.1.txt: hg.1.gendoc.txt
+ touch hg.1.txt
+
+hg.1.gendoc.txt: ../mercurial/commands.py
+ python gendoc.py > $@
+
%: %.xml
xmlto man $*.xml
@@ -18,4 +24,4 @@
asciidoc -b html4 $*.txt || asciidoc -b html $*.txt
clean:
- $(RM) $(MAN) $(MAN:%=%.xml) $(MAN:%=%.html)
+ $(RM) $(MAN) $(MAN:%=%.xml) $(MAN:%=%.html) *.[0-9].gendoc.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/gendoc.py Sun Mar 12 16:21:59 2006 -0800
@@ -0,0 +1,92 @@
+import sys, textwrap
+# import from the live mercurial repo
+sys.path.insert(0, "..")
+from mercurial.commands import table, globalopts
+from mercurial.i18n import gettext as _
+
+def get_desc(docstr):
+ if not docstr:
+ return "", ""
+ # sanitize
+ docstr = docstr.strip("\n")
+ docstr = docstr.rstrip()
+ shortdesc = docstr.splitlines()[0].strip()
+
+ i = docstr.find("\n")
+ if i != -1:
+ desc = docstr[i+2:]
+ else:
+ desc = " %s" % shortdesc
+ return (shortdesc, desc)
+
+def get_opts(opts):
+ for shortopt, longopt, default, desc in opts:
+ allopts = []
+ if shortopt:
+ allopts.append("-%s" % shortopt)
+ if longopt:
+ allopts.append("--%s" % longopt)
+ desc += default and _(" (default: %s)") % default or ""
+ yield(", ".join(allopts), desc)
+
+def get_cmd(cmd):
+ d = {}
+ attr = table[cmd]
+ cmds = cmd.lstrip("^").split("|")
+
+ d['synopsis'] = attr[2]
+ d['cmd'] = cmds[0]
+ d['aliases'] = cmd.split("|")[1:]
+ d['desc'] = get_desc(attr[0].__doc__)
+ d['opts'] = list(get_opts(attr[1]))
+ return d
+
+
+def show_doc(ui):
+ def bold(s, text=""):
+ ui.write("%s\n%s\n%s\n" % (s, "="*len(s), text))
+ def underlined(s, text=""):
+ ui.write("%s\n%s\n%s\n" % (s, "-"*len(s), text))
+
+ # print options
+ underlined(_("OPTIONS"))
+ for optstr, desc in get_opts(globalopts):
+ ui.write("%s::\n %s\n\n" % (optstr, desc))
+
+ # print cmds
+ underlined(_("COMMANDS"))
+ h = {}
+ for c, attr in table.items():
+ f = c.split("|")[0]
+ f = f.lstrip("^")
+ h[f] = c
+ cmds = h.keys()
+ cmds.sort()
+
+ for f in cmds:
+ if f.startswith("debug"): continue
+ d = get_cmd(h[f])
+ # synopsis
+ ui.write("%s::\n" % d['synopsis'].replace("hg ","", 1))
+ # description
+ ui.write("%s\n\n" % d['desc'][1])
+ # options
+ opt_output = list(d['opts'])
+ if opt_output:
+ opts_len = max([len(line[0]) for line in opt_output])
+ ui.write(_(" options:\n"))
+ for optstr, desc in opt_output:
+ if desc:
+ s = "%-*s %s" % (opts_len, optstr, desc)
+ else:
+ s = optstr
+ s = textwrap.fill(s, initial_indent=4 * " ",
+ subsequent_indent=(6 + opts_len) * " ")
+ ui.write("%s\n" % s)
+ ui.write("\n")
+ # aliases
+ if d['aliases']:
+ ui.write(_(" aliases: %s\n\n") % " ".join(d['aliases']))
+
+if __name__ == "__main__":
+ show_doc(sys.stdout)
--- a/doc/hg.1.txt Sun Mar 12 15:58:56 2006 -0800
+++ b/doc/hg.1.txt Sun Mar 12 16:21:59 2006 -0800
@@ -14,42 +14,6 @@
-----------
The hg(1) command provides a command line interface to the Mercurial system.
-OPTIONS
--------
-
--R, --repository::
- repository root directory
-
---cwd::
- change working directory
-
--y, --noninteractive::
- do not prompt, assume 'yes' for any required answers
-
--q, --quiet::
- suppress output
-
--v, --verbose::
- enable additional output
-
---debug::
- enable debugging output
-
---traceback::
- print traceback on exception
-
---time::
- time how long the command takes
-
---profile::
- print command execution profile
-
---version::
- output version information and exit
-
--h, --help::
- display help and exit
-
COMMAND ELEMENTS
----------------
@@ -70,617 +34,8 @@
fast and the old-http:// protocol which is much slower but does not
require a special server on the web host.
-COMMANDS
---------
-add [options] [files ...]::
- Schedule files to be version controlled and added to the repository.
-
- The files will be added to the repository at the next commit.
-
- If no names are given, add all files in the current directory and
- its subdirectories.
-
-addremove [options] [files ...]::
- Add all new files and remove all missing files from the repository.
-
- New files are ignored if they match any of the patterns in .hgignore. As
- with add, these changes take effect at the next commit.
-
-annotate [-r <rev> -u -n -c -d] [files ...]::
- List changes in files, showing the revision id responsible for each line
-
- This command is useful to discover who did a change or when a change took
- place.
-
- Without the -a option, annotate will avoid processing files it
- detects as binary. With -a, annotate will generate an annotation
- anyway, probably with undesirable results.
-
- options:
- -a, --text treat all files as text
- -I, --include <pat> include names matching the given patterns
- -X, --exclude <pat> exclude names matching the given patterns
- -r, --revision <rev> annotate the specified revision
- -u, --user list the author
- -d, --date list the commit date
- -c, --changeset list the changeset
- -n, --number list the revision number (default)
-
-bundle <file> <other>::
- (EXPERIMENTAL)
-
- Generate a compressed changegroup file collecting all changesets
- not found in the other repository.
-
- This file can then be transferred using conventional means and
- applied to another repository with the unbundle command. This is
- useful when native push and pull are not available or when
- exporting an entire repository is undesirable. The standard file
- extension is ".hg".
-
- Unlike import/export, this exactly preserves all changeset
- contents including permissions, rename data, and revision history.
-
-cat [options] <file ...>::
- Print the specified files as they were at the given revision.
- If no revision is given then the tip is used.
-
- Output may be to a file, in which case the name of the file is
- given using a format string. The formatting rules are the same as
- for the export command, with the following additions:
-
- %s basename of file being printed
- %d dirname of file being printed, or '.' if in repo root
- %p root-relative path name of file being printed
-
- options:
- -I, --include <pat> include names matching the given patterns
- -X, --exclude <pat> exclude names matching the given patterns
- -o, --output <filespec> print output to file with formatted name
- -r, --rev <rev> print the given revision
-
-clone [options] <source> [dest]::
- Create a copy of an existing repository in a new directory.
-
- If no destination directory name is specified, it defaults to the
- basename of the source.
-
- The location of the source is added to the new repository's
- .hg/hgrc file, as the default to be used for future pulls.
-
- For efficiency, hardlinks are used for cloning whenever the source
- and destination are on the same filesystem. Some filesystems,
- such as AFS, implement hardlinking incorrectly, but do not report
- errors. In these cases, use the --pull option to avoid
- hardlinking.
-
- See pull for valid source format details.
-
- options:
- -U, --noupdate do not update the new working directory
- --pull use pull protocol to copy metadata
- -e, --ssh specify ssh command to use
- --remotecmd specify hg command to run on the remote side
-
-commit [options] [files...]::
- Commit changes to the given files into the repository.
-
- If a list of files is omitted, all changes reported by "hg status"
- from the root of the repository will be commited.
-
- The HGEDITOR or EDITOR environment variables are used to start an
- editor to add a commit comment.
-
- Options:
-
- -A, --addremove run addremove during commit
- -I, --include <pat> include names matching the given patterns
- -X, --exclude <pat> exclude names matching the given patterns
- -m, --message <text> use <text> as commit message
- -l, --logfile <file> read the commit message from <file>
- -d, --date <datecode> record datecode as commit date
- -u, --user <user> record user as commiter
-
- aliases: ci
-
-copy <source ...> <dest>::
- Mark dest as having copies of source files. If dest is a
- directory, copies are put in that directory. If dest is a file,
- there can only be one source.
-
- By default, this command copies the contents of files as they
- stand in the working directory. If invoked with --after, the
- operation is recorded, but no copying is performed.
-
- This command takes effect in the next commit.
-
- NOTE: This command should be treated as experimental. While it
- should properly record copied files, this information is not yet
- fully used by merge, nor fully reported by log.
-
- Options:
- -A, --after record a copy that has already occurred
- -I, --include <pat> include names matching the given patterns
- -X, --exclude <pat> exclude names matching the given patterns
- -f, --force forcibly copy over an existing managed file
-
- aliases: cp
-
-diff [-a] [-r revision] [-r revision] [files ...]::
- Show differences between revisions for the specified files.
-
- Differences between files are shown using the unified diff format.
-
- When two revision arguments are given, then changes are shown
- between those revisions. If only one revision is specified then
- that revision is compared to the working directory, and, when no
- revisions are specified, the working directory files are compared
- to its parent.
-
- Without the -a option, diff will avoid generating diffs of files
- it detects as binary. With -a, diff will generate a diff anyway,
- probably with undesirable results.
-
- options:
- -a, --text treat all files as text
- -I, --include <pat> include names matching the given patterns
- -p, --show-function show which function each change is in
- -X, --exclude <pat> exclude names matching the given patterns
- -w, --ignore-all-space ignore white space when comparing lines
-
-export [-o filespec] [revision] ...::
- Print the changeset header and diffs for one or more revisions.
-
- The information shown in the changeset header is: author,
- changeset hash, parent and commit comment.
-
- Output may be to a file, in which case the name of the file is
- given using a format string. The formatting rules are as follows:
-
- %% literal "%" character
- %H changeset hash (40 bytes of hexadecimal)
- %N number of patches being generated
- %R changeset revision number
- %b basename of the exporting repository
- %h short-form changeset hash (12 bytes of hexadecimal)
- %n zero-padded sequence number, starting at 1
- %r zero-padded changeset revision number
-
- Without the -a option, export will avoid generating diffs of files
- it detects as binary. With -a, export will generate a diff anyway,
- probably with undesirable results.
-
- options:
- -a, --text treat all files as text
- -o, --output <filespec> print output to file with formatted name
-
-forget [options] [files]::
- Undo an 'hg add' scheduled for the next commit.
-
- options:
- -I, --include <pat> include names matching the given patterns
- -X, --exclude <pat> exclude names matching the given patterns
-
-grep [options] pattern [files]::
- Search revisions of files for a regular expression.
-
- This command behaves differently than Unix grep. It only accepts
- Python/Perl regexps. It searches repository history, not the
- working directory. It always prints the revision number in which
- a match appears.
-
- By default, grep only prints output for the first revision of a
- file in which it finds a match. To get it to print every revision
- that contains a change in match status ("-" for a match that
- becomes a non-match, or "+" for a non-match that becomes a match),
- use the --all flag.
-
- options:
- -0, --print0 end fields with NUL
- -I, --include <pat> include names matching the given patterns
- -X, --exclude <pat> exclude names matching the given patterns
- --all print all revisions that match
- -i, --ignore-case ignore case when matching
- -l, --files-with-matches print only filenames and revs that match
- -n, --line-number print matching line numbers
- -r <rev>, --rev <rev> search in given revision range
- -u, --user print user who committed change
-
-heads::
- Show all repository head changesets.
-
- Repository "heads" are changesets that don't have children
- changesets. They are where development generally takes place and
- are the usual targets for update and merge operations.
-
- options:
- -b, --branches show branches
- -r, --rev <rev> show only heads which are descendants of rev
- --style <style> display using style map file
- --template <tpl> display using template
-
-identify::
- Print a short summary of the current state of the repo.
-
- This summary identifies the repository state using one or two parent
- hash identifiers, followed by a "+" if there are uncommitted changes
- in the working directory, followed by a list of tags for this revision.
-
- aliases: id
-
-import [-p <n> -b <base> -f] <patches>::
- Import a list of patches and commit them individually.
-
- If there are outstanding changes in the working directory, import
- will abort unless given the -f flag.
-
- If a patch looks like a mail message (its first line starts with
- "From " or looks like an RFC822 header), it will not be applied
- unless the -f option is used. The importer neither parses nor
- discards mail headers, so use -f only to override the "mailness"
- safety check, not to import a real mail message.
-
- options:
- -p, --strip <n> directory strip option for patch. This has the same
- meaning as the corresponding patch option
- -b <path> base directory to read patches from
- -f, --force skip check for outstanding uncommitted changes
-
- aliases: patch
-
-incoming [-p] [source]::
- Show new changesets found in the specified repo or the default
- pull repo. These are the changesets that would be pulled if a pull
- was requested.
-
- Currently only local repositories are supported.
-
- options:
- -M, --no-merges do not show merges
- -n, --newest-first show newest records first
- -p, --patch show patch
- --style <style> display using style map file
- --template <tpl> display using template
-
- aliases: in
-
-init [dest]::
- Initialize a new repository in the given directory. If the given
- directory does not exist, it is created.
-
- If no directory is given, the current directory is used.
-
-locate [options] [files]::
- Print all files under Mercurial control whose names match the
- given patterns.
-
- This command searches the current directory and its
- subdirectories. To search an entire repository, move to the root
- of the repository.
-
- If no patterns are given to match, this command prints all file
- names.
-
- If you want to feed the output of this command into the "xargs"
- command, use the "-0" option to both this command and "xargs".
- This will avoid the problem of "xargs" treating single filenames
- that contain white space as multiple filenames.
-
- options:
-
- -0, --print0 end filenames with NUL, for use with xargs
- -f, --fullpath print complete paths from the filesystem root
- -I, --include <pat> include names matching the given patterns
- -r, --rev <rev> search the repository as it stood at rev
- -X, --exclude <pat> exclude names matching the given patterns
-
-log [-r revision ...] [-p] [files]::
- Print the revision history of the specified files or the entire project.
-
- By default this command outputs: changeset id and hash, tags,
- parents, user, date and time, and a summary for each commit. The
- -v switch adds some more detail, such as changed files, manifest
- hashes or message signatures.
-
- options:
- -I, --include <pat> include names matching the given patterns
- -X, --exclude <pat> exclude names matching the given patterns
- -b, --branch show branches
- -k, --keyword <str> search for keywords
- -l, --limit <num> print no more than this many changes
- -M, --no-merges do not show merges
- -m, --only-merges only show merges
- -r, --rev <A> show the specified revision or range
- -p, --patch show patch
- --style <style> display using style map file
- --template <tpl> display using template
-
- aliases: history
-
-manifest [revision]::
- Print a list of version controlled files for the given revision.
-
- The manifest is the list of files being version controlled. If no revision
- is given then the tip is used.
-
-outgoing [-p] [dest]::
- Show changesets not found in the specified destination repo or the
- default push repo. These are the changesets that would be pushed
- if a push was requested.
-
- See pull for valid source format details.
-
- options:
- -M, --no-merges do not show merges
- -p, --patch show patch
- -n, --newest-first show newest records first
- --style <style> display using style map file
- --template <tpl> display using template
-
- aliases: out
-
-parents::
- Print the working directory's parent revisions.
-
- options:
- -b, --branches show branches
- --style <style> display using style map file
- --template <tpl> display using template
-
-paths [NAME]::
- Show definition of symbolic path name NAME. If no name is given, show
- definition of available names.
-
- Path names are defined in the [paths] section of /etc/mercurial/hgrc
- and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
-
-pull <repository path>::
- Pull changes from a remote repository to a local one.
-
- This finds all changes from the repository at the specified path
- or URL and adds them to the local repository. By default, this
- does not update the copy of the project in the working directory.
-
- Valid URLs are of the form:
-
- local/filesystem/path
- http://[user@]host[:port][/path]
- https://[user@]host[:port][/path]
- ssh://[user@]host[:port][/path]
-
- SSH requires an accessible shell account on the destination machine
- and a copy of hg in the remote path. With SSH, paths are relative
- to the remote user's home directory by default; use two slashes at
- the start of a path to specify it as relative to the filesystem root.
-
- options:
- -u, --update update the working directory to tip after pull
- -e, --ssh specify ssh command to use
- --remotecmd specify hg command to run on the remote side
-
-push <destination>::
- Push changes from the local repository to the given destination.
-
- This is the symmetrical operation for pull. It helps to move
- changes from the current repository to a different one. If the
- destination is local this is identical to a pull in that directory
- from the current one.
-
- By default, push will refuse to run if it detects the result would
- increase the number of remote heads. This generally indicates the
- the client has forgotten to sync and merge before pushing.
-
- Valid URLs are of the form:
-
- local/filesystem/path
- ssh://[user@]host[:port][/path]
-
- SSH requires an accessible shell account on the destination
- machine and a copy of hg in the remote path.
-
- options:
-
- -f, --force force update
- -e, --ssh specify ssh command to use
- --remotecmd specify hg command to run on the remote side
-
-rawcommit [-p -d -u -F -m -l]::
- Lowlevel commit, for use in helper scripts. (DEPRECATED)
-
- This command is not intended to be used by normal users, as it is
- primarily useful for importing from other SCMs.
-
- This command is now deprecated and will be removed in a future
- release, please use debugsetparents and commit instead.
-
-recover::
- Recover from an interrupted commit or pull.
-
- This command tries to fix the repository status after an interrupted
- operation. It should only be necessary when Mercurial suggests it.
-
-remove [options] [files ...]::
- Schedule the indicated files for removal from the repository.
-
- This command schedules the files to be removed at the next commit.
- This only removes files from the current branch, not from the
- entire project history. If the files still exist in the working
- directory, they will be deleted from it.
-
- aliases: rm
-
-rename <source ...> <dest>::
- Mark dest as copies of sources; mark sources for deletion. If
- dest is a directory, copies are put in that directory. If dest is
- a file, there can only be one source.
-
- By default, this command copies the contents of files as they
- stand in the working directory. If invoked with --after, the
- operation is recorded, but no copying is performed.
-
- This command takes effect in the next commit.
-
- NOTE: This command should be treated as experimental. While it
- should properly record rename files, this information is not yet
- fully used by merge, nor fully reported by log.
-
- Options:
- -A, --after record a rename that has already occurred
- -f, --force forcibly copy over an existing managed file
-
- aliases: mv
-
-revert [names ...]::
- The revert command has two modes of operation.
-
- In its default mode, it reverts any uncommitted modifications made
- to the named files or directories. This restores the contents of
- the affected files to an unmodified state.
-
- Using the -r option, it reverts the given files or directories to
- their state as of an earlier revision. This can be helpful to "roll
- back" some or all of a change that should not have been committed.
-
- Revert modifies the working directory. It does not commit any
- changes, or change the parent of the current working directory.
-
- If a file has been deleted, it is recreated. If the executable
- mode of a file was changed, it is reset.
-
- If a directory is given, all files in that directory and its
- subdirectories are reverted.
-
- If no arguments are given, all files in the current directory and
- its subdirectories are reverted.
-
- options:
- -r, --rev <rev> revision to revert to
- -n, --nonrecursive do not recurse into subdirectories
-
-root::
- Print the root directory of the current repository.
-
-serve [options]::
- Start a local HTTP repository browser and pull server.
-
- By default, the server logs accesses to stdout and errors to
- stderr. Use the "-A" and "-E" options to log to files.
-
- options:
- -A, --accesslog <file> name of access log file to write to
- -d, --daemon run server in background, as a daemon
- -E, --errorlog <file> name of error log file to write to
- -a, --address <addr> address to use
- -p, --port <n> port to use (default: 8000)
- -n, --name <name> name to show in web pages (default: working dir)
- --pid-file <file> write server process ID to given file
- -t, --templatedir <path> web templates to use
- -6, --ipv6 use IPv6 in addition to IPv4
-
-status [options] [files]::
- Show changed files in the working directory. If no names are
- given, all files are shown. Otherwise, only files matching the
- given names are shown.
-
- The codes used to show the status of files are:
-
- M = changed
- A = added
- R = removed
- ? = not tracked
-
- options:
-
- -m, --modified show only modified files
- -a, --added show only added files
- -r, --removed show only removed files
- -u, --unknown show only unknown (not tracked) files
- -n, --no-status hide status prefix
- -0, --print0 end filenames with NUL, for use with xargs
- -I, --include <pat> include names matching the given patterns
- -X, --exclude <pat> exclude names matching the given patterns
-
-tag [-l -m <text> -d <datecode> -u <user>] <name> [revision]::
- Name a particular revision using <name>.
-
- Tags are used to name particular revisions of the repository and are
- very useful to compare different revision, to go back to significant
- earlier versions or to mark branch points as releases, etc.
-
- If no revision is given, the tip is used.
-
- To facilitate version control, distribution, and merging of tags,
- they are stored as a file named ".hgtags" which is managed
- similarly to other project files and can be hand-edited if
- necessary.
-
- options:
- -l, --local make the tag local
- -m, --message <text> message for tag commit log entry
- -d, --date <datecode> datecode for commit
- -u, --user <user> user for commit
-
- Note: Local tags are not version-controlled or distributed and are
- stored in the .hg/localtags file. If there exists a local tag and
- a public tag with the same name, local tag is used.
-
-tags::
- List the repository tags.
-
- This lists both regular and local tags.
-
-tip [-p]::
- Show the tip revision.
-
- options:
- -b, --branches show branches
- -p, --patch show patch
- --style <style> display using style map file
- --template <tpl> display using template
-
-unbundle <file>::
- (EXPERIMENTAL)
-
- Apply a compressed changegroup file generated by the bundle
- command.
-
-undo::
- Undo the last commit or pull transaction.
-
- Roll back the last pull or commit transaction on the
- repository, restoring the project to its earlier state.
-
- This command should be used with care. There is only one level of
- undo and there is no redo.
-
- This command is not intended for use on public repositories. Once
- a change is visible for pull by other users, undoing it locally is
- ineffective.
-
-update [-m -C] [revision]::
- Update the working directory to the specified revision.
-
- By default, update will refuse to run if doing so would require
- merging or discarding local changes.
-
- With the -m option, a merge will be performed.
-
- With the -C option, local changes will be lost.
-
- options:
- -m, --merge allow merging of branches
- -C, --clean overwrite locally modified files
-
- aliases: up checkout co
-
-verify::
- Verify the integrity of the current repository.
-
- This will perform an extensive check of the repository's
- integrity, validating the hashes and checksums of each entry in
- the changelog, manifest, and tracked files, as well as the
- integrity of their crosslinks and indices.
+include::hg.1.gendoc.txt[]
FILE NAME PATTERNS
------------------
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext/hbisect.py Sun Mar 12 16:21:59 2006 -0800
@@ -0,0 +1,290 @@
+# bisect extension for mercurial
+#
+# Copyright 2005, 2006 Benoit Boissinot <benoit.boissinot@ens-lyon.org>
+# Inspired by git bisect, extension skeleton taken from mq.py.
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+
+from mercurial.demandload import demandload
+demandload(globals(), "os sys sets mercurial:hg,util")
+
+versionstr = "0.0.3"
+
+def lookup_rev(ui, repo, rev=None):
+ """returns rev or the checked-out revision if rev is None"""
+ if not rev is None:
+ return repo.lookup(rev)
+ parents = [p for p in repo.dirstate.parents() if p != hg.nullid]
+ if len(parents) != 1:
+ ui.warn("unexpected number of parents\n")
+ ui.warn("please commit or revert\n")
+ sys.exit(1)
+ return parents.pop()
+
+def check_clean(ui, repo):
+ modified, added, removed, deleted, unknown = repo.changes()
+ if modified or added or removed:
+ ui.warn("Repository is not clean, please commit or revert\n")
+ sys.exit(1)
+
+class bisect(object):
+ """dichotomic search in the DAG of changesets"""
+ def __init__(self, ui, repo):
+ self.repo = repo
+ self.path = repo.join("bisect")
+ self.opener = util.opener(self.path)
+ self.ui = ui
+ self.goodrevs = []
+ self.badrev = None
+ self.good_dirty = 0
+ self.bad_dirty = 0
+ self.good_path = "good"
+ self.bad_path = "bad"
+
+ if os.path.exists(os.path.join(self.path, self.good_path)):
+ self.goodrevs = self.opener(self.good_path).read().splitlines()
+ self.goodrevs = [hg.bin(x) for x in self.goodrevs]
+ if os.path.exists(os.path.join(self.path, self.bad_path)):
+ r = self.opener(self.bad_path).read().splitlines()
+ if r:
+ self.badrev = hg.bin(r.pop(0))
+
+ def __del__(self):
+ if not os.path.isdir(self.path):
+ return
+ f = self.opener(self.good_path, "w")
+ f.write("\n".join([hg.hex(r) for r in self.goodrevs]))
+ if len(self.goodrevs) > 0:
+ f.write("\n")
+ f = self.opener(self.bad_path, "w")
+ if self.badrev:
+ f.write(hg.hex(self.badrev) + "\n")
+
+ def init(self):
+ """start a new bisection"""
+ if os.path.isdir(self.path):
+ self.ui.warn("bisect directory already exists\n")
+ return 1
+ os.mkdir(self.path)
+ check_clean(self.ui, self.repo)
+ return 0
+
+ def reset(self):
+ """finish a bisection"""
+ if os.path.isdir(self.path):
+ sl = [os.path.join(self.path, p)
+ for p in [self.bad_path, self.good_path]]
+ for s in sl:
+ if os.path.exists(s):
+ os.unlink(s)
+ os.rmdir(self.path)
+ # Not sure about this
+ #self.ui.write("Going back to tip\n")
+ #self.repo.update(self.repo.changelog.tip())
+ return 1
+
+ def num_ancestors(self, head=None, stop=None):
+ """
+ returns a dict with the mapping:
+ node -> number of ancestors (self included)
+ for all nodes who are ancestor of head and
+ not in stop.
+ """
+ if head is None:
+ head = self.badrev
+ return self.__ancestors_and_nb_ancestors(head, stop)[1]
+
+ def ancestors(self, head=None, stop=None):
+ """
+ returns the set of the ancestors of head (self included)
+ who are not in stop.
+ """
+ if head is None:
+ head = self.badrev
+ return self.__ancestors_and_nb_ancestors(head, stop)[0]
+
+ def __ancestors_and_nb_ancestors(self, head, stop=None):
+ """
+ if stop is None then ancestors of goodrevs are used as
+ lower limit.
+
+ returns (anc, n_child) where anc is the set of the ancestors of head
+ and n_child is a dictionary with the following mapping:
+ node -> number of ancestors (self included)
+ """
+ cl = self.repo.changelog
+ if not stop:
+ stop = sets.Set([])
+ for i in xrange(len(self.goodrevs)-1, -1, -1):
+ g = self.goodrevs[i]
+ if g in stop:
+ continue
+ stop.update(cl.reachable(g))
+ def num_children(a):
+ """
+ returns a dictionnary with the following mapping
+ node -> [number of children, empty set]
+ """
+ d = {a: [0, sets.Set([])]}
+ for i in xrange(cl.rev(a)+1):
+ n = cl.node(i)
+ if not d.has_key(n):
+ d[n] = [0, sets.Set([])]
+ parents = [p for p in cl.parents(n) if p != hg.nullid]
+ for p in parents:
+ d[p][0] += 1
+ return d
+
+ if head in stop:
+ self.ui.warn("Unconsistent state, %s is good and bad\n"
+ % hg.hex(head))
+ sys.exit(1)
+ n_child = num_children(head)
+ for i in xrange(cl.rev(head)+1):
+ n = cl.node(i)
+ parents = [p for p in cl.parents(n) if p != hg.nullid]
+ for p in parents:
+ n_child[p][0] -= 1
+ if not n in stop:
+ n_child[n][1].union_update(n_child[p][1])
+ if n_child[p][0] == 0:
+ n_child[p] = len(n_child[p][1])
+ if not n in stop:
+ n_child[n][1].add(n)
+ if n_child[n][0] == 0:
+ if n == head:
+ anc = n_child[n][1]
+ n_child[n] = len(n_child[n][1])
+ return anc, n_child
+
+ def next(self):
+ if not self.badrev:
+ self.ui.warn("You should give at least one bad\n")
+ sys.exit(1)
+ if not self.goodrevs:
+ self.ui.warn("No good revision given\n")
+ self.ui.warn("Assuming the first revision is good\n")
+ ancestors, num_ancestors = self.__ancestors_and_nb_ancestors(
+ self.badrev)
+ tot = len(ancestors)
+ if tot == 1:
+ if ancestors.pop() != self.badrev:
+ self.ui.warn("Could not find the first bad revision\n")
+ sys.exit(1)
+ self.ui.write(
+ "The first bad revision is : %s\n" % hg.hex(self.badrev))
+ sys.exit(0)
+ self.ui.write("%d revisions left\n" % tot)
+ best_rev = None
+ best_len = -1
+ for n in ancestors:
+ l = num_ancestors[n]
+ l = min(l, tot - l)
+ if l > best_len:
+ best_len = l
+ best_rev = n
+ return best_rev
+
+ def autonext(self):
+ """find and update to the next revision to test"""
+ check_clean(self.ui, self.repo)
+ rev = self.next()
+ self.ui.write("Now testing %s\n" % hg.hex(rev))
+ return self.repo.update(rev, force=True)
+
+ def good(self, rev):
+ self.goodrevs.append(rev)
+
+ def autogood(self, rev=None):
+ """mark revision as good and update to the next revision to test"""
+ check_clean(self.ui, self.repo)
+ rev = lookup_rev(self.ui, self.repo, rev)
+ self.good(rev)
+ if self.badrev:
+ self.autonext()
+
+ def bad(self, rev):
+ self.badrev = rev
+
+ def autobad(self, rev=None):
+ """mark revision as bad and update to the next revision to test"""
+ check_clean(self.ui, self.repo)
+ rev = lookup_rev(self.ui, self.repo, rev)
+ self.bad(rev)
+ if self.goodrevs:
+ self.autonext()
+
+# should we put it in the class ?
+def test(ui, repo, rev):
+ """test the bisection code"""
+ b = bisect(ui, repo)
+ rev = repo.lookup(rev)
+ ui.write("testing with rev %s\n" % hg.hex(rev))
+ anc = b.ancestors()
+ while len(anc) > 1:
+ if not rev in anc:
+ ui.warn("failure while bisecting\n")
+ sys.exit(1)
+ ui.write("it worked :)\n")
+ new_rev = b.next()
+ ui.write("choosing if good or bad\n")
+ if rev in b.ancestors(head=new_rev):
+ b.bad(new_rev)
+ ui.write("it is bad\n")
+ else:
+ b.good(new_rev)
+ ui.write("it is good\n")
+ anc = b.ancestors()
+ repo.update(new_rev, force=True)
+ for v in anc:
+ if v != rev:
+ ui.warn("fail to found cset! :(\n")
+ return 1
+ ui.write("Found bad cset: %s\n" % hg.hex(b.badrev))
+ ui.write("Everything is ok :)\n")
+ return 0
+
+def bisect_run(ui, repo, cmd=None, *args):
+ """bisect extension: dichotomic search in the DAG of changesets
+for subcommands see "hg bisect help\"
+ """
+ def help_(cmd=None, *args):
+ """show help for a given bisect subcommand or all subcommands"""
+ cmdtable = bisectcmdtable
+ if cmd:
+ doc = cmdtable[cmd][0].__doc__
+ synopsis = cmdtable[cmd][2]
+ ui.write(synopsis + "\n")
+ ui.write("\n" + doc + "\n")
+ return
+ ui.write("list of subcommands for the bisect extension\n\n")
+ cmds = cmdtable.keys()
+ cmds.sort()
+ m = max([len(c) for c in cmds])
+ for cmd in cmds:
+ doc = cmdtable[cmd][0].__doc__.splitlines(0)[0].rstrip()
+ ui.write(" %-*s %s\n" % (m, cmd, doc))
+
+ b = bisect(ui, repo)
+ bisectcmdtable = {
+ "init": (b.init, 0, "hg bisect init"),
+ "bad": (b.autobad, 1, "hg bisect bad [<rev>]"),
+ "good": (b.autogood, 1, "hg bisect good [<rev>]"),
+ "next": (b.autonext, 0, "hg bisect next"),
+ "reset": (b.reset, 0, "hg bisect reset"),
+ "help": (help_, 1, "hg bisect help [<subcommand>]"),
+ }
+
+ if not bisectcmdtable.has_key(cmd):
+ ui.warn("bisect: Unknown sub-command\n")
+ return help_()
+ if len(args) > bisectcmdtable[cmd][1]:
+ ui.warn("bisect: Too many arguments\n")
+ return help_()
+ return bisectcmdtable[cmd][0](*args)
+
+cmdtable = {
+ "bisect": (bisect_run, [], "hg bisect [help|init|reset|next|good|bad]"),
+ #"bisect-test": (test, [], "hg bisect-test rev"),
+}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext/mq.py Sun Mar 12 16:21:59 2006 -0800
@@ -0,0 +1,1306 @@
+# queue.py - patch queues for mercurial
+#
+# Copyright 2005 Chris Mason <mason@suse.com>
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+
+from mercurial.demandload import *
+demandload(globals(), "os sys re struct traceback errno bz2")
+from mercurial.i18n import gettext as _
+from mercurial import ui, hg, revlog, commands, util
+
+versionstr = "0.45"
+
+repomap = {}
+
+class queue:
+ def __init__(self, ui, path, patchdir=None):
+ self.basepath = path
+ if patchdir:
+ self.path = patchdir
+ else:
+ self.path = os.path.join(path, "patches")
+ self.opener = util.opener(self.path)
+ self.ui = ui
+ self.applied = []
+ self.full_series = []
+ self.applied_dirty = 0
+ self.series_dirty = 0
+ self.series_path = "series"
+ self.status_path = "status"
+
+ if os.path.exists(os.path.join(self.path, self.series_path)):
+ self.full_series = self.opener(self.series_path).read().splitlines()
+ self.read_series(self.full_series)
+
+ if os.path.exists(os.path.join(self.path, self.status_path)):
+ self.applied = self.opener(self.status_path).read().splitlines()
+
+ def find_series(self, patch):
+ pre = re.compile("(\s*)([^#]+)")
+ index = 0
+ for l in self.full_series:
+ m = pre.match(l)
+ if m:
+ s = m.group(2)
+ s = s.rstrip()
+ if s == patch:
+ return index
+ index += 1
+ return None
+
+ def read_series(self, list):
+ def matcher(list):
+ pre = re.compile("(\s*)([^#]+)")
+ for l in list:
+ m = pre.match(l)
+ if m:
+ s = m.group(2)
+ s = s.rstrip()
+ if len(s) > 0:
+ yield s
+ self.series = []
+ self.series = [ x for x in matcher(list) ]
+
+ def save_dirty(self):
+ if self.applied_dirty:
+ if len(self.applied) > 0:
+ nl = "\n"
+ else:
+ nl = ""
+ f = self.opener(self.status_path, "w")
+ f.write("\n".join(self.applied) + nl)
+ if self.series_dirty:
+ if len(self.full_series) > 0:
+ nl = "\n"
+ else:
+ nl = ""
+ f = self.opener(self.series_path, "w")
+ f.write("\n".join(self.full_series) + nl)
+
+ def readheaders(self, patch):
+ def eatdiff(lines):
+ while lines:
+ l = lines[-1]
+ if (l.startswith("diff -") or
+ l.startswith("Index:") or
+ l.startswith("===========")):
+ del lines[-1]
+ else:
+ break
+ def eatempty(lines):
+ while lines:
+ l = lines[-1]
+ if re.match('\s*$', l):
+ del lines[-1]
+ else:
+ break
+
+ pf = os.path.join(self.path, patch)
+ message = []
+ comments = []
+ user = None
+ format = None
+ subject = None
+ diffstart = 0
+
+ for line in file(pf):
+ line = line.rstrip()
+ if diffstart:
+ if line.startswith('+++ '):
+ diffstart = 2
+ break
+ if line.startswith("--- "):
+ diffstart = 1
+ continue
+ elif format == "hgpatch":
+ # parse values when importing the result of an hg export
+ if line.startswith("# User "):
+ user = line[7:]
+ elif not line.startswith("# ") and line:
+ message.append(line)
+ format = None
+ elif line == '# HG changeset patch':
+ format = "hgpatch"
+ elif (format != "tagdone" and (line.startswith("Subject: ") or
+ line.startswith("subject: "))):
+ subject = line[9:]
+ format = "tag"
+ elif (format != "tagdone" and (line.startswith("From: ") or
+ line.startswith("from: "))):
+ user = line[6:]
+ format = "tag"
+ elif format == "tag" and line == "":
+ # when looking for tags (subject: from: etc) they
+ # end once you find a blank line in the source
+ format = "tagdone"
+ else:
+ message.append(line)
+ comments.append(line)
+
+ eatdiff(message)
+ eatdiff(comments)
+ eatempty(message)
+ eatempty(comments)
+
+ # make sure message isn't empty
+ if format and format.startswith("tag") and subject:
+ message.insert(0, "")
+ message.insert(0, subject)
+ return (message, comments, user, diffstart > 1)
+
+ def mergeone(self, repo, mergeq, head, patch, rev, wlock):
+ # first try just applying the patch
+ (err, n) = self.apply(repo, [ patch ], update_status=False,
+ strict=True, merge=rev, wlock=wlock)
+
+ if err == 0:
+ return (err, n)
+
+ if n is None:
+ self.ui.warn("apply failed for patch %s\n" % patch)
+ sys.exit(1)
+
+ self.ui.warn("patch didn't work out, merging %s\n" % patch)
+
+ # apply failed, strip away that rev and merge.
+ repo.update(head, allow=False, force=True, wlock=wlock)
+ self.strip(repo, n, update=False, backup='strip', wlock=wlock)
+
+ c = repo.changelog.read(rev)
+ ret = repo.update(rev, allow=True, wlock=wlock)
+ if ret:
+ self.ui.warn("update returned %d\n" % ret)
+ sys.exit(1)
+ n = repo.commit(None, c[4], c[1], force=1, wlock=wlock)
+ if n == None:
+ self.ui.warn("repo commit failed\n")
+ sys.exit(1)
+ try:
+ message, comments, user, patchfound = mergeq.readheaders(patch)
+ except:
+ self.ui.warn("Unable to read %s\n" % patch)
+ sys.exit(1)
+
+ patchf = self.opener(patch, "w")
+ if comments:
+ comments = "\n".join(comments) + '\n\n'
+ patchf.write(comments)
+ commands.dodiff(patchf, self.ui, repo, head, n)
+ patchf.close()
+ return (0, n)
+
+ def qparents(self, repo, rev=None):
+ if rev is None:
+ (p1, p2) = repo.dirstate.parents()
+ if p2 == revlog.nullid:
+ return p1
+ if len(self.applied) == 0:
+ return None
+ (top, patch) = self.applied[-1].split(':')
+ top = revlog.bin(top)
+ return top
+ pp = repo.changelog.parents(rev)
+ if pp[1] != revlog.nullid:
+ arevs = [ x.split(':')[0] for x in self.applied ]
+ p0 = revlog.hex(pp[0])
+ p1 = revlog.hex(pp[1])
+ if p0 in arevs:
+ return pp[0]
+ if p1 in arevs:
+ return pp[1]
+ return None
+ return pp[0]
+
+ def mergepatch(self, repo, mergeq, series, wlock):
+ if len(self.applied) == 0:
+ # each of the patches merged in will have two parents. This
+ # can confuse the qrefresh, qdiff, and strip code because it
+ # needs to know which parent is actually in the patch queue.
+ # so, we insert a merge marker with only one parent. This way
+ # the first patch in the queue is never a merge patch
+ #
+ pname = ".hg.patches.merge.marker"
+ n = repo.commit(None, '[mq]: merge marker', user=None, force=1,
+ wlock=wlock)
+ self.applied.append(revlog.hex(n) + ":" + pname)
+ self.applied_dirty = 1
+
+ head = self.qparents(repo)
+
+ for patch in series:
+ patch = mergeq.lookup(patch)
+ if not patch:
+ self.ui.warn("patch %s does not exist\n" % patch)
+ return (1, None)
+
+ info = mergeq.isapplied(patch)
+ if not info:
+ self.ui.warn("patch %s is not applied\n" % patch)
+ return (1, None)
+ rev = revlog.bin(info[1])
+ (err, head) = self.mergeone(repo, mergeq, head, patch, rev, wlock)
+ if head:
+ self.applied.append(revlog.hex(head) + ":" + patch)
+ self.applied_dirty = 1
+ if err:
+ return (err, head)
+ return (0, head)
+
+ def apply(self, repo, series, list=False, update_status=True,
+ strict=False, patchdir=None, merge=None, wlock=None):
+ # TODO unify with commands.py
+ if not patchdir:
+ patchdir = self.path
+ pwd = os.getcwd()
+ os.chdir(repo.root)
+ err = 0
+ if not wlock:
+ wlock = repo.wlock()
+ lock = repo.lock()
+ tr = repo.transaction()
+ n = None
+ for patch in series:
+ self.ui.warn("applying %s\n" % patch)
+ pf = os.path.join(patchdir, patch)
+
+ try:
+ message, comments, user, patchfound = self.readheaders(patch)
+ except:
+ self.ui.warn("Unable to read %s\n" % pf)
+ err = 1
+ break
+
+ if not message:
+ message = "imported patch %s\n" % patch
+ else:
+ if list:
+ message.append("\nimported patch %s" % patch)
+ message = '\n'.join(message)
+
+ try:
+ f = os.popen("patch -p1 --no-backup-if-mismatch < '%s'" % (pf))
+ except:
+ self.ui.warn("patch failed, unable to continue (try -v)\n")
+ err = 1
+ break
+ files = []
+ fuzz = False
+ for l in f:
+ l = l.rstrip('\r\n');
+ if self.ui.verbose:
+ self.ui.warn(l + "\n")
+ if l[:14] == 'patching file ':
+ pf = os.path.normpath(l[14:])
+ # when patch finds a space in the file name, it puts
+ # single quotes around the filename. strip them off
+ if pf[0] == "'" and pf[-1] == "'":
+ pf = pf[1:-1]
+ if pf not in files:
+ files.append(pf)
+ printed_file = False
+ file_str = l
+ elif l.find('with fuzz') >= 0:
+ if not printed_file:
+ self.ui.warn(file_str + '\n')
+ printed_file = True
+ self.ui.warn(l + '\n')
+ fuzz = True
+ elif l.find('saving rejects to file') >= 0:
+ self.ui.warn(l + '\n')
+ elif l.find('FAILED') >= 0:
+ if not printed_file:
+ self.ui.warn(file_str + '\n')
+ printed_file = True
+ self.ui.warn(l + '\n')
+ patcherr = f.close()
+
+ if merge and len(files) > 0:
+ # Mark as merged and update dirstate parent info
+ repo.dirstate.update(repo.dirstate.filterfiles(files), 'm')
+ p1, p2 = repo.dirstate.parents()
+ repo.dirstate.setparents(p1, merge)
+ if len(files) > 0:
+ commands.addremove_lock(self.ui, repo, files,
+ opts={}, wlock=wlock)
+ n = repo.commit(files, message, user, force=1, lock=lock,
+ wlock=wlock)
+
+ if n == None:
+ self.ui.warn("repo commit failed\n")
+ sys.exit(1)
+
+ if update_status:
+ self.applied.append(revlog.hex(n) + ":" + patch)
+
+ if patcherr:
+ if not patchfound:
+ self.ui.warn("patch %s is empty\n" % patch)
+ err = 0
+ else:
+ self.ui.warn("patch failed, rejects left in working dir\n")
+ err = 1
+ break
+
+ if fuzz and strict:
+ self.ui.warn("fuzz found when applying patch, stopping\n")
+ err = 1
+ break
+ tr.close()
+ os.chdir(pwd)
+ return (err, n)
+
+ def delete(self, repo, patch):
+ patch = self.lookup(patch)
+ info = self.isapplied(patch)
+ if info:
+ self.ui.warn("cannot delete applied patch %s\n" % patch)
+ sys.exit(1)
+ if patch not in self.series:
+ self.ui.warn("patch %s not in series file\n" % patch)
+ sys.exit(1)
+ i = self.find_series(patch)
+ del self.full_series[i]
+ self.read_series(self.full_series)
+ self.series_dirty = 1
+
+ def check_toppatch(self, repo):
+ if len(self.applied) > 0:
+ (top, patch) = self.applied[-1].split(':')
+ top = revlog.bin(top)
+ pp = repo.dirstate.parents()
+ if top not in pp:
+ self.ui.warn("queue top not at dirstate parents. top %s dirstate %s %s\n" %( revlog.short(top), revlog.short(pp[0]), revlog.short(pp[1])))
+ sys.exit(1)
+ return top
+ return None
+ def check_localchanges(self, repo):
+ (c, a, r, d, u) = repo.changes(None, None)
+ if c or a or d or r:
+ self.ui.write("Local changes found, refresh first\n")
+ sys.exit(1)
+ def new(self, repo, patch, msg=None, force=None):
+ if not force:
+ self.check_localchanges(repo)
+ self.check_toppatch(repo)
+ wlock = repo.wlock()
+ insert = self.series_end()
+ if msg:
+ n = repo.commit([], "[mq]: %s" % msg, force=True, wlock=wlock)
+ else:
+ n = repo.commit([],
+ "New patch: %s" % patch, force=True, wlock=wlock)
+ if n == None:
+ self.ui.warn("repo commit failed\n")
+ sys.exit(1)
+ self.full_series[insert:insert] = [patch]
+ self.applied.append(revlog.hex(n) + ":" + patch)
+ self.read_series(self.full_series)
+ self.series_dirty = 1
+ self.applied_dirty = 1
+ p = self.opener(patch, "w")
+ if msg:
+ msg = msg + "\n"
+ p.write(msg)
+ p.close()
+ wlock = None
+ r = self.qrepo()
+ if r: r.add([patch])
+
+ def strip(self, repo, rev, update=True, backup="all", wlock=None):
+ def limitheads(chlog, stop):
+ """return the list of all nodes that have no children"""
+ p = {}
+ h = []
+ stoprev = 0
+ if stop in chlog.nodemap:
+ stoprev = chlog.rev(stop)
+
+ for r in range(chlog.count() - 1, -1, -1):
+ n = chlog.node(r)
+ if n not in p:
+ h.append(n)
+ if n == stop:
+ break
+ if r < stoprev:
+ break
+ for pn in chlog.parents(n):
+ p[pn] = 1
+ return h
+
+ def bundle(cg):
+ backupdir = repo.join("strip-backup")
+ if not os.path.isdir(backupdir):
+ os.mkdir(backupdir)
+ name = os.path.join(backupdir, "%s" % revlog.short(rev))
+ name = savename(name)
+ self.ui.warn("saving bundle to %s\n" % name)
+ # TODO, exclusive open
+ f = open(name, "wb")
+ try:
+ f.write("HG10")
+ z = bz2.BZ2Compressor(9)
+ while 1:
+ chunk = cg.read(4096)
+ if not chunk:
+ break
+ f.write(z.compress(chunk))
+ f.write(z.flush())
+ except:
+ os.unlink(name)
+ raise
+ f.close()
+ return name
+
+ def stripall(rev, revnum):
+ cl = repo.changelog
+ c = cl.read(rev)
+ mm = repo.manifest.read(c[0])
+ seen = {}
+
+ for x in xrange(revnum, cl.count()):
+ c = cl.read(cl.node(x))
+ for f in c[3]:
+ if f in seen:
+ continue
+ seen[f] = 1
+ if f in mm:
+ filerev = mm[f]
+ else:
+ filerev = 0
+ seen[f] = filerev
+ # we go in two steps here so the strip loop happens in a
+ # sensible order. When stripping many files, this helps keep
+ # our disk access patterns under control.
+ list = seen.keys()
+ list.sort()
+ for f in list:
+ ff = repo.file(f)
+ filerev = seen[f]
+ if filerev != 0:
+ if filerev in ff.nodemap:
+ filerev = ff.rev(filerev)
+ else:
+ filerev = 0
+ ff.strip(filerev, revnum)
+
+ if not wlock:
+ wlock = repo.wlock()
+ lock = repo.lock()
+ chlog = repo.changelog
+ # TODO delete the undo files, and handle undo of merge sets
+ pp = chlog.parents(rev)
+ revnum = chlog.rev(rev)
+
+ if update:
+ urev = self.qparents(repo, rev)
+ repo.update(urev, allow=False, force=True, wlock=wlock)
+ repo.dirstate.write()
+
+ # save is a list of all the branches we are truncating away
+ # that we actually want to keep. changegroup will be used
+ # to preserve them and add them back after the truncate
+ saveheads = []
+ savebases = {}
+
+ tip = chlog.tip()
+ heads = limitheads(chlog, rev)
+ seen = {}
+
+ # search through all the heads, finding those where the revision
+ # we want to strip away is an ancestor. Also look for merges
+ # that might be turned into new heads by the strip.
+ while heads:
+ h = heads.pop()
+ n = h
+ while True:
+ seen[n] = 1
+ pp = chlog.parents(n)
+ if pp[1] != revlog.nullid and chlog.rev(pp[1]) > revnum:
+ if pp[1] not in seen:
+ heads.append(pp[1])
+ if pp[0] == revlog.nullid:
+ break
+ if chlog.rev(pp[0]) < revnum:
+ break
+ n = pp[0]
+ if n == rev:
+ break
+ r = chlog.reachable(h, rev)
+ if rev not in r:
+ saveheads.append(h)
+ for x in r:
+ if chlog.rev(x) > revnum:
+ savebases[x] = 1
+
+ # create a changegroup for all the branches we need to keep
+ if backup is "all":
+ backupch = repo.changegroupsubset([rev], chlog.heads(), 'strip')
+ bundle(backupch)
+ if saveheads:
+ backupch = repo.changegroupsubset(savebases.keys(), saveheads, 'strip')
+ chgrpfile = bundle(backupch)
+
+ stripall(rev, revnum)
+
+ change = chlog.read(rev)
+ repo.manifest.strip(repo.manifest.rev(change[0]), revnum)
+ chlog.strip(revnum, revnum)
+ if saveheads:
+ self.ui.status("adding branch\n")
+ commands.unbundle(self.ui, repo, chgrpfile, update=False)
+ if backup is not "strip":
+ os.unlink(chgrpfile)
+
+ def isapplied(self, patch):
+ """returns (index, rev, patch)"""
+ for i in xrange(len(self.applied)):
+ p = self.applied[i]
+ a = p.split(':')
+ if a[1] == patch:
+ return (i, a[0], a[1])
+ return None
+
+ def lookup(self, patch):
+ if patch == None:
+ return None
+ if patch in self.series:
+ return patch
+ if not os.path.isfile(os.path.join(self.path, patch)):
+ try:
+ sno = int(patch)
+ except(ValueError, OverflowError):
+ self.ui.warn("patch %s not in series\n" % patch)
+ sys.exit(1)
+ if sno >= len(self.series):
+ self.ui.warn("patch number %d is out of range\n" % sno)
+ sys.exit(1)
+ patch = self.series[sno]
+ else:
+ self.ui.warn("patch %s not in series\n" % patch)
+ sys.exit(1)
+ return patch
+
+ def push(self, repo, patch=None, force=False, list=False,
+ mergeq=None, wlock=None):
+ if not wlock:
+ wlock = repo.wlock()
+ patch = self.lookup(patch)
+ if patch and self.isapplied(patch):
+ self.ui.warn("patch %s is already applied\n" % patch)
+ sys.exit(1)
+ if self.series_end() == len(self.series):
+ self.ui.warn("File series fully applied\n")
+ sys.exit(1)
+ if not force:
+ self.check_localchanges(repo)
+
+ self.applied_dirty = 1;
+ start = self.series_end()
+ if start > 0:
+ self.check_toppatch(repo)
+ if not patch:
+ patch = self.series[start]
+ end = start + 1
+ else:
+ end = self.series.index(patch, start) + 1
+ s = self.series[start:end]
+ if mergeq:
+ ret = self.mergepatch(repo, mergeq, s, wlock)
+ else:
+ ret = self.apply(repo, s, list, wlock=wlock)
+ top = self.applied[-1].split(':')[1]
+ if ret[0]:
+ self.ui.write("Errors during apply, please fix and refresh %s\n" %
+ top)
+ else:
+ self.ui.write("Now at: %s\n" % top)
+ return ret[0]
+
+ def pop(self, repo, patch=None, force=False, update=True, wlock=None):
+ def getfile(f, rev):
+ t = repo.file(f).read(rev)
+ try:
+ repo.wfile(f, "w").write(t)
+ except IOError:
+ os.makedirs(os.path.dirname(repo.wjoin(f)))
+ repo.wfile(f, "w").write(t)
+
+ if not wlock:
+ wlock = repo.wlock()
+ if patch:
+ # index, rev, patch
+ info = self.isapplied(patch)
+ if not info:
+ patch = self.lookup(patch)
+ info = self.isapplied(patch)
+ if not info:
+ self.ui.warn("patch %s is not applied\n" % patch)
+ sys.exit(1)
+ if len(self.applied) == 0:
+ self.ui.warn("No patches applied\n")
+ sys.exit(1)
+
+ if not update:
+ parents = repo.dirstate.parents()
+ rr = [ revlog.bin(x.split(':')[0]) for x in self.applied ]
+ for p in parents:
+ if p in rr:
+ self.ui.warn("qpop: forcing dirstate update\n")
+ update = True
+
+ if not force and update:
+ self.check_localchanges(repo)
+
+ self.applied_dirty = 1;
+ end = len(self.applied)
+ if not patch:
+ info = [len(self.applied) - 1] + self.applied[-1].split(':')
+ start = info[0]
+ rev = revlog.bin(info[1])
+
+ # we know there are no local changes, so we can make a simplified
+ # form of hg.update.
+ if update:
+ top = self.check_toppatch(repo)
+ qp = self.qparents(repo, rev)
+ changes = repo.changelog.read(qp)
+ mf1 = repo.manifest.readflags(changes[0])
+ mmap = repo.manifest.read(changes[0])
+ (c, a, r, d, u) = repo.changes(qp, top)
+ if d:
+ raise util.Abort("deletions found between repo revs")
+ for f in c:
+ getfile(f, mmap[f])
+ for f in r:
+ getfile(f, mmap[f])
+ util.set_exec(repo.wjoin(f), mf1[f])
+ repo.dirstate.update(c + r, 'n')
+ for f in a:
+ try: os.unlink(repo.wjoin(f))
+ except: raise
+ try: os.removedirs(os.path.dirname(repo.wjoin(f)))
+ except: pass
+ if a:
+ repo.dirstate.forget(a)
+ repo.dirstate.setparents(qp, revlog.nullid)
+ self.strip(repo, rev, update=False, backup='strip', wlock=wlock)
+ del self.applied[start:end]
+ if len(self.applied):
+ self.ui.write("Now at: %s\n" % self.applied[-1].split(':')[1])
+ else:
+ self.ui.write("Patch queue now empty\n")
+
+ def diff(self, repo, files):
+ top = self.check_toppatch(repo)
+ if not top:
+ self.ui.write("No patches applied\n")
+ return
+ qp = self.qparents(repo, top)
+ commands.dodiff(sys.stdout, self.ui, repo, qp, None, files)
+
+ def refresh(self, repo, short=False):
+ if len(self.applied) == 0:
+ self.ui.write("No patches applied\n")
+ return
+ wlock = repo.wlock()
+ self.check_toppatch(repo)
+ qp = self.qparents(repo)
+ (top, patch) = self.applied[-1].split(':')
+ top = revlog.bin(top)
+ cparents = repo.changelog.parents(top)
+ patchparent = self.qparents(repo, top)
+ message, comments, user, patchfound = self.readheaders(patch)
+
+ patchf = self.opener(patch, "w")
+ if comments:
+ comments = "\n".join(comments) + '\n\n'
+ patchf.write(comments)
+
+ tip = repo.changelog.tip()
+ if top == tip:
+ # if the top of our patch queue is also the tip, there is an
+ # optimization here. We update the dirstate in place and strip
+ # off the tip commit. Then just commit the current directory
+ # tree. We can also send repo.commit the list of files
+ # changed to speed up the diff
+ #
+ # in short mode, we only diff the files included in the
+ # patch already
+ #
+ # this should really read:
+ #(cc, dd, aa, aa2, uu) = repo.changes(tip, patchparent)
+ # but we do it backwards to take advantage of manifest/chlog
+ # caching against the next repo.changes call
+ #
+ (cc, aa, dd, aa2, uu) = repo.changes(patchparent, tip)
+ if short:
+ filelist = cc + aa + dd
+ else:
+ filelist = None
+ (c, a, r, d, u) = repo.changes(None, None, filelist)
+
+ # we might end up with files that were added between tip and
+ # the dirstate parent, but then changed in the local dirstate.
+ # in this case, we want them to only show up in the added section
+ for x in c:
+ if x not in aa:
+ cc.append(x)
+ # we might end up with files added by the local dirstate that
+ # were deleted by the patch. In this case, they should only
+ # show up in the changed section.
+ for x in a:
+ if x in dd:
+ del dd[dd.index(x)]
+ cc.append(x)
+ else:
+ aa.append(x)
+ # make sure any files deleted in the local dirstate
+ # are not in the add or change column of the patch
+ forget = []
+ for x in d + r:
+ if x in aa:
+ del aa[aa.index(x)]
+ forget.append(x)
+ continue
+ elif x in cc:
+ del cc[cc.index(x)]
+ dd.append(x)
+
+ c = list(util.unique(cc))
+ r = list(util.unique(dd))
+ a = list(util.unique(aa))
+ filelist = list(util.unique(c + r + a ))
+ commands.dodiff(patchf, self.ui, repo, patchparent, None,
+ filelist, changes=(c, a, r, [], u))
+ patchf.close()
+
+ changes = repo.changelog.read(tip)
+ repo.dirstate.setparents(*cparents)
+ repo.dirstate.update(a, 'a')
+ repo.dirstate.update(r, 'r')
+ repo.dirstate.update(c, 'n')
+ repo.dirstate.forget(forget)
+
+ if not message:
+ message = "patch queue: %s\n" % patch
+ else:
+ message = "\n".join(message)
+ self.strip(repo, top, update=False, backup='strip', wlock=wlock)
+ n = repo.commit(filelist, message, changes[1], force=1, wlock=wlock)
+ self.applied[-1] = revlog.hex(n) + ':' + patch
+ self.applied_dirty = 1
+ else:
+ commands.dodiff(patchf, self.ui, repo, patchparent, None)
+ patchf.close()
+ self.pop(repo, force=True, wlock=wlock)
+ self.push(repo, force=True, wlock=wlock)
+
+ def init(self, repo, create=False):
+ if os.path.isdir(self.path):
+ raise util.Abort("patch queue directory already exists")
+ os.mkdir(self.path)
+ if create:
+ return self.qrepo(create=True)
+
+ def unapplied(self, repo, patch=None):
+ if patch and patch not in self.series:
+ self.ui.warn("%s not in the series file\n" % patch)
+ sys.exit(1)
+ if not patch:
+ start = self.series_end()
+ else:
+ start = self.series.index(patch) + 1
+ for p in self.series[start:]:
+ self.ui.write("%s\n" % p)
+
+ def qseries(self, repo, missing=None):
+ start = self.series_end()
+ if not missing:
+ for p in self.series[:start]:
+ if self.ui.verbose:
+ self.ui.write("%d A " % self.series.index(p))
+ self.ui.write("%s\n" % p)
+ for p in self.series[start:]:
+ if self.ui.verbose:
+ self.ui.write("%d U " % self.series.index(p))
+ self.ui.write("%s\n" % p)
+ else:
+ list = []
+ for root, dirs, files in os.walk(self.path):
+ d = root[len(self.path) + 1:]
+ for f in files:
+ fl = os.path.join(d, f)
+ if (fl not in self.series and
+ fl not in (self.status_path, self.series_path)
+ and not fl.startswith('.')):
+ list.append(fl)
+ list.sort()
+ if list:
+ for x in list:
+ if self.ui.verbose:
+ self.ui.write("D ")
+ self.ui.write("%s\n" % x)
+
+ def issaveline(self, l):
+ name = l.split(':')[1]
+ if name == '.hg.patches.save.line':
+ return True
+
+ def qrepo(self, create=False):
+ if create or os.path.isdir(os.path.join(self.path, ".hg")):
+ return hg.repository(self.ui, path=self.path, create=create)
+
+ def restore(self, repo, rev, delete=None, qupdate=None):
+ c = repo.changelog.read(rev)
+ desc = c[4].strip()
+ lines = desc.splitlines()
+ i = 0
+ datastart = None
+ series = []
+ applied = []
+ qpp = None
+ for i in xrange(0, len(lines)):
+ if lines[i] == 'Patch Data:':
+ datastart = i + 1
+ elif lines[i].startswith('Dirstate:'):
+ l = lines[i].rstrip()
+ l = l[10:].split(' ')
+ qpp = [ hg.bin(x) for x in l ]
+ elif datastart != None:
+ l = lines[i].rstrip()
+ index = l.index(':')
+ id = l[:index]
+ file = l[index + 1:]
+ if id:
+ applied.append(l)
+ series.append(file)
+ if datastart == None:
+ self.ui.warn("No saved patch data found\n")
+ return 1
+ self.ui.warn("restoring status: %s\n" % lines[0])
+ self.full_series = series
+ self.applied = applied
+ self.read_series(self.full_series)
+ self.series_dirty = 1
+ self.applied_dirty = 1
+ heads = repo.changelog.heads()
+ if delete:
+ if rev not in heads:
+ self.ui.warn("save entry has children, leaving it alone\n")
+ else:
+ self.ui.warn("removing save entry %s\n" % hg.short(rev))
+ pp = repo.dirstate.parents()
+ if rev in pp:
+ update = True
+ else:
+ update = False
+ self.strip(repo, rev, update=update, backup='strip')
+ if qpp:
+ self.ui.warn("saved queue repository parents: %s %s\n" %
+ (hg.short(qpp[0]), hg.short(qpp[1])))
+ if qupdate:
+ print "queue directory updating"
+ r = self.qrepo()
+ if not r:
+ self.ui.warn("Unable to load queue repository\n")
+ return 1
+ r.update(qpp[0], allow=False, force=True)
+
+ def save(self, repo, msg=None):
+ if len(self.applied) == 0:
+ self.ui.warn("save: no patches applied, exiting\n")
+ return 1
+ if self.issaveline(self.applied[-1]):
+ self.ui.warn("status is already saved\n")
+ return 1
+
+ ar = [ ':' + x for x in self.full_series ]
+ if not msg:
+ msg = "hg patches saved state"
+ else:
+ msg = "hg patches: " + msg.rstrip('\r\n')
+ r = self.qrepo()
+ if r:
+ pp = r.dirstate.parents()
+ msg += "\nDirstate: %s %s" % (hg.hex(pp[0]), hg.hex(pp[1]))
+ msg += "\n\nPatch Data:\n"
+ text = msg + "\n".join(self.applied) + '\n' + (ar and "\n".join(ar)
+ + '\n' or "")
+ n = repo.commit(None, text, user=None, force=1)
+ if not n:
+ self.ui.warn("repo commit failed\n")
+ return 1
+ self.applied.append(revlog.hex(n) + ":" + '.hg.patches.save.line')
+ self.applied_dirty = 1
+
+ def series_end(self):
+ end = 0
+ if len(self.applied) > 0:
+ (top, p) = self.applied[-1].split(':')
+ try:
+ end = self.series.index(p)
+ except ValueError:
+ return 0
+ return end + 1
+ return end
+
+ def qapplied(self, repo, patch=None):
+ if patch and patch not in self.series:
+ self.ui.warn("%s not in the series file\n" % patch)
+ sys.exit(1)
+ if not patch:
+ end = len(self.applied)
+ else:
+ end = self.series.index(patch) + 1
+ for x in xrange(end):
+ p = self.appliedname(x)
+ self.ui.write("%s\n" % p)
+
+ def appliedname(self, index):
+ p = self.applied[index]
+ if not self.ui.verbose:
+ p = p.split(':')[1]
+ return p
+
+ def top(self, repo):
+ if len(self.applied):
+ p = self.appliedname(-1)
+ self.ui.write(p + '\n')
+ else:
+ self.ui.write("No patches applied\n")
+
+ def next(self, repo):
+ end = self.series_end()
+ if end == len(self.series):
+ self.ui.write("All patches applied\n")
+ else:
+ self.ui.write(self.series[end] + '\n')
+
+ def prev(self, repo):
+ if len(self.applied) > 1:
+ p = self.appliedname(-2)
+ self.ui.write(p + '\n')
+ elif len(self.applied) == 1:
+ self.ui.write("Only one patch applied\n")
+ else:
+ self.ui.write("No patches applied\n")
+
+ def qimport(self, repo, files, patch=None, existing=None, force=None):
+ if len(files) > 1 and patch:
+ self.ui.warn("-n option not valid when importing multiple files\n")
+ sys.exit(1)
+ i = 0
+ for filename in files:
+ if existing:
+ if not patch:
+ patch = filename
+ if not os.path.isfile(os.path.join(self.path, patch)):
+ self.ui.warn("patch %s does not exist\n" % patch)
+ sys.exit(1)
+ else:
+ try:
+ text = file(filename).read()
+ except IOError:
+ self.ui.warn("Unable to read %s\n" % patch)
+ sys.exit(1)
+ if not patch:
+ patch = os.path.split(filename)[1]
+ if not force and os.path.isfile(os.path.join(self.path, patch)):
+ self.ui.warn("patch %s already exists\n" % patch)
+ sys.exit(1)
+ patchf = self.opener(patch, "w")
+ patchf.write(text)
+ if patch in self.series:
+ self.ui.warn("patch %s is already in the series file\n" % patch)
+ sys.exit(1)
+ index = self.series_end() + i
+ self.full_series[index:index] = [patch]
+ self.read_series(self.full_series)
+ self.ui.warn("adding %s to series file\n" % patch)
+ i += 1
+ patch = None
+ self.series_dirty = 1
+
+def delete(ui, repo, patch, **opts):
+ """remove a patch from the series file"""
+ q = repomap[repo]
+ q.delete(repo, patch)
+ q.save_dirty()
+ return 0
+
+def applied(ui, repo, patch=None, **opts):
+ """print the patches already applied"""
+ repomap[repo].qapplied(repo, patch)
+ return 0
+
+def unapplied(ui, repo, patch=None, **opts):
+ """print the patches not yet applied"""
+ repomap[repo].unapplied(repo, patch)
+ return 0
+
+def qimport(ui, repo, *filename, **opts):
+ """import a patch"""
+ q = repomap[repo]
+ q.qimport(repo, filename, patch=opts['name'],
+ existing=opts['existing'], force=opts['force'])
+ q.save_dirty()
+ return 0
+
+def init(ui, repo, **opts):
+ """init a new queue repository"""
+ q = repomap[repo]
+ r = q.init(repo, create=opts['create_repo'])
+ q.save_dirty()
+ if r:
+ fp = r.wopener('.hgignore', 'w')
+ print >> fp, 'syntax: glob'
+ print >> fp, 'status'
+ fp.close()
+ r.wopener('series', 'w').close()
+ r.add(['.hgignore', 'series'])
+ return 0
+
+def commit(ui, repo, *pats, **opts):
+ q = repomap[repo]
+ r = q.qrepo()
+ if not r: raise util.Abort('no queue repository')
+ commands.commit(r.ui, r, *pats, **opts)
+
+def series(ui, repo, **opts):
+ """print the entire series file"""
+ repomap[repo].qseries(repo, missing=opts['missing'])
+ return 0
+
+def top(ui, repo, **opts):
+ """print the name of the current patch"""
+ repomap[repo].top(repo)
+ return 0
+
+def next(ui, repo, **opts):
+ """print the name of the next patch"""
+ repomap[repo].next(repo)
+ return 0
+
+def prev(ui, repo, **opts):
+ """print the name of the previous patch"""
+ repomap[repo].prev(repo)
+ return 0
+
+def new(ui, repo, patch, **opts):
+ """create a new patch"""
+ q = repomap[repo]
+ q.new(repo, patch, msg=opts['message'], force=opts['force'])
+ q.save_dirty()
+ return 0
+
+def refresh(ui, repo, **opts):
+ """update the current patch"""
+ q = repomap[repo]
+ q.refresh(repo, short=opts['short'])
+ q.save_dirty()
+ return 0
+
+def diff(ui, repo, *files, **opts):
+ """diff of the current patch"""
+ repomap[repo].diff(repo, files)
+ return 0
+
+def lastsavename(path):
+ (dir, base) = os.path.split(path)
+ names = os.listdir(dir)
+ namere = re.compile("%s.([0-9]+)" % base)
+ max = None
+ maxname = None
+ for f in names:
+ m = namere.match(f)
+ if m:
+ index = int(m.group(1))
+ if max == None or index > max:
+ max = index
+ maxname = f
+ if maxname:
+ return (os.path.join(dir, maxname), max)
+ return (None, None)
+
+def savename(path):
+ (last, index) = lastsavename(path)
+ if last is None:
+ index = 0
+ newpath = path + ".%d" % (index + 1)
+ return newpath
+
+def push(ui, repo, patch=None, **opts):
+ """push the next patch onto the stack"""
+ q = repomap[repo]
+ mergeq = None
+
+ if opts['all']:
+ patch = q.series[-1]
+ if opts['merge']:
+ if opts['name']:
+ newpath = opts['name']
+ else:
+ newpath, i = lastsavename(q.path)
+ if not newpath:
+ ui.warn("no saved queues found, please use -n\n")
+ return 1
+ mergeq = queue(ui, repo.join(""), newpath)
+ ui.warn("merging with queue at: %s\n" % mergeq.path)
+ ret = q.push(repo, patch, force=opts['force'], list=opts['list'],
+ mergeq=mergeq)
+ q.save_dirty()
+ return ret
+
+def pop(ui, repo, patch=None, **opts):
+ """pop the current patch off the stack"""
+ localupdate = True
+ if opts['name']:
+ q = queue(ui, repo.join(""), repo.join(opts['name']))
+ ui.warn('using patch queue: %s\n' % q.path)
+ localupdate = False
+ else:
+ q = repomap[repo]
+ if opts['all'] and len(q.applied) > 0:
+ patch = q.applied[0].split(':')[1]
+ q.pop(repo, patch, force=opts['force'], update=localupdate)
+ q.save_dirty()
+ return 0
+
+def restore(ui, repo, rev, **opts):
+ """restore the queue state saved by a rev"""
+ rev = repo.lookup(rev)
+ q = repomap[repo]
+ q.restore(repo, rev, delete=opts['delete'],
+ qupdate=opts['update'])
+ q.save_dirty()
+ return 0
+
+def save(ui, repo, **opts):
+ """save current queue state"""
+ q = repomap[repo]
+ ret = q.save(repo, msg=opts['message'])
+ if ret:
+ return ret
+ q.save_dirty()
+ if opts['copy']:
+ path = q.path
+ if opts['name']:
+ newpath = os.path.join(q.basepath, opts['name'])
+ if os.path.exists(newpath):
+ if not os.path.isdir(newpath):
+ ui.warn("destination %s exists and is not a directory\n" %
+ newpath)
+ sys.exit(1)
+ if not opts['force']:
+ ui.warn("destination %s exists, use -f to force\n" %
+ newpath)
+ sys.exit(1)
+ else:
+ newpath = savename(path)
+ ui.warn("copy %s to %s\n" % (path, newpath))
+ util.copyfiles(path, newpath)
+ if opts['empty']:
+ try:
+ os.unlink(os.path.join(q.path, q.status_path))
+ except:
+ pass
+ return 0
+
+def strip(ui, repo, rev, **opts):
+ """strip a revision and all later revs on the same branch"""
+ rev = repo.lookup(rev)
+ backup = 'all'
+ if opts['backup']:
+ backup = 'strip'
+ elif opts['nobackup']:
+ backup = 'none'
+ repomap[repo].strip(repo, rev, backup=backup)
+ return 0
+
+def version(ui, q=None):
+ """print the version number"""
+ ui.write("mq version %s\n" % versionstr)
+ return 0
+
+def reposetup(ui, repo):
+ repomap[repo] = queue(ui, repo.join(""))
+
+cmdtable = {
+ "qapplied": (applied, [], 'hg qapplied [patch]'),
+ "qcommit|qci":
+ (commit,
+ [('A', 'addremove', None, _('run addremove during commit')),
+ ('I', 'include', [], _('include names matching the given patterns')),
+ ('X', 'exclude', [], _('exclude names matching the given patterns')),
+ ('m', 'message', '', _('use <text> as commit message')),
+ ('l', 'logfile', '', _('read the commit message from <file>')),
+ ('d', 'date', '', _('record datecode as commit date')),
+ ('u', 'user', '', _('record user as commiter'))],
+ 'hg qcommit [options] [files]'),
+ "^qdiff": (diff, [], 'hg qdiff [files]'),
+ "qdelete": (delete, [], 'hg qdelete [patch]'),
+ "^qimport":
+ (qimport,
+ [('e', 'existing', None, 'import file in patch dir'),
+ ('n', 'name', '', 'patch file name'),
+ ('f', 'force', None, 'overwrite existing files')],
+ 'hg qimport'),
+ "^qinit":
+ (init,
+ [('c', 'create-repo', None, 'create patch repository')],
+ 'hg [-c] qinit'),
+ "qnew":
+ (new,
+ [('m', 'message', '', 'commit message'),
+ ('f', 'force', None, 'force')],
+ 'hg qnew [-m message ] patch'),
+ "qnext": (next, [], 'hg qnext'),
+ "qprev": (prev, [], 'hg qprev'),
+ "^qpop":
+ (pop,
+ [('a', 'all', None, 'pop all patches'),
+ ('n', 'name', '', 'queue name to pop'),
+ ('f', 'force', None, 'forget any local changes')],
+ 'hg qpop [options] [patch/index]'),
+ "^qpush":
+ (push,
+ [('f', 'force', None, 'apply if the patch has rejects'),
+ ('l', 'list', None, 'list patch name in commit text'),
+ ('a', 'all', None, 'apply all patches'),
+ ('m', 'merge', None, 'merge from another queue'),
+ ('n', 'name', '', 'merge queue name')],
+ 'hg qpush [options] [patch/index]'),
+ "^qrefresh":
+ (refresh,
+ [('s', 'short', None, 'short refresh')],
+ 'hg qrefresh'),
+ "qrestore":
+ (restore,
+ [('d', 'delete', None, 'delete save entry'),
+ ('u', 'update', None, 'update queue working dir')],
+ 'hg qrestore rev'),
+ "qsave":
+ (save,
+ [('m', 'message', '', 'commit message'),
+ ('c', 'copy', None, 'copy patch directory'),
+ ('n', 'name', '', 'copy directory name'),
+ ('e', 'empty', None, 'clear queue status file'),
+ ('f', 'force', None, 'force copy')],
+ 'hg qsave'),
+ "qseries":
+ (series,
+ [('m', 'missing', None, 'print patches not in series')],
+ 'hg qseries'),
+ "^strip":
+ (strip,
+ [('f', 'force', None, 'force multi-head removal'),
+ ('b', 'backup', None, 'bundle unrelated changesets'),
+ ('n', 'nobackup', None, 'no backups')],
+ 'hg strip rev'),
+ "qtop": (top, [], 'hg qtop'),
+ "qunapplied": (unapplied, [], 'hg qunapplied [patch]'),
+ "qversion": (version, [], 'hg qversion')
+}
+
--- a/hgext/patchbomb.py Sun Mar 12 15:58:56 2006 -0800
+++ b/hgext/patchbomb.py Sun Mar 12 16:21:59 2006 -0800
@@ -49,20 +49,11 @@
# to = recipient1, recipient2, ...
# cc = cc1, cc2, ...
-from email.MIMEMultipart import MIMEMultipart
-from email.MIMEText import MIMEText
-from email.Utils import parseaddr
-from mercurial import commands
-from mercurial import hg
-from mercurial import ui
+from mercurial.demandload import *
+demandload(globals(), '''email.MIMEMultipart email.MIMEText email.Utils
+ mercurial:commands,hg,ui
+ os errno popen2 smtplib socket sys tempfile time''')
from mercurial.i18n import gettext as _
-import os
-import popen2
-import smtplib
-import socket
-import sys
-import tempfile
-import time
try:
# readline gives raw_input editing capabilities, but is not
@@ -149,8 +140,11 @@
if opts['diffstat']:
body += cdiffstat('\n'.join(desc), patch) + '\n\n'
body += '\n'.join(patch)
- msg = MIMEText(body)
- subj = '[PATCH %d of %d] %s' % (idx, total, desc[0].strip())
+ msg = email.MIMEText.MIMEText(body)
+ if total == 1:
+ subj = '[PATCH] ' + desc[0].strip()
+ else:
+ subj = '[PATCH %d of %d] %s' % (idx, total, desc[0].strip())
if subj.endswith('.'): subj = subj[:-1]
msg['Subject'] = subj
msg['X-Mercurial-Node'] = node
@@ -189,17 +183,9 @@
jumbo.extend(p)
msgs.append(makepatch(p, i + 1, len(patches)))
- ui.write(_('\nWrite the introductory message for the patch series.\n\n'))
-
sender = (opts['from'] or ui.config('patchbomb', 'from') or
prompt('From', ui.username()))
- msg = MIMEMultipart()
- msg['Subject'] = '[PATCH 0 of %d] %s' % (
- len(patches),
- opts['subject'] or
- prompt('Subject:', rest = ' [PATCH 0 of %d] ' % len(patches)))
-
def getaddrs(opt, prpt, default = None):
addrs = opts[opt] or (ui.config('patchbomb', opt) or
prompt(prpt, default = default)).split(',')
@@ -207,26 +193,35 @@
to = getaddrs('to', 'To')
cc = getaddrs('cc', 'Cc', '')
- ui.write(_('Finish with ^D or a dot on a line by itself.\n\n'))
+ if len(patches) > 1:
+ ui.write(_('\nWrite the introductory message for the patch series.\n\n'))
- body = []
+ msg = email.MIMEMultipart.MIMEMultipart()
+ msg['Subject'] = '[PATCH 0 of %d] %s' % (
+ len(patches),
+ opts['subject'] or
+ prompt('Subject:', rest = ' [PATCH 0 of %d] ' % len(patches)))
+
+ ui.write(_('Finish with ^D or a dot on a line by itself.\n\n'))
+
+ body = []
- while True:
- try: l = raw_input()
- except EOFError: break
- if l == '.': break
- body.append(l)
+ while True:
+ try: l = raw_input()
+ except EOFError: break
+ if l == '.': break
+ body.append(l)
- msg.attach(MIMEText('\n'.join(body) + '\n'))
+ msg.attach(email.MIMEText.MIMEText('\n'.join(body) + '\n'))
+
+ if opts['diffstat']:
+ d = cdiffstat(_('Final summary:\n'), jumbo)
+ if d: msg.attach(email.MIMEText.MIMEText(d))
+
+ msgs.insert(0, msg)
ui.write('\n')
- if opts['diffstat']:
- d = cdiffstat(_('Final summary:\n'), jumbo)
- if d: msg.attach(MIMEText(d))
-
- msgs.insert(0, msg)
-
if not opts['test'] and not opts['mbox']:
s = smtplib.SMTP()
s.connect(host = ui.config('smtp', 'host', 'mail'),
@@ -241,7 +236,7 @@
s.login(username, password)
parent = None
tz = time.strftime('%z')
- sender_addr = parseaddr(sender)[1]
+ sender_addr = email.Utils.parseaddr(sender)[1]
for m in msgs:
try:
m['Message-Id'] = genmsgid(m['X-Mercurial-Node'])
@@ -259,8 +254,12 @@
if opts['test']:
ui.status('Displaying ', m['Subject'], ' ...\n')
fp = os.popen(os.getenv('PAGER', 'more'), 'w')
- fp.write(m.as_string(0))
- fp.write('\n')
+ try:
+ fp.write(m.as_string(0))
+ fp.write('\n')
+ except IOError, inst:
+ if inst.errno != errno.EPIPE:
+ raise
fp.close()
elif opts['mbox']:
ui.status('Writing ', m['Subject'], ' ...\n')
--- a/hgmerge Sun Mar 12 15:58:56 2006 -0800
+++ b/hgmerge Sun Mar 12 16:21:59 2006 -0800
@@ -3,7 +3,13 @@
# hgmerge - default merge helper for Mercurial
#
# This tries to find a way to do three-way merge on the current system.
-# The result ought to end up in $1.
+# The result ought to end up in $1. Script is run in root directory of
+# repository.
+#
+# Environment variables set by Mercurial:
+# HG_FILE name of file within repo
+# HG_MY_NODE revision being merged
+# HG_OTHER_NODE revision being merged
set -e # bail out quickly on failure
--- a/hgwebdir.cgi Sun Mar 12 15:58:56 2006 -0800
+++ b/hgwebdir.cgi Sun Mar 12 16:21:59 2006 -0800
@@ -8,10 +8,21 @@
# sys.path.insert(0, "/path/to/python/lib") # if not a system-wide install
from mercurial import hgweb
-# The config file looks like this:
+# The config file looks like this. You can have paths to individual
+# repos, collections of repos in a directory tree, or both.
+#
# [paths]
# virtual/path = /real/path
# virtual/path = /real/path
+#
+# [collections]
+# /prefix/to/strip/off = /root/of/tree/full/of/repos
+#
+# collections example: say directory tree /foo contains repos /foo/bar,
+# /foo/quux/baz. Give this config section:
+# [collections]
+# /foo = /foo
+# Then repos will list as bar and quux/baz.
# Alternatively you can pass a list of ('virtual/path', '/real/path') tuples
# or use a dictionary with entries like 'virtual/path': '/real/path'
--- a/mercurial/commands.py Sun Mar 12 15:58:56 2006 -0800
+++ b/mercurial/commands.py Sun Mar 12 16:21:59 2006 -0800
@@ -276,6 +276,14 @@
def dodiff(fp, ui, repo, node1, node2, files=None, match=util.always,
changes=None, text=False, opts={}):
+ if not node1:
+ node1 = repo.dirstate.parents()[0]
+ # reading the data for node1 early allows it to play nicely
+ # with repo.changes and the revlog cache.
+ change = repo.changelog.read(node1)
+ mmap = repo.manifest.read(change[0])
+ date1 = util.datestr(change[2])
+
if not changes:
changes = repo.changes(node1, node2, files, match=match)
modified, added, removed, deleted, unknown = changes
@@ -294,8 +302,6 @@
return repo.file(f).read(mmap2[f])
else:
date2 = util.datestr()
- if not node1:
- node1 = repo.dirstate.parents()[0]
def read(f):
return repo.wread(f)
@@ -305,10 +311,6 @@
hexfunc = ui.verbose and hex or short
r = [hexfunc(node) for node in [node1, node2] if node]
- change = repo.changelog.read(node1)
- mmap = repo.manifest.read(change[0])
- date1 = util.datestr(change[2])
-
diffopts = ui.diffopts()
showfunc = opts.get('show_function') or diffopts['showfunc']
ignorews = opts.get('ignore_all_space') or diffopts['ignorews']
@@ -793,12 +795,7 @@
change = repo.changelog.read(node)
mmap = repo.manifest.read(change[0])
- for src, abs, rel, exact in walk(repo, pats, opts):
- if abs not in mmap:
- ui.warn(_("warning: %s is not in the repository!\n") %
- ((pats and rel) or abs))
- continue
-
+ for src, abs, rel, exact in walk(repo, pats, opts, node=node):
f = repo.file(abs)
if not opts['text'] and util.binary(f.read(mmap[abs])):
ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
@@ -834,7 +831,7 @@
contents including permissions, rename data, and revision history.
"""
f = open(fname, "wb")
- dest = ui.expandpath(dest, repo.root)
+ dest = ui.expandpath(dest)
other = hg.repository(ui, dest)
o = repo.findoutgoing(other)
cg = repo.changegroup(o, 'bundle')
@@ -896,6 +893,8 @@
such as AFS, implement hardlinking incorrectly, but do not report
errors. In these cases, use the --pull option to avoid
hardlinking.
+
+ See pull for valid source format details.
"""
if dest is None:
dest = os.path.basename(os.path.normpath(source))
@@ -921,8 +920,7 @@
if opts['remotecmd']:
ui.setconfig("ui", "remotecmd", opts['remotecmd'])
- if not os.path.exists(source):
- source = ui.expandpath(source)
+ source = ui.expandpath(source)
d = Dircleanup(dest)
abspath = source
@@ -978,7 +976,7 @@
f.close()
if not opts['noupdate']:
- update(ui, repo)
+ update(repo.ui, repo)
d.close()
@@ -1023,7 +1021,8 @@
except ValueError, inst:
raise util.Abort(str(inst))
-def docopy(ui, repo, pats, opts):
+def docopy(ui, repo, pats, opts, wlock):
+ # called with the repo lock held
cwd = repo.getcwd()
errors = 0
copied = []
@@ -1069,8 +1068,16 @@
if not os.path.isdir(targetdir):
os.makedirs(targetdir)
try:
- shutil.copyfile(relsrc, reltarget)
- shutil.copymode(relsrc, reltarget)
+ restore = repo.dirstate.state(abstarget) == 'r'
+ if restore:
+ repo.undelete([abstarget], wlock)
+ try:
+ shutil.copyfile(relsrc, reltarget)
+ shutil.copymode(relsrc, reltarget)
+ restore = False
+ finally:
+ if restore:
+ repo.remove([abstarget], wlock)
except shutil.Error, inst:
raise util.Abort(str(inst))
except IOError, inst:
@@ -1084,7 +1091,8 @@
if ui.verbose or not exact:
ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
targets[abstarget] = abssrc
- repo.copy(origsrc, abstarget)
+ if abstarget != origsrc:
+ repo.copy(origsrc, abstarget, wlock)
copied.append((abssrc, relsrc, exact))
def targetpathfn(pat, dest, srcs):
@@ -1192,15 +1200,26 @@
should properly record copied files, this information is not yet
fully used by merge, nor fully reported by log.
"""
- errs, copied = docopy(ui, repo, pats, opts)
+ try:
+ wlock = repo.wlock(0)
+ errs, copied = docopy(ui, repo, pats, opts, wlock)
+ except lock.LockHeld, inst:
+ ui.warn(_("repository lock held by %s\n") % inst.args[0])
+ errs = 1
return errs
def debugancestor(ui, index, rev1, rev2):
"""find the ancestor revision of two revisions in a given index"""
- r = revlog.revlog(util.opener(os.getcwd()), index, "")
+ r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "")
a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
ui.write("%d:%s\n" % (r.rev(a), hex(a)))
+def debugcomplete(ui, cmd):
+ """returns the completion list associated with the given command"""
+ clist = findpossible(cmd).keys()
+ clist.sort()
+ ui.write("%s\n" % " ".join(clist))
+
def debugrebuildstate(ui, repo, rev=None):
"""rebuild the dirstate as it would look like for the given revision"""
if not rev:
@@ -1246,12 +1265,8 @@
error = _(".hg/dirstate inconsistent with current parent's manifest")
raise util.Abort(error)
-def debugconfig(ui):
+def debugconfig(ui, repo):
"""show combined config settings from all hgrc files"""
- try:
- repo = hg.repository(ui)
- except hg.RepoError:
- pass
for section, name, value in ui.walkconfig():
ui.write('%s.%s=%s\n' % (section, name, value))
@@ -1283,7 +1298,8 @@
def debugdata(ui, file_, rev):
"""dump the contents of an data file revision"""
- r = revlog.revlog(util.opener(os.getcwd()), file_[:-2] + ".i", file_)
+ r = revlog.revlog(util.opener(os.getcwd(), audit=False),
+ file_[:-2] + ".i", file_)
try:
ui.write(r.revision(r.lookup(rev)))
except KeyError:
@@ -1291,7 +1307,7 @@
def debugindex(ui, file_):
"""dump the contents of an index file"""
- r = revlog.revlog(util.opener(os.getcwd()), file_, "")
+ r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "")
ui.write(" rev offset length base linkrev" +
" nodeid p1 p2\n")
for i in range(r.count()):
@@ -1302,7 +1318,7 @@
def debugindexdot(ui, file_):
"""dump an index DAG as a .dot file"""
- r = revlog.revlog(util.opener(os.getcwd()), file_, "")
+ r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "")
ui.write("digraph G {\n")
for i in range(r.count()):
e = r.index[i]
@@ -1730,7 +1746,7 @@
Currently only local repositories are supported.
"""
- source = ui.expandpath(source, repo.root)
+ source = ui.expandpath(source)
other = hg.repository(ui, source)
if not other.local():
raise util.Abort(_("incoming doesn't work for remote repositories yet"))
@@ -1917,8 +1933,10 @@
Show changesets not found in the specified destination repo or the
default push repo. These are the changesets that would be pushed
if a push was requested.
+
+ See pull for valid source format details.
"""
- dest = ui.expandpath(dest, repo.root)
+ dest = ui.expandpath(dest)
other = hg.repository(ui, dest)
o = repo.findoutgoing(other)
o = repo.changelog.nodesbetween(o)[0]
@@ -1953,7 +1971,7 @@
if n != nullid:
displayer.show(changenode=n, brinfo=br)
-def paths(ui, search=None):
+def paths(ui, repo, search=None):
"""show definition of symbolic path names
Show definition of symbolic path name NAME. If no name is given, show
@@ -1962,11 +1980,6 @@
Path names are defined in the [paths] section of /etc/mercurial/hgrc
and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
"""
- try:
- repo = hg.repository(ui=ui)
- except hg.RepoError:
- pass
-
if search:
for name, path in ui.configitems("paths"):
if name == search:
@@ -1999,7 +2012,7 @@
to the remote user's home directory by default; use two slashes at
the start of a path to specify it as relative to the filesystem root.
"""
- source = ui.expandpath(source, repo.root)
+ source = ui.expandpath(source)
ui.status(_('pulling from %s\n') % (source))
if opts['ssh']:
@@ -2044,7 +2057,7 @@
SSH requires an accessible shell account on the destination
machine and a copy of hg in the remote path.
"""
- dest = ui.expandpath(dest, repo.root)
+ dest = ui.expandpath(dest)
ui.status('pushing to %s\n' % (dest))
if opts['ssh']:
@@ -2062,6 +2075,7 @@
def rawcommit(ui, repo, *flist, **rc):
"""raw commit interface (DEPRECATED)
+ (DEPRECATED)
Lowlevel commit, for use in helper scripts.
This command is not intended to be used by normal users, as it is
@@ -2119,7 +2133,7 @@
def okaytoremove(abs, rel, exact):
modified, added, removed, deleted, unknown = repo.changes(files=[abs])
reason = None
- if modified:
+ if modified and not opts['force']:
reason = _('is modified')
elif added:
reason = _('has been marked for add')
@@ -2154,21 +2168,33 @@
should properly record rename files, this information is not yet
fully used by merge, nor fully reported by log.
"""
- errs, copied = docopy(ui, repo, pats, opts)
- names = []
- for abs, rel, exact in copied:
- if ui.verbose or not exact:
- ui.status(_('removing %s\n') % rel)
- names.append(abs)
- repo.remove(names, unlink=True)
+ try:
+ wlock = repo.wlock(0)
+ errs, copied = docopy(ui, repo, pats, opts, wlock)
+ names = []
+ for abs, rel, exact in copied:
+ if ui.verbose or not exact:
+ ui.status(_('removing %s\n') % rel)
+ names.append(abs)
+ repo.remove(names, True, wlock)
+ except lock.LockHeld, inst:
+ ui.warn(_("repository lock held by %s\n") % inst.args[0])
+ errs = 1
return errs
def revert(ui, repo, *pats, **opts):
"""revert modified files or dirs back to their unmodified states
- Revert any uncommitted modifications made to the named files or
- directories. This restores the contents of the affected files to
- an unmodified state.
+ In its default mode, it reverts any uncommitted modifications made
+ to the named files or directories. This restores the contents of
+ the affected files to an unmodified state.
+
+ Using the -r option, it reverts the given files or directories to
+ their state as of an earlier revision. This can be helpful to "roll
+ back" some or all of a change that should not have been committed.
+
+ Revert modifies the working directory. It does not commit any
+ changes, or change the parent of the current working directory.
If a file has been deleted, it is recreated. If the executable
mode of a file was changed, it is reset.
@@ -2183,7 +2209,7 @@
files, choose, anypats = matchpats(repo, pats, opts)
modified, added, removed, deleted, unknown = repo.changes(match=choose)
repo.forget(added)
- repo.undelete(removed + deleted)
+ repo.undelete(removed)
return repo.update(node, False, True, choose, False)
@@ -2573,50 +2599,51 @@
('c', 'changeset', None, _('list the changeset')),
('I', 'include', [], _('include names matching the given patterns')),
('X', 'exclude', [], _('exclude names matching the given patterns'))],
- _('hg annotate [OPTION]... FILE...')),
+ _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')),
"bundle":
(bundle,
[],
_('hg bundle FILE DEST')),
"cat":
(cat,
- [('I', 'include', [], _('include names matching the given patterns')),
- ('X', 'exclude', [], _('exclude names matching the given patterns')),
- ('o', 'output', '', _('print output to file with formatted name')),
- ('r', 'rev', '', _('print the given revision'))],
+ [('o', 'output', '', _('print output to file with formatted name')),
+ ('r', 'rev', '', _('print the given revision')),
+ ('I', 'include', [], _('include names matching the given patterns')),
+ ('X', 'exclude', [], _('exclude names matching the given patterns'))],
_('hg cat [OPTION]... FILE...')),
"^clone":
(clone,
[('U', 'noupdate', None, _('do not update the new working directory')),
- ('e', 'ssh', '', _('specify ssh command to use')),
- ('', 'pull', None, _('use pull protocol to copy metadata')),
('r', 'rev', [],
_('a changeset you would like to have after cloning')),
+ ('', 'pull', None, _('use pull protocol to copy metadata')),
+ ('e', 'ssh', '', _('specify ssh command to use')),
('', 'remotecmd', '',
_('specify hg command to run on the remote side'))],
_('hg clone [OPTION]... SOURCE [DEST]')),
"^commit|ci":
(commit,
[('A', 'addremove', None, _('run addremove during commit')),
- ('I', 'include', [], _('include names matching the given patterns')),
- ('X', 'exclude', [], _('exclude names matching the given patterns')),
('m', 'message', '', _('use <text> as commit message')),
('l', 'logfile', '', _('read the commit message from <file>')),
('d', 'date', '', _('record datecode as commit date')),
- ('u', 'user', '', _('record user as commiter'))],
+ ('u', 'user', '', _('record user as commiter')),
+ ('I', 'include', [], _('include names matching the given patterns')),
+ ('X', 'exclude', [], _('exclude names matching the given patterns'))],
_('hg commit [OPTION]... [FILE]...')),
"copy|cp":
(copy,
- [('I', 'include', [], _('include names matching the given patterns')),
- ('X', 'exclude', [], _('exclude names matching the given patterns')),
- ('A', 'after', None, _('record a copy that has already occurred')),
+ [('A', 'after', None, _('record a copy that has already occurred')),
('f', 'force', None,
- _('forcibly copy over an existing managed file'))],
+ _('forcibly copy over an existing managed file')),
+ ('I', 'include', [], _('include names matching the given patterns')),
+ ('X', 'exclude', [], _('exclude names matching the given patterns'))],
_('hg copy [OPTION]... [SOURCE]... DEST')),
"debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
+ "debugcomplete": (debugcomplete, [], _('debugcomplete CMD')),
"debugrebuildstate":
(debugrebuildstate,
- [('r', 'rev', "", _("revision to rebuild to"))],
+ [('r', 'rev', '', _('revision to rebuild to'))],
_('debugrebuildstate [-r REV] [REV]')),
"debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
"debugconfig": (debugconfig, [], _('debugconfig')),
@@ -2635,20 +2662,19 @@
(diff,
[('r', 'rev', [], _('revision')),
('a', 'text', None, _('treat all files as text')),
- ('I', 'include', [], _('include names matching the given patterns')),
('p', 'show-function', None,
_('show which function each change is in')),
('w', 'ignore-all-space', None,
_('ignore white space when comparing lines')),
- ('X', 'exclude', [],
- _('exclude names matching the given patterns'))],
+ ('I', 'include', [], _('include names matching the given patterns')),
+ ('X', 'exclude', [], _('exclude names matching the given patterns'))],
_('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
"^export":
(export,
[('o', 'output', '', _('print output to file with formatted name')),
('a', 'text', None, _('treat all files as text')),
('', 'switch-parent', None, _('diff against the second parent'))],
- _('hg export [-a] [-o OUTFILE] REV...')),
+ _('hg export [-a] [-o OUTFILESPEC] REV...')),
"forget":
(forget,
[('I', 'include', [], _('include names matching the given patterns')),
@@ -2657,15 +2683,15 @@
"grep":
(grep,
[('0', 'print0', None, _('end fields with NUL')),
- ('I', 'include', [], _('include names matching the given patterns')),
- ('X', 'exclude', [], _('exclude names matching the given patterns')),
('', 'all', None, _('print all revisions that match')),
('i', 'ignore-case', None, _('ignore case when matching')),
('l', 'files-with-matches', None,
_('print only filenames and revs that match')),
('n', 'line-number', None, _('print matching line numbers')),
('r', 'rev', [], _('search in given revision range')),
- ('u', 'user', None, _('print user who committed change'))],
+ ('u', 'user', None, _('print user who committed change')),
+ ('I', 'include', [], _('include names matching the given patterns')),
+ ('X', 'exclude', [], _('exclude names matching the given patterns'))],
_('hg grep [OPTION]... PATTERN [FILE]...')),
"heads":
(heads,
@@ -2681,10 +2707,10 @@
[('p', 'strip', 1,
_('directory strip option for patch. This has the same\n') +
_('meaning as the corresponding patch option')),
+ ('b', 'base', '', _('base path')),
('f', 'force', None,
- _('skip check for outstanding uncommitted changes')),
- ('b', 'base', '', _('base path'))],
- _('hg import [-f] [-p NUM] [-b BASE] PATCH...')),
+ _('skip check for outstanding uncommitted changes'))],
+ _('hg import [-p NUM] [-b BASE] [-f] PATCH...')),
"incoming|in": (incoming,
[('M', 'no-merges', None, _('do not show merges')),
('', 'style', '', _('display using template map file')),
@@ -2705,9 +2731,7 @@
_('hg locate [OPTION]... [PATTERN]...')),
"^log|history":
(log,
- [('I', 'include', [], _('include names matching the given patterns')),
- ('X', 'exclude', [], _('exclude names matching the given patterns')),
- ('b', 'branches', None, _('show branches')),
+ [('b', 'branches', None, _('show branches')),
('k', 'keyword', [], _('search for a keyword')),
('l', 'limit', '', _('limit number of changes displayed')),
('r', 'rev', [], _('show the specified revision or range')),
@@ -2715,8 +2739,10 @@
('', 'style', '', _('display using template map file')),
('m', 'only-merges', None, _('show only merges')),
('p', 'patch', None, _('show patch')),
- ('', 'template', '', _('display with template'))],
- _('hg log [-I] [-X] [-r REV]... [-p] [FILE]')),
+ ('', 'template', '', _('display with template')),
+ ('I', 'include', [], _('include names matching the given patterns')),
+ ('X', 'exclude', [], _('exclude names matching the given patterns'))],
+ _('hg log [OPTION]... [FILE]')),
"manifest": (manifest, [], _('hg manifest [REV]')),
"outgoing|out": (outgoing,
[('M', 'no-merges', None, _('do not show merges')),
@@ -2724,7 +2750,7 @@
('', 'style', '', _('display using template map file')),
('n', 'newest-first', None, _('show newest record first')),
('', 'template', '', _('display with template'))],
- _('hg outgoing [-p] [-n] [-M] [DEST]')),
+ _('hg outgoing [-M] [-p] [-n] [DEST]')),
"^parents":
(parents,
[('b', 'branches', None, _('show branches')),
@@ -2740,7 +2766,7 @@
('r', 'rev', [], _('a specific revision you would like to pull')),
('', 'remotecmd', '',
_('specify hg command to run on the remote side'))],
- _('hg pull [-u] [-e FILE] [-r rev]... [--remotecmd FILE] [SOURCE]')),
+ _('hg pull [-u] [-e FILE] [-r REV]... [--remotecmd FILE] [SOURCE]')),
"^push":
(push,
[('f', 'force', None, _('force push')),
@@ -2748,8 +2774,8 @@
('r', 'rev', [], _('a specific revision you would like to push')),
('', 'remotecmd', '',
_('specify hg command to run on the remote side'))],
- _('hg push [-f] [-e FILE] [-r rev]... [--remotecmd FILE] [DEST]')),
- "rawcommit":
+ _('hg push [-f] [-e FILE] [-r REV]... [--remotecmd FILE] [DEST]')),
+ "debugrawcommit|rawcommit":
(rawcommit,
[('p', 'parent', [], _('parent')),
('d', 'date', '', _('date code')),
@@ -2757,27 +2783,28 @@
('F', 'files', '', _('file list')),
('m', 'message', '', _('commit message')),
('l', 'logfile', '', _('commit message file'))],
- _('hg rawcommit [OPTION]... [FILE]...')),
+ _('hg debugrawcommit [OPTION]... [FILE]...')),
"recover": (recover, [], _('hg recover')),
"^remove|rm":
(remove,
- [('I', 'include', [], _('include names matching the given patterns')),
+ [('f', 'force', None, _('remove file even if modified')),
+ ('I', 'include', [], _('include names matching the given patterns')),
('X', 'exclude', [], _('exclude names matching the given patterns'))],
_('hg remove [OPTION]... FILE...')),
"rename|mv":
(rename,
- [('I', 'include', [], _('include names matching the given patterns')),
- ('X', 'exclude', [], _('exclude names matching the given patterns')),
- ('A', 'after', None, _('record a rename that has already occurred')),
+ [('A', 'after', None, _('record a rename that has already occurred')),
('f', 'force', None,
- _('forcibly copy over an existing managed file'))],
- _('hg rename [OPTION]... [SOURCE]... DEST')),
+ _('forcibly copy over an existing managed file')),
+ ('I', 'include', [], _('include names matching the given patterns')),
+ ('X', 'exclude', [], _('exclude names matching the given patterns'))],
+ _('hg rename [OPTION]... SOURCE... DEST')),
"^revert":
(revert,
- [('I', 'include', [], _('include names matching the given patterns')),
- ('X', 'exclude', [], _('exclude names matching the given patterns')),
- ('r', 'rev', '', _('revision to revert to'))],
- _('hg revert [-n] [-r REV] [NAME]...')),
+ [('r', 'rev', '', _('revision to revert to')),
+ ('I', 'include', [], _('include names matching the given patterns')),
+ ('X', 'exclude', [], _('exclude names matching the given patterns'))],
+ _('hg revert [-r REV] [NAME]...')),
"root": (root, [], _('hg root')),
"^serve":
(serve,
@@ -2791,7 +2818,7 @@
_('name to show in web pages (default: working dir)')),
('', 'pid-file', '', _('name of file to write process ID to')),
('', 'stdio', None, _('for remote clients')),
- ('', 'templates', '', _('web templates to use')),
+ ('t', 'templates', '', _('web templates to use')),
('', 'style', '', _('template style to use')),
('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
_('hg serve [OPTION]...')),
@@ -2815,7 +2842,7 @@
('d', 'date', '', _('record datecode as commit date')),
('u', 'user', '', _('record user as commiter')),
('r', 'rev', '', _('revision to tag'))],
- _('hg tag [-r REV] [OPTION]... NAME')),
+ _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
"tags": (tags, [], _('hg tags')),
"tip":
(tip,
@@ -2823,7 +2850,7 @@
('', 'style', '', _('display using template map file')),
('p', 'patch', None, _('show patch')),
('', 'template', '', _('display with template'))],
- _('hg [-b] [-p] tip')),
+ _('hg tip [-b] [-p]')),
"unbundle":
(unbundle,
[('u', 'update', None,
@@ -2844,7 +2871,8 @@
}
globalopts = [
- ('R', 'repository', '', _('repository root directory')),
+ ('R', 'repository', '',
+ _('repository root directory or symbolic path name')),
('', 'cwd', '', _('change working directory')),
('y', 'noninteractive', None,
_('do not prompt, assume \'yes\' for any required answers')),
@@ -2859,28 +2887,49 @@
('h', 'help', None, _('display help and exit')),
]
-norepo = ("clone init version help debugancestor debugconfig debugdata"
- " debugindex debugindexdot paths")
-
-def find(cmd):
- """Return (aliases, command table entry) for command string."""
- choice = None
- count = 0
+norepo = ("clone init version help debugancestor debugcomplete debugdata"
+ " debugindex debugindexdot")
+optionalrepo = ("paths debugconfig")
+
+def findpossible(cmd):
+ """
+ Return cmd -> (aliases, command table entry)
+ for each matching command
+ """
+ choice = {}
+ debugchoice = {}
for e in table.keys():
aliases = e.lstrip("^").split("|")
if cmd in aliases:
- return aliases, table[e]
+ choice[cmd] = (aliases, table[e])
+ continue
for a in aliases:
if a.startswith(cmd):
- count += 1
- choice = aliases, table[e]
+ if aliases[0].startswith("debug"):
+ debugchoice[a] = (aliases, table[e])
+ else:
+ choice[a] = (aliases, table[e])
break
- if count > 1:
- raise AmbiguousCommand(cmd)
+ if not choice and debugchoice:
+ choice = debugchoice
+
+ return choice
+
+def find(cmd):
+ """Return (aliases, command table entry) for command string."""
+ choice = findpossible(cmd)
+
+ if choice.has_key(cmd):
+ return choice[cmd]
+
+ if len(choice) > 1:
+ clist = choice.keys()
+ clist.sort()
+ raise AmbiguousCommand(cmd, clist)
if choice:
- return choice
+ return choice.values()[0]
raise UnknownCommand(cmd)
@@ -2968,7 +3017,10 @@
mod = getattr(mod, comp)
return mod
try:
- mod = importh(x[0])
+ try:
+ mod = importh("hgext." + x[0])
+ except ImportError:
+ mod = importh(x[0])
except Exception, inst:
on_exception(Exception, inst)
continue
@@ -2983,6 +3035,93 @@
try:
cmd, func, args, options, cmdoptions = parse(u, args)
+ if options["time"]:
+ def get_times():
+ t = os.times()
+ if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
+ t = (t[0], t[1], t[2], t[3], time.clock())
+ return t
+ s = get_times()
+ def print_time():
+ t = get_times()
+ u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
+ (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
+ atexit.register(print_time)
+
+ u.updateopts(options["verbose"], options["debug"], options["quiet"],
+ not options["noninteractive"])
+
+ # enter the debugger before command execution
+ if options['debugger']:
+ pdb.set_trace()
+
+ try:
+ if options['cwd']:
+ try:
+ os.chdir(options['cwd'])
+ except OSError, inst:
+ raise util.Abort('%s: %s' %
+ (options['cwd'], inst.strerror))
+
+ path = u.expandpath(options["repository"]) or ""
+ repo = path and hg.repository(u, path=path) or None
+
+ if options['help']:
+ help_(u, cmd, options['version'])
+ sys.exit(0)
+ elif options['version']:
+ show_version(u)
+ sys.exit(0)
+ elif not cmd:
+ help_(u, 'shortlist')
+ sys.exit(0)
+
+ if cmd not in norepo.split():
+ try:
+ if not repo:
+ repo = hg.repository(u, path=path)
+ u = repo.ui
+ for x in external:
+ if hasattr(x, 'reposetup'):
+ x.reposetup(u, repo)
+ except hg.RepoError:
+ if cmd not in optionalrepo.split():
+ raise
+ d = lambda: func(u, repo, *args, **cmdoptions)
+ else:
+ d = lambda: func(u, *args, **cmdoptions)
+
+ try:
+ if options['profile']:
+ import hotshot, hotshot.stats
+ prof = hotshot.Profile("hg.prof")
+ try:
+ try:
+ return prof.runcall(d)
+ except:
+ try:
+ u.warn(_('exception raised - generating '
+ 'profile anyway\n'))
+ except:
+ pass
+ raise
+ finally:
+ prof.close()
+ stats = hotshot.stats.load("hg.prof")
+ stats.strip_dirs()
+ stats.sort_stats('time', 'calls')
+ stats.print_stats(40)
+ else:
+ return d()
+ finally:
+ u.flush()
+ except:
+ # enter the debugger when we hit an exception
+ if options['debugger']:
+ pdb.post_mortem(sys.exc_info()[2])
+ if options['traceback']:
+ traceback.print_exc()
+ raise
except ParseError, inst:
if inst.args[0]:
u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
@@ -2992,81 +3131,13 @@
help_(u, 'shortlist')
sys.exit(-1)
except AmbiguousCommand, inst:
- u.warn(_("hg: command '%s' is ambiguous.\n") % inst.args[0])
+ u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
+ (inst.args[0], " ".join(inst.args[1])))
sys.exit(1)
except UnknownCommand, inst:
u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
help_(u, 'shortlist')
sys.exit(1)
-
- if options["time"]:
- def get_times():
- t = os.times()
- if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
- t = (t[0], t[1], t[2], t[3], time.clock())
- return t
- s = get_times()
- def print_time():
- t = get_times()
- u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
- (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
- atexit.register(print_time)
-
- u.updateopts(options["verbose"], options["debug"], options["quiet"],
- not options["noninteractive"])
-
- # enter the debugger before command execution
- if options['debugger']:
- pdb.set_trace()
-
- try:
- try:
- if options['help']:
- help_(u, cmd, options['version'])
- sys.exit(0)
- elif options['version']:
- show_version(u)
- sys.exit(0)
- elif not cmd:
- help_(u, 'shortlist')
- sys.exit(0)
-
- if options['cwd']:
- try:
- os.chdir(options['cwd'])
- except OSError, inst:
- raise util.Abort('%s: %s' %
- (options['cwd'], inst.strerror))
-
- if cmd not in norepo.split():
- path = options["repository"] or ""
- repo = hg.repository(ui=u, path=path)
- for x in external:
- if hasattr(x, 'reposetup'):
- x.reposetup(u, repo)
- d = lambda: func(u, repo, *args, **cmdoptions)
- else:
- d = lambda: func(u, *args, **cmdoptions)
-
- if options['profile']:
- import hotshot, hotshot.stats
- prof = hotshot.Profile("hg.prof")
- r = prof.runcall(d)
- prof.close()
- stats = hotshot.stats.load("hg.prof")
- stats.strip_dirs()
- stats.sort_stats('time', 'calls')
- stats.print_stats(40)
- return r
- else:
- return d()
- except:
- # enter the debugger when we hit an exception
- if options['debugger']:
- pdb.post_mortem(sys.exc_info()[2])
- if options['traceback']:
- traceback.print_exc()
- raise
except hg.RepoError, inst:
u.warn(_("abort: "), inst, "!\n")
except revlog.RevlogError, inst:
@@ -3113,12 +3184,6 @@
u.debug(inst, "\n")
u.warn(_("%s: invalid arguments\n") % cmd)
help_(u, cmd)
- except AmbiguousCommand, inst:
- u.warn(_("hg: command '%s' is ambiguous.\n") % inst.args[0])
- help_(u, 'shortlist')
- except UnknownCommand, inst:
- u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
- help_(u, 'shortlist')
except SystemExit:
# don't catch this in the catch-all below
raise
--- a/mercurial/demandload.py Sun Mar 12 15:58:56 2006 -0800
+++ b/mercurial/demandload.py Sun Mar 12 16:21:59 2006 -0800
@@ -1,15 +1,125 @@
-def demandload(scope, modules):
- class d:
- def __getattr__(self, name):
- mod = self.__dict__["mod"]
- scope = self.__dict__["scope"]
- scope[mod] = __import__(mod, scope, scope, [])
- return getattr(scope[mod], name)
+'''Demand load modules when used, not when imported.'''
+
+__author__ = '''Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>.
+This software may be used and distributed according to the terms
+of the GNU General Public License, incorporated herein by reference.'''
+
+# this is based on matt's original demandload module. it is a
+# complete rewrite. some time, we may need to support syntax of
+# "import foo as bar".
+
+class _importer(object):
+ '''import a module. it is not imported until needed, and is
+ imported at most once per scope.'''
+
+ def __init__(self, scope, modname, fromlist):
+ '''scope is context (globals() or locals()) in which import
+ should be made. modname is name of module to import.
+ fromlist is list of modules for "from foo import ..."
+ emulation.'''
+
+ self.scope = scope
+ self.modname = modname
+ self.fromlist = fromlist
+ self.mod = None
+
+ def module(self):
+ '''import the module if needed, and return.'''
+ if self.mod is None:
+ self.mod = __import__(self.modname, self.scope, self.scope,
+ self.fromlist)
+ del self.modname, self.fromlist
+ return self.mod
+
+class _replacer(object):
+ '''placeholder for a demand loaded module. demandload puts this in
+ a target scope. when an attribute of this object is looked up,
+ this object is replaced in the target scope with the actual
+ module.
+
+ we use __getattribute__ to avoid namespace clashes between
+ placeholder object and real module.'''
+
+ def __init__(self, importer, target):
+ self.importer = importer
+ self.target = target
+ # consider case where we do this:
+ # demandload(globals(), 'foo.bar foo.quux')
+ # foo will already exist in target scope when we get to
+ # foo.quux. so we remember that we will need to demandload
+ # quux into foo's scope when we really load it.
+ self.later = []
+
+ def module(self):
+ return object.__getattribute__(self, 'importer').module()
+
+ def __getattribute__(self, key):
+ '''look up an attribute in a module and return it. replace the
+ name of the module in the caller\'s dict with the actual
+ module.'''
- for m in modules.split():
- dl = d()
- dl.mod = m
- dl.scope = scope
- scope[m] = dl
+ module = object.__getattribute__(self, 'module')()
+ target = object.__getattribute__(self, 'target')
+ importer = object.__getattribute__(self, 'importer')
+ later = object.__getattribute__(self, 'later')
+
+ if later:
+ demandload(module.__dict__, ' '.join(later))
+
+ importer.scope[target] = module
+
+ return getattr(module, key)
+
+class _replacer_from(_replacer):
+ '''placeholder for a demand loaded module. used for "from foo
+ import ..." emulation. semantics of this are different than
+ regular import, so different implementation needed.'''
+
+ def module(self):
+ importer = object.__getattribute__(self, 'importer')
+ target = object.__getattribute__(self, 'target')
+
+ return getattr(importer.module(), target)
+
+def demandload(scope, modules):
+ '''import modules into scope when each is first used.
+
+ scope should be the value of globals() in the module calling this
+ function, or locals() in the calling function.
+
+ modules is a string listing module names, separated by white
+ space. names are handled like this:
+ foo import foo
+ foo bar import foo, bar
+ foo.bar import foo.bar
+ foo:bar from foo import bar
+ foo:bar,quux from foo import bar, quux
+ foo.bar:quux from foo.bar import quux'''
+ for mod in modules.split():
+ col = mod.find(':')
+ if col >= 0:
+ fromlist = mod[col+1:].split(',')
+ mod = mod[:col]
+ else:
+ fromlist = []
+ importer = _importer(scope, mod, fromlist)
+ if fromlist:
+ for name in fromlist:
+ scope[name] = _replacer_from(importer, name)
+ else:
+ dot = mod.find('.')
+ if dot >= 0:
+ basemod = mod[:dot]
+ val = scope.get(basemod)
+ # if base module has already been demandload()ed,
+ # remember to load this submodule into its namespace
+ # when needed.
+ if isinstance(val, _replacer):
+ later = object.__getattribute__(val, 'later')
+ later.append(mod[dot+1:])
+ continue
+ else:
+ basemod = mod
+ scope[basemod] = _replacer(importer, basemod)
--- a/mercurial/hgweb.py Sun Mar 12 15:58:56 2006 -0800
+++ b/mercurial/hgweb.py Sun Mar 12 16:21:59 2006 -0800
@@ -34,31 +34,28 @@
return os.stat(hg_path).st_mtime
def staticfile(directory, fname):
- fname = os.path.realpath(os.path.join(directory, fname))
+ """return a file inside directory with guessed content-type header
+
+ fname always uses '/' as directory separator and isn't allowed to
+ contain unusual path components.
+ Content-type is guessed using the mimetypes module.
+ Return an empty string if fname is illegal or file not found.
+ """
+ parts = fname.split('/')
+ path = directory
+ for part in parts:
+ if (part in ('', os.curdir, os.pardir) or
+ os.sep in part or os.altsep is not None and os.altsep in part):
+ return ""
+ path = os.path.join(path, part)
try:
- # the static dir should be a substring in the real
- # file path, if it is not, we have something strange
- # going on => security breach attempt?
- #
- # This will either:
- # 1) find the `static' path at index 0 = success
- # 2) find the `static' path at other index = error
- # 3) not find the `static' path = ValueError generated
- if fname.index(directory) != 0:
- # generate ValueError manually
- raise ValueError()
-
- os.stat(fname)
-
- ct = mimetypes.guess_type(fname)[0] or "text/plain"
- return "Content-type: %s\n\n%s" % (ct, file(fname).read())
- except ValueError:
- # security breach attempt
+ os.stat(path)
+ ct = mimetypes.guess_type(path)[0] or "text/plain"
+ return "Content-type: %s\n\n%s" % (ct, file(path).read())
+ except (TypeError, OSError):
+ # illegal fname or unreadable file
return ""
- except OSError, e:
- if e.errno == errno.ENOENT:
- return ""
class hgrequest(object):
def __init__(self, inp=None, out=None, env=None):
@@ -739,7 +736,7 @@
def run(self, req=hgrequest()):
def clean(path):
- p = os.path.normpath(path)
+ p = util.normpath(path)
if p[:2] == "..":
raise "suspicious path"
return p
@@ -1001,17 +998,27 @@
class hgwebdir(object):
def __init__(self, config):
def cleannames(items):
- return [(name.strip('/'), path) for name, path in items]
+ return [(name.strip(os.sep), path) for name, path in items]
- if type(config) == type([]):
+ if isinstance(config, (list, tuple)):
self.repos = cleannames(config)
- elif type(config) == type({}):
+ elif isinstance(config, dict):
self.repos = cleannames(config.items())
self.repos.sort()
else:
cp = ConfigParser.SafeConfigParser()
cp.read(config)
- self.repos = cleannames(cp.items("paths"))
+ self.repos = []
+ if cp.has_section('paths'):
+ self.repos.extend(cleannames(cp.items('paths')))
+ if cp.has_section('collections'):
+ for prefix, root in cp.items('collections'):
+ for path in util.walkrepos(root):
+ repo = os.path.normpath(path)
+ name = repo
+ if name.startswith(prefix):
+ name = name[len(prefix):]
+ self.repos.append((name.lstrip(os.sep), repo))
self.repos.sort()
def run(self, req=hgrequest()):
--- a/mercurial/httprepo.py Sun Mar 12 15:58:56 2006 -0800
+++ b/mercurial/httprepo.py Sun Mar 12 16:21:59 2006 -0800
@@ -67,6 +67,9 @@
def dev(self):
return -1
+ def lock(self):
+ raise util.Abort(_('operation not supported over http'))
+
def do_cmd(self, cmd, **args):
self.ui.debug(_("sending %s command\n") % cmd)
q = {"cmd": cmd}
--- a/mercurial/localrepo.py Sun Mar 12 15:58:56 2006 -0800
+++ b/mercurial/localrepo.py Sun Mar 12 16:21:59 2006 -0800
@@ -10,10 +10,12 @@
from node import *
from i18n import gettext as _
from demandload import *
-demandload(globals(), "re lock transaction tempfile stat mdiff errno")
+demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
class localrepository(object):
- def __init__(self, ui, path=None, create=0):
+ def __del__(self):
+ self.transhandle = None
+ def __init__(self, parentui, path=None, create=0):
if not path:
p = os.getcwd()
while not os.path.isdir(os.path.join(p, ".hg")):
@@ -28,7 +30,7 @@
raise repo.RepoError(_("repository %s not found") % path)
self.root = os.path.abspath(path)
- self.ui = ui
+ self.ui = ui.ui(parentui=parentui)
self.opener = util.opener(self.path)
self.wopener = util.opener(self.root)
self.manifest = manifest.manifest(self.opener)
@@ -37,42 +39,23 @@
self.nodetagscache = None
self.encodepats = None
self.decodepats = None
+ self.transhandle = None
if create:
os.mkdir(self.path)
os.mkdir(self.join("data"))
- self.dirstate = dirstate.dirstate(self.opener, ui, self.root)
+ self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
try:
- self.ui.readconfig(self.join("hgrc"))
+ self.ui.readconfig(self.join("hgrc"), self.root)
except IOError:
pass
def hook(self, name, throw=False, **args):
def runhook(name, cmd):
self.ui.note(_("running hook %s: %s\n") % (name, cmd))
- old = {}
- for k, v in args.items():
- k = k.upper()
- old['HG_' + k] = os.environ.get(k, None)
- old[k] = os.environ.get(k, None)
- os.environ['HG_' + k] = str(v)
- os.environ[k] = str(v)
-
- try:
- # Hooks run in the repository root
- olddir = os.getcwd()
- os.chdir(self.root)
- r = os.system(cmd)
- finally:
- for k, v in old.items():
- if v is not None:
- os.environ[k] = v
- else:
- del os.environ[k]
-
- os.chdir(olddir)
-
+ env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
+ r = util.system(cmd, environ=env, cwd=self.root)
if r:
desc, r = util.explain_exit(r)
if throw:
@@ -82,10 +65,11 @@
return True
r = True
- for hname, cmd in self.ui.configitems("hooks"):
- s = hname.split(".")
- if s[0] == name and cmd:
- r = runhook(hname, cmd) and r
+ hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
+ if hname.split(".", 1)[0] == name and cmd]
+ hooks.sort()
+ for hname, cmd in hooks:
+ r = runhook(hname, cmd) and r
return r
def tags(self):
@@ -215,6 +199,10 @@
return self.wopener(filename, 'w').write(data)
def transaction(self):
+ tr = self.transhandle
+ if tr != None and tr.running():
+ return tr.nest()
+
# save dirstate for undo
try:
ds = self.opener("dirstate").read()
@@ -222,13 +210,11 @@
ds = ""
self.opener("journal.dirstate", "w").write(ds)
- def after():
- util.rename(self.join("journal"), self.join("undo"))
- util.rename(self.join("journal.dirstate"),
- self.join("undo.dirstate"))
-
- return transaction.transaction(self.ui.warn, self.opener,
- self.join("journal"), after)
+ tr = transaction.transaction(self.ui.warn, self.opener,
+ self.join("journal"),
+ aftertrans(self.path))
+ self.transhandle = tr
+ return tr
def recover(self):
l = self.lock()
@@ -366,7 +352,7 @@
self.dirstate.setparents(n, nullid)
def commit(self, files=None, text="", user=None, date=None,
- match=util.always, force=False, wlock=None):
+ match=util.always, force=False, lock=None, wlock=None):
commit = []
remove = []
changed = []
@@ -404,7 +390,8 @@
if not wlock:
wlock = self.wlock()
- l = self.lock()
+ if not lock:
+ lock = self.lock()
tr = self.transaction()
# check in files
@@ -519,6 +506,12 @@
del mf[fn]
return mf
+ if node1:
+ # read the manifest from node1 before the manifest from node2,
+ # so that we'll hit the manifest cache if we're going through
+ # all the revisions in parent->child order.
+ mf1 = mfmatches(node1)
+
# are we comparing the working directory?
if not node2:
if not wlock:
@@ -557,8 +550,6 @@
# flush lists from dirstate before comparing manifests
modified, added = [], []
- mf1 = mfmatches(node1)
-
for fn in mf2:
if mf1.has_key(fn):
if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)):
@@ -818,7 +809,7 @@
base[h] = 1
if not unknown:
- return None
+ return []
rep = {}
reqcnt = 0
@@ -1645,10 +1636,12 @@
# merge the tricky bits
files = merge.keys()
files.sort()
+ xp1 = hex(p1)
+ xp2 = hex(p2)
for f in files:
self.ui.status(_("merging %s\n") % f)
my, other, flag = merge[f]
- ret = self.merge3(f, my, other)
+ ret = self.merge3(f, my, other, xp1, xp2)
if ret:
err = True
util.set_exec(self.wjoin(f), flag)
@@ -1669,6 +1662,7 @@
remove.sort()
for f in remove:
self.ui.note(_("removing %s\n") % f)
+ util.audit_path(f)
try:
util.unlink(self.wjoin(f))
except OSError, inst:
@@ -1685,7 +1679,7 @@
self.dirstate.setparents(p1, p2)
return err
- def merge3(self, fn, my, other):
+ def merge3(self, fn, my, other, p1, p2):
"""perform a 3-way merge in the working directory"""
def temp(prefix, node):
@@ -1708,7 +1702,13 @@
cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
or "hgmerge")
- r = os.system('%s "%s" "%s" "%s"' % (cmd, a, b, c))
+ r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=self.root,
+ environ={'HG_FILE': fn,
+ 'HG_MY_NODE': p1,
+ 'HG_OTHER_NODE': p2,
+ 'HG_FILE_MY_NODE': hex(my),
+ 'HG_FILE_OTHER_NODE': hex(other),
+ 'HG_FILE_BASE_NODE': hex(base)})
if r:
self.ui.warn(_("merging %s failed!\n") % fn)
@@ -1759,6 +1759,7 @@
raise
except Exception, inst:
err(_("unpacking changeset %s: %s") % (short(n), inst))
+ continue
neededmanifests[changes[0]] = n
@@ -1796,10 +1797,14 @@
raise
except Exception, inst:
err(_("unpacking manifest %s: %s") % (short(n), inst))
+ continue
- ff = [ l.split('\0') for l in delta.splitlines() ]
- for f, fn in ff:
- filenodes.setdefault(f, {})[bin(fn[:40])] = 1
+ try:
+ ff = [ l.split('\0') for l in delta.splitlines() ]
+ for f, fn in ff:
+ filenodes.setdefault(f, {})[bin(fn[:40])] = 1
+ except (ValueError, TypeError), inst:
+ err(_("broken delta in manifest %s: %s") % (short(n), inst))
self.ui.status(_("crosschecking files in changesets and manifests\n"))
@@ -1823,6 +1828,9 @@
if f == "/dev/null":
continue
files += 1
+ if not f:
+ err(_("file without name in manifest %s") % short(n))
+ continue
fl = self.file(f)
checksize(fl, f)
@@ -1840,7 +1848,7 @@
del filenodes[f][n]
flr = fl.linkrev(n)
- if flr not in filelinkrevs[f]:
+ if flr not in filelinkrevs.get(f, []):
err(_("%s:%s points to unexpected changeset %d")
% (f, short(n), flr))
else:
@@ -1875,3 +1883,13 @@
if errors[0]:
self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
return 1
+
+# used to avoid circular references so destructors work
+def aftertrans(base):
+ p = base
+ def a():
+ util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
+ util.rename(os.path.join(p, "journal.dirstate"),
+ os.path.join(p, "undo.dirstate"))
+ return a
+
--- a/mercurial/lock.py Sun Mar 12 15:58:56 2006 -0800
+++ b/mercurial/lock.py Sun Mar 12 16:21:59 2006 -0800
@@ -5,8 +5,8 @@
# This software may be used and distributed according to the terms
# of the GNU General Public License, incorporated herein by reference.
-import errno, os, time
-import util
+from demandload import *
+demandload(globals(), 'errno os socket time util')
class LockException(Exception):
pass
@@ -16,11 +16,22 @@
pass
class lock(object):
+ # lock is symlink on platforms that support it, file on others.
+
+ # symlink is used because create of directory entry and contents
+ # are atomic even over nfs.
+
+ # old-style lock: symlink to pid
+ # new-style lock: symlink to hostname:pid
+
def __init__(self, file, timeout=-1, releasefn=None):
self.f = file
self.held = 0
self.timeout = timeout
self.releasefn = releasefn
+ self.id = None
+ self.host = None
+ self.pid = None
self.lock()
def __del__(self):
@@ -41,15 +52,50 @@
raise inst
def trylock(self):
- pid = os.getpid()
+ if self.id is None:
+ self.host = socket.gethostname()
+ self.pid = os.getpid()
+ self.id = '%s:%s' % (self.host, self.pid)
+ while not self.held:
+ try:
+ util.makelock(self.id, self.f)
+ self.held = 1
+ except (OSError, IOError), why:
+ if why.errno == errno.EEXIST:
+ locker = self.testlock()
+ if locker:
+ raise LockHeld(locker)
+ else:
+ raise LockUnavailable(why)
+
+ def testlock(self):
+ '''return id of locker if lock is valid, else None.'''
+ # if old-style lock, we cannot tell what machine locker is on.
+ # with new-style lock, if locker is on this machine, we can
+ # see if locker is alive. if locker is on this machine but
+ # not alive, we can safely break lock.
+ locker = util.readlock(self.f)
+ c = locker.find(':')
+ if c == -1:
+ return locker
+ host = locker[:c]
+ if host != self.host:
+ return locker
try:
- util.makelock(str(pid), self.f)
- self.held = 1
- except (OSError, IOError), why:
- if why.errno == errno.EEXIST:
- raise LockHeld(util.readlock(self.f))
- else:
- raise LockUnavailable(why)
+ pid = int(locker[c+1:])
+ except:
+ return locker
+ if util.testpid(pid):
+ return locker
+ # if locker dead, break lock. must do this with another lock
+ # held, or can race and break valid lock.
+ try:
+ l = lock(self.f + '.break')
+ l.trylock()
+ os.unlink(self.f)
+ l.release()
+ except (LockHeld, LockUnavailable):
+ return locker
def release(self):
if self.held:
--- a/mercurial/packagescan.py Sun Mar 12 15:58:56 2006 -0800
+++ b/mercurial/packagescan.py Sun Mar 12 16:21:59 2006 -0800
@@ -16,8 +16,14 @@
""" fake demandload function that collects the required modules """
for m in modules.split():
mod = None
- mod = __import__(m,scope,scope)
- scope[m] = mod
+ try:
+ module, submodules = m.split(':')
+ submodules = submodules.split(',')
+ except:
+ module = m
+ submodules = []
+ mod = __import__(module, scope, scope, submodules)
+ scope[module] = mod
requiredmodules[mod.__name__] = 1
def getmodules(libpath,packagename):
--- a/mercurial/revlog.py Sun Mar 12 15:58:56 2006 -0800
+++ b/mercurial/revlog.py Sun Mar 12 16:21:59 2006 -0800
@@ -48,7 +48,7 @@
if t == '\0': return bin
if t == 'x': return zlib.decompress(bin)
if t == 'u': return bin[1:]
- raise RevlogError(_("unknown compression type %s") % t)
+ raise RevlogError(_("unknown compression type %r") % t)
indexformat = ">4l20s20s20s"
--- a/mercurial/statichttprepo.py Sun Mar 12 15:58:56 2006 -0800
+++ b/mercurial/statichttprepo.py Sun Mar 12 16:21:59 2006 -0800
@@ -15,8 +15,10 @@
def read(self, size=None):
try:
return httprangereader.httprangereader.read(self, size)
+ except urllib2.HTTPError, inst:
+ raise IOError(None, inst)
except urllib2.URLError, inst:
- raise IOError(None, str(inst))
+ raise IOError(None, inst.reason[1])
def opener(base):
"""return a function that opens files over http"""
--- a/mercurial/transaction.py Sun Mar 12 15:58:56 2006 -0800
+++ b/mercurial/transaction.py Sun Mar 12 16:21:59 2006 -0800
@@ -22,6 +22,7 @@
if os.path.exists(journal):
raise AssertionError(_("journal already exists - run hg recover"))
+ self.count = 1
self.report = report
self.opener = opener
self.after = after
@@ -46,7 +47,17 @@
self.file.write("%s\0%d\n" % (file, offset))
self.file.flush()
+ def nest(self):
+ self.count += 1
+ return self
+
+ def running(self):
+ return self.count > 0
+
def close(self):
+ self.count -= 1
+ if self.count != 0:
+ return
self.file.close()
self.entries = []
if self.after:
--- a/mercurial/ui.py Sun Mar 12 15:58:56 2006 -0800
+++ b/mercurial/ui.py Sun Mar 12 16:21:59 2006 -0800
@@ -5,25 +5,41 @@
# This software may be used and distributed according to the terms
# of the GNU General Public License, incorporated herein by reference.
-import os, ConfigParser
+import ConfigParser
from i18n import gettext as _
from demandload import *
-demandload(globals(), "re socket sys util")
+demandload(globals(), "os re socket sys util")
class ui(object):
def __init__(self, verbose=False, debug=False, quiet=False,
- interactive=True):
+ interactive=True, parentui=None):
self.overlay = {}
- self.cdata = ConfigParser.SafeConfigParser()
- self.readconfig(util.rcpath)
+ if parentui is None:
+ # this is the parent of all ui children
+ self.parentui = None
+ self.cdata = ConfigParser.SafeConfigParser()
+ self.readconfig(util.rcpath)
+
+ self.quiet = self.configbool("ui", "quiet")
+ self.verbose = self.configbool("ui", "verbose")
+ self.debugflag = self.configbool("ui", "debug")
+ self.interactive = self.configbool("ui", "interactive", True)
- self.quiet = self.configbool("ui", "quiet")
- self.verbose = self.configbool("ui", "verbose")
- self.debugflag = self.configbool("ui", "debug")
- self.interactive = self.configbool("ui", "interactive", True)
+ self.updateopts(verbose, debug, quiet, interactive)
+ self.diffcache = None
+ else:
+ # parentui may point to an ui object which is already a child
+ self.parentui = parentui.parentui or parentui
+ parent_cdata = self.parentui.cdata
+ self.cdata = ConfigParser.SafeConfigParser(parent_cdata.defaults())
+ # make interpolation work
+ for section in parent_cdata.sections():
+ self.cdata.add_section(section)
+ for name, value in parent_cdata.items(section, raw=True):
+ self.cdata.set(section, name, value)
- self.updateopts(verbose, debug, quiet, interactive)
- self.diffcache = None
+ def __getattr__(self, key):
+ return getattr(self.parentui, key)
def updateopts(self, verbose=False, debug=False, quiet=False,
interactive=True):
@@ -32,7 +48,7 @@
self.debugflag = (self.debugflag or debug)
self.interactive = (self.interactive and interactive)
- def readconfig(self, fn):
+ def readconfig(self, fn, root=None):
if isinstance(fn, basestring):
fn = [fn]
for f in fn:
@@ -40,6 +56,12 @@
self.cdata.read(f)
except ConfigParser.ParsingError, inst:
raise util.Abort(_("Failed to parse %s\n%s") % (f, inst))
+ # translate paths relative to root (or home) into absolute paths
+ if root is None:
+ root = os.path.expanduser('~')
+ for name, path in self.configitems("paths"):
+ if path.find("://") == -1 and not os.path.isabs(path):
+ self.cdata.set("paths", name, os.path.join(root, path))
def setconfig(self, section, name, val):
self.overlay[(section, name)] = val
@@ -48,23 +70,44 @@
if self.overlay.has_key((section, name)):
return self.overlay[(section, name)]
if self.cdata.has_option(section, name):
- return self.cdata.get(section, name)
- return default
+ try:
+ return self.cdata.get(section, name)
+ except ConfigParser.InterpolationError, inst:
+ raise util.Abort(_("Error in configuration:\n%s") % inst)
+ if self.parentui is None:
+ return default
+ else:
+ return self.parentui.config(section, name, default)
def configbool(self, section, name, default=False):
if self.overlay.has_key((section, name)):
return self.overlay[(section, name)]
if self.cdata.has_option(section, name):
- return self.cdata.getboolean(section, name)
- return default
+ try:
+ return self.cdata.getboolean(section, name)
+ except ConfigParser.InterpolationError, inst:
+ raise util.Abort(_("Error in configuration:\n%s") % inst)
+ if self.parentui is None:
+ return default
+ else:
+ return self.parentui.configbool(section, name, default)
def configitems(self, section):
+ items = {}
+ if self.parentui is not None:
+ items = dict(self.parentui.configitems(section))
if self.cdata.has_section(section):
- return self.cdata.items(section)
- return []
+ try:
+ items.update(dict(self.cdata.items(section)))
+ except ConfigParser.InterpolationError, inst:
+ raise util.Abort(_("Error in configuration:\n%s") % inst)
+ x = items.items()
+ x.sort()
+ return x
- def walkconfig(self):
- seen = {}
+ def walkconfig(self, seen=None):
+ if seen is None:
+ seen = {}
for (section, name), value in self.overlay.iteritems():
yield section, name, value
seen[section, name] = 1
@@ -73,6 +116,9 @@
if (section, name) in seen: continue
yield section, name, value.replace('\n', '\\n')
seen[section, name] = 1
+ if self.parentui is not None:
+ for parent in self.parentui.walkconfig(seen):
+ yield parent
def extensions(self):
return self.configitems("extensions")
@@ -107,15 +153,12 @@
if not self.verbose: user = util.shortuser(user)
return user
- def expandpath(self, loc, root=""):
- paths = {}
- for name, path in self.configitems("paths"):
- m = path.find("://")
- if m == -1:
- path = os.path.join(root, path)
- paths[name] = path
+ def expandpath(self, loc):
+ """Return repository location relative to cwd or from [paths]"""
+ if loc.find("://") != -1 or os.path.exists(loc):
+ return loc
- return paths.get(loc, loc)
+ return self.config("paths", loc, loc)
def write(self, *args):
for a in args:
@@ -126,6 +169,12 @@
for a in args:
sys.stderr.write(str(a))
+ def flush(self):
+ try:
+ sys.stdout.flush()
+ finally:
+ sys.stderr.flush()
+
def readline(self):
return sys.stdin.readline()[:-1]
def prompt(self, msg, pat, default="y"):
@@ -157,7 +206,9 @@
os.environ.get("EDITOR", "vi"))
os.environ["HGUSER"] = self.username()
- util.system("%s \"%s\"" % (editor, name), errprefix=_("edit failed"))
+ util.system("%s \"%s\"" % (editor, name),
+ environ={'HGUSER': self.username()},
+ onerr=util.Abort, errprefix=_("edit failed"))
t = open(name).read()
t = re.sub("(?m)^HG:.*\n", "", t)
--- a/mercurial/util.py Sun Mar 12 15:58:56 2006 -0800
+++ b/mercurial/util.py Sun Mar 12 16:21:59 2006 -0800
@@ -179,7 +179,7 @@
if root == os.sep:
rootsep = os.sep
else:
- rootsep = root + os.sep
+ rootsep = root + os.sep
name = myname
if not name.startswith(os.sep):
name = os.path.join(root, cwd, name)
@@ -315,15 +315,42 @@
(files and filematch(fn)))),
(inc or exc or (pats and pats != [('glob', '**')])) and True)
-def system(cmd, errprefix=None):
- """execute a shell command that must succeed"""
- rc = os.system(cmd)
- if rc:
- errmsg = "%s %s" % (os.path.basename(cmd.split(None, 1)[0]),
- explain_exit(rc)[0])
- if errprefix:
- errmsg = "%s: %s" % (errprefix, errmsg)
- raise Abort(errmsg)
+def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
+ '''enhanced shell command execution.
+ run with environment maybe modified, maybe in different dir.
+
+ if command fails and onerr is None, return status. if ui object,
+ print error message and return status, else raise onerr object as
+ exception.'''
+ oldenv = {}
+ for k in environ:
+ oldenv[k] = os.environ.get(k)
+ if cwd is not None:
+ oldcwd = os.getcwd()
+ try:
+ for k, v in environ.iteritems():
+ os.environ[k] = str(v)
+ if cwd is not None and oldcwd != cwd:
+ os.chdir(cwd)
+ rc = os.system(cmd)
+ if rc and onerr:
+ errmsg = '%s %s' % (os.path.basename(cmd.split(None, 1)[0]),
+ explain_exit(rc)[0])
+ if errprefix:
+ errmsg = '%s: %s' % (errprefix, errmsg)
+ try:
+ onerr.warn(errmsg + '\n')
+ except AttributeError:
+ raise onerr(errmsg)
+ return rc
+ finally:
+ for k, v in oldenv.iteritems():
+ if v is None:
+ del os.environ[k]
+ else:
+ os.environ[k] = v
+ if cwd is not None and oldcwd != cwd:
+ os.chdir(oldcwd)
def rename(src, dst):
"""forcibly rename a file"""
@@ -363,7 +390,14 @@
else:
shutil.copy(src, dst)
-def opener(base):
+def audit_path(path):
+ """Abort if path contains dangerous components"""
+ parts = os.path.normcase(path).split(os.sep)
+ if (os.path.splitdrive(path)[0] or parts[0] in ('.hg', '')
+ or os.pardir in parts):
+ raise Abort(_("path contains illegal component: %s\n") % path)
+
+def opener(base, audit=True):
"""
return a function that opens files relative to base
@@ -371,6 +405,7 @@
remote file access from higher level code.
"""
p = base
+ audit_p = audit
def mktempcopy(name):
d, fn = os.path.split(name)
@@ -401,6 +436,8 @@
self.close()
def o(path, mode="r", text=False, atomic=False):
+ if audit_p:
+ audit_path(path)
f = os.path.join(p, path)
if not text:
@@ -489,7 +526,7 @@
return pf
try: # ActivePython can create hard links using win32file module
- import win32file
+ import win32api, win32con, win32file
def os_link(src, dst): # NB will only succeed on NTFS
win32file.CreateHardLink(dst, src)
@@ -506,8 +543,18 @@
except:
return os.stat(pathname).st_nlink
+ def testpid(pid):
+ '''return False if pid is dead, True if running or not known'''
+ try:
+ win32api.OpenProcess(win32con.PROCESS_QUERY_INFORMATION,
+ False, pid)
+ except:
+ return True
+
except ImportError:
- pass
+ def testpid(pid):
+ '''return False if pid dead, True if running or not known'''
+ return True
def is_exec(f, last):
return last
@@ -604,6 +651,14 @@
else:
raise
+ def testpid(pid):
+ '''return False if pid dead, True if running or not sure'''
+ try:
+ os.kill(pid, 0)
+ return True
+ except OSError, inst:
+ return inst.errno != errno.ESRCH
+
def explain_exit(code):
"""return a 2-tuple (desc, code) describing a process's status"""
if os.WIFEXITED(code):
@@ -700,3 +755,16 @@
if f >= 0:
user = user[f+1:]
return user
+
+def walkrepos(path):
+ '''yield every hg repository under path, recursively.'''
+ def errhandler(err):
+ if err.filename == path:
+ raise err
+
+ for root, dirs, files in os.walk(path, onerror=errhandler):
+ for d in dirs:
+ if d == '.hg':
+ yield root
+ dirs[:] = []
+ break
--- a/setup.py Sun Mar 12 15:58:56 2006 -0800
+++ b/setup.py Sun Mar 12 16:21:59 2006 -0800
@@ -5,8 +5,11 @@
# './setup.py install', or
# './setup.py --help' for more options
+import sys
+if not hasattr(sys, 'version_info') or sys.version_info < (2, 3):
+ raise SystemExit, "Mercurial requires python 2.3 or later."
+
import glob
-import sys
from distutils.core import setup, Extension
from distutils.command.install_data import install_data
--- a/tests/run-tests Sun Mar 12 15:58:56 2006 -0800
+++ b/tests/run-tests Sun Mar 12 16:21:59 2006 -0800
@@ -53,7 +53,16 @@
fi
cd "$TESTDIR"
-PATH="$INST/bin:$PATH"; export PATH
+BINDIR="$INST/bin"
+PATH="$BINDIR:$PATH"; export PATH
+if [ -n "$PYTHON" ]; then
+ {
+ echo "#!/bin/sh"
+ echo "exec \"$PYTHON"'" "$@"'
+ } > "$BINDIR/python"
+ chmod 755 "$BINDIR/python"
+fi
+
PYTHONPATH="$PYTHONDIR"; export PYTHONPATH
run_one() {
--- a/tests/test-clone-r Sun Mar 12 15:58:56 2006 -0800
+++ b/tests/test-clone-r Sun Mar 12 16:21:59 2006 -0800
@@ -1,4 +1,4 @@
-#!/bin/bash
+#!/bin/sh
hg init test
cd test
--- a/tests/test-commit.out Sun Mar 12 15:58:56 2006 -0800
+++ b/tests/test-commit.out Sun Mar 12 16:21:59 2006 -0800
@@ -1,5 +1,3 @@
-transaction abort!
-rollback completed
abort: impossible time zone offset: 4444444
transaction abort!
rollback completed
@@ -13,4 +11,6 @@
transaction abort!
rollback completed
abort: date exceeds 32 bits: 111111111111
+transaction abort!
+rollback completed
abort: No such file or directory: .../test/bar
--- a/tests/test-help.out Sun Mar 12 15:58:56 2006 -0800
+++ b/tests/test-help.out Sun Mar 12 16:21:59 2006 -0800
@@ -64,7 +64,6 @@
paths show definition of symbolic path names
pull pull changes from the specified source
push push changes to the specified destination
- rawcommit raw commit interface (DEPRECATED)
recover roll back an interrupted transaction
remove remove the specified files on the next commit
rename rename files; equivalent of copy + remove
@@ -106,7 +105,6 @@
paths show definition of symbolic path names
pull pull changes from the specified source
push push changes to the specified destination
- rawcommit raw commit interface (DEPRECATED)
recover roll back an interrupted transaction
remove remove the specified files on the next commit
rename rename files; equivalent of copy + remove
@@ -173,9 +171,9 @@
-r --rev revision
-a --text treat all files as text
- -I --include include names matching the given patterns
-p --show-function show which function each change is in
-w --ignore-all-space ignore white space when comparing lines
+ -I --include include names matching the given patterns
-X --exclude exclude names matching the given patterns
hg status [OPTION]... [FILE]...
--- a/tests/test-hook.out Sun Mar 12 15:58:56 2006 -0800
+++ b/tests/test-hook.out Sun Mar 12 16:21:59 2006 -0800
@@ -1,23 +1,23 @@
precommit hook: p1=0000000000000000000000000000000000000000 p2=
pretxncommit hook: n=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b p1=0000000000000000000000000000000000000000 p2=
0:cb9a9f314b8b
+commit hook: n=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b p1=0000000000000000000000000000000000000000 p2=
commit hook b
-commit hook: n=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b p1=0000000000000000000000000000000000000000 p2=
precommit hook: p1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b p2=
pretxncommit hook: n=ab228980c14deea8b9555d91c9581127383e40fd p1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b p2=
1:ab228980c14d
+commit hook: n=ab228980c14deea8b9555d91c9581127383e40fd p1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b p2=
commit hook b
-commit hook: n=ab228980c14deea8b9555d91c9581127383e40fd p1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b p2=
precommit hook: p1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b p2=
pretxncommit hook: n=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 p1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b p2=
2:ee9deb46ab31
+commit hook: n=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 p1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b p2=
commit hook b
-commit hook: n=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 p1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b p2=
precommit hook: p1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 p2=ab228980c14deea8b9555d91c9581127383e40fd
pretxncommit hook: n=07f3376c1e655977439df2a814e3cc14b27abac2 p1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 p2=ab228980c14deea8b9555d91c9581127383e40fd
3:07f3376c1e65
+commit hook: n=07f3376c1e655977439df2a814e3cc14b27abac2 p1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 p2=ab228980c14deea8b9555d91c9581127383e40fd
commit hook b
-commit hook: n=07f3376c1e655977439df2a814e3cc14b27abac2 p1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 p2=ab228980c14deea8b9555d91c9581127383e40fd
prechangegroup hook
changegroup hook: n=ab228980c14deea8b9555d91c9581127383e40fd
incoming hook: n=ab228980c14deea8b9555d91c9581127383e40fd
@@ -34,8 +34,8 @@
precommit hook: p1=07f3376c1e655977439df2a814e3cc14b27abac2 p2=
pretxncommit hook: n=3cd2c6a5a36c5908aad3bc0d717c29873a05dfc2 p1=07f3376c1e655977439df2a814e3cc14b27abac2 p2=
4:3cd2c6a5a36c
+commit hook: n=3cd2c6a5a36c5908aad3bc0d717c29873a05dfc2 p1=07f3376c1e655977439df2a814e3cc14b27abac2 p2=
commit hook b
-commit hook: n=3cd2c6a5a36c5908aad3bc0d717c29873a05dfc2 p1=07f3376c1e655977439df2a814e3cc14b27abac2 p2=
tag hook: t=a n=07f3376c1e655977439df2a814e3cc14b27abac2 l=0
pretag hook: t=la n=3cd2c6a5a36c5908aad3bc0d717c29873a05dfc2 l=1
tag hook: t=la n=3cd2c6a5a36c5908aad3bc0d717c29873a05dfc2 l=1
@@ -47,11 +47,14 @@
abort: pretag.forbid hook exited with status 1
4:3cd2c6a5a36c
precommit hook: p1=3cd2c6a5a36c5908aad3bc0d717c29873a05dfc2 p2=
+pretxncommit hook: n=469a61fe67d64df9a5023e4c2b8a0b85c61e9b69 p1=3cd2c6a5a36c5908aad3bc0d717c29873a05dfc2 p2=
+5:469a61fe67d6
pretxncommit.forbid hook: tip=5:469a61fe67d6
abort: pretxncommit.forbid hook exited with status 1
transaction abort!
rollback completed
4:3cd2c6a5a36c
+precommit hook: p1=3cd2c6a5a36c5908aad3bc0d717c29873a05dfc2 p2=
precommit.forbid hook
abort: precommit.forbid hook exited with status 1
4:3cd2c6a5a36c
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-push-hook-lock Sun Mar 12 16:21:59 2006 -0800
@@ -0,0 +1,13 @@
+#!/bin/sh
+hg init 1
+echo '[ui]' >> 1/.hg/hgrc
+echo 'timeout = 10' >> 1/.hg/hgrc
+echo foo > 1/foo
+hg --cwd 1 ci -A -m foo
+hg clone 1 2
+hg clone 2 3
+echo '[hooks]' >> 2/.hg/hgrc
+echo 'changegroup.push = hg push -qf ../1' >> 2/.hg/hgrc
+echo bar >> 3/foo
+hg --cwd 3 ci -m bar
+hg --cwd 3 push ../2
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-push-hook-lock.out Sun Mar 12 16:21:59 2006 -0800
@@ -0,0 +1,7 @@
+adding foo
+pushing to ../2
+searching for changes
+adding changesets
+adding manifests
+adding file changes
+added 1 changesets with 1 changes to 1 files
--- a/tests/test-push-r Sun Mar 12 15:58:56 2006 -0800
+++ b/tests/test-push-r Sun Mar 12 16:21:59 2006 -0800
@@ -1,4 +1,4 @@
-#!/bin/bash
+#!/bin/sh
hg init test
cd test
--- a/tests/test-rename Sun Mar 12 15:58:56 2006 -0800
+++ b/tests/test-rename Sun Mar 12 16:21:59 2006 -0800
@@ -171,3 +171,11 @@
hg rename --after d1/bb d1/bc
hg status
hg update -C
+
+echo "# idempotent renames (d1/b -> d1/bb followed by d1/bb -> d1/b)"
+hg rename d1/b d1/bb
+echo "some stuff added to d1/bb" >> d1/bb
+hg rename d1/bb d1/b
+hg status
+hg debugstate | grep copy
+hg update -C
--- a/tests/test-rename.out Sun Mar 12 15:58:56 2006 -0800
+++ b/tests/test-rename.out Sun Mar 12 16:21:59 2006 -0800
@@ -252,3 +252,5 @@
# transitive rename --after
A d1/bc
R d1/b
+# idempotent renames (d1/b -> d1/bb followed by d1/bb -> d1/b)
+M d1/b
--- a/tests/test-static-http.out Sun Mar 12 15:58:56 2006 -0800
+++ b/tests/test-static-http.out Sun Mar 12 16:21:59 2006 -0800
@@ -1,4 +1,4 @@
-abort: <urlopen error (111, 'Connection refused')>
+abort: Connection refused
255
ls: copy: No such file or directory
changeset: 0:61c9426e69fe