# HG changeset patch
# User Patrick Mezard
# Date 1179607903 -7200
# Node ID 30e7aa755efdbcb4e6e34fd81451054d2485d492
# Parent af8db3b42a4a1800293dc1290f116a5d5d623a8a# Parent 2d32e3ae01a7b2d67907e95b7ccea19f36bd15ad
Merge with crew-stable.
diff -r 2d32e3ae01a7 -r 30e7aa755efd Makefile
--- a/Makefile Sat May 19 22:47:01 2007 +0200
+++ b/Makefile Sat May 19 22:51:43 2007 +0200
@@ -67,10 +67,10 @@
TAR_OPTIONS="--owner=root --group=root --mode=u+w,go-w,a+rX-s" $(PYTHON) setup.py -q sdist
tests:
- cd tests && $(PYTHON) run-tests.py
+ cd tests && $(PYTHON) run-tests.py $(TESTFLAGS)
test-%:
- cd tests && $(PYTHON) run-tests.py $@
+ cd tests && $(PYTHON) run-tests.py $(TESTFLAGS) $@
.PHONY: help all local build doc clean install install-bin install-doc \
diff -r 2d32e3ae01a7 -r 30e7aa755efd README
--- a/README Sat May 19 22:47:01 2007 +0200
+++ b/README Sat May 19 22:51:43 2007 +0200
@@ -1,99 +1,10 @@
-MERCURIAL QUICK-START
-
-Setting up Mercurial:
-
- Note: some distributions fails to include bits of distutils by
- default, you'll need python-dev to install. You'll also need a C
- compiler and a 3-way merge tool like merge, tkdiff, or kdiff3.
-
- First, unpack the source:
-
- $ tar xvzf mercurial-.tar.gz
- $ cd mercurial-
-
- When installing, change python to python2.3 or python2.4 if 2.2 is the
- default on your system.
-
- To install system-wide:
-
- $ python setup.py install --force
-
- To install in your home directory (~/bin and ~/lib, actually), run:
-
- $ python setup.py install --home=${HOME} --force
- $ export PYTHONPATH=${HOME}/lib/python # (or lib64/ on some systems)
- $ export PATH=${HOME}/bin:$PATH # add these to your .bashrc
-
- And finally:
-
- $ hg debuginstall # run some basic tests
- $ hg # show help
-
- If you get complaints about missing modules, you probably haven't set
- PYTHONPATH correctly.
-
-Setting up a Mercurial project:
-
- $ hg init project # creates project directory
- $ cd project
- # copy files in, edit them
- $ hg add # add all unknown files
- $ hg commit # commit all changes, edit changelog entry
-
- Mercurial will look for a file named .hgignore in the root of your
- repository which contains a set of regular expressions to ignore in
- file paths.
-
-Branching and merging:
+Basic install:
- $ hg clone project project-work # create a new branch
- $ cd project-work
- $
- $ hg commit
- $ cd ../project
- $ hg pull ../project-work # pull changesets from project-work
- $ hg merge # merge the new tip from project-work into
- # our working directory
- $ hg commit # commit the result of the merge
-
-Importing patches:
-
- Simple:
- $ patch < ../p/foo.patch
- $ hg commit -A
-
- Fast:
- $ cat ../p/patchlist | xargs hg import -p1 -b ../p
-
-Exporting a patch:
-
- (make changes)
- $ hg commit
- $ hg export tip > foo.patch # export latest change
+ $ make # see install targets
+ $ make install # do a system-wide install
+ $ hg debuginstall # sanity-check setup
+ $ hg # see help
-Network support:
-
- # pull from the primary Mercurial repo
- foo$ hg clone http://selenic.com/hg/
- foo$ cd hg
-
- # make your current repo available via http://server:8000/
- foo$ hg serve
-
- # pushing and pulling changes to/from a remote repo with SSH
- foo$ hg push ssh://user@example.com/my/repository
- foo$ hg pull ssh://user@example.com//home/somebody/his/repository
+See http://www.selenic.com/mercurial/ for detailed installation
+instructions, platform-specific notes, and Mercurial user information.
- # merge changes from a remote machine (e.g. running 'hg serve')
- bar$ hg pull http://foo:8000/
- bar$ hg merge # merge changes into your working directory
- bar$ hg commit # commit merge in to your local repository
-
- # Set up a CGI server on your webserver
- foo$ cp hgweb.cgi ~/public_html/hg/index.cgi
- foo$ emacs ~/public_html/hg/index.cgi # adjust the defaults
-
-For more info:
-
- Documentation in doc/
- Mercurial website at http://selenic.com/mercurial
diff -r 2d32e3ae01a7 -r 30e7aa755efd contrib/bash_completion
--- a/contrib/bash_completion Sat May 19 22:47:01 2007 +0200
+++ b/contrib/bash_completion Sat May 19 22:51:43 2007 +0200
@@ -145,6 +145,7 @@
# global options
case "$prev" in
-R|--repository)
+ _hg_paths
_hg_repos
return
;;
@@ -477,3 +478,25 @@
{
_hg_tags
}
+
+
+# transplant
+_hg_cmd_transplant()
+{
+ case "$prev" in
+ -s|--source)
+ _hg_paths
+ _hg_repos
+ return
+ ;;
+ --filter)
+ # standard filename completion
+ return
+ ;;
+ esac
+
+ # all other transplant options values and command parameters are revisions
+ _hg_tags
+ return
+}
+
diff -r 2d32e3ae01a7 -r 30e7aa755efd contrib/churn.py
--- a/contrib/churn.py Sat May 19 22:47:01 2007 +0200
+++ b/contrib/churn.py Sat May 19 22:51:43 2007 +0200
@@ -11,10 +11,9 @@
#
#
-from mercurial.demandload import *
+import sys
from mercurial.i18n import gettext as _
-demandload(globals(), 'time sys signal os')
-demandload(globals(), 'mercurial:hg,mdiff,fancyopts,cmdutil,ui,util,templater,node')
+from mercurial import hg, mdiff, cmdutil, ui, util, templater, node
def __gather(ui, repo, node1, node2):
def dirtywork(f, mmap1, mmap2):
diff -r 2d32e3ae01a7 -r 30e7aa755efd contrib/convert-repo
--- a/contrib/convert-repo Sat May 19 22:47:01 2007 +0200
+++ b/contrib/convert-repo Sat May 19 22:51:43 2007 +0200
@@ -3,29 +3,49 @@
# This is a generalized framework for converting between SCM
# repository formats.
#
-# In its current form, it's hardcoded to convert incrementally between
-# git and Mercurial.
-#
# To use, run:
#
-# convert-repo
+# convert-repo
Configuration notes
-
The default editor for commit messages is 'notepad'. You can set the EDITOR
+
Default editor
+ The default editor for commit messages is 'notepad'. You can set the EDITOR
(or HGEDITOR) environment variable to specify your preference or set it in
- mercurial.ini:
+ mercurial.ini:
[ui]
editor = whatever
+
Configuring a Merge program
+ It should be emphasized that Mercurial by itself doesn't attempt to do a
+ Merge at the file level, neither does it make any attempt to Resolve the conflicts.
+
+ By default, Mercurial will use the merge program defined by the HGMERGE environment
+ variable, or uses the one defined in the mercurial.ini file. (see MergeProgram on the Mercurial Wiki for more information)
Reporting problems
diff -r 2d32e3ae01a7 -r 30e7aa755efd contrib/win32/mercurial.iss
--- a/contrib/win32/mercurial.iss Sat May 19 22:47:01 2007 +0200
+++ b/contrib/win32/mercurial.iss Sat May 19 22:51:43 2007 +0200
@@ -2,7 +2,7 @@
; SEE THE DOCUMENTATION FOR DETAILS ON CREATING INNO SETUP SCRIPT FILES!
[Setup]
-AppCopyright=Copyright 2005, 2006 Matt Mackall and others
+AppCopyright=Copyright 2005-2007 Matt Mackall and others
AppName=Mercurial
AppVerName=Mercurial snapshot
InfoAfterFile=contrib/win32/postinstall.txt
@@ -18,7 +18,7 @@
DefaultDirName={sd}\Mercurial
SourceDir=C:\hg\hg-release
VersionInfoDescription=Mercurial distributed SCM
-VersionInfoCopyright=Copyright 2005, 2006 Matt Mackall and others
+VersionInfoCopyright=Copyright 2005-2007 Matt Mackall and others
VersionInfoCompany=Matt Mackall and others
InternalCompressLevel=max
SolidCompression=true
diff -r 2d32e3ae01a7 -r 30e7aa755efd doc/Makefile
--- a/doc/Makefile Sat May 19 22:47:01 2007 +0200
+++ b/doc/Makefile Sat May 19 22:51:43 2007 +0200
@@ -2,7 +2,7 @@
MAN=$(SOURCES:%.txt=%)
HTML=$(SOURCES:%.txt=%.html)
PREFIX=/usr/local
-MANDIR=$(PREFIX)/man
+MANDIR=$(PREFIX)/share/man
INSTALL=install -c
all: man html
@@ -36,8 +36,8 @@
install: man
for i in $(MAN) ; do \
subdir=`echo $$i | sed -n 's/..*\.\([0-9]\)$$/man\1/p'` ; \
- mkdir -p $(MANDIR)/$$subdir ; \
- $(INSTALL) $$i $(MANDIR)/$$subdir ; \
+ mkdir -p $(DESTDIR)/$(MANDIR)/$$subdir ; \
+ $(INSTALL) $$i $(DESTDIR)/$(MANDIR)/$$subdir ; \
done
clean:
diff -r 2d32e3ae01a7 -r 30e7aa755efd doc/hgrc.5.txt
--- a/doc/hgrc.5.txt Sat May 19 22:47:01 2007 +0200
+++ b/doc/hgrc.5.txt Sat May 19 22:51:43 2007 +0200
@@ -215,6 +215,15 @@
# (this extension will get loaded from the file specified)
myfeature = ~/.hgext/myfeature.py
+format::
+
+ usestore;;
+ Enable or disable the "store" repository format which improves
+ compatibility with systems that fold case or otherwise mangle
+ filenames. Enabled by default. Disabling this option will allow
+ you to store longer filenames in some situations at the expense of
+ compatibility.
+
hooks::
Commands or Python functions that get automatically executed by
various actions such as starting or finishing a commit. Multiple
@@ -423,6 +432,9 @@
merge;;
The conflict resolution program to use during a manual merge.
Default is "hgmerge".
+ patch;;
+ command to use to apply patches. Look for 'gpatch' or 'patch' in PATH if
+ unset.
quiet;;
Reduce the amount of output printed. True or False. Default is False.
remotecmd;;
@@ -507,6 +519,11 @@
push_ssl;;
Whether to require that inbound pushes be transported over SSL to
prevent password sniffing. Default is true.
+ staticurl;;
+ Base URL to use for static files. If unset, static files (e.g.
+ the hgicon.png favicon) will be served by the CGI script itself.
+ Use this setting to serve them directly with the HTTP server.
+ Example: "http://hgserver/static/"
stripes;;
How many lines a "zebra stripe" should span in multiline output.
Default is 1; set to 0 to disable.
diff -r 2d32e3ae01a7 -r 30e7aa755efd hg
--- a/hg Sat May 19 22:47:01 2007 +0200
+++ b/hg Sat May 19 22:51:43 2007 +0200
@@ -7,6 +7,5 @@
# This software may be used and distributed according to the terms
# of the GNU General Public License, incorporated herein by reference.
-from mercurial import commands
-
-commands.run()
+import mercurial.commands
+mercurial.commands.run()
diff -r 2d32e3ae01a7 -r 30e7aa755efd hgext/acl.py
--- a/hgext/acl.py Sat May 19 22:47:01 2007 +0200
+++ b/hgext/acl.py Sat May 19 22:51:43 2007 +0200
@@ -45,10 +45,10 @@
# glob pattern = user4, user5
# ** = user6
-from mercurial.demandload import *
-from mercurial.i18n import gettext as _
+from mercurial.i18n import _
from mercurial.node import *
-demandload(globals(), 'getpass mercurial:util')
+from mercurial import util
+import getpass
class checker(object):
'''acl checker.'''
@@ -91,7 +91,7 @@
def check(self, node):
'''return if access allowed, raise exception if not.'''
- files = self.repo.changelog.read(node)[3]
+ files = self.repo.changectx(node).files()
if self.deniable:
for f in files:
if self.deny(f):
diff -r 2d32e3ae01a7 -r 30e7aa755efd hgext/bugzilla.py
--- a/hgext/bugzilla.py Sat May 19 22:47:01 2007 +0200
+++ b/hgext/bugzilla.py Sat May 19 22:51:43 2007 +0200
@@ -52,10 +52,10 @@
# [usermap]
# committer_email = bugzilla_user_name
-from mercurial.demandload import *
-from mercurial.i18n import gettext as _
+from mercurial.i18n import _
from mercurial.node import *
-demandload(globals(), 'mercurial:cmdutil,templater,util os re time')
+from mercurial import cmdutil, templater, util
+import os, re, time
MySQLdb = None
@@ -222,7 +222,7 @@
_bug_re = None
_split_re = None
- def find_bug_ids(self, node, desc):
+ def find_bug_ids(self, ctx):
'''find valid bug ids that are referred to in changeset
comments and that do not already have references to this
changeset.'''
@@ -235,7 +235,7 @@
start = 0
ids = {}
while True:
- m = bugzilla._bug_re.search(desc, start)
+ m = bugzilla._bug_re.search(ctx.description(), start)
if not m:
break
start = m.end()
@@ -246,10 +246,10 @@
if ids:
ids = self.filter_real_bug_ids(ids)
if ids:
- ids = self.filter_unknown_bug_ids(node, ids)
+ ids = self.filter_unknown_bug_ids(ctx.node(), ids)
return ids
- def update(self, bugid, node, changes):
+ def update(self, bugid, ctx):
'''update bugzilla bug with reference to changeset.'''
def webroot(root):
@@ -268,7 +268,7 @@
mapfile = self.ui.config('bugzilla', 'style')
tmpl = self.ui.config('bugzilla', 'template')
t = cmdutil.changeset_templater(self.ui, self.repo,
- False, None, mapfile, False)
+ False, mapfile, False)
if not mapfile and not tmpl:
tmpl = _('changeset {node|short} in repo {root} refers '
'to bug {bug}.\ndetails:\n\t{desc|tabindent}')
@@ -276,13 +276,13 @@
tmpl = templater.parsestring(tmpl, quoted=False)
t.use_template(tmpl)
self.ui.pushbuffer()
- t.show(changenode=node, changes=changes,
+ t.show(changenode=ctx.node(), changes=ctx.changeset(),
bug=str(bugid),
hgweb=self.ui.config('web', 'baseurl'),
root=self.repo.root,
webroot=webroot(self.repo.root))
data = self.ui.popbuffer()
- self.add_comment(bugid, data, templater.email(changes[1]))
+ self.add_comment(bugid, data, templater.email(ctx.user()))
def hook(ui, repo, hooktype, node=None, **kwargs):
'''add comment to bugzilla for each changeset that refers to a
@@ -300,12 +300,11 @@
hooktype)
try:
bz = bugzilla(ui, repo)
- bin_node = bin(node)
- changes = repo.changelog.read(bin_node)
- ids = bz.find_bug_ids(bin_node, changes[4])
+ ctx = repo.changectx(node)
+ ids = bz.find_bug_ids(ctx)
if ids:
for id in ids:
- bz.update(id, bin_node, changes)
+ bz.update(id, ctx)
bz.notify(ids)
except MySQLdb.MySQLError, err:
raise util.Abort(_('database error: %s') % err[1])
diff -r 2d32e3ae01a7 -r 30e7aa755efd hgext/extdiff.py
--- a/hgext/extdiff.py Sat May 19 22:47:01 2007 +0200
+++ b/hgext/extdiff.py Sat May 19 22:51:43 2007 +0200
@@ -48,16 +48,15 @@
# needed files, so running the external diff program will actually be
# pretty fast (at least faster than having to compare the entire tree).
-from mercurial.demandload import demandload
-from mercurial.i18n import gettext as _
+from mercurial.i18n import _
from mercurial.node import *
-demandload(globals(), 'mercurial:cmdutil,util os shutil tempfile')
+from mercurial import cmdutil, util
+import os, shutil, tempfile
def dodiff(ui, repo, diffcmd, diffopts, pats, opts):
def snapshot_node(files, node):
'''snapshot files as of some revision'''
- changes = repo.changelog.read(node)
- mf = repo.manifest.read(changes[0])
+ mf = repo.changectx(node).manifest()
dirname = os.path.basename(repo.root)
if dirname == "":
dirname = "root"
@@ -77,7 +76,8 @@
destdir = os.path.dirname(dest)
if not os.path.isdir(destdir):
os.makedirs(destdir)
- repo.wwrite(wfn, repo.file(fn).read(mf[fn]), open(dest, 'wb'))
+ data = repo.wwritedata(wfn, repo.file(wfn).read(mf[wfn]))
+ open(dest, 'wb').write(data)
return dirname
def snapshot_wdir(files):
diff -r 2d32e3ae01a7 -r 30e7aa755efd hgext/fetch.py
--- a/hgext/fetch.py Sat May 19 22:47:01 2007 +0200
+++ b/hgext/fetch.py Sat May 19 22:51:43 2007 +0200
@@ -5,10 +5,9 @@
# This software may be used and distributed according to the terms
# of the GNU General Public License, incorporated herein by reference.
-from mercurial.demandload import *
-from mercurial.i18n import gettext as _
+from mercurial.i18n import _
from mercurial.node import *
-demandload(globals(), 'mercurial:commands,hg,node,util')
+from mercurial import commands, hg, node, util
def fetch(ui, repo, source='default', **opts):
'''Pull changes from a remote repository, merge new changes if needed.
diff -r 2d32e3ae01a7 -r 30e7aa755efd hgext/gpg.py
--- a/hgext/gpg.py Sat May 19 22:47:01 2007 +0200
+++ b/hgext/gpg.py Sat May 19 22:51:43 2007 +0200
@@ -8,7 +8,7 @@
import os, tempfile, binascii
from mercurial import util
from mercurial import node as hgnode
-from mercurial.i18n import gettext as _
+from mercurial.i18n import _
class gpg:
def __init__(self, path, key=None):
diff -r 2d32e3ae01a7 -r 30e7aa755efd hgext/graphlog.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext/graphlog.py Sat May 19 22:51:43 2007 +0200
@@ -0,0 +1,266 @@
+# ASCII graph log extension for Mercurial
+#
+# Copyright 2007 Joel Rosdahl
+#
+# This software may be used and distributed according to the terms of
+# the GNU General Public License, incorporated herein by reference.
+
+import sys
+from mercurial.cmdutil import revrange, show_changeset
+from mercurial.i18n import _
+from mercurial.node import nullid, nullrev
+from mercurial.util import Abort
+
+def revision_grapher(repo, start_rev, stop_rev):
+ """incremental revision grapher
+
+ This generator function walks through the revision history from
+ revision start_rev to revision stop_rev (which must be less than
+ or equal to start_rev) and for each revision emits tuples with the
+ following elements:
+
+ - Current revision.
+ - Current node.
+ - Column of the current node in the set of ongoing edges.
+ - Edges; a list of (col, next_col) indicating the edges between
+ the current node and its parents.
+ - Number of columns (ongoing edges) in the current revision.
+ - The difference between the number of columns (ongoing edges)
+ in the next revision and the number of columns (ongoing edges)
+ in the current revision. That is: -1 means one column removed;
+ 0 means no columns added or removed; 1 means one column added.
+ """
+
+ assert start_rev >= stop_rev
+ curr_rev = start_rev
+ revs = []
+ while curr_rev >= stop_rev:
+ node = repo.changelog.node(curr_rev)
+
+ # Compute revs and next_revs.
+ if curr_rev not in revs:
+ # New head.
+ revs.append(curr_rev)
+ rev_index = revs.index(curr_rev)
+ next_revs = revs[:]
+
+ # Add parents to next_revs.
+ parents = get_rev_parents(repo, curr_rev)
+ parents_to_add = []
+ for parent in parents:
+ if parent not in next_revs:
+ parents_to_add.append(parent)
+ parents_to_add.sort()
+ next_revs[rev_index:rev_index + 1] = parents_to_add
+
+ edges = []
+ for parent in parents:
+ edges.append((rev_index, next_revs.index(parent)))
+
+ n_columns_diff = len(next_revs) - len(revs)
+ yield (curr_rev, node, rev_index, edges, len(revs), n_columns_diff)
+
+ revs = next_revs
+ curr_rev -= 1
+
+def get_rev_parents(repo, rev):
+ return [x for x in repo.changelog.parentrevs(rev) if x != nullrev]
+
+def fix_long_right_edges(edges):
+ for (i, (start, end)) in enumerate(edges):
+ if end > start:
+ edges[i] = (start, end + 1)
+
+def draw_edges(edges, nodeline, interline):
+ for (start, end) in edges:
+ if start == end + 1:
+ interline[2 * end + 1] = "/"
+ elif start == end - 1:
+ interline[2 * start + 1] = "\\"
+ elif start == end:
+ interline[2 * start] = "|"
+ else:
+ nodeline[2 * end] = "+"
+ if start > end:
+ (start, end) = (end,start)
+ for i in range(2 * start + 1, 2 * end):
+ if nodeline[i] != "+":
+ nodeline[i] = "-"
+
+def format_line(line, level, logstr):
+ text = "%-*s %s" % (2 * level, "".join(line), logstr)
+ return "%s\n" % text.rstrip()
+
+def get_nodeline_edges_tail(
+ node_index, p_node_index, n_columns, n_columns_diff, p_diff, fix_tail):
+ if fix_tail and n_columns_diff == p_diff and n_columns_diff != 0:
+ # Still going in the same non-vertical direction.
+ if n_columns_diff == -1:
+ start = max(node_index + 1, p_node_index)
+ tail = ["|", " "] * (start - node_index - 1)
+ tail.extend(["/", " "] * (n_columns - start))
+ return tail
+ else:
+ return ["\\", " "] * (n_columns - node_index - 1)
+ else:
+ return ["|", " "] * (n_columns - node_index - 1)
+
+def get_padding_line(ni, n_columns, edges):
+ line = []
+ line.extend(["|", " "] * ni)
+ if (ni, ni - 1) in edges or (ni, ni) in edges:
+ # (ni, ni - 1) (ni, ni)
+ # | | | | | | | |
+ # +---o | | o---+
+ # | | c | | c | |
+ # | |/ / | |/ /
+ # | | | | | |
+ c = "|"
+ else:
+ c = " "
+ line.extend([c, " "])
+ line.extend(["|", " "] * (n_columns - ni - 1))
+ return line
+
+def get_limit(limit_opt):
+ if limit_opt:
+ try:
+ limit = int(limit_opt)
+ except ValueError:
+ raise Abort(_("limit must be a positive integer"))
+ if limit <= 0:
+ raise Abort(_("limit must be positive"))
+ else:
+ limit = sys.maxint
+ return limit
+
+def get_revs(repo, rev_opt):
+ if rev_opt:
+ revs = revrange(repo, rev_opt)
+ return (max(revs), min(revs))
+ else:
+ return (repo.changelog.count() - 1, 0)
+
+def graphlog(ui, repo, *args, **opts):
+ """show revision history alongside an ASCII revision graph
+
+ Print a revision history alongside a revision graph drawn with
+ ASCII characters.
+
+ Nodes printed as an @ character are parents of the working
+ directory.
+ """
+
+ limit = get_limit(opts["limit"])
+ (start_rev, stop_rev) = get_revs(repo, opts["rev"])
+ stop_rev = max(stop_rev, start_rev - limit + 1)
+ if start_rev == nullrev:
+ return
+ cs_printer = show_changeset(ui, repo, opts)
+ grapher = revision_grapher(repo, start_rev, stop_rev)
+ repo_parents = repo.dirstate.parents()
+ prev_n_columns_diff = 0
+ prev_node_index = 0
+
+ for (rev, node, node_index, edges, n_columns, n_columns_diff) in grapher:
+ # log_strings is the list of all log strings to draw alongside
+ # the graph.
+ ui.pushbuffer()
+ cs_printer.show(rev, node)
+ log_strings = ui.popbuffer().split("\n")[:-1]
+
+ if n_columns_diff == -1:
+ # Transform
+ #
+ # | | | | | |
+ # o | | into o---+
+ # |X / |/ /
+ # | | | |
+ fix_long_right_edges(edges)
+
+ # add_padding_line says whether to rewrite
+ #
+ # | | | | | | | |
+ # | o---+ into | o---+
+ # | / / | | | # <--- padding line
+ # o | | | / /
+ # o | |
+ add_padding_line = \
+ len(log_strings) > 2 and \
+ n_columns_diff == -1 and \
+ [x for (x, y) in edges if x + 1 < y]
+
+ # fix_nodeline_tail says whether to rewrite
+ #
+ # | | o | | | | o | |
+ # | | |/ / | | |/ /
+ # | o | | into | o / / # <--- fixed nodeline tail
+ # | |/ / | |/ /
+ # o | | o | |
+ fix_nodeline_tail = len(log_strings) <= 2 and not add_padding_line
+
+ # nodeline is the line containing the node character (@ or o).
+ nodeline = ["|", " "] * node_index
+ if node in repo_parents:
+ node_ch = "@"
+ else:
+ node_ch = "o"
+ nodeline.extend([node_ch, " "])
+
+ nodeline.extend(
+ get_nodeline_edges_tail(
+ node_index, prev_node_index, n_columns, n_columns_diff,
+ prev_n_columns_diff, fix_nodeline_tail))
+
+ # shift_interline is the line containing the non-vertical
+ # edges between this entry and the next.
+ shift_interline = ["|", " "] * node_index
+ if n_columns_diff == -1:
+ n_spaces = 1
+ edge_ch = "/"
+ elif n_columns_diff == 0:
+ n_spaces = 2
+ edge_ch = "|"
+ else:
+ n_spaces = 3
+ edge_ch = "\\"
+ shift_interline.extend(n_spaces * [" "])
+ shift_interline.extend([edge_ch, " "] * (n_columns - node_index - 1))
+
+ # Draw edges from the current node to its parents.
+ draw_edges(edges, nodeline, shift_interline)
+
+ # lines is the list of all graph lines to print.
+ lines = [nodeline]
+ if add_padding_line:
+ lines.append(get_padding_line(node_index, n_columns, edges))
+ lines.append(shift_interline)
+
+ # Make sure that there are as many graph lines as there are
+ # log strings.
+ while len(log_strings) < len(lines):
+ log_strings.append("")
+ if len(lines) < len(log_strings):
+ extra_interline = ["|", " "] * (n_columns + n_columns_diff)
+ while len(lines) < len(log_strings):
+ lines.append(extra_interline)
+
+ # Print lines.
+ indentation_level = max(n_columns, n_columns + n_columns_diff)
+ for (line, logstr) in zip(lines, log_strings):
+ ui.write(format_line(line, indentation_level, logstr))
+
+ # ...and start over.
+ prev_node_index = node_index
+ prev_n_columns_diff = n_columns_diff
+
+cmdtable = {
+ "glog":
+ (graphlog,
+ [("l", "limit", "", _("limit number of changes displayed")),
+ ("p", "patch", False, _("show patch")),
+ ("r", "rev", [], _("show the specified revision or range")),
+ ("", "style", "", _("display using template map file")),
+ ("", "template", "", _("display with template"))],
+ "hg glog [OPTIONS]"),
+}
diff -r 2d32e3ae01a7 -r 30e7aa755efd hgext/hbisect.py
--- a/hgext/hbisect.py Sat May 19 22:47:01 2007 +0200
+++ b/hgext/hbisect.py Sat May 19 22:51:43 2007 +0200
@@ -6,9 +6,9 @@
# This software may be used and distributed according to the terms
# of the GNU General Public License, incorporated herein by reference.
-from mercurial.i18n import gettext as _
-from mercurial.demandload import demandload
-demandload(globals(), "os sys sets mercurial:hg,util,commands,cmdutil")
+from mercurial.i18n import _
+from mercurial import hg, util, commands, cmdutil
+import os, sys, sets
versionstr = "0.0.3"
@@ -252,8 +252,21 @@
return 0
def bisect_run(ui, repo, cmd=None, *args):
- """bisect extension: dichotomic search in the DAG of changesets
-for subcommands see "hg bisect help\"
+ """Dichotomic search in the DAG of changesets
+
+This extension helps to find changesets which cause problems.
+To use, mark the earliest changeset you know introduces the problem
+as bad, then mark the latest changeset which is free from the problem
+as good. Bisect will update your working directory to a revision for
+testing. Once you have performed tests, mark the working directory
+as bad or good and bisect will either update to another candidate
+changeset or announce that it has found the bad revision.
+
+Note: bisect expects bad revisions to be descendants of good revisions.
+If you are looking for the point at which a problem was fixed, then make
+the problem-free state "bad" and the problematic state "good."
+
+For subcommands see "hg bisect help\"
"""
def help_(cmd=None, *args):
"""show help for a given bisect subcommand or all subcommands"""
diff -r 2d32e3ae01a7 -r 30e7aa755efd hgext/hgk.py
--- a/hgext/hgk.py Sat May 19 22:47:01 2007 +0200
+++ b/hgext/hgk.py Sat May 19 22:51:43 2007 +0200
@@ -5,26 +5,18 @@
# This software may be used and distributed according to the terms
# of the GNU General Public License, incorporated herein by reference.
-from mercurial.demandload import *
-demandload(globals(), 'time sys signal os')
-demandload(globals(), 'mercurial:hg,fancyopts,commands,ui,util,patch,revlog')
+import sys, os
+from mercurial import hg, fancyopts, commands, ui, util, patch, revlog
def difftree(ui, repo, node1=None, node2=None, *files, **opts):
"""diff trees from two commits"""
def __difftree(repo, node1, node2, files=[]):
- if node2:
- change = repo.changelog.read(node2)
- mmap2 = repo.manifest.read(change[0])
- status = repo.status(node1, node2, files=files)[:5]
- modified, added, removed, deleted, unknown = status
- else:
- status = repo.status(node1, files=files)[:5]
- modified, added, removed, deleted, unknown = status
- if not node1:
- node1 = repo.dirstate.parents()[0]
+ assert node2 is not None
+ mmap = repo.changectx(node1).manifest()
+ mmap2 = repo.changectx(node2).manifest()
+ status = repo.status(node1, node2, files=files)[:5]
+ modified, added, removed, deleted, unknown = status
- change = repo.changelog.read(node1)
- mmap = repo.manifest.read(change[0])
empty = hg.short(hg.nullid)
for f in modified:
@@ -70,32 +62,30 @@
if not opts['stdin']:
break
-def catcommit(repo, n, prefix, changes=None):
+def catcommit(repo, n, prefix, ctx=None):
nlprefix = '\n' + prefix;
- (p1, p2) = repo.changelog.parents(n)
- (h, h1, h2) = map(hg.short, (n, p1, p2))
- (i1, i2) = map(repo.changelog.rev, (p1, p2))
- if not changes:
- changes = repo.changelog.read(n)
- print "tree %s" % (hg.short(changes[0]))
- if i1 != hg.nullrev: print "parent %s" % (h1)
- if i2 != hg.nullrev: print "parent %s" % (h2)
- date_ar = changes[2]
- date = int(float(date_ar[0]))
- lines = changes[4].splitlines()
+ if ctx is None:
+ ctx = repo.changectx(n)
+ (p1, p2) = ctx.parents()
+ print "tree %s" % (hg.short(ctx.changeset()[0])) # use ctx.node() instead ??
+ if p1: print "parent %s" % (hg.short(p1.node()))
+ if p2: print "parent %s" % (hg.short(p2.node()))
+ date = ctx.date()
+ description = ctx.description().replace("\0", "")
+ lines = description.splitlines()
if lines and lines[-1].startswith('committer:'):
committer = lines[-1].split(': ')[1].rstrip()
else:
- committer = changes[1]
+ committer = ctx.user()
- print "author %s %s %s" % (changes[1], date, date_ar[1])
- print "committer %s %s %s" % (committer, date, date_ar[1])
- print "revision %d" % repo.changelog.rev(n)
+ print "author %s %s %s" % (ctx.user(), int(date[0]), date[1])
+ print "committer %s %s %s" % (committer, int(date[0]), date[1])
+ print "revision %d" % ctx.rev()
print ""
if prefix != "":
- print "%s%s" % (prefix, changes[4].replace('\n', nlprefix).strip())
+ print "%s%s" % (prefix, description.replace('\n', nlprefix).strip())
else:
- print changes[4]
+ print description
if prefix:
sys.stdout.write('\0')
@@ -146,8 +136,7 @@
# you can specify a commit to stop at by starting the sha1 with ^
def revtree(args, repo, full="tree", maxnr=0, parents=False):
def chlogwalk():
- ch = repo.changelog
- count = ch.count()
+ count = repo.changelog.count()
i = count
l = [0] * 100
chunk = 100
@@ -163,7 +152,8 @@
l[chunk - x:] = [0] * (chunk - x)
break
if full != None:
- l[x] = ch.read(ch.node(i + x))
+ l[x] = repo.changectx(i + x)
+ l[x].changeset() # force reading
else:
l[x] = 1
for x in xrange(chunk-1, -1, -1):
@@ -217,7 +207,7 @@
# walk the repository looking for commits that are in our
# reachability graph
- for i, changes in chlogwalk():
+ for i, ctx in chlogwalk():
n = repo.changelog.node(i)
mask = is_reachable(want_sha1, reachable, n)
if mask:
@@ -232,13 +222,13 @@
print hg.short(n) + parentstr
elif full == "commit":
print hg.short(n) + parentstr
- catcommit(repo, n, ' ', changes)
+ catcommit(repo, n, ' ', ctx)
else:
(p1, p2) = repo.changelog.parents(n)
(h, h1, h2) = map(hg.short, (n, p1, p2))
(i1, i2) = map(repo.changelog.rev, (p1, p2))
- date = changes[2][0]
+ date = ctx.date()[0]
print "%s %s:%s" % (date, h, mask),
mask = is_reachable(want_sha1, reachable, p1)
if i1 != hg.nullrev and mask > 0:
diff -r 2d32e3ae01a7 -r 30e7aa755efd hgext/mq.py
--- a/hgext/mq.py Sat May 19 22:47:01 2007 +0200
+++ b/hgext/mq.py Sat May 19 22:51:43 2007 +0200
@@ -29,14 +29,16 @@
refresh contents of top applied patch qrefresh
'''
-from mercurial.demandload import *
-from mercurial.i18n import gettext as _
-from mercurial import commands
-demandload(globals(), "os sys re struct traceback errno bz2")
-demandload(globals(), "mercurial:cmdutil,hg,patch,revlog,util,changegroup")
+from mercurial.i18n import _
+from mercurial import commands, cmdutil, hg, patch, revlog, util, changegroup
+import os, sys, re, errno
commands.norepo += " qclone qversion"
+# Patch names looks like unix-file names.
+# They must be joinable with queue directory and result in the patch path.
+normname = util.normpath
+
class statusentry:
def __init__(self, rev, name=None):
if not name:
@@ -304,6 +306,15 @@
message.insert(0, subject)
return (message, comments, user, date, diffstart > 1)
+ def removeundo(self, repo):
+ undo = repo.sjoin('undo')
+ if not os.path.exists(undo):
+ return
+ try:
+ os.unlink(undo)
+ except OSError, inst:
+ self.ui.warn('error removing undo: %s\n' % str(inst))
+
def printdiff(self, repo, node1, node2=None, files=None,
fp=None, changes=None, opts={}):
fns, matchfn, anypats = cmdutil.matchpats(repo, files, opts)
@@ -328,11 +339,12 @@
hg.clean(repo, head, wlock=wlock)
self.strip(repo, n, update=False, backup='strip', wlock=wlock)
- c = repo.changelog.read(rev)
+ ctx = repo.changectx(rev)
ret = hg.merge(repo, rev, wlock=wlock)
if ret:
raise util.Abort(_("update returned %d") % ret)
- n = repo.commit(None, c[4], c[1], force=1, wlock=wlock)
+ n = repo.commit(None, ctx.description(), ctx.user(),
+ force=1, wlock=wlock)
if n == None:
raise util.Abort(_("repo commit failed"))
try:
@@ -346,6 +358,7 @@
patchf.write(comments)
self.printdiff(repo, head, n, fp=patchf)
patchf.close()
+ self.removeundo(repo)
return (0, n)
def qparents(self, repo, rev=None):
@@ -378,6 +391,7 @@
pname = ".hg.patches.merge.marker"
n = repo.commit(None, '[mq]: merge marker', user=None, force=1,
wlock=wlock)
+ self.removeundo(repo)
self.applied.append(statusentry(revlog.hex(n), pname))
self.applied_dirty = 1
@@ -403,6 +417,7 @@
self.applied_dirty = 1
if err:
return (err, head)
+ self.save_dirty()
return (0, head)
def patch(self, repo, patchfile):
@@ -512,6 +527,7 @@
self.ui.warn("fuzz found when applying patch, stopping\n")
err = 1
break
+ self.removeundo(repo)
return (err, n)
def delete(self, repo, patches, opts):
@@ -610,6 +626,7 @@
if r: r.add([patch])
if commitfiles:
self.refresh(repo, short=True)
+ self.removeundo(repo)
def strip(self, repo, rev, update=True, backup="all", wlock=None):
def limitheads(chlog, stop):
@@ -641,15 +658,12 @@
self.ui.warn("saving bundle to %s\n" % name)
return changegroup.writebundle(cg, name, "HG10BZ")
- def stripall(rev, revnum):
- cl = repo.changelog
- c = cl.read(rev)
- mm = repo.manifest.read(c[0])
+ def stripall(revnum):
+ mm = repo.changectx(rev).manifest()
seen = {}
- for x in xrange(revnum, cl.count()):
- c = cl.read(cl.node(x))
- for f in c[3]:
+ for x in xrange(revnum, repo.changelog.count()):
+ for f in repo.changectx(x).files():
if f in seen:
continue
seen[f] = 1
@@ -731,11 +745,12 @@
backupch = repo.changegroupsubset(savebases.keys(), saveheads, 'strip')
chgrpfile = bundle(backupch)
- stripall(rev, revnum)
+ stripall(revnum)
change = chlog.read(rev)
chlog.strip(revnum, revnum)
repo.manifest.strip(repo.manifest.rev(change[0]), revnum)
+ self.removeundo(repo)
if saveheads:
self.ui.status("adding branch\n")
commands.unbundle(self.ui, repo, "file:%s" % chgrpfile,
@@ -829,10 +844,29 @@
if not wlock:
wlock = repo.wlock()
patch = self.lookup(patch)
- if patch and self.isapplied(patch):
- raise util.Abort(_("patch %s is already applied") % patch)
+ # Suppose our series file is: A B C and the current 'top' patch is B.
+ # qpush C should be performed (moving forward)
+ # qpush B is a NOP (no change)
+ # qpush A is an error (can't go backwards with qpush)
+ if patch:
+ info = self.isapplied(patch)
+ if info:
+ if info[0] < len(self.applied) - 1:
+ raise util.Abort(_("cannot push to a previous patch: %s") %
+ patch)
+ if info[0] < len(self.series) - 1:
+ self.ui.warn(_('qpush: %s is already at the top\n') % patch)
+ else:
+ self.ui.warn(_('all patches are currently applied\n'))
+ return
+
+ # Following the above example, starting at 'top' of B:
+ # qpush should be performed (pushes C), but a subsequent qpush without
+ # an argument is an error (nothing to apply). This allows a loop
+ # of "...while hg qpush..." to work as it detects an error when done
if self.series_end() == len(self.series):
- raise util.Abort(_("patch series fully applied"))
+ self.ui.warn(_('patch series already fully applied\n'))
+ return 1
if not force:
self.check_localchanges(repo)
@@ -877,14 +911,7 @@
wlock=None):
def getfile(f, rev):
t = repo.file(f).read(rev)
- try:
- repo.wfile(f, "w").write(t)
- except IOError:
- try:
- os.makedirs(os.path.dirname(repo.wjoin(f)))
- except OSError, err:
- if err.errno != errno.EEXIST: raise
- repo.wfile(f, "w").write(t)
+ repo.wfile(f, "w").write(t)
if not wlock:
wlock = repo.wlock()
@@ -896,8 +923,12 @@
info = self.isapplied(patch)
if not info:
raise util.Abort(_("patch %s is not applied") % patch)
+
if len(self.applied) == 0:
- raise util.Abort(_("no patches applied"))
+ # Allow qpop -a to work repeatedly,
+ # but not qpop without an argument
+ self.ui.warn(_("no patches applied\n"))
+ return not all
if not update:
parents = repo.dirstate.parents()
@@ -989,8 +1020,11 @@
if comments:
# Remove existing message.
ci = 0
+ subj = None
for mi in xrange(len(message)):
- while message[mi] != comments[ci]:
+ if comments[ci].lower().startswith('subject: '):
+ subj = comments[ci][9:]
+ while message[mi] != comments[ci] and message[mi] != subj:
ci += 1
del comments[ci]
comments.append(msg)
@@ -1096,7 +1130,7 @@
mm.append(m[i])
del m[i]
repo.dirstate.update(m, 'n')
- repo.dirstate.update(mm, 'n', st_mtime=0)
+ repo.dirstate.update(mm, 'n', st_mtime=-1, st_size=-1)
repo.dirstate.forget(forget)
if not msg:
@@ -1112,6 +1146,7 @@
force=1, wlock=wlock)
self.applied[-1] = statusentry(revlog.hex(n), patchfn)
self.applied_dirty = 1
+ self.removeundo(repo)
else:
self.printdiff(repo, patchparent, fp=patchf)
patchf.close()
@@ -1132,9 +1167,13 @@
self.push(repo, force=True, wlock=wlock)
def init(self, repo, create=False):
- if os.path.isdir(self.path):
+ if not create and os.path.isdir(self.path):
raise util.Abort(_("patch queue directory already exists"))
- os.mkdir(self.path)
+ try:
+ os.mkdir(self.path)
+ except OSError, inst:
+ if inst.errno != errno.EEXIST or not create:
+ raise
if create:
return self.qrepo(create=True)
@@ -1287,6 +1326,7 @@
return 1
self.applied.append(statusentry(revlog.hex(n),'.hg.patches.save.line'))
self.applied_dirty = 1
+ self.removeundo(repo)
def full_series_end(self):
if len(self.applied) > 0:
@@ -1391,7 +1431,7 @@
lastparent = p1
if not patchname:
- patchname = '%d.diff' % r
+ patchname = normname('%d.diff' % r)
checkseries(patchname)
checkfile(patchname)
self.full_series.insert(0, patchname)
@@ -1413,7 +1453,7 @@
if filename == '-':
raise util.Abort(_('-e is incompatible with import from -'))
if not patchname:
- patchname = filename
+ patchname = normname(filename)
if not os.path.isfile(self.join(patchname)):
raise util.Abort(_("patch %s does not exist") % patchname)
else:
@@ -1427,7 +1467,7 @@
except IOError:
raise util.Abort(_("unable to read %s") % patchname)
if not patchname:
- patchname = os.path.basename(filename)
+ patchname = normname(os.path.basename(filename))
checkfile(patchname)
patchf = self.opener(patchname, "w")
patchf.write(text)
@@ -1515,13 +1555,16 @@
r = q.init(repo, create=opts['create_repo'])
q.save_dirty()
if r:
- fp = r.wopener('.hgignore', 'w')
- print >> fp, 'syntax: glob'
- print >> fp, 'status'
- print >> fp, 'guards'
- fp.close()
- r.wopener('series', 'w').close()
+ if not os.path.exists(r.wjoin('.hgignore')):
+ fp = r.wopener('.hgignore', 'w')
+ fp.write('syntax: glob\n')
+ fp.write('status\n')
+ fp.write('guards\n')
+ fp.close()
+ if not os.path.exists(r.wjoin('series')):
+ r.wopener('series', 'w').close()
r.add(['.hgignore', 'series'])
+ commands.add(ui, r)
return 0
def clone(ui, source, dest=None, **opts):
@@ -1640,6 +1683,9 @@
If any file patterns are provided, the refreshed patch will contain only
the modifications that match those patterns; the remaining modifications
will remain in the working directory.
+
+ hg add/remove/copy/rename work as usual, though you might want to use
+ git-style patches (--git or [diff] git=1) to track copies and renames.
"""
q = repo.mq
message = commands.logmessage(opts)
@@ -1717,6 +1763,17 @@
q.delete(repo, patches, opts)
q.save_dirty()
+def goto(ui, repo, patch, **opts):
+ '''push or pop patches until named patch is at top of stack'''
+ q = repo.mq
+ patch = q.lookup(patch)
+ if q.isapplied(patch):
+ ret = q.pop(repo, patch, force=opts['force'])
+ else:
+ ret = q.push(repo, patch, force=opts['force'])
+ q.save_dirty()
+ return ret
+
def guard(ui, repo, *args, **opts):
'''set or print guards for a patch
@@ -1811,7 +1868,8 @@
if opts['all']:
if not q.series:
- raise util.Abort(_('no patches in series'))
+ ui.warn(_('no patches in series\n'))
+ return 0
patch = q.series[-1]
if opts['merge']:
if opts['name']:
@@ -1836,9 +1894,10 @@
localupdate = False
else:
q = repo.mq
- q.pop(repo, patch, force=opts['force'], update=localupdate, all=opts['all'])
+ ret = q.pop(repo, patch, force=opts['force'], update=localupdate,
+ all=opts['all'])
q.save_dirty()
- return 0
+ return ret
def rename(ui, repo, patch, name=None, **opts):
"""rename a patch
@@ -1861,7 +1920,7 @@
patch = q.lookup('qtip')
absdest = q.join(name)
if os.path.isdir(absdest):
- name = os.path.join(name, os.path.basename(patch))
+ name = normname(os.path.join(name, os.path.basename(patch)))
absdest = q.join(name)
if os.path.exists(absdest):
raise util.Abort(_('%s already exists') % absdest)
@@ -2066,7 +2125,7 @@
return super(mqrepo, self).commit(*args, **opts)
def push(self, remote, force=False, revs=None):
- if self.mq.applied and not force:
+ if self.mq.applied and not force and not revs:
raise util.Abort(_('source has mq patches applied'))
return super(mqrepo, self).push(remote, force, revs)
@@ -2080,14 +2139,15 @@
if not q.applied:
return tagscache
- mqtags = [(patch.rev, patch.name) for patch in q.applied]
+ mqtags = [(revlog.bin(patch.rev), patch.name) for patch in q.applied]
mqtags.append((mqtags[-1][0], 'qtip'))
mqtags.append((mqtags[0][0], 'qbase'))
+ mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
for patch in mqtags:
if patch[1] in tagscache:
self.ui.warn('Tag %s overrides mq patch of the same name\n' % patch[1])
else:
- tagscache[patch[1]] = revlog.bin(patch[0])
+ tagscache[patch[1]] = patch[0]
return tagscache
@@ -2154,6 +2214,8 @@
('k', 'keep', None, _('keep folded patch files'))
] + commands.commitopts,
'hg qfold [-e] [-m ] [-l 1:
+ raise util.Abort(_("too many destinations"))
+ dest = revs and revs[0] or None
+ revs = []
+
+ if opts.get('rev'):
+ if revs:
+ raise util.Abort(_('use only one form to specify the revision'))
+ revs = opts.get('rev')
+
+ if opts.get('outgoing'):
+ revs = outgoing(dest, opts.get('rev'))
+ if opts.get('bundle'):
+ opts['revs'] = revs
+
+ # start
start_time = util.makedate()
def genmsgid(id):
return '<%s.%s@%s>' % (id[:20], int(start_time[0]), socket.getfqdn())
- patches = []
-
- class exportee:
- def __init__(self, container):
- self.lines = []
- self.container = container
- self.name = 'email'
-
- def write(self, data):
- self.lines.append(data)
-
- def close(self):
- self.container.append(''.join(self.lines).split('\n'))
- self.lines = []
-
- commands.export(ui, repo, *revs, **{'output': exportee(patches),
- 'switch_parent': False,
- 'text': None,
- 'git': opts.get('git')})
-
- jumbo = []
- msgs = []
-
- ui.write(_('This patch series consists of %d patches.\n\n') % len(patches))
-
- for p, i in zip(patches, xrange(len(patches))):
- jumbo.extend(p)
- msgs.append(makepatch(p, i + 1, len(patches)))
-
sender = (opts['from'] or ui.config('email', 'from') or
ui.config('patchbomb', 'from') or
prompt('From', ui.username()))
@@ -209,6 +259,7 @@
ui.config('patchbomb', opt) or
prompt(prpt, default = default)).split(',')
return [a.strip() for a in addrs if a.strip()]
+
to = getaddrs('to', 'To')
cc = getaddrs('cc', 'Cc', '')
@@ -216,38 +267,82 @@
ui.config('patchbomb', 'bcc') or '').split(',')
bcc = [a.strip() for a in bcc if a.strip()]
- if len(patches) > 1:
- ui.write(_('\nWrite the introductory message for the patch series.\n\n'))
+ def getexportmsgs():
+ patches = []
- tlen = len(str(len(patches)))
+ class exportee:
+ def __init__(self, container):
+ self.lines = []
+ self.container = container
+ self.name = 'email'
+
+ def write(self, data):
+ self.lines.append(data)
+
+ def close(self):
+ self.container.append(''.join(self.lines).split('\n'))
+ self.lines = []
- subj = '[PATCH %0*d of %d] %s' % (
- tlen, 0,
- len(patches),
- opts['subject'] or
- prompt('Subject:', rest = ' [PATCH %0*d of %d] ' % (tlen, 0,
- len(patches))))
+ commands.export(ui, repo, *revs, **{'output': exportee(patches),
+ 'switch_parent': False,
+ 'text': None,
+ 'git': opts.get('git')})
+
+ jumbo = []
+ msgs = []
- ui.write(_('Finish with ^D or a dot on a line by itself.\n\n'))
+ ui.write(_('This patch series consists of %d patches.\n\n') % len(patches))
- body = []
+ for p, i in zip(patches, xrange(len(patches))):
+ jumbo.extend(p)
+ msgs.append(makepatch(p, i + 1, len(patches)))
+
+ if len(patches) > 1:
+ tlen = len(str(len(patches)))
- while True:
- try: l = raw_input()
- except EOFError: break
- if l == '.': break
- body.append(l)
+ subj = '[PATCH %0*d of %d] %s' % (
+ tlen, 0,
+ len(patches),
+ opts['subject'] or
+ prompt('Subject:', rest = ' [PATCH %0*d of %d] ' % (tlen, 0,
+ len(patches))))
+
+ body = ''
+ if opts['diffstat']:
+ d = cdiffstat(_('Final summary:\n'), jumbo)
+ if d: body = '\n' + d
+
+ ui.write(_('\nWrite the introductory message for the patch series.\n\n'))
+ body = ui.edit(body, sender)
+
+ msg = email.MIMEText.MIMEText(body)
+ msg['Subject'] = subj
+
+ msgs.insert(0, msg)
+ return msgs
- if opts['diffstat']:
- d = cdiffstat(_('Final summary:\n'), jumbo)
- if d: body.append('\n' + d)
+ def getbundlemsgs(bundle):
+ subj = opts['subject'] or \
+ prompt('Subject:', default='A bundle for your repository')
+ ui.write(_('\nWrite the introductory message for the bundle.\n\n'))
+ body = ui.edit('', sender)
- body = '\n'.join(body) + '\n'
+ msg = email.MIMEMultipart.MIMEMultipart()
+ if body:
+ msg.attach(email.MIMEText.MIMEText(body, 'plain'))
+ datapart = email.MIMEBase.MIMEBase('application', 'x-mercurial-bundle')
+ datapart.set_payload(bundle)
+ datapart.add_header('Content-Disposition', 'attachment',
+ filename='bundle.hg')
+ email.Encoders.encode_base64(datapart)
+ msg.attach(datapart)
+ msg['Subject'] = subj
+ return [msg]
- msg = email.MIMEText.MIMEText(body)
- msg['Subject'] = subj
-
- msgs.insert(0, msg)
+ if opts.get('bundle'):
+ msgs = getbundlemsgs(getbundle(dest))
+ else:
+ msgs = getexportmsgs()
ui.write('\n')
@@ -310,7 +405,14 @@
('', 'plain', None, 'omit hg patch header'),
('n', 'test', None, 'print messages that would be sent'),
('m', 'mbox', '', 'write messages to mbox file instead of sending them'),
+ ('o', 'outgoing', None, _('send changes not found in the target repository')),
+ ('b', 'bundle', None, _('send changes not in target as a binary bundle')),
+ ('r', 'rev', [], _('a revision to send')),
('s', 'subject', '', 'subject of first message (intro or single patch)'),
- ('t', 'to', [], 'email addresses of recipients')],
- "hg email [OPTION]... [REV]...")
+ ('t', 'to', [], 'email addresses of recipients'),
+ ('', 'force', None, _('run even when remote repository is unrelated (with -b)')),
+ ('', 'base', [],
+ _('a base changeset to specify instead of a destination (with -b)'))]
+ + commands.remoteopts,
+ "hg email [OPTION]... [DEST]...")
}
diff -r 2d32e3ae01a7 -r 30e7aa755efd hgext/purge.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext/purge.py Sat May 19 22:51:43 2007 +0200
@@ -0,0 +1,159 @@
+# Copyright (C) 2006 - Marco Barisione
+#
+# This is a small extension for Mercurial (http://www.selenic.com/mercurial)
+# that removes files not known to mercurial
+#
+# This program was inspired by the "cvspurge" script contained in CVS utilities
+# (http://www.red-bean.com/cvsutils/).
+#
+# To enable the "purge" extension put these lines in your ~/.hgrc:
+# [extensions]
+# hgext.purge =
+#
+# For help on the usage of "hg purge" use:
+# hg help purge
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+
+from mercurial import hg, util
+from mercurial.i18n import _
+import os
+
+def dopurge(ui, repo, dirs=None, act=True, abort_on_err=False, eol='\n',
+ force=False):
+ def error(msg):
+ if abort_on_err:
+ raise util.Abort(msg)
+ else:
+ ui.warn(_('warning: %s\n') % msg)
+
+ def remove(remove_func, name):
+ if act:
+ try:
+ remove_func(os.path.join(repo.root, name))
+ except OSError, e:
+ error(_('%s cannot be removed') % name)
+ else:
+ ui.write('%s%s' % (name, eol))
+
+ directories = []
+ files = []
+ missing = []
+ roots, match, anypats = util.cmdmatcher(repo.root, repo.getcwd(), dirs)
+ for src, f, st in repo.dirstate.statwalk(files=roots, match=match,
+ ignored=True, directories=True):
+ if src == 'd':
+ directories.append(f)
+ elif src == 'm':
+ missing.append(f)
+ elif src == 'f' and f not in repo.dirstate:
+ files.append(f)
+
+ _check_missing(ui, repo, missing, force)
+
+ directories.sort()
+
+ for f in files:
+ if f not in repo.dirstate:
+ ui.note(_('Removing file %s\n') % f)
+ remove(os.remove, f)
+
+ for f in directories[::-1]:
+ if not os.listdir(repo.wjoin(f)):
+ ui.note(_('Removing directory %s\n') % f)
+ remove(os.rmdir, f)
+
+def _check_missing(ui, repo, missing, force=False):
+ """Abort if there is the chance of having problems with name-mangling fs
+
+ In a name mangling filesystem (e.g. a case insensitive one)
+ dirstate.walk() can yield filenames different from the ones
+ stored in the dirstate. This already confuses the status and
+ add commands, but with purge this may cause data loss.
+
+ To prevent this, _check_missing will abort if there are missing
+ files. The force option will let the user skip the check if he
+ knows it is safe.
+
+ Even with the force option this function will check if any of the
+ missing files is still available in the working dir: if so there
+ may be some problem with the underlying filesystem, so it
+ aborts unconditionally."""
+
+ found = [f for f in missing if util.lexists(repo.wjoin(f))]
+
+ if found:
+ if not ui.quiet:
+ ui.warn(_("The following tracked files weren't listed by the "
+ "filesystem, but could still be found:\n"))
+ for f in found:
+ ui.warn("%s\n" % f)
+ if util.checkfolding(repo.path):
+ ui.warn(_("This is probably due to a case-insensitive "
+ "filesystem\n"))
+ raise util.Abort(_("purging on name mangling filesystems is not "
+ "yet fully supported"))
+
+ if missing and not force:
+ raise util.Abort(_("there are missing files in the working dir and "
+ "purge still has problems with them due to name "
+ "mangling filesystems. "
+ "Use --force if you know what you are doing"))
+
+
+def purge(ui, repo, *dirs, **opts):
+ '''removes files not tracked by mercurial
+
+ Delete files not known to mercurial, this is useful to test local and
+ uncommitted changes in the otherwise clean source tree.
+
+ This means that purge will delete:
+ - Unknown files: files marked with "?" by "hg status"
+ - Ignored files: files usually ignored by Mercurial because they match
+ a pattern in a ".hgignore" file
+ - Empty directories: in fact Mercurial ignores directories unless they
+ contain files under source control managment
+ But it will leave untouched:
+ - Unmodified tracked files
+ - Modified tracked files
+ - New files added to the repository (with "hg add")
+
+ If directories are given on the command line, only files in these
+ directories are considered.
+
+ Be careful with purge, you could irreversibly delete some files you
+ forgot to add to the repository. If you only want to print the list of
+ files that this program would delete use the --print option.
+ '''
+ act = not opts['print']
+ abort_on_err = bool(opts['abort_on_err'])
+ eol = opts['print0'] and '\0' or '\n'
+ if eol == '\0':
+ # --print0 implies --print
+ act = False
+ force = bool(opts['force'])
+ dopurge(ui, repo, dirs, act, abort_on_err, eol, force)
+
+
+cmdtable = {
+ 'purge':
+ (purge,
+ [('a', 'abort-on-err', None, _('abort if an error occurs')),
+ ('f', 'force', None, _('purge even when missing files are detected')),
+ ('p', 'print', None, _('print the file names instead of deleting them')),
+ ('0', 'print0', None, _('end filenames with NUL, for use with xargs'
+ ' (implies -p)'))],
+ _('hg purge [OPTION]... [DIR]...'))
+}
diff -r 2d32e3ae01a7 -r 30e7aa755efd hgext/transplant.py
--- a/hgext/transplant.py Sat May 19 22:47:01 2007 +0200
+++ b/hgext/transplant.py Sat May 19 22:51:43 2007 +0200
@@ -5,11 +5,10 @@
# This software may be used and distributed according to the terms
# of the GNU General Public License, incorporated herein by reference.
-from mercurial.demandload import *
-from mercurial.i18n import gettext as _
-demandload(globals(), 'os tempfile')
-demandload(globals(), 'mercurial:bundlerepo,cmdutil,commands,hg,merge,patch')
-demandload(globals(), 'mercurial:revlog,util')
+from mercurial.i18n import _
+import os, tempfile
+from mercurial import bundlerepo, changegroup, cmdutil, commands, hg, merge
+from mercurial import patch, revlog, util
'''patch transplanting tool
@@ -120,7 +119,8 @@
if pulls:
if source != repo:
repo.pull(source, heads=pulls, lock=lock)
- merge.update(repo, pulls[-1], wlock=wlock)
+ merge.update(repo, pulls[-1], False, False, None,
+ wlock=wlock)
p1, p2 = repo.dirstate.parents()
pulls = []
@@ -151,10 +151,10 @@
log=opts.get('log'),
filter=opts.get('filter'),
lock=lock, wlock=wlock)
- if domerge:
+ if n and domerge:
self.ui.status(_('%s merged at %s\n') % (revstr,
revlog.short(n)))
- else:
+ elif n:
self.ui.status(_('%s transplanted to %s\n') % (revlog.short(node),
revlog.short(n)))
finally:
@@ -162,7 +162,7 @@
os.unlink(patchfile)
if pulls:
repo.pull(source, heads=pulls, lock=lock)
- merge.update(repo, pulls[-1], wlock=wlock)
+ merge.update(repo, pulls[-1], False, False, None, wlock=wlock)
finally:
self.saveseries(revmap, merges)
self.transplants.write()
@@ -217,7 +217,7 @@
files=files)
if not files:
self.ui.warn(_('%s: empty changeset') % revlog.hex(node))
- return
+ return None
finally:
files = patch.updatedir(self.ui, repo, files, wlock=wlock)
except Exception, inst:
@@ -473,7 +473,7 @@
bundle = None
if not source.local():
cg = source.changegroup(incoming, 'incoming')
- bundle = commands.write_bundle(cg, compress=False)
+ bundle = changegroup.writebundle(cg, None, 'HG10UN')
source = bundlerepo.bundlerepository(ui, repo.root, bundle)
return (source, incoming, bundle)
@@ -575,6 +575,7 @@
tp.apply(repo, source, revmap, merges, opts)
finally:
if bundle:
+ source.close()
os.unlink(bundle)
cmdtable = {
@@ -588,5 +589,5 @@
('', 'log', None, _('append transplant info to log message')),
('c', 'continue', None, _('continue last transplant session after repair')),
('', 'filter', '', _('filter changesets through FILTER'))],
- _('hg transplant [-s REPOSITORY] [-b BRANCH] [-p REV] [-m REV] [-n] REV...'))
+ _('hg transplant [-s REPOSITORY] [-b BRANCH [-a]] [-p REV] [-m REV] [REV]...'))
}
diff -r 2d32e3ae01a7 -r 30e7aa755efd mercurial/appendfile.py
--- a/mercurial/appendfile.py Sat May 19 22:47:01 2007 +0200
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,162 +0,0 @@
-# appendfile.py - special classes to make repo updates atomic
-#
-# Copyright 2006 Vadim Gelfer
-#
-# This software may be used and distributed according to the terms
-# of the GNU General Public License, incorporated herein by reference.
-
-from demandload import *
-demandload(globals(), "cStringIO changelog errno manifest os tempfile util")
-
-# writes to metadata files are ordered. reads: changelog, manifest,
-# normal files. writes: normal files, manifest, changelog.
-
-# manifest contains pointers to offsets in normal files. changelog
-# contains pointers to offsets in manifest. if reader reads old
-# changelog while manifest or normal files are written, it has no
-# pointers into new parts of those files that are maybe not consistent
-# yet, so will not read them.
-
-# localrepo.addchangegroup thinks it writes changelog first, then
-# manifest, then normal files (this is order they are available, and
-# needed for computing linkrev fields), but uses appendfile to hide
-# updates from readers. data not written to manifest or changelog
-# until all normal files updated. write manifest first, then
-# changelog.
-
-# with this write ordering, readers cannot see inconsistent view of
-# repo during update.
-
-class appendfile(object):
- '''implement enough of file protocol to append to revlog file.
- appended data is written to temp file. reads and seeks span real
- file and temp file. readers cannot see appended data until
- writedata called.'''
-
- def __init__(self, fp, tmpname):
- if tmpname:
- self.tmpname = tmpname
- self.tmpfp = util.posixfile(self.tmpname, 'ab+')
- else:
- fd, self.tmpname = tempfile.mkstemp(prefix="hg-appendfile-")
- os.close(fd)
- self.tmpfp = util.posixfile(self.tmpname, 'ab+')
- self.realfp = fp
- self.offset = fp.tell()
- # real file is not written by anyone else. cache its size so
- # seek and read can be fast.
- self.realsize = util.fstat(fp).st_size
- self.name = fp.name
-
- def end(self):
- self.tmpfp.flush() # make sure the stat is correct
- return self.realsize + util.fstat(self.tmpfp).st_size
-
- def tell(self):
- return self.offset
-
- def flush(self):
- self.tmpfp.flush()
-
- def close(self):
- self.realfp.close()
- self.tmpfp.close()
-
- def seek(self, offset, whence=0):
- '''virtual file offset spans real file and temp file.'''
- if whence == 0:
- self.offset = offset
- elif whence == 1:
- self.offset += offset
- elif whence == 2:
- self.offset = self.end() + offset
-
- if self.offset < self.realsize:
- self.realfp.seek(self.offset)
- else:
- self.tmpfp.seek(self.offset - self.realsize)
-
- def read(self, count=-1):
- '''only trick here is reads that span real file and temp file.'''
- fp = cStringIO.StringIO()
- old_offset = self.offset
- if self.offset < self.realsize:
- s = self.realfp.read(count)
- fp.write(s)
- self.offset += len(s)
- if count > 0:
- count -= len(s)
- if count != 0:
- if old_offset != self.offset:
- self.tmpfp.seek(self.offset - self.realsize)
- s = self.tmpfp.read(count)
- fp.write(s)
- self.offset += len(s)
- return fp.getvalue()
-
- def write(self, s):
- '''append to temp file.'''
- self.tmpfp.seek(0, 2)
- self.tmpfp.write(s)
- # all writes are appends, so offset must go to end of file.
- self.offset = self.realsize + self.tmpfp.tell()
-
-class appendopener(object):
- '''special opener for files that only read or append.'''
-
- def __init__(self, opener):
- self.realopener = opener
- # key: file name, value: appendfile name
- self.tmpnames = {}
-
- def __call__(self, name, mode='r'):
- '''open file.'''
-
- assert mode in 'ra+'
- try:
- realfp = self.realopener(name, 'r')
- except IOError, err:
- if err.errno != errno.ENOENT: raise
- realfp = self.realopener(name, 'w+')
- tmpname = self.tmpnames.get(name)
- fp = appendfile(realfp, tmpname)
- if tmpname is None:
- self.tmpnames[name] = fp.tmpname
- return fp
-
- def writedata(self):
- '''copy data from temp files to real files.'''
- # write .d file before .i file.
- tmpnames = self.tmpnames.items()
- tmpnames.sort()
- for name, tmpname in tmpnames:
- ifp = open(tmpname, 'rb')
- ofp = self.realopener(name, 'a')
- for chunk in util.filechunkiter(ifp):
- ofp.write(chunk)
- ifp.close()
- os.unlink(tmpname)
- del self.tmpnames[name]
- ofp.close()
-
- def cleanup(self):
- '''delete temp files (this discards unwritten data!)'''
- for tmpname in self.tmpnames.values():
- os.unlink(tmpname)
-
-# files for changelog and manifest are in different appendopeners, so
-# not mixed up together.
-
-class appendchangelog(changelog.changelog, appendopener):
- def __init__(self, opener, version):
- appendopener.__init__(self, opener)
- changelog.changelog.__init__(self, self, version)
- def checkinlinesize(self, fp, tr):
- return
-
-class appendmanifest(manifest.manifest, appendopener):
- def __init__(self, opener, version):
- appendopener.__init__(self, opener)
- manifest.manifest.__init__(self, self, version)
- def checkinlinesize(self, fp, tr):
- return
diff -r 2d32e3ae01a7 -r 30e7aa755efd mercurial/archival.py
--- a/mercurial/archival.py Sat May 19 22:47:01 2007 +0200
+++ b/mercurial/archival.py Sat May 19 22:51:43 2007 +0200
@@ -5,10 +5,9 @@
# This software may be used and distributed according to the terms of
# the GNU General Public License, incorporated herein by reference.
-from demandload import *
-from i18n import gettext as _
+from i18n import _
from node import *
-demandload(globals(), 'cStringIO os stat tarfile time util zipfile')
+import cStringIO, os, stat, tarfile, time, util, zipfile
def tidyprefix(dest, prefix, suffixes):
'''choose prefix to use for names in archive. make sure prefix is
@@ -156,15 +155,12 @@
def write(name, mode, data):
if matchfn and not matchfn(name): return
if decode:
- fp = cStringIO.StringIO()
- repo.wwrite(name, data, fp)
- data = fp.getvalue()
+ data = repo.wwritedata(name, data)
archiver.addfile(name, mode, data)
- change = repo.changelog.read(node)
- mn = change[0]
- archiver = archivers[kind](dest, prefix, mtime or change[2][0])
- m = repo.manifest.read(mn)
+ ctx = repo.changectx(node)
+ archiver = archivers[kind](dest, prefix, mtime or ctx.date()[0])
+ m = ctx.manifest()
items = m.items()
items.sort()
write('.hg_archival.txt', 0644,
diff -r 2d32e3ae01a7 -r 30e7aa755efd mercurial/bdiff.c
--- a/mercurial/bdiff.c Sat May 19 22:47:01 2007 +0200
+++ b/mercurial/bdiff.c Sat May 19 22:51:43 2007 +0200
@@ -33,7 +33,11 @@
}
#else
#include
+#ifdef __BEOS__
+#include
+#else
#include
+#endif
#include
#endif
diff -r 2d32e3ae01a7 -r 30e7aa755efd mercurial/bundlerepo.py
--- a/mercurial/bundlerepo.py Sat May 19 22:47:01 2007 +0200
+++ b/mercurial/bundlerepo.py Sat May 19 22:51:43 2007 +0200
@@ -11,14 +11,13 @@
"""
from node import *
-from i18n import gettext as _
-from demandload import demandload
-demandload(globals(), "changegroup util os struct bz2 tempfile")
+from i18n import _
+import changegroup, util, os, struct, bz2, tempfile
import localrepo, changelog, manifest, filelog, revlog
class bundlerevlog(revlog.revlog):
- def __init__(self, opener, indexfile, datafile, bundlefile,
+ def __init__(self, opener, indexfile, bundlefile,
linkmapper=None):
# How it works:
# to retrieve a revision, we need to know the offset of
@@ -29,7 +28,7 @@
# len(index[r]). If the tuple is bigger than 7, it is a bundle
# (it is bigger since we store the node to which the delta is)
#
- revlog.revlog.__init__(self, opener, indexfile, datafile)
+ revlog.revlog.__init__(self, opener, indexfile)
self.bundlefile = bundlefile
self.basemap = {}
def chunkpositer():
@@ -50,7 +49,7 @@
continue
for p in (p1, p2):
if not p in self.nodemap:
- raise revlog.RevlogError(_("unknown parent %s") % short(p1))
+ raise revlog.LookupError(_("unknown parent %s") % short(p1))
if linkmapper is None:
link = n
else:
@@ -141,20 +140,19 @@
class bundlechangelog(bundlerevlog, changelog.changelog):
def __init__(self, opener, bundlefile):
changelog.changelog.__init__(self, opener)
- bundlerevlog.__init__(self, opener, self.indexfile, self.datafile,
- bundlefile)
+ bundlerevlog.__init__(self, opener, self.indexfile, bundlefile)
class bundlemanifest(bundlerevlog, manifest.manifest):
def __init__(self, opener, bundlefile, linkmapper):
manifest.manifest.__init__(self, opener)
- bundlerevlog.__init__(self, opener, self.indexfile, self.datafile,
- bundlefile, linkmapper)
+ bundlerevlog.__init__(self, opener, self.indexfile, bundlefile,
+ linkmapper)
class bundlefilelog(bundlerevlog, filelog.filelog):
def __init__(self, opener, path, bundlefile, linkmapper):
filelog.filelog.__init__(self, opener, path)
- bundlerevlog.__init__(self, opener, self.indexfile, self.datafile,
- bundlefile, linkmapper)
+ bundlerevlog.__init__(self, opener, self.indexfile, bundlefile,
+ linkmapper)
class bundlerepository(localrepo.localrepository):
def __init__(self, ui, path, bundlename):
diff -r 2d32e3ae01a7 -r 30e7aa755efd mercurial/changegroup.py
--- a/mercurial/changegroup.py Sat May 19 22:47:01 2007 +0200
+++ b/mercurial/changegroup.py Sat May 19 22:51:43 2007 +0200
@@ -6,9 +6,9 @@
This software may be used and distributed according to the terms
of the GNU General Public License, incorporated herein by reference.
"""
-from i18n import gettext as _
-from demandload import *
-demandload(globals(), "struct os bz2 zlib util tempfile")
+
+from i18n import _
+import struct, os, bz2, zlib, util, tempfile
def getchunk(source):
"""get a chunk from a changegroup"""
@@ -67,8 +67,6 @@
cleanup = None
try:
if filename:
- if os.path.exists(filename):
- raise util.Abort(_("file '%s' already exists") % filename)
fh = open(filename, "wb")
else:
fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
diff -r 2d32e3ae01a7 -r 30e7aa755efd mercurial/changelog.py
--- a/mercurial/changelog.py Sat May 19 22:47:01 2007 +0200
+++ b/mercurial/changelog.py Sat May 19 22:51:43 2007 +0200
@@ -6,9 +6,8 @@
# of the GNU General Public License, incorporated herein by reference.
from revlog import *
-from i18n import gettext as _
-from demandload import demandload
-demandload(globals(), "os time util")
+from i18n import _
+import os, time, util
def _string_escape(text):
"""
@@ -27,10 +26,100 @@
def _string_unescape(text):
return text.decode('string_escape')
+class appender:
+ '''the changelog index must be update last on disk, so we use this class
+ to delay writes to it'''
+ def __init__(self, fp, buf):
+ self.data = buf
+ self.fp = fp
+ self.offset = fp.tell()
+ self.size = util.fstat(fp).st_size
+
+ def end(self):
+ return self.size + len("".join(self.data))
+ def tell(self):
+ return self.offset
+ def flush(self):
+ pass
+ def close(self):
+ close(self.fp)
+
+ def seek(self, offset, whence=0):
+ '''virtual file offset spans real file and data'''
+ if whence == 0:
+ self.offset = offset
+ elif whence == 1:
+ self.offset += offset
+ elif whence == 2:
+ self.offset = self.end() + offset
+ if self.offset < self.size:
+ self.fp.seek(self.offset)
+
+ def read(self, count=-1):
+ '''only trick here is reads that span real file and data'''
+ ret = ""
+ old_offset = self.offset
+ if self.offset < self.size:
+ s = self.fp.read(count)
+ ret = s
+ self.offset += len(s)
+ if count > 0:
+ count -= len(s)
+ if count != 0:
+ doff = self.offset - self.size
+ self.data.insert(0, "".join(self.data))
+ del self.data[1:]
+ s = self.data[0][doff:doff+count]
+ self.offset += len(s)
+ ret += s
+ return ret
+
+ def write(self, s):
+ self.data.append(s)
+ self.offset += len(s)
+
class changelog(revlog):
- def __init__(self, opener, defversion=REVLOGV0):
- revlog.__init__(self, opener, "00changelog.i", "00changelog.d",
- defversion)
+ def __init__(self, opener):
+ revlog.__init__(self, opener, "00changelog.i")
+
+ def delayupdate(self):
+ "delay visibility of index updates to other readers"
+ self._realopener = self.opener
+ self.opener = self._delayopener
+ self._delaycount = self.count()
+ self._delaybuf = []
+ self._delayname = None
+
+ def finalize(self, tr):
+ "finalize index updates"
+ self.opener = self._realopener
+ # move redirected index data back into place
+ if self._delayname:
+ util.rename(self._delayname + ".a", self._delayname)
+ elif self._delaybuf:
+ fp = self.opener(self.indexfile, 'a')
+ fp.write("".join(self._delaybuf))
+ fp.close()
+ del self._delaybuf
+ # split when we're done
+ self.checkinlinesize(tr)
+
+ def _delayopener(self, name, mode='r'):
+ fp = self._realopener(name, mode)
+ # only divert the index
+ if not name == self.indexfile:
+ return fp
+ # if we're doing an initial clone, divert to another file
+ if self._delaycount == 0:
+ self._delayname = fp.name
+ return self._realopener(name + ".a", mode)
+ # otherwise, divert to memory
+ return appender(fp, self._delaybuf)
+
+ def checkinlinesize(self, tr, fp=None):
+ if self.opener == self._delayopener:
+ return
+ return revlog.checkinlinesize(self, tr, fp)
def decode_extra(self, text):
extra = {}
diff -r 2d32e3ae01a7 -r 30e7aa755efd mercurial/cmdutil.py
--- a/mercurial/cmdutil.py Sat May 19 22:47:01 2007 +0200
+++ b/mercurial/cmdutil.py Sat May 19 22:51:43 2007 +0200
@@ -5,11 +5,9 @@
# This software may be used and distributed according to the terms
# of the GNU General Public License, incorporated herein by reference.
-from demandload import demandload
from node import *
-from i18n import gettext as _
-demandload(globals(), 'os sys')
-demandload(globals(), 'mdiff util templater patch')
+from i18n import _
+import os, sys, mdiff, bdiff, util, templater, patch
revrangesep = ':'
@@ -127,42 +125,45 @@
pathname),
mode)
-def matchpats(repo, pats=[], opts={}, head='', globbed=False):
+def matchpats(repo, pats=[], opts={}, globbed=False, default=None):
cwd = repo.getcwd()
- if not pats and cwd:
- opts['include'] = [os.path.join(cwd, i)
- for i in opts.get('include', [])]
- opts['exclude'] = [os.path.join(cwd, x)
- for x in opts.get('exclude', [])]
- cwd = ''
- return util.cmdmatcher(repo.root, cwd, pats or ['.'], opts.get('include'),
- opts.get('exclude'), head, globbed=globbed)
+ return util.cmdmatcher(repo.root, cwd, pats or [], opts.get('include'),
+ opts.get('exclude'), globbed=globbed,
+ default=default)
-def walk(repo, pats=[], opts={}, node=None, head='', badmatch=None,
- globbed=False):
- files, matchfn, anypats = matchpats(repo, pats, opts, head,
- globbed=globbed)
+def walk(repo, pats=[], opts={}, node=None, badmatch=None, globbed=False,
+ default=None):
+ files, matchfn, anypats = matchpats(repo, pats, opts, globbed=globbed,
+ default=default)
exact = dict.fromkeys(files)
for src, fn in repo.walk(node=node, files=files, match=matchfn,
badmatch=badmatch):
yield src, fn, util.pathto(repo.root, repo.getcwd(), fn), fn in exact
def findrenames(repo, added=None, removed=None, threshold=0.5):
+ '''find renamed files -- yields (before, after, score) tuples'''
if added is None or removed is None:
added, removed = repo.status()[1:3]
- changes = repo.changelog.read(repo.dirstate.parents()[0])
- mf = repo.manifest.read(changes[0])
+ ctx = repo.changectx()
for a in added:
aa = repo.wread(a)
- bestscore, bestname = None, None
+ bestname, bestscore = None, threshold
for r in removed:
- rr = repo.file(r).read(mf[r])
- delta = mdiff.textdiff(aa, rr)
- if len(delta) < len(aa):
- myscore = 1.0 - (float(len(delta)) / len(aa))
- if bestscore is None or myscore > bestscore:
- bestscore, bestname = myscore, r
- if bestname and bestscore >= threshold:
+ rr = ctx.filectx(r).data()
+
+ # bdiff.blocks() returns blocks of matching lines
+ # count the number of bytes in each
+ equal = 0
+ alines = mdiff.splitnewlines(aa)
+ matches = bdiff.blocks(aa, rr)
+ for x1,x2,y1,y2 in matches:
+ for line in alines[x1:x2]:
+ equal += len(line)
+
+ myscore = equal*2.0 / (len(aa)+len(rr))
+ if myscore >= bestscore:
+ bestname, bestscore = r, myscore
+ if bestname:
yield bestname, a, bestscore
def addremove(repo, pats=[], opts={}, wlock=None, dry_run=None,
@@ -179,7 +180,8 @@
mapping[abs] = rel, exact
if repo.ui.verbose or not exact:
repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
- if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
+ islink = os.path.islink(rel)
+ if repo.dirstate.state(abs) != 'r' and not islink and not os.path.exists(rel):
remove.append(abs)
mapping[abs] = rel, exact
if repo.ui.verbose or not exact:
@@ -198,15 +200,58 @@
if not dry_run:
repo.copy(old, new, wlock=wlock)
+def service(opts, parentfn=None, initfn=None, runfn=None):
+ '''Run a command as a service.'''
+
+ if opts['daemon'] and not opts['daemon_pipefds']:
+ rfd, wfd = os.pipe()
+ args = sys.argv[:]
+ args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
+ pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
+ args[0], args)
+ os.close(wfd)
+ os.read(rfd, 1)
+ if parentfn:
+ return parentfn(pid)
+ else:
+ os._exit(0)
+
+ if initfn:
+ initfn()
+
+ if opts['pid_file']:
+ fp = open(opts['pid_file'], 'w')
+ fp.write(str(os.getpid()) + '\n')
+ fp.close()
+
+ if opts['daemon_pipefds']:
+ rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
+ os.close(rfd)
+ try:
+ os.setsid()
+ except AttributeError:
+ pass
+ os.write(wfd, 'y')
+ os.close(wfd)
+ sys.stdout.flush()
+ sys.stderr.flush()
+ fd = os.open(util.nulldev, os.O_RDWR)
+ if fd != 0: os.dup2(fd, 0)
+ if fd != 1: os.dup2(fd, 1)
+ if fd != 2: os.dup2(fd, 2)
+ if fd not in (0, 1, 2): os.close(fd)
+
+ if runfn:
+ return runfn()
+
class changeset_printer(object):
'''show changeset information when templating not requested.'''
- def __init__(self, ui, repo, patch, brinfo, buffered):
+ def __init__(self, ui, repo, patch, buffered):
self.ui = ui
self.repo = repo
self.buffered = buffered
self.patch = patch
- self.brinfo = brinfo
self.header = {}
self.hunk = {}
self.lastheader = None
@@ -271,11 +316,6 @@
for parent in parents:
self.ui.write(_("parent: %d:%s\n") % parent)
- if self.brinfo:
- br = self.repo.branchlookup([changenode])
- if br:
- self.ui.write(_("branch: %s\n") % " ".join(br[changenode]))
-
if self.ui.debugflag:
self.ui.write(_("manifest: %d:%s\n") %
(self.repo.manifest.rev(changes[0]), hex(changes[0])))
@@ -323,8 +363,8 @@
class changeset_templater(changeset_printer):
'''format changeset information.'''
- def __init__(self, ui, repo, patch, brinfo, mapfile, buffered):
- changeset_printer.__init__(self, ui, repo, patch, brinfo, buffered)
+ def __init__(self, ui, repo, patch, mapfile, buffered):
+ changeset_printer.__init__(self, ui, repo, patch, buffered)
filters = templater.common_filters.copy()
filters['formatnode'] = (ui.debugflag and (lambda x: x)
or (lambda x: x[:12]))
@@ -414,12 +454,6 @@
if branch != 'default':
branch = util.tolocal(branch)
return showlist('branch', [branch], plural='branches', **args)
- # add old style branches if requested
- if self.brinfo:
- br = self.repo.branchlookup([changenode])
- if changenode in br:
- return showlist('branch', br[changenode],
- plural='branches', **args)
def showparents(**args):
parents = [[('rev', log.rev(p)), ('node', hex(p))]
@@ -533,11 +567,6 @@
if opts.get('patch'):
patch = matchfn or util.always
- br = None
- if opts.get('branches'):
- ui.warn(_("the --branches option is deprecated, "
- "please use 'hg branches' instead\n"))
- br = True
tmpl = opts.get('template')
mapfile = None
if tmpl:
@@ -559,12 +588,12 @@
or templater.templatepath(mapfile))
if mapname: mapfile = mapname
try:
- t = changeset_templater(ui, repo, patch, br, mapfile, buffered)
+ t = changeset_templater(ui, repo, patch, mapfile, buffered)
except SyntaxError, inst:
raise util.Abort(inst.args[0])
if tmpl: t.use_template(tmpl)
return t
- return changeset_printer(ui, repo, patch, br, buffered)
+ return changeset_printer(ui, repo, patch, buffered)
def finddate(ui, repo, date):
"""Find the tipmost changeset that matches the given date spec"""
diff -r 2d32e3ae01a7 -r 30e7aa755efd mercurial/commands.py
--- a/mercurial/commands.py Sat May 19 22:47:01 2007 +0200
+++ b/mercurial/commands.py Sat May 19 22:51:43 2007 +0200
@@ -5,14 +5,14 @@
# This software may be used and distributed according to the terms
# of the GNU General Public License, incorporated herein by reference.
-from demandload import demandload
+import demandimport; demandimport.enable()
from node import *
-from i18n import gettext as _
-demandload(globals(), "bisect os re sys signal imp urllib pdb shlex stat")
-demandload(globals(), "fancyopts ui hg util lock revlog bundlerepo")
-demandload(globals(), "difflib patch time help mdiff tempfile")
-demandload(globals(), "traceback errno version atexit socket")
-demandload(globals(), "archival changegroup cmdutil hgweb.server sshserver")
+from i18n import _
+import bisect, os, re, sys, signal, imp, urllib, pdb, shlex, stat
+import fancyopts, ui, hg, util, lock, revlog, bundlerepo
+import difflib, patch, time, help, mdiff, tempfile
+import traceback, errno, version, atexit, socket
+import archival, changegroup, cmdutil, hgweb.server, sshserver
class UnknownCommand(Exception):
"""Exception raised if command is not in the command table."""
@@ -240,22 +240,28 @@
if op1 != node:
if opts['merge']:
ui.status(_('merging with changeset %s\n') % nice(op1))
- n = _lookup(repo, hex(op1))
- hg.merge(repo, n)
+ hg.merge(repo, hex(op1))
else:
ui.status(_('the backout changeset is a new head - '
'do not forget to merge\n'))
ui.status(_('(use "backout --merge" '
'if you want to auto-merge)\n'))
-def branch(ui, repo, label=None):
+def branch(ui, repo, label=None, **opts):
"""set or show the current branch name
With , set the current branch name. Otherwise, show the
current branch name.
+
+ Unless --force is specified, branch will not let you set a
+ branch name that shadows an existing branch.
"""
if label:
+ if not opts.get('force') and label in repo.branchtags():
+ if label not in [p.branch() for p in repo.workingctx().parents()]:
+ raise util.Abort(_('a branch of the same name already exists'
+ ' (use --force to override)'))
repo.dirstate.setbranch(util.fromlocal(label))
else:
ui.write("%s\n" % util.tolocal(repo.dirstate.branch()))
@@ -424,6 +430,8 @@
files = modified + added + removed
slist = None
for f in fns:
+ if f == '.':
+ continue
if f not in files:
rf = repo.wjoin(f)
if f in unknown:
@@ -495,7 +503,7 @@
util.localpath(prevsrc)))
return
if (not opts['after'] and os.path.exists(reltarget) or
- opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
+ opts['after'] and repo.dirstate.state(abstarget) not in '?ar'):
if not opts['force']:
ui.warn(_('%s: not overwriting - file exists\n') %
reltarget)
@@ -519,7 +527,7 @@
restore = False
finally:
if restore:
- repo.remove([abstarget], wlock)
+ repo.remove([abstarget], wlock=wlock)
except IOError, inst:
if inst.errno == errno.ENOENT:
ui.warn(_('%s: deleted in working copy\n') % relsrc)
@@ -658,7 +666,7 @@
def debugancestor(ui, index, rev1, rev2):
"""find the ancestor revision of two revisions in a given index"""
- r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
+ r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
ui.write("%d:%s\n" % (r.rev(a), hex(a)))
@@ -683,15 +691,12 @@
clist.sort()
ui.write("%s\n" % "\n".join(clist))
-def debugrebuildstate(ui, repo, rev=None):
+def debugrebuildstate(ui, repo, rev=""):
"""rebuild the dirstate as it would look like for the given revision"""
- if not rev:
+ if rev == "":
rev = repo.changelog.tip()
- else:
- rev = repo.lookup(rev)
- change = repo.changelog.read(rev)
- n = change[0]
- files = repo.manifest.read(n)
+ ctx = repo.changectx(rev)
+ files = ctx.manifest()
wlock = repo.wlock()
repo.dirstate.rebuild(rev, files)
@@ -702,10 +707,8 @@
dc = repo.dirstate.map
keys = dc.keys()
keys.sort()
- m1n = repo.changelog.read(parent1)[0]
- m2n = repo.changelog.read(parent2)[0]
- m1 = repo.manifest.read(m1n)
- m2 = repo.manifest.read(m2n)
+ m1 = repo.changectx(parent1).manifest()
+ m2 = repo.changectx(parent2).manifest()
errors = 0
for f in dc:
state = repo.dirstate.state(f)
@@ -791,9 +794,8 @@
ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
def debugdata(ui, file_, rev):
- """dump the contents of an data file revision"""
- r = revlog.revlog(util.opener(os.getcwd(), audit=False),
- file_[:-2] + ".i", file_, 0)
+ """dump the contents of a data file revision"""
+ r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
try:
ui.write(r.revision(r.lookup(rev)))
except KeyError:
@@ -813,7 +815,7 @@
def debugindex(ui, file_):
"""dump the contents of an index file"""
- r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
+ r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
ui.write(" rev offset length base linkrev" +
" nodeid p1 p2\n")
for i in xrange(r.count()):
@@ -825,7 +827,7 @@
def debugindexdot(ui, file_):
"""dump an index DAG as a .dot file"""
- r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
+ r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
ui.write("digraph G {\n")
for i in xrange(r.count()):
node = r.node(i)
@@ -879,8 +881,10 @@
# patch
ui.status(_("Checking patch...\n"))
path = os.environ.get('PATH', '')
- patcher = util.find_in_path('gpatch', path,
- util.find_in_path('patch', path, None))
+ patcher = ui.config('ui', 'patch')
+ if not patcher:
+ patcher = util.find_in_path('gpatch', path,
+ util.find_in_path('patch', path, None))
if not patcher:
ui.write(_(" Can't find patch or gpatch in PATH\n"))
ui.write(_(" (specify a patch utility in your .hgrc file)\n"))
@@ -889,27 +893,28 @@
# actually attempt a patch here
a = "1\n2\n3\n4\n"
b = "1\n2\n3\ninsert\n4\n"
- d = mdiff.unidiff(a, None, b, None, "a")
fa = writetemp(a)
+ d = mdiff.unidiff(a, None, b, None, os.path.basename(fa))
fd = writetemp(d)
- fp = os.popen('%s %s %s' % (patcher, fa, fd))
- files = []
- output = ""
- for line in fp:
- output += line
- if line.startswith('patching file '):
- pf = util.parse_patch_output(line.rstrip())
- files.append(pf)
- if files != [fa]:
- ui.write(_(" unexpected patch output!"))
- ui.write(_(" (you may have an incompatible version of patch)\n"))
- ui.write(output)
+
+ files = {}
+ try:
+ patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
+ except util.Abort, e:
+ ui.write(_(" patch call failed:\n"))
+ ui.write(" " + str(e) + "\n")
problems += 1
- a = file(fa).read()
- if a != b:
- ui.write(_(" patch test failed!"))
- ui.write(_(" (you may have an incompatible version of patch)\n"))
- problems += 1
+ else:
+ if list(files) != [os.path.basename(fa)]:
+ ui.write(_(" unexpected patch output!"))
+ ui.write(_(" (you may have an incompatible version of patch)\n"))
+ problems += 1
+ a = file(fa).read()
+ if a != b:
+ ui.write(_(" patch test failed!"))
+ ui.write(_(" (you may have an incompatible version of patch)\n"))
+ problems += 1
+
os.unlink(fa)
os.unlink(fd)
@@ -1158,13 +1163,14 @@
prev = {}
def display(fn, rev, states, prevstates):
- counts = {'-': 0, '+': 0}
+ found = False
filerevmatches = {}
- if incrementing or not opts['all']:
- a, b, r = prevstates, states, rev
+ r = prev.get(fn, -1)
+ if opts['all']:
+ iter = difflinestates(states, prevstates)
else:
- a, b, r = states, prevstates, prev.get(fn, -1)
- for change, l in difflinestates(a, b):
+ iter = [('', l) for l in prevstates]
+ for change, l in iter:
cols = [fn, str(r)]
if opts['line_number']:
cols.append(str(l.linenum))
@@ -1180,19 +1186,17 @@
else:
cols.append(l.line)
ui.write(sep.join(cols), eol)
- counts[change] += 1
- return counts['+'], counts['-']
+ found = True
+ return found
fstate = {}
skip = {}
get = util.cachefunc(lambda r: repo.changectx(r).changeset())
changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
- count = 0
- incrementing = False
+ found = False
follow = opts.get('follow')
for st, rev, fns in changeiter:
if st == 'window':
- incrementing = rev
matches.clear()
elif st == 'add':
mf = repo.changectx(rev).manifest()
@@ -1218,10 +1222,10 @@
if copy:
skip[copy] = True
continue
- if incrementing or not opts['all'] or fstate[fn]:
- pos, neg = display(fn, rev, m, fstate[fn])
- count += pos + neg
- if pos and not opts['all']:
+ if fn in prev or fstate[fn]:
+ r = display(fn, rev, m, fstate[fn])
+ found = found or r
+ if r and not opts['all']:
skip[fn] = True
if copy:
skip[copy] = True
@@ -1230,15 +1234,14 @@
fstate[copy] = m
prev[fn] = rev
- if not incrementing:
- fstate = fstate.items()
- fstate.sort()
- for fn, state in fstate:
- if fn in skip:
- continue
- if fn not in copies.get(prev[fn], {}):
- display(fn, rev, {}, state)
- return (count == 0 and 1) or 0
+ fstate = fstate.items()
+ fstate.sort()
+ for fn, state in fstate:
+ if fn in skip:
+ continue
+ if fn not in copies.get(prev[fn], {}):
+ found = display(fn, rev, {}, state) or found
+ return (not found and 1) or 0
def heads(ui, repo, **opts):
"""show current repository heads
@@ -1485,15 +1488,21 @@
text/plain body parts before first diff are added to commit
message.
- If imported patch was generated by hg export, user and description
+ If the imported patch was generated by hg export, user and description
from patch override values from message headers and body. Values
given on command line with -m and -u override these.
+ If --exact is specified, import will set the working directory
+ to the parent of each patch before applying it, and will abort
+ if the resulting changeset has a different ID than the one
+ recorded in the patch. This may happen due to character set
+ problems or other deficiencies in the text patch format.
+
To read a patch from standard input, use patch name "-".
"""
patches = (patch1,) + patches
- if not opts['force']:
+ if opts.get('exact') or not opts['force']:
bail_if_changed(repo)
d = opts["base"]
@@ -1507,10 +1516,10 @@
if pf == '-':
ui.status(_("applying patch from stdin\n"))
- tmpname, message, user, date = patch.extract(ui, sys.stdin)
+ tmpname, message, user, date, branch, nodeid, p1, p2 = patch.extract(ui, sys.stdin)
else:
ui.status(_("applying %s\n") % p)
- tmpname, message, user, date = patch.extract(ui, file(pf))
+ tmpname, message, user, date, branch, nodeid, p1, p2 = patch.extract(ui, file(pf))
if tmpname is None:
raise util.Abort(_('no diffs found'))
@@ -1528,13 +1537,37 @@
message = None
ui.debug(_('message:\n%s\n') % message)
+ wp = repo.workingctx().parents()
+ if opts.get('exact'):
+ if not nodeid or not p1:
+ raise util.Abort(_('not a mercurial patch'))
+ p1 = repo.lookup(p1)
+ p2 = repo.lookup(p2 or hex(nullid))
+
+ if p1 != wp[0].node():
+ hg.clean(repo, p1, wlock=wlock)
+ repo.dirstate.setparents(p1, p2)
+ repo.dirstate.setbranch(branch or 'default')
+ elif p2:
+ try:
+ p1 = repo.lookup(p1)
+ p2 = repo.lookup(p2)
+ if p1 == wp[0].node():
+ repo.dirstate.setparents(p1, p2)
+ except hg.RepoError:
+ pass
+
files = {}
try:
fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
files=files)
finally:
files = patch.updatedir(ui, repo, files, wlock=wlock)
- repo.commit(files, message, user, date, wlock=wlock, lock=lock)
+ n = repo.commit(files, message, user, date, wlock=wlock, lock=lock)
+ if opts.get('exact'):
+ if hex(n) != nodeid:
+ repo.rollback(wlock=wlock, lock=lock)
+ raise util.Abort(_('patch is damaged or loses information'))
finally:
os.unlink(tmpname)
@@ -1554,10 +1587,15 @@
setremoteconfig(ui, opts)
other = hg.repository(ui, source)
+ ui.status(_('comparing with %s\n') % source)
incoming = repo.findincoming(other, force=opts["force"])
if not incoming:
+ try:
+ os.unlink(opts["bundle"])
+ except:
+ pass
ui.status(_("no changes found\n"))
- return
+ return 1
cleanup = None
try:
@@ -1613,9 +1651,8 @@
Print all files under Mercurial control whose names match the
given patterns.
- This command searches the current directory and its
- subdirectories. To search an entire repository, move to the root
- of the repository.
+ This command searches the entire repository by default. To search
+ just the current directory and its subdirectories, use "--include .".
If no patterns are given to match, this command prints all file
names.
@@ -1632,14 +1669,21 @@
else:
node = None
+ ret = 1
for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
- head='(?:.*/|)'):
+ badmatch=util.always,
+ default='relglob'):
+ if src == 'b':
+ continue
if not node and repo.dirstate.state(abs) == '?':
continue
if opts['fullpath']:
ui.write(os.path.join(repo.root, abs), end)
else:
ui.write(((pats and rel) or abs), end)
+ ret = 0
+
+ return ret
def log(ui, repo, *pats, **opts):
"""show revision history of entire repository or files
@@ -1722,7 +1766,6 @@
if opts["date"]:
df = util.matchdate(opts["date"])
-
displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
for st, rev, fns in changeiter:
if st == 'add':
@@ -1789,7 +1832,7 @@
ui.write("%3s " % (m.execf(f) and "755" or "644"))
ui.write("%s\n" % f)
-def merge(ui, repo, node=None, force=None, branch=None):
+def merge(ui, repo, node=None, force=None):
"""merge working directory with another revision
Merge the contents of the current working directory and the
@@ -1803,9 +1846,7 @@
revision to merge with must be provided.
"""
- if node or branch:
- node = _lookup(repo, node, branch)
- else:
+ if not node:
heads = repo.heads()
if len(heads) > 2:
raise util.Abort(_('repo has %d heads - '
@@ -1837,10 +1878,11 @@
revs = [repo.lookup(rev) for rev in opts['rev']]
other = hg.repository(ui, dest)
+ ui.status(_('comparing with %s\n') % dest)
o = repo.findoutgoing(other, force=opts['force'])
if not o:
ui.status(_("no changes found\n"))
- return
+ return 1
o = repo.changelog.nodesbetween(o, revs)[0]
if opts['newest_first']:
o.reverse()
@@ -2043,7 +2085,9 @@
This only removes files from the current branch, not from the
entire project history. If the files still exist in the working
directory, they will be deleted from it. If invoked with --after,
- files that have been manually deleted are marked as removed.
+ files are marked as removed, but not actually unlinked unless --force
+ is also given. Without exact file names, --after will only mark
+ files as removed if they are no longer in the working directory.
This command schedules the files to be removed at the next commit.
To undo a remove before that, see hg revert.
@@ -2061,9 +2105,7 @@
remove, forget = [], []
for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
reason = None
- if abs not in deleted and opts['after']:
- reason = _('is still present')
- elif abs in modified and not opts['force']:
+ if abs in modified and not opts['force']:
reason = _('is modified (use -f to force removal)')
elif abs in added:
if opts['force']:
@@ -2072,6 +2114,8 @@
reason = _('has been marked for add (use -f to force removal)')
elif abs in unknown:
reason = _('is not managed')
+ elif opts['after'] and not exact and abs not in deleted:
+ continue
elif abs in removed:
continue
if reason:
@@ -2082,7 +2126,7 @@
ui.status(_('removing %s\n') % rel)
remove.append(abs)
repo.forget(forget)
- repo.remove(remove, unlink=not opts['after'])
+ repo.remove(remove, unlink=opts['force'] or not opts['after'])
def rename(ui, repo, *pats, **opts):
"""rename files; equivalent of copy + remove
@@ -2106,7 +2150,7 @@
ui.status(_('removing %s\n') % rel)
names.append(abs)
if not opts.get('dry_run'):
- repo.remove(names, True, wlock)
+ repo.remove(names, True, wlock=wlock)
return errs
def revert(ui, repo, *pats, **opts):
@@ -2153,8 +2197,9 @@
if not opts['rev'] and p2 != nullid:
raise util.Abort(_('uncommitted merge - please provide a '
'specific revision'))
- node = repo.changectx(opts['rev']).node()
- mf = repo.manifest.read(repo.changelog.read(node)[0])
+ ctx = repo.changectx(opts['rev'])
+ node = ctx.node()
+ mf = ctx.manifest()
if node == parent:
pmf = mf
else:
@@ -2228,7 +2273,8 @@
def handle(xlist, dobackup):
xlist[0].append(abs)
update[abs] = 1
- if dobackup and not opts['no_backup'] and os.path.exists(rel):
+ if (dobackup and not opts['no_backup'] and
+ (os.path.islink(rel) or os.path.exists(rel))):
bakname = "%s.orig" % rel
ui.note(_('saving current version of %s as %s\n') %
(rel, bakname))
@@ -2254,7 +2300,7 @@
if pmf is None:
# only need parent manifest in this unlikely case,
# so do not read by default
- pmf = repo.manifest.read(repo.changelog.read(parent)[0])
+ pmf = repo.changectx(parent).manifest()
if abs in pmf:
if mfentry:
# if version of file is same in parent and target
@@ -2335,44 +2381,27 @@
raise hg.RepoError(_("There is no Mercurial repository here"
" (.hg not found)"))
- if opts['daemon'] and not opts['daemon_pipefds']:
- rfd, wfd = os.pipe()
- args = sys.argv[:]
- args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
- pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
- args[0], args)
- os.close(wfd)
- os.read(rfd, 1)
- os._exit(0)
-
- httpd = hgweb.server.create_server(parentui, repo)
-
- if ui.verbose:
- if httpd.port != 80:
- ui.status(_('listening at http://%s:%d/\n') %
- (httpd.addr, httpd.port))
- else:
- ui.status(_('listening at http://%s/\n') % httpd.addr)
-
- if opts['pid_file']:
- fp = open(opts['pid_file'], 'w')
- fp.write(str(os.getpid()) + '\n')
- fp.close()
-
- if opts['daemon_pipefds']:
- rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
- os.close(rfd)
- os.write(wfd, 'y')
- os.close(wfd)
- sys.stdout.flush()
- sys.stderr.flush()
- fd = os.open(util.nulldev, os.O_RDWR)
- if fd != 0: os.dup2(fd, 0)
- if fd != 1: os.dup2(fd, 1)
- if fd != 2: os.dup2(fd, 2)
- if fd not in (0, 1, 2): os.close(fd)
-
- httpd.serve_forever()
+ class service:
+ def init(self):
+ try:
+ self.httpd = hgweb.server.create_server(parentui, repo)
+ except socket.error, inst:
+ raise util.Abort(_('cannot start server: ') + inst.args[1])
+
+ if not ui.verbose: return
+
+ if httpd.port != 80:
+ ui.status(_('listening at http://%s:%d/\n') %
+ (httpd.addr, httpd.port))
+ else:
+ ui.status(_('listening at http://%s/\n') % httpd.addr)
+
+ def run(self):
+ self.httpd.serve_forever()
+
+ service = service()
+
+ cmdutil.service(opts, initfn=service.init, runfn=service.run)
def status(ui, repo, *pats, **opts):
"""show changed files in the working directory
@@ -2498,9 +2527,10 @@
hexfunc = ui.debugflag and hex or short
for t, n in l:
try:
+ hn = hexfunc(n)
r = "%5d:%s" % (repo.changelog.rev(n), hexfunc(n))
- except KeyError:
- r = " ?:?"
+ except revlog.LookupError:
+ r = " ?:%s" % hn
if ui.quiet:
ui.write("%s\n" % t)
else:
@@ -2528,10 +2558,11 @@
modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
return postincoming(ui, repo, modheads, opts['update'])
-def update(ui, repo, node=None, clean=False, branch=None, date=None):
+def update(ui, repo, node=None, clean=False, date=None):
"""update working directory
- Update the working directory to the specified revision.
+ Update the working directory to the specified revision, or the
+ tip of the current branch if none is specified.
If there are no outstanding changes in the working directory and
there is a linear relationship between the current version and the
@@ -2548,43 +2579,11 @@
raise util.Abort(_("you can't specify a revision and a date"))
node = cmdutil.finddate(ui, repo, date)
- node = _lookup(repo, node, branch)
if clean:
return hg.clean(repo, node)
else:
return hg.update(repo, node)
-def _lookup(repo, node, branch=None):
- if branch:
- repo.ui.warn(_("the --branch option is deprecated, "
- "please use 'hg branch' instead\n"))
- br = repo.branchlookup(branch=branch)
- found = []
- for x in br:
- if branch in br[x]:
- found.append(x)
- if len(found) > 1:
- repo.ui.warn(_("Found multiple heads for %s\n") % branch)
- for x in found:
- cmdutil.show_changeset(ui, repo, {}).show(changenode=x)
- raise util.Abort("")
- if len(found) == 1:
- node = found[0]
- repo.ui.warn(_("Using head %s for branch %s\n")
- % (short(node), branch))
- else:
- raise util.Abort(_("branch %s not found") % branch)
- else:
- if node:
- node = repo.lookup(node)
- else:
- wc = repo.workingctx()
- try:
- node = repo.branchtags()[wc.branch()]
- except KeyError:
- raise util.Abort(_("branch %s not found") % wc.branch())
- return node
-
def verify(ui, repo):
"""verify the integrity of the repository
@@ -2685,7 +2684,10 @@
('u', 'user', '', _('record user as committer')),
] + walkopts + commitopts,
_('hg backout [OPTION]... REV')),
- "branch": (branch, [], _('hg branch [NAME]')),
+ "branch": (branch,
+ [('f', 'force', None,
+ _('set branch name even if it shadows an existing branch'))],
+ _('hg branch [NAME]')),
"branches": (branches, [], _('hg branches')),
"bundle":
(bundle,
@@ -2790,8 +2792,7 @@
_('hg grep [OPTION]... PATTERN [FILE]...')),
"heads":
(heads,
- [('b', 'branches', None, _('show branches (DEPRECATED)')),
- ('', 'style', '', _('display using template map file')),
+ [('', 'style', '', _('display using template map file')),
('r', 'rev', '', _('show only heads which are descendants of rev')),
('', 'template', '', _('display with template'))],
_('hg heads [-r REV]')),
@@ -2802,9 +2803,11 @@
[('p', 'strip', 1,
_('directory strip option for patch. This has the same\n'
'meaning as the corresponding patch option')),
- ('b', 'base', '', _('base path (DEPRECATED)')),
+ ('b', 'base', '', _('base path')),
('f', 'force', None,
- _('skip check for outstanding uncommitted changes'))] + commitopts,
+ _('skip check for outstanding uncommitted changes')),
+ ('', 'exact', None,
+ _('apply patch to the nodes from which it was generated'))] + commitopts,
_('hg import [-p NUM] [-m MESSAGE] [-f] PATCH...')),
"incoming|in": (incoming,
[('M', 'no-merges', None, _('do not show merges')),
@@ -2834,8 +2837,7 @@
_('hg locate [OPTION]... [PATTERN]...')),
"^log|history":
(log,
- [('b', 'branches', None, _('show branches (DEPRECATED)')),
- ('f', 'follow', None,
+ [('f', 'follow', None,
_('follow changeset history, or file history across copies and renames')),
('', 'follow-first', None,
_('only follow the first parent of merge changesets')),
@@ -2856,8 +2858,7 @@
"manifest": (manifest, [], _('hg manifest [REV]')),
"^merge":
(merge,
- [('b', 'branch', '', _('merge with head of a specific branch (DEPRECATED)')),
- ('f', 'force', None, _('force a merge with outstanding changes'))],
+ [('f', 'force', None, _('force a merge with outstanding changes'))],
_('hg merge [-f] [REV]')),
"outgoing|out": (outgoing,
[('M', 'no-merges', None, _('do not show merges')),
@@ -2872,8 +2873,7 @@
_('hg outgoing [-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
"^parents":
(parents,
- [('b', 'branches', None, _('show branches (DEPRECATED)')),
- ('r', 'rev', '', _('show parents from the specified rev')),
+ [('r', 'rev', '', _('show parents from the specified rev')),
('', 'style', '', _('display using template map file')),
('', 'template', '', _('display with template'))],
_('hg parents [-r REV] [FILE]')),
@@ -2978,8 +2978,7 @@
"tags": (tags, [], _('hg tags')),
"tip":
(tip,
- [('b', 'branches', None, _('show branches (DEPRECATED)')),
- ('', 'style', '', _('display using template map file')),
+ [('', 'style', '', _('display using template map file')),
('p', 'patch', None, _('show patch')),
('', 'template', '', _('display with template'))],
_('hg tip [-p]')),
@@ -2990,9 +2989,7 @@
_('hg unbundle [-u] FILE')),
"^update|up|checkout|co":
(update,
- [('b', 'branch', '',
- _('checkout the head of a specific branch (DEPRECATED)')),
- ('C', 'clean', None, _('overwrite locally modified files')),
+ [('C', 'clean', None, _('overwrite locally modified files')),
('d', 'date', '', _('tipmost revision matching date'))],
_('hg update [-C] [-d DATE] [REV]')),
"verify": (verify, [], _('hg verify')),
@@ -3149,9 +3146,10 @@
if reposetup:
hg.repo_setup_hooks.append(reposetup)
cmdtable = getattr(mod, 'cmdtable', {})
- for t in cmdtable:
- if t in table:
- ui.warn(_("module %s overrides %s\n") % (name, t))
+ overrides = [cmd for cmd in cmdtable if cmd in table]
+ if overrides:
+ ui.warn(_("extension '%s' overrides commands: %s\n")
+ % (name, " ".join(overrides)))
table.update(cmdtable)
def parseconfig(config):
diff -r 2d32e3ae01a7 -r 30e7aa755efd mercurial/context.py
--- a/mercurial/context.py Sat May 19 22:47:01 2007 +0200
+++ b/mercurial/context.py Sat May 19 22:51:43 2007 +0200
@@ -6,9 +6,8 @@
# of the GNU General Public License, incorporated herein by reference.
from node import *
-from i18n import gettext as _
-from demandload import demandload
-demandload(globals(), "ancestor bdiff repo revlog util os")
+from i18n import _
+import ancestor, bdiff, repo, revlog, util, os, errno
class changectx(object):
"""A changecontext object makes access to data related to a particular
@@ -84,21 +83,22 @@
try:
return self._manifest[path]
except KeyError:
- raise repo.LookupError(_("'%s' not found in manifest") % path)
+ raise revlog.LookupError(_("'%s' not found in manifest") % path)
if '_manifestdelta' in self.__dict__ or path in self.files():
if path in self._manifestdelta:
return self._manifestdelta[path]
node, flag = self._repo.manifest.find(self._changeset[0], path)
if not node:
- raise repo.LookupError(_("'%s' not found in manifest") % path)
+ raise revlog.LookupError(_("'%s' not found in manifest") % path)
return node
- def filectx(self, path, fileid=None):
+ def filectx(self, path, fileid=None, filelog=None):
"""get a file context from this changeset"""
if fileid is None:
fileid = self.filenode(path)
- return filectx(self._repo, path, fileid=fileid, changectx=self)
+ return filectx(self._repo, path, fileid=fileid,
+ changectx=self, filelog=filelog)
def filectxs(self):
"""generate a file context for each file in this changeset's
@@ -126,16 +126,18 @@
self._repo = repo
self._path = path
- assert changeid is not None or fileid is not None
+ assert (changeid is not None
+ or fileid is not None
+ or changectx is not None)
if filelog:
self._filelog = filelog
- if changectx:
- self._changectx = changectx
- self._changeid = changectx.node()
if fileid is None:
- self._changeid = changeid
+ if changectx is None:
+ self._changeid = changeid
+ else:
+ self._changectx = changectx
else:
self._fileid = fileid
@@ -150,13 +152,10 @@
self._changeid = self._filelog.linkrev(self._filenode)
return self._changeid
elif name == '_filenode':
- try:
- if '_fileid' in self.__dict__:
- self._filenode = self._filelog.lookup(self._fileid)
- else:
- self._filenode = self._changectx.filenode(self._path)
- except revlog.RevlogError, inst:
- raise repo.LookupError(str(inst))
+ if '_fileid' in self.__dict__:
+ self._filenode = self._filelog.lookup(self._fileid)
+ else:
+ self._filenode = self._changectx.filenode(self._path)
return self._filenode
elif name == '_filerev':
self._filerev = self._filelog.rev(self._filenode)
@@ -168,7 +167,7 @@
try:
n = self._filenode
return True
- except repo.LookupError:
+ except revlog.LookupError:
# file is missing
return False
@@ -379,13 +378,15 @@
"""generate a manifest corresponding to the working directory"""
man = self._parents[0].manifest().copy()
+ is_exec = util.execfunc(self._repo.root, man.execf)
+ is_link = util.linkfunc(self._repo.root, man.linkf)
copied = self._repo.dirstate.copies()
modified, added, removed, deleted, unknown = self._status[:5]
for i, l in (("a", added), ("m", modified), ("u", unknown)):
for f in l:
man[f] = man.get(copied.get(f, f), nullid) + i
try:
- man.set(f, util.is_exec(self._repo.wjoin(f), man.execf(f)))
+ man.set(f, is_exec(f), is_link(f))
except OSError:
pass
@@ -420,9 +421,10 @@
def children(self):
return []
- def filectx(self, path):
+ def filectx(self, path, filelog=None):
"""get a file context from the working directory"""
- return workingfilectx(self._repo, path, workingctx=self)
+ return workingfilectx(self._repo, path, workingctx=self,
+ filelog=filelog)
def ancestor(self, c2):
"""return the ancestor context of self and c2"""
@@ -480,7 +482,7 @@
rp = self._repopath
if rp == self._path:
return None
- return rp, self._workingctx._parents._manifest.get(rp, nullid)
+ return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
def parents(self):
'''return parent filectxs, following copies if necessary'''
@@ -501,5 +503,12 @@
return []
def size(self): return os.stat(self._repo.wjoin(self._path)).st_size
+ def date(self):
+ t, tz = self._changectx.date()
+ try:
+ return (int(os.lstat(self._repo.wjoin(self._path)).st_mtime), tz)
+ except OSError, err:
+ if err.errno != errno.ENOENT: raise
+ return (t, tz)
def cmp(self, text): return self._repo.wread(self._path) == text
diff -r 2d32e3ae01a7 -r 30e7aa755efd mercurial/demandimport.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/demandimport.py Sat May 19 22:51:43 2007 +0200
@@ -0,0 +1,115 @@
+# demandimport.py - global demand-loading of modules for Mercurial
+#
+# Copyright 2006 Matt Mackall
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+
+'''
+demandimport - automatic demandloading of modules
+
+To enable this module, do:
+
+ import demandimport; demandimport.enable()
+
+Imports of the following forms will be demand-loaded:
+
+ import a, b.c
+ import a.b as c
+ from a import b,c # a will be loaded immediately
+
+These imports will not be delayed:
+
+ from a import *
+ b = __import__(a)
+'''
+
+_origimport = __import__
+
+class _demandmod(object):
+ """module demand-loader and proxy"""
+ def __init__(self, name, globals, locals):
+ if '.' in name:
+ head, rest = name.split('.', 1)
+ after = [rest]
+ else:
+ head = name
+ after = []
+ object.__setattr__(self, "_data", (head, globals, locals, after))
+ object.__setattr__(self, "_module", None)
+ def _extend(self, name):
+ """add to the list of submodules to load"""
+ self._data[3].append(name)
+ def _load(self):
+ if not self._module:
+ head, globals, locals, after = self._data
+ mod = _origimport(head, globals, locals)
+ # load submodules
+ def subload(mod, p):
+ h, t = p, None
+ if '.' in p:
+ h, t = p.split('.', 1)
+ if not hasattr(mod, h):
+ setattr(mod, h, _demandmod(p, mod.__dict__, mod.__dict__))
+ elif t:
+ subload(getattr(mod, h), t)
+
+ for x in after:
+ subload(mod, x)
+
+ # are we in the locals dictionary still?
+ if locals and locals.get(head) == self:
+ locals[head] = mod
+ object.__setattr__(self, "_module", mod)
+ def __repr__(self):
+ return "" % self._data[0]
+ def __call__(self, *args, **kwargs):
+ raise TypeError("'unloaded module' object is not callable")
+ def __getattribute__(self, attr):
+ if attr in ('_data', '_extend', '_load', '_module'):
+ return object.__getattribute__(self, attr)
+ self._load()
+ return getattr(self._module, attr)
+ def __setattr__(self, attr, val):
+ self._load()
+ setattr(self._module, attr, val)
+
+def _demandimport(name, globals=None, locals=None, fromlist=None):
+ if not locals or name in ignore or fromlist == ('*',):
+ # these cases we can't really delay
+ return _origimport(name, globals, locals, fromlist)
+ elif not fromlist:
+ # import a [as b]
+ if '.' in name: # a.b
+ base, rest = name.split('.', 1)
+ # email.__init__ loading email.mime
+ if globals and globals.get('__name__', None) == base:
+ return _origimport(name, globals, locals, fromlist)
+ # if a is already demand-loaded, add b to its submodule list
+ if base in locals:
+ if isinstance(locals[base], _demandmod):
+ locals[base]._extend(rest)
+ return locals[base]
+ return _demandmod(name, globals, locals)
+ else:
+ # from a import b,c,d
+ mod = _origimport(name, globals, locals)
+ # recurse down the module chain
+ for comp in name.split('.')[1:]:
+ mod = getattr(mod, comp)
+ for x in fromlist:
+ # set requested submodules for demand load
+ if not(hasattr(mod, x)):
+ setattr(mod, x, _demandmod(x, mod.__dict__, mod.__dict__))
+ return mod
+
+ignore = ['_hashlib', '_xmlplus', 'fcntl', 'win32com.gen_py']
+
+def enable():
+ "enable global demand-loading of modules"
+ __builtins__["__import__"] = _demandimport
+
+def disable():
+ "disable global demand-loading of modules"
+ __builtins__["__import__"] = _origimport
+
diff -r 2d32e3ae01a7 -r 30e7aa755efd mercurial/demandload.py
--- a/mercurial/demandload.py Sat May 19 22:47:01 2007 +0200
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,135 +0,0 @@
-'''Demand load modules when used, not when imported.'''
-
-__author__ = '''Copyright 2006 Vadim Gelfer .
-This software may be used and distributed according to the terms
-of the GNU General Public License, incorporated herein by reference.'''
-
-# this is based on matt's original demandload module. it is a
-# complete rewrite. some time, we may need to support syntax of
-# "import foo as bar".
-
-class _importer(object):
- '''import a module. it is not imported until needed, and is
- imported at most once per scope.'''
-
- def __init__(self, scope, modname, fromlist):
- '''scope is context (globals() or locals()) in which import
- should be made. modname is name of module to import.
- fromlist is list of modules for "from foo import ..."
- emulation.'''
-
- self.scope = scope
- self.modname = modname
- self.fromlist = fromlist
- self.mod = None
-
- def module(self):
- '''import the module if needed, and return.'''
- if self.mod is None:
- self.mod = __import__(self.modname, self.scope, self.scope,
- self.fromlist)
- del self.modname, self.fromlist
- return self.mod
-
-class _replacer(object):
- '''placeholder for a demand loaded module. demandload puts this in
- a target scope. when an attribute of this object is looked up,
- this object is replaced in the target scope with the actual
- module.
-
- we use __getattribute__ to avoid namespace clashes between
- placeholder object and real module.'''
-
- def __init__(self, importer, target):
- self.importer = importer
- self.target = target
- # consider case where we do this:
- # demandload(globals(), 'foo.bar foo.quux')
- # foo will already exist in target scope when we get to
- # foo.quux. so we remember that we will need to demandload
- # quux into foo's scope when we really load it.
- self.later = []
-
- def module(self):
- return object.__getattribute__(self, 'importer').module()
-
- def __getattribute__(self, key):
- '''look up an attribute in a module and return it. replace the
- name of the module in the caller\'s dict with the actual
- module.'''
-
- module = object.__getattribute__(self, 'module')()
- target = object.__getattribute__(self, 'target')
- importer = object.__getattribute__(self, 'importer')
- later = object.__getattribute__(self, 'later')
-
- if later:
- demandload(module.__dict__, ' '.join(later))
-
- importer.scope[target] = module
-
- return getattr(module, key)
-
-class _replacer_from(_replacer):
- '''placeholder for a demand loaded module. used for "from foo
- import ..." emulation. semantics of this are different than
- regular import, so different implementation needed.'''
-
- def module(self):
- importer = object.__getattribute__(self, 'importer')
- target = object.__getattribute__(self, 'target')
-
- return getattr(importer.module(), target)
-
- def __call__(self, *args, **kwargs):
- target = object.__getattribute__(self, 'module')()
- return target(*args, **kwargs)
-
-def demandload(scope, modules):
- '''import modules into scope when each is first used.
-
- scope should be the value of globals() in the module calling this
- function, or locals() in the calling function.
-
- modules is a string listing module names, separated by white
- space. names are handled like this:
-
- foo import foo
- foo bar import foo, bar
- foo@bar import foo as bar
- foo.bar import foo.bar
- foo:bar from foo import bar
- foo:bar,quux from foo import bar, quux
- foo.bar:quux from foo.bar import quux'''
-
- for mod in modules.split():
- col = mod.find(':')
- if col >= 0:
- fromlist = mod[col+1:].split(',')
- mod = mod[:col]
- else:
- fromlist = []
- as_ = None
- if '@' in mod:
- mod, as_ = mod.split("@")
- importer = _importer(scope, mod, fromlist)
- if fromlist:
- for name in fromlist:
- scope[name] = _replacer_from(importer, name)
- else:
- dot = mod.find('.')
- if dot >= 0:
- basemod = mod[:dot]
- val = scope.get(basemod)
- # if base module has already been demandload()ed,
- # remember to load this submodule into its namespace
- # when needed.
- if isinstance(val, _replacer):
- later = object.__getattribute__(val, 'later')
- later.append(mod[dot+1:])
- continue
- else:
- basemod = mod
- if not as_:
- as_ = basemod
- scope[as_] = _replacer(importer, as_)
diff -r 2d32e3ae01a7 -r 30e7aa755efd mercurial/dirstate.py
--- a/mercurial/dirstate.py Sat May 19 22:47:01 2007 +0200
+++ b/mercurial/dirstate.py Sat May 19 22:51:43 2007 +0200
@@ -8,9 +8,9 @@
"""
from node import *
-from i18n import gettext as _
-from demandload import *
-demandload(globals(), "struct os time bisect stat strutil util re errno")
+from i18n import _
+import struct, os, time, bisect, stat, strutil, util, re, errno
+import cStringIO
class dirstate(object):
format = ">cllll"
@@ -21,6 +21,7 @@
self.dirty = 0
self.ui = ui
self.map = None
+ self.fp = None
self.pl = None
self.dirs = None
self.copymap = {}
@@ -136,12 +137,28 @@
self.lazyread()
return self[key]
+ _unknown = ('?', 0, 0, 0)
+
+ def get(self, key):
+ try:
+ return self[key]
+ except KeyError:
+ return self._unknown
+
def __contains__(self, key):
self.lazyread()
return key in self.map
def parents(self):
- self.lazyread()
+ if self.pl is None:
+ self.pl = [nullid, nullid]
+ try:
+ self.fp = self.opener('dirstate')
+ st = self.fp.read(40)
+ if len(st) == 40:
+ self.pl = st[:20], st[20:40]
+ except IOError, err:
+ if err.errno != errno.ENOENT: raise
return self.pl
def branch(self):
@@ -205,12 +222,27 @@
self.map = {}
self.pl = [nullid, nullid]
try:
- st = self.opener("dirstate").read()
+ if self.fp:
+ self.fp.seek(0)
+ st = self.fp.read()
+ self.fp = None
+ else:
+ st = self.opener("dirstate").read()
if st:
self.parse(st)
except IOError, err:
if err.errno != errno.ENOENT: raise
+ def reload(self):
+ def mtime():
+ m = self.map and self.map.get('.hgignore')
+ return m and m[-1]
+
+ old_mtime = self.ignorefunc and mtime()
+ self.read()
+ if old_mtime != mtime():
+ self.ignorefunc = None
+
def copy(self, source, dest):
self.lazyread()
self.markdirty()
@@ -317,15 +349,17 @@
def write(self):
if not self.dirty:
return
- st = self.opener("dirstate", "w", atomictemp=True)
- st.write("".join(self.pl))
- for f, e in self.map.items():
+ cs = cStringIO.StringIO()
+ cs.write("".join(self.pl))
+ for f, e in self.map.iteritems():
c = self.copied(f)
if c:
f = f + "\0" + c
e = struct.pack(self.format, e[0], e[1], e[2], e[3], len(f))
- st.write(e + f)
- st.rename()
+ cs.write(e)
+ cs.write(f)
+ st = self.opener("dirstate", "w", atomic=True)
+ st.write(cs.getvalue())
self.dirty = 0
def filterfiles(self, files):
@@ -359,14 +393,13 @@
return ret
def supported_type(self, f, st, verbose=False):
- if stat.S_ISREG(st.st_mode):
+ if stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode):
return True
if verbose:
kind = 'unknown'
if stat.S_ISCHR(st.st_mode): kind = _('character device')
elif stat.S_ISBLK(st.st_mode): kind = _('block device')
elif stat.S_ISFIFO(st.st_mode): kind = _('fifo')
- elif stat.S_ISLNK(st.st_mode): kind = _('symbolic link')
elif stat.S_ISSOCK(st.st_mode): kind = _('socket')
elif stat.S_ISDIR(st.st_mode): kind = _('directory')
self.ui.warn(_('%s: unsupported file type (type is %s)\n') % (
@@ -380,7 +413,7 @@
yield src, f
def statwalk(self, files=None, match=util.always, ignored=False,
- badmatch=None):
+ badmatch=None, directories=False):
'''
walk recursively through the directory tree, finding all files
matched by the match function
@@ -388,6 +421,7 @@
results are yielded in a tuple (src, filename, st), where src
is one of:
'f' the file was found in the directory tree
+ 'd' the file is a directory of the tree
'm' the file was only in the dirstate and not in the tree
'b' file was not found and matched badmatch
@@ -408,7 +442,10 @@
return False
return match(file_)
- if ignored: imatch = match
+ ignore = self.ignore
+ if ignored:
+ imatch = match
+ ignore = util.never
# self.root may end with a path separator when self.root == '/'
common_prefix_len = len(self.root)
@@ -417,6 +454,8 @@
# recursion free walker, faster than os.walk.
def findfiles(s):
work = [s]
+ if directories:
+ yield 'd', util.normpath(s[common_prefix_len:]), os.lstat(s)
while work:
top = work.pop()
names = os.listdir(top)
@@ -441,9 +480,10 @@
# don't trip over symlinks
st = os.lstat(p)
if stat.S_ISDIR(st.st_mode):
- ds = util.pconvert(os.path.join(nd, f +'/'))
- if imatch(ds):
+ if not ignore(np):
work.append(p)
+ if directories:
+ yield 'd', np, st
if imatch(np) and np in dc:
yield 'm', np, st
elif imatch(np):
diff -r 2d32e3ae01a7 -r 30e7aa755efd mercurial/filelog.py
--- a/mercurial/filelog.py Sat May 19 22:47:01 2007 +0200
+++ b/mercurial/filelog.py Sat May 19 22:51:43 2007 +0200
@@ -6,15 +6,12 @@
# of the GNU General Public License, incorporated herein by reference.
from revlog import *
-from demandload import *
-demandload(globals(), "os")
+import os
class filelog(revlog):
- def __init__(self, opener, path, defversion=REVLOG_DEFAULT_VERSION):
+ def __init__(self, opener, path):
revlog.__init__(self, opener,
- "/".join(("data", self.encodedir(path + ".i"))),
- "/".join(("data", self.encodedir(path + ".d"))),
- defversion)
+ "/".join(("data", self.encodedir(path + ".i"))))
# This avoids a collision between a file named foo and a dir named
# foo.i or foo.d
diff -r 2d32e3ae01a7 -r 30e7aa755efd mercurial/hg.py
--- a/mercurial/hg.py Sat May 19 22:47:01 2007 +0200
+++ b/mercurial/hg.py Sat May 19 22:51:43 2007 +0200
@@ -8,10 +8,11 @@
from node import *
from repo import *
-from demandload import *
-from i18n import gettext as _
-demandload(globals(), "localrepo bundlerepo httprepo sshrepo statichttprepo")
-demandload(globals(), "errno lock os shutil util merge@_merge verify@_verify")
+from i18n import _
+import localrepo, bundlerepo, httprepo, sshrepo, statichttprepo
+import errno, lock, os, shutil, util
+import merge as _merge
+import verify as _verify
def _local(path):
return (os.path.isfile(util.drop_scheme('file', path)) and
diff -r 2d32e3ae01a7 -r 30e7aa755efd mercurial/hgweb/__init__.py
--- a/mercurial/hgweb/__init__.py Sat May 19 22:47:01 2007 +0200
+++ b/mercurial/hgweb/__init__.py Sat May 19 22:51:43 2007 +0200
@@ -6,6 +6,11 @@
# This software may be used and distributed according to the terms
# of the GNU General Public License, incorporated herein by reference.
-from mercurial.demandload import demandload
-demandload(globals(), "mercurial.hgweb.hgweb_mod:hgweb")
-demandload(globals(), "mercurial.hgweb.hgwebdir_mod:hgwebdir")
+import hgweb_mod, hgwebdir_mod
+
+def hgweb(*args, **kwargs):
+ return hgweb_mod.hgweb(*args, **kwargs)
+
+def hgwebdir(*args, **kwargs):
+ return hgwebdir_mod.hgwebdir(*args, **kwargs)
+
diff -r 2d32e3ae01a7 -r 30e7aa755efd mercurial/hgweb/common.py
--- a/mercurial/hgweb/common.py Sat May 19 22:47:01 2007 +0200
+++ b/mercurial/hgweb/common.py Sat May 19 22:51:43 2007 +0200
@@ -7,7 +7,6 @@
# of the GNU General Public License, incorporated herein by reference.
import os, mimetypes
-import os.path
def get_mtime(repo_path):
store_path = os.path.join(repo_path, ".hg")
@@ -39,7 +38,7 @@
os.stat(path)
ct = mimetypes.guess_type(path)[0] or "text/plain"
req.header([('Content-type', ct),
- ('Content-length', os.path.getsize(path))])
+ ('Content-length', str(os.path.getsize(path)))])
return file(path, 'rb').read()
except (TypeError, OSError):
# illegal fname or unreadable file
diff -r 2d32e3ae01a7 -r 30e7aa755efd mercurial/hgweb/hgweb_mod.py
--- a/mercurial/hgweb/hgweb_mod.py Sat May 19 22:47:01 2007 +0200
+++ b/mercurial/hgweb/hgweb_mod.py Sat May 19 22:51:43 2007 +0200
@@ -6,17 +6,13 @@
# This software may be used and distributed according to the terms
# of the GNU General Public License, incorporated herein by reference.
-import os
-import os.path
-import mimetypes
-from mercurial.demandload import demandload
-demandload(globals(), "re zlib ConfigParser mimetools cStringIO sys tempfile")
-demandload(globals(), 'urllib bz2')
-demandload(globals(), "mercurial:mdiff,ui,hg,util,archival,streamclone,patch")
-demandload(globals(), "mercurial:revlog,templater")
-demandload(globals(), "mercurial.hgweb.common:get_mtime,staticfile,style_map")
+import os, mimetypes, re, zlib, mimetools, cStringIO, sys
+import tempfile, urllib, bz2
from mercurial.node import *
from mercurial.i18n import gettext as _
+from mercurial import mdiff, ui, hg, util, archival, streamclone, patch
+from mercurial import revlog, templater
+from common import get_mtime, staticfile, style_map
def _up(p):
if p[0] != "/":
@@ -172,14 +168,10 @@
yield self.t("diffline", line=l)
r = self.repo
- cl = r.changelog
- mf = r.manifest
- change1 = cl.read(node1)
- change2 = cl.read(node2)
- mmap1 = mf.read(change1[0])
- mmap2 = mf.read(change2[0])
- date1 = util.datestr(change1[2])
- date2 = util.datestr(change2[2])
+ c1 = r.changectx(node1)
+ c2 = r.changectx(node2)
+ date1 = util.datestr(c1.date())
+ date2 = util.datestr(c2.date())
modified, added, removed, deleted, unknown = r.status(node1, node2)[:5]
if files:
@@ -188,17 +180,17 @@
diffopts = patch.diffopts(self.repo.ui, untrusted=True)
for f in modified:
- to = r.file(f).read(mmap1[f])
- tn = r.file(f).read(mmap2[f])
+ to = c1.filectx(f).data()
+ tn = c2.filectx(f).data()
yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
opts=diffopts), f, tn)
for f in added:
to = None
- tn = r.file(f).read(mmap2[f])
+ tn = c2.filectx(f).data()
yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
opts=diffopts), f, tn)
for f in removed:
- to = r.file(f).read(mmap1[f])
+ to = c1.filectx(f).data()
tn = None
yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
opts=diffopts), f, tn)
@@ -497,8 +489,6 @@
archives=self.archivelist(hex(node)))
def tags(self):
- cl = self.repo.changelog
-
i = self.repo.tagslist()
i.reverse()
@@ -509,7 +499,7 @@
continue
yield {"parity": self.stripes(parity),
"tag": k,
- "date": cl.read(n)[2],
+ "date": self.repo.changectx(n).date(),
"node": hex(n)}
parity += 1
@@ -519,8 +509,6 @@
entriesnotip=lambda **x: entries(True, **x))
def summary(self):
- cl = self.repo.changelog
-
i = self.repo.tagslist()
i.reverse()
@@ -535,69 +523,64 @@
if count > 10: # limit to 10 tags
break;
- c = cl.read(n)
- t = c[2]
-
yield self.t("tagentry",
- parity = self.stripes(parity),
- tag = k,
- node = hex(n),
- date = t)
+ parity=self.stripes(parity),
+ tag=k,
+ node=hex(n),
+ date=self.repo.changectx(n).date())
parity += 1
- def heads(**map):
+
+ def branches(**map):
parity = 0
- count = 0
- for node in self.repo.heads():
- count += 1
- if count > 10:
- break;
+ b = self.repo.branchtags()
+ l = [(-self.repo.changelog.rev(n), n, t) for t, n in b.items()]
+ l.sort()
- ctx = self.repo.changectx(node)
+ for r,n,t in l:
+ ctx = self.repo.changectx(n)
yield {'parity': self.stripes(parity),
- 'branch': ctx.branch(),
- 'node': hex(node),
+ 'branch': t,
+ 'node': hex(n),
'date': ctx.date()}
parity += 1
def changelist(**map):
parity = 0
- cl = self.repo.changelog
l = [] # build a list in forward order for efficiency
for i in xrange(start, end):
- n = cl.node(i)
- changes = cl.read(n)
- hn = hex(n)
- t = changes[2]
+ ctx = self.repo.changectx(i)
+ hn = hex(ctx.node())
l.insert(0, self.t(
'shortlogentry',
- parity = parity,
- author = changes[1],
- desc = changes[4],
- date = t,
- rev = i,
- node = hn))
+ parity=parity,
+ author=ctx.user(),
+ desc=ctx.description(),
+ date=ctx.date(),
+ rev=i,
+ node=hn))
parity = 1 - parity
yield l
+ cl = self.repo.changelog
count = cl.count()
start = max(0, count - self.maxchanges)
end = min(count, start + self.maxchanges)
yield self.t("summary",
- desc = self.config("web", "description", "unknown"),
- owner = (self.config("ui", "username") or # preferred
- self.config("web", "contact") or # deprecated
- self.config("web", "author", "unknown")), # also
- lastchange = cl.read(cl.tip())[2],
- tags = tagentries,
- heads = heads,
- shortlog = changelist,
- node = hex(cl.tip()),
+ desc=self.config("web", "description", "unknown"),
+ owner=(self.config("ui", "username") or # preferred
+ self.config("web", "contact") or # deprecated
+ self.config("web", "author", "unknown")), # also
+ lastchange=cl.read(cl.tip())[2],
+ tags=tagentries,
+ branches=branches,
+ shortlog=changelist,
+ node=hex(cl.tip()),
archives=self.archivelist("tip"))
def filediff(self, fctx):
@@ -623,9 +606,13 @@
'zip': ('application/zip', 'zip', '.zip', None),
}
- def archive(self, req, cnode, type_):
+ def archive(self, req, id, type_):
reponame = re.sub(r"\W+", "-", os.path.basename(self.reponame))
- name = "%s-%s" % (reponame, short(cnode))
+ cnode = self.repo.lookup(id)
+ arch_version = id
+ if cnode == id or id == 'tip':
+ arch_version = short(cnode)
+ name = "%s-%s" % (reponame, arch_version)
mimetype, artype, extension, encoding = self.archive_specs[type_]
headers = [('Content-type', mimetype),
('Content-disposition', 'attachment; filename=%s%s' %
@@ -789,6 +776,9 @@
port = req.env["SERVER_PORT"]
port = port != "80" and (":" + port) or ""
urlbase = 'http://%s%s' % (req.env['SERVER_NAME'], port)
+ staticurl = self.config("web", "staticurl") or req.url + 'static/'
+ if not staticurl.endswith('/'):
+ staticurl += '/'
if not self.reponame:
self.reponame = (self.config("web", "name")
@@ -797,6 +787,7 @@
self.t = templater.templater(mapfile, templater.common_filters,
defaults={"url": req.url,
+ "staticurl": staticurl,
"urlbase": urlbase,
"repo": self.reponame,
"header": header,
@@ -876,7 +867,7 @@
try:
req.write(self.filerevision(self.filectx(req)))
return
- except hg.RepoError:
+ except revlog.LookupError:
pass
req.write(self.manifest(self.changectx(req), path))
@@ -1005,12 +996,11 @@
req.write(z.flush())
def do_archive(self, req):
- changeset = self.repo.lookup(req.form['node'][0])
type_ = req.form['type'][0]
allowed = self.configlist("web", "allow_archive")
if (type_ in self.archives and (type_ in allowed or
self.configbool("web", "allow" + type_, False))):
- self.archive(req, changeset, type_)
+ self.archive(req, req.form['node'][0], type_)
return
req.write(self.t("error"))
@@ -1028,7 +1018,7 @@
def do_capabilities(self, req):
caps = ['lookup', 'changegroupsubset']
if self.configbool('server', 'uncompressed'):
- caps.append('stream=%d' % self.repo.revlogversion)
+ caps.append('stream=%d' % self.repo.changelog.version)
# XXX: make configurable and/or share code with do_unbundle:
unbundleversions = ['HG10GZ', 'HG10BZ', 'HG10UN']
if unbundleversions:
diff -r 2d32e3ae01a7 -r 30e7aa755efd mercurial/hgweb/hgwebdir_mod.py
--- a/mercurial/hgweb/hgwebdir_mod.py Sat May 19 22:47:01 2007 +0200
+++ b/mercurial/hgweb/hgwebdir_mod.py Sat May 19 22:51:43 2007 +0200
@@ -6,13 +6,12 @@
# This software may be used and distributed according to the terms
# of the GNU General Public License, incorporated herein by reference.
-import os
-from mercurial.demandload import demandload
-demandload(globals(), "mimetools cStringIO")
-demandload(globals(), "mercurial:ui,hg,util,templater")
-demandload(globals(), "mercurial.hgweb.hgweb_mod:hgweb")
-demandload(globals(), "mercurial.hgweb.common:get_mtime,staticfile,style_map")
+from mercurial import demandimport; demandimport.enable()
+import os, mimetools, cStringIO
from mercurial.i18n import gettext as _
+from mercurial import ui, hg, util, templater
+from common import get_mtime, staticfile, style_map
+from hgweb_mod import hgweb
# This is a stopgap
class hgwebdir(object):
@@ -31,8 +30,11 @@
self.repos = cleannames(config.items())
self.repos.sort()
else:
- cp = util.configparser()
- cp.read(config)
+ if isinstance(config, util.configparser):
+ cp = config
+ else:
+ cp = util.configparser()
+ cp.read(config)
self.repos = []
if cp.has_section('web'):
if cp.has_option('web', 'motd'):
@@ -86,6 +88,10 @@
if not url.endswith('/'):
url += '/'
+ staticurl = config('web', 'staticurl') or url + 'static/'
+ if not staticurl.endswith('/'):
+ staticurl += '/'
+
style = self.style
if style is None:
style = config('web', 'style', '')
@@ -96,7 +102,8 @@
defaults={"header": header,
"footer": footer,
"motd": motd,
- "url": url})
+ "url": url,
+ "staticurl": staticurl})
def archivelist(ui, nodeid, url):
allowed = ui.configlist("web", "allow_archive", untrusted=True)
diff -r 2d32e3ae01a7 -r 30e7aa755efd mercurial/hgweb/request.py
--- a/mercurial/hgweb/request.py Sat May 19 22:47:01 2007 +0200
+++ b/mercurial/hgweb/request.py Sat May 19 22:51:43 2007 +0200
@@ -6,8 +6,7 @@
# This software may be used and distributed according to the terms
# of the GNU General Public License, incorporated herein by reference.
-from mercurial.demandload import demandload
-demandload(globals(), "socket sys cgi os errno")
+import socket, cgi, errno
from mercurial.i18n import gettext as _
class wsgiapplication(object):
diff -r 2d32e3ae01a7 -r 30e7aa755efd mercurial/hgweb/server.py
--- a/mercurial/hgweb/server.py Sat May 19 22:47:01 2007 +0200
+++ b/mercurial/hgweb/server.py Sat May 19 22:51:43 2007 +0200
@@ -6,11 +6,11 @@
# This software may be used and distributed according to the terms
# of the GNU General Public License, incorporated herein by reference.
-from mercurial.demandload import demandload
-import os, sys, errno
-demandload(globals(), "urllib BaseHTTPServer socket SocketServer traceback")
-demandload(globals(), "mercurial:ui,hg,util,templater")
-demandload(globals(), "hgweb_mod:hgweb hgwebdir_mod:hgwebdir request:wsgiapplication")
+import os, sys, errno, urllib, BaseHTTPServer, socket, SocketServer, traceback
+from mercurial import ui, hg, util, templater
+from hgweb_mod import hgweb
+from hgwebdir_mod import hgwebdir
+from request import wsgiapplication
from mercurial.i18n import gettext as _
def _splitURI(uri):
diff -r 2d32e3ae01a7 -r 30e7aa755efd mercurial/httprepo.py
--- a/mercurial/httprepo.py Sat May 19 22:47:01 2007 +0200
+++ b/mercurial/httprepo.py Sat May 19 22:51:43 2007 +0200
@@ -8,10 +8,9 @@
from node import *
from remoterepo import *
-from i18n import gettext as _
-from demandload import *
-demandload(globals(), "hg os urllib urllib2 urlparse zlib util httplib")
-demandload(globals(), "errno keepalive tempfile socket changegroup")
+from i18n import _
+import hg, os, urllib, urllib2, urlparse, zlib, util, httplib
+import errno, keepalive, tempfile, socket, changegroup
class passwordmgr(urllib2.HTTPPasswordMgrWithDefaultRealm):
def __init__(self, ui):
diff -r 2d32e3ae01a7 -r 30e7aa755efd mercurial/i18n.py
--- a/mercurial/i18n.py Sat May 19 22:47:01 2007 +0200
+++ b/mercurial/i18n.py Sat May 19 22:51:43 2007 +0200
@@ -7,9 +7,7 @@
of the GNU General Public License, incorporated herein by reference.
"""
-# the import from gettext is _really_ slow
-# for now we use a dummy function
-gettext = lambda x: x
-#import gettext
-#t = gettext.translation('hg', '/usr/share/locale', fallback=1)
-#gettext = t.gettext
+import gettext
+t = gettext.translation('hg', fallback=1)
+gettext = t.gettext
+_ = gettext
diff -r 2d32e3ae01a7 -r 30e7aa755efd mercurial/localrepo.py
--- a/mercurial/localrepo.py Sat May 19 22:47:01 2007 +0200
+++ b/mercurial/localrepo.py Sat May 19 22:51:43 2007 +0200
@@ -6,13 +6,11 @@
# of the GNU General Public License, incorporated herein by reference.
from node import *
-from i18n import gettext as _
-from demandload import *
-import repo
-demandload(globals(), "appendfile changegroup")
-demandload(globals(), "changelog dirstate filelog manifest context")
-demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
-demandload(globals(), "os revlog time util")
+from i18n import _
+import repo, changegroup
+import changelog, dirstate, filelog, manifest, context
+import re, lock, transaction, tempfile, stat, mdiff, errno, ui
+import os, revlog, time, util
class localrepository(repo.repository):
capabilities = ('lookup', 'changegroupsubset')
@@ -43,17 +41,19 @@
if not os.path.exists(path):
os.mkdir(path)
os.mkdir(self.path)
- os.mkdir(os.path.join(self.path, "store"))
- requirements = ("revlogv1", "store")
+ requirements = ["revlogv1"]
+ if parentui.configbool('format', 'usestore', True):
+ os.mkdir(os.path.join(self.path, "store"))
+ requirements.append("store")
+ # create an invalid changelog
+ self.opener("00changelog.i", "a").write(
+ '\0\0\0\2' # represents revlogv2
+ ' dummy changelog to prevent using the old repo layout'
+ )
reqfile = self.opener("requires", "w")
for r in requirements:
reqfile.write("%s\n" % r)
reqfile.close()
- # create an invalid changelog
- self.opener("00changelog.i", "a").write(
- '\0\0\0\2' # represents revlogv2
- ' dummy changelog to prevent using the old repo layout'
- )
else:
raise repo.RepoError(_("repository %s not found") % path)
elif create:
@@ -88,39 +88,18 @@
except IOError:
pass
- v = self.ui.configrevlog()
- self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
- self.revlogv1 = self.revlogversion != revlog.REVLOGV0
- fl = v.get('flags', None)
- flags = 0
- if fl != None:
- for x in fl.split():
- flags |= revlog.flagstr(x)
- elif self.revlogv1:
- flags = revlog.REVLOG_DEFAULT_FLAGS
-
- v = self.revlogversion | flags
- self.manifest = manifest.manifest(self.sopener, v)
- self.changelog = changelog.changelog(self.sopener, v)
+ self.changelog = changelog.changelog(self.sopener)
+ self.sopener.defversion = self.changelog.version
+ self.manifest = manifest.manifest(self.sopener)
fallback = self.ui.config('ui', 'fallbackencoding')
if fallback:
util._fallbackencoding = fallback
- # the changelog might not have the inline index flag
- # on. If the format of the changelog is the same as found in
- # .hgrc, apply any flags found in the .hgrc as well.
- # Otherwise, just version from the changelog
- v = self.changelog.version
- if v == self.revlogversion:
- v |= flags
- self.revlogversion = v
-
self.tagscache = None
self.branchcache = None
self.nodetagscache = None
- self.encodepats = None
- self.decodepats = None
+ self.filterpats = {}
self.transhandle = None
self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
@@ -140,32 +119,34 @@
be run as hooks without wrappers to convert return values.'''
self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
- d = funcname.rfind('.')
- if d == -1:
- raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
- % (hname, funcname))
- modname = funcname[:d]
- try:
- obj = __import__(modname)
- except ImportError:
+ obj = funcname
+ if not callable(obj):
+ d = funcname.rfind('.')
+ if d == -1:
+ raise util.Abort(_('%s hook is invalid ("%s" not in '
+ 'a module)') % (hname, funcname))
+ modname = funcname[:d]
try:
- # extensions are loaded with hgext_ prefix
- obj = __import__("hgext_%s" % modname)
+ obj = __import__(modname)
except ImportError:
+ try:
+ # extensions are loaded with hgext_ prefix
+ obj = __import__("hgext_%s" % modname)
+ except ImportError:
+ raise util.Abort(_('%s hook is invalid '
+ '(import of "%s" failed)') %
+ (hname, modname))
+ try:
+ for p in funcname.split('.')[1:]:
+ obj = getattr(obj, p)
+ except AttributeError, err:
raise util.Abort(_('%s hook is invalid '
- '(import of "%s" failed)') %
- (hname, modname))
- try:
- for p in funcname.split('.')[1:]:
- obj = getattr(obj, p)
- except AttributeError, err:
- raise util.Abort(_('%s hook is invalid '
- '("%s" is not defined)') %
- (hname, funcname))
- if not callable(obj):
- raise util.Abort(_('%s hook is invalid '
- '("%s" is not callable)') %
- (hname, funcname))
+ '("%s" is not defined)') %
+ (hname, funcname))
+ if not callable(obj):
+ raise util.Abort(_('%s hook is invalid '
+ '("%s" is not callable)') %
+ (hname, funcname))
try:
r = obj(ui=self.ui, repo=self, hooktype=name, **args)
except (KeyboardInterrupt, util.SignalInterrupt):
@@ -203,7 +184,9 @@
if hname.split(".", 1)[0] == name and cmd]
hooks.sort()
for hname, cmd in hooks:
- if cmd.startswith('python:'):
+ if callable(cmd):
+ r = callhook(hname, cmd) or r
+ elif cmd.startswith('python:'):
r = callhook(hname, cmd[7:].strip()) or r
else:
r = runhook(hname, cmd) or r
@@ -211,6 +194,37 @@
tag_disallowed = ':\r\n'
+ def _tag(self, name, node, message, local, user, date, parent=None):
+ use_dirstate = parent is None
+
+ for c in self.tag_disallowed:
+ if c in name:
+ raise util.Abort(_('%r cannot be used in a tag name') % c)
+
+ self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
+
+ if local:
+ # local tags are stored in the current charset
+ self.opener('localtags', 'a').write('%s %s\n' % (hex(node), name))
+ self.hook('tag', node=hex(node), tag=name, local=local)
+ return
+
+ # committed tags are stored in UTF-8
+ line = '%s %s\n' % (hex(node), util.fromlocal(name))
+ if use_dirstate:
+ self.wfile('.hgtags', 'ab').write(line)
+ else:
+ ntags = self.filectx('.hgtags', parent).data()
+ self.wfile('.hgtags', 'ab').write(ntags + line)
+ if use_dirstate and self.dirstate.state('.hgtags') == '?':
+ self.add(['.hgtags'])
+
+ tagnode = self.commit(['.hgtags'], message, user, date, p1=parent)
+
+ self.hook('tag', node=hex(node), tag=name, local=local)
+
+ return tagnode
+
def tag(self, name, node, message, local, user, date):
'''tag a revision with a symbolic name.
@@ -229,31 +243,13 @@
date: date tuple to use if committing'''
- for c in self.tag_disallowed:
- if c in name:
- raise util.Abort(_('%r cannot be used in a tag name') % c)
-
- self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
-
- if local:
- # local tags are stored in the current charset
- self.opener('localtags', 'a').write('%s %s\n' % (hex(node), name))
- self.hook('tag', node=hex(node), tag=name, local=local)
- return
-
for x in self.status()[:5]:
if '.hgtags' in x:
raise util.Abort(_('working copy of .hgtags is changed '
'(please commit .hgtags manually)'))
- # committed tags are stored in UTF-8
- line = '%s %s\n' % (hex(node), util.fromlocal(name))
- self.wfile('.hgtags', 'ab').write(line)
- if self.dirstate.state('.hgtags') == '?':
- self.add(['.hgtags'])
- self.commit(['.hgtags'], message, user, date)
- self.hook('tag', node=hex(node), tag=name, local=local)
+ self._tag(name, node, message, local, user, date)
def tags(self):
'''return a mapping of tag to node'''
@@ -344,7 +340,7 @@
rev = c.rev()
try:
fnode = c.filenode('.hgtags')
- except repo.LookupError:
+ except revlog.LookupError:
continue
ret.append((rev, node, fnode))
if fnode in last:
@@ -477,7 +473,7 @@
def file(self, f):
if f[0] == '/':
f = f[1:]
- return filelog.filelog(self.sopener, f, self.revlogversion)
+ return filelog.filelog(self.sopener, f)
def changectx(self, changeid=None):
return context.changectx(self, changeid)
@@ -509,17 +505,18 @@
def wfile(self, f, mode='r'):
return self.wopener(f, mode)
- def wread(self, filename):
- if self.encodepats == None:
+ def _link(self, f):
+ return os.path.islink(self.wjoin(f))
+
+ def _filter(self, filter, filename, data):
+ if filter not in self.filterpats:
l = []
- for pat, cmd in self.ui.configitems("encode"):
+ for pat, cmd in self.ui.configitems(filter):
mf = util.matcher(self.root, "", [pat], [], [])[1]
l.append((mf, cmd))
- self.encodepats = l
+ self.filterpats[filter] = l
- data = self.wopener(filename, 'r').read()
-
- for mf, cmd in self.encodepats:
+ for mf, cmd in self.filterpats[filter]:
if mf(filename):
self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
data = util.filter(data, cmd)
@@ -527,23 +524,36 @@
return data
- def wwrite(self, filename, data, fd=None):
- if self.decodepats == None:
- l = []
- for pat, cmd in self.ui.configitems("decode"):
- mf = util.matcher(self.root, "", [pat], [], [])[1]
- l.append((mf, cmd))
- self.decodepats = l
+ def wread(self, filename):
+ if self._link(filename):
+ data = os.readlink(self.wjoin(filename))
+ else:
+ data = self.wopener(filename, 'r').read()
+ return self._filter("encode", filename, data)
- for mf, cmd in self.decodepats:
- if mf(filename):
- self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
- data = util.filter(data, cmd)
- break
+ def wwrite(self, filename, data, flags):
+ data = self._filter("decode", filename, data)
+ if "l" in flags:
+ f = self.wjoin(filename)
+ try:
+ os.unlink(f)
+ except OSError:
+ pass
+ d = os.path.dirname(f)
+ if not os.path.exists(d):
+ os.makedirs(d)
+ os.symlink(data, f)
+ else:
+ try:
+ if self._link(filename):
+ os.unlink(self.wjoin(filename))
+ except OSError:
+ pass
+ self.wopener(filename, 'w').write(data)
+ util.set_exec(self.wjoin(filename), "x" in flags)
- if fd:
- return fd.write(data)
- return self.wopener(filename, 'w').write(data)
+ def wwritedata(self, filename, data):
+ return self._filter("decode", filename, data)
def transaction(self):
tr = self.transhandle
@@ -576,10 +586,11 @@
self.ui.warn(_("no interrupted transaction available\n"))
return False
- def rollback(self, wlock=None):
+ def rollback(self, wlock=None, lock=None):
if not wlock:
wlock = self.wlock()
- l = self.lock()
+ if not lock:
+ lock = self.lock()
if os.path.exists(self.sjoin("undo")):
self.ui.status(_("rolling back last transaction\n"))
transaction.rollback(self.sopener, self.sjoin("undo"))
@@ -590,7 +601,7 @@
self.ui.warn(_("no rollback information available\n"))
def wreload(self):
- self.dirstate.read()
+ self.dirstate.reload()
def reload(self):
self.changelog.load()
@@ -683,11 +694,11 @@
changelist.append(fn)
return fl.add(t, meta, transaction, linkrev, fp1, fp2)
- def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
+ def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None, extra={}):
if p1 is None:
p1, p2 = self.dirstate.parents()
return self.commit(files=files, text=text, user=user, date=date,
- p1=p1, p2=p2, wlock=wlock)
+ p1=p1, p2=p2, wlock=wlock, extra=extra)
def commit(self, files=None, text="", user=None, date=None,
match=util.always, force=False, lock=None, wlock=None,
@@ -761,12 +772,14 @@
new = {}
linkrev = self.changelog.count()
commit.sort()
+ is_exec = util.execfunc(self.root, m1.execf)
+ is_link = util.linkfunc(self.root, m1.linkf)
for f in commit:
self.ui.note(f + "\n")
try:
new[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
- m1.set(f, util.is_exec(self.wjoin(f), m1.execf(f)))
- except IOError:
+ m1.set(f, is_exec(f), is_link(f))
+ except (OSError, IOError):
if use_dirstate:
self.ui.warn(_("trouble committing %s!\n") % f)
raise
@@ -776,11 +789,13 @@
# update manifest
m1.update(new)
remove.sort()
+ removed = []
for f in remove:
if f in m1:
del m1[f]
- mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0], (new, remove))
+ removed.append(f)
+ mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0], (new, removed))
# add changeset
new = new.keys()
@@ -795,8 +810,10 @@
edittext.append("HG: user: %s" % user)
if p2 != nullid:
edittext.append("HG: branch merge")
+ if branchname:
+ edittext.append("HG: branch %s" % util.tolocal(branchname))
edittext.extend(["HG: changed %s" % f for f in changed])
- edittext.extend(["HG: removed %s" % f for f in remove])
+ edittext.extend(["HG: removed %s" % f for f in removed])
if not changed and not remove:
edittext.append("HG: no files changed")
edittext.append("")
@@ -814,17 +831,20 @@
text = '\n'.join(lines)
if branchname:
extra["branch"] = branchname
- n = self.changelog.add(mn, changed + remove, text, tr, p1, p2,
+ n = self.changelog.add(mn, changed + removed, text, tr, p1, p2,
user, date, extra)
self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
parent2=xp2)
tr.close()
+ if self.branchcache and "branch" in extra:
+ self.branchcache[util.tolocal(extra["branch"])] = n
+
if use_dirstate or update_dirstate:
self.dirstate.setparents(n)
if use_dirstate:
self.dirstate.update(new, "n")
- self.dirstate.forget(remove)
+ self.dirstate.forget(removed)
self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
return n
@@ -844,7 +864,13 @@
if node:
fdict = dict.fromkeys(files)
- for fn in self.manifest.read(self.changelog.read(node)[0]):
+ # for dirstate.walk, files=['.'] means "walk the whole tree".
+ # follow that here, too
+ fdict.pop('.', None)
+ mdict = self.manifest.read(self.changelog.read(node)[0])
+ mfiles = mdict.keys()
+ mfiles.sort()
+ for fn in mfiles:
for ffn in fdict:
# match if the file is the exact name or a directory
if ffn == fn or fn.startswith("%s/" % ffn):
@@ -852,7 +878,9 @@
break
if match(fn):
yield 'm', fn
- for fn in fdict:
+ ffiles = fdict.keys()
+ ffiles.sort()
+ for fn in ffiles:
if badmatch and badmatch(fn):
if match(fn):
yield 'b', fn
@@ -871,9 +899,9 @@
If node2 is None, compare node1 with working directory.
"""
- def fcmp(fn, mf):
+ def fcmp(fn, getnode):
t1 = self.wread(fn)
- return self.file(fn).cmp(mf.get(fn, nullid), t1)
+ return self.file(fn).cmp(getnode(fn), t1)
def mfmatches(node):
change = self.changelog.read(node)
@@ -896,13 +924,10 @@
# all the revisions in parent->child order.
mf1 = mfmatches(node1)
+ mywlock = False
+
# are we comparing the working directory?
if not node2:
- if not wlock:
- try:
- wlock = self.wlock(wait=0)
- except lock.LockException:
- wlock = None
(lookup, modified, added, removed, deleted, unknown,
ignored, clean) = self.dirstate.status(files, match,
list_ignored, list_clean)
@@ -911,25 +936,38 @@
if compareworking:
if lookup:
# do a full compare of any files that might have changed
- mf2 = mfmatches(self.dirstate.parents()[0])
+ mnode = self.changelog.read(self.dirstate.parents()[0])[0]
+ getnode = lambda fn: (self.manifest.find(mnode, fn)[0] or
+ nullid)
for f in lookup:
- if fcmp(f, mf2):
+ if fcmp(f, getnode):
modified.append(f)
else:
clean.append(f)
- if wlock is not None:
+ if not wlock and not mywlock:
+ mywlock = True
+ try:
+ wlock = self.wlock(wait=0)
+ except lock.LockException:
+ pass
+ if wlock:
self.dirstate.update([f], "n")
else:
# we are comparing working dir against non-parent
# generate a pseudo-manifest for the working dir
# XXX: create it in dirstate.py ?
mf2 = mfmatches(self.dirstate.parents()[0])
+ is_exec = util.execfunc(self.root, mf2.execf)
+ is_link = util.linkfunc(self.root, mf2.linkf)
for f in lookup + modified + added:
mf2[f] = ""
- mf2.set(f, execf=util.is_exec(self.wjoin(f), mf2.execf(f)))
+ mf2.set(f, is_exec(f), is_link(f))
for f in removed:
if f in mf2:
del mf2[f]
+
+ if mywlock and wlock:
+ wlock.release()
else:
# we are comparing two revisions
mf2 = mfmatches(node2)
@@ -942,10 +980,12 @@
# reasonable order
mf2keys = mf2.keys()
mf2keys.sort()
+ getnode = lambda fn: mf1.get(fn, nullid)
for fn in mf2keys:
if mf1.has_key(fn):
if mf1.flags(fn) != mf2.flags(fn) or \
- (mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1))):
+ (mf1[fn] != mf2[fn] and (mf2[fn] != "" or
+ fcmp(fn, getnode))):
modified.append(fn)
elif list_clean:
clean.append(fn)
@@ -965,11 +1005,12 @@
wlock = self.wlock()
for f in list:
p = self.wjoin(f)
- if not os.path.exists(p):
+ islink = os.path.islink(p)
+ if not islink and not os.path.exists(p):
self.ui.warn(_("%s does not exist!\n") % f)
- elif not os.path.isfile(p):
- self.ui.warn(_("%s not added: only files supported currently\n")
- % f)
+ elif not islink and not os.path.isfile(p):
+ self.ui.warn(_("%s not added: only files and symlinks "
+ "supported currently\n") % f)
elif self.dirstate.state(f) in 'an':
self.ui.warn(_("%s already tracked!\n") % f)
else:
@@ -995,8 +1036,7 @@
if not wlock:
wlock = self.wlock()
for f in list:
- p = self.wjoin(f)
- if os.path.exists(p):
+ if unlink and os.path.exists(self.wjoin(f)):
self.ui.warn(_("%s still exists!\n") % f)
elif self.dirstate.state(f) == 'a':
self.dirstate.forget([f])
@@ -1016,16 +1056,16 @@
self.ui.warn("%s not removed!\n" % f)
else:
t = self.file(f).read(m[f])
- self.wwrite(f, t)
- util.set_exec(self.wjoin(f), m.execf(f))
+ self.wwrite(f, t, m.flags(f))
self.dirstate.update([f], "n")
def copy(self, source, dest, wlock=None):
p = self.wjoin(dest)
- if not os.path.exists(p):
+ if not (os.path.exists(p) or os.path.islink(p)):
self.ui.warn(_("%s does not exist!\n") % dest)
- elif not os.path.isfile(p):
- self.ui.warn(_("copy failed: %s is not a file\n") % dest)
+ elif not (os.path.isfile(p) or os.path.islink(p)):
+ self.ui.warn(_("copy failed: %s is not a file or a "
+ "symbolic link\n") % dest)
else:
if not wlock:
wlock = self.wlock()
@@ -1040,112 +1080,6 @@
heads.sort()
return [n for (r, n) in heads]
- # branchlookup returns a dict giving a list of branches for
- # each head. A branch is defined as the tag of a node or
- # the branch of the node's parents. If a node has multiple
- # branch tags, tags are eliminated if they are visible from other
- # branch tags.
- #
- # So, for this graph: a->b->c->d->e
- # \ /
- # aa -----/
- # a has tag 2.6.12
- # d has tag 2.6.13
- # e would have branch tags for 2.6.12 and 2.6.13. Because the node
- # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
- # from the list.
- #
- # It is possible that more than one head will have the same branch tag.
- # callers need to check the result for multiple heads under the same
- # branch tag if that is a problem for them (ie checkout of a specific
- # branch).
- #
- # passing in a specific branch will limit the depth of the search
- # through the parents. It won't limit the branches returned in the
- # result though.
- def branchlookup(self, heads=None, branch=None):
- if not heads:
- heads = self.heads()
- headt = [ h for h in heads ]
- chlog = self.changelog
- branches = {}
- merges = []
- seenmerge = {}
-
- # traverse the tree once for each head, recording in the branches
- # dict which tags are visible from this head. The branches
- # dict also records which tags are visible from each tag
- # while we traverse.
- while headt or merges:
- if merges:
- n, found = merges.pop()
- visit = [n]
- else:
- h = headt.pop()
- visit = [h]
- found = [h]
- seen = {}
- while visit:
- n = visit.pop()
- if n in seen:
- continue
- pp = chlog.parents(n)
- tags = self.nodetags(n)
- if tags:
- for x in tags:
- if x == 'tip':
- continue
- for f in found:
- branches.setdefault(f, {})[n] = 1
- branches.setdefault(n, {})[n] = 1
- break
- if n not in found:
- found.append(n)
- if branch in tags:
- continue
- seen[n] = 1
- if pp[1] != nullid and n not in seenmerge:
- merges.append((pp[1], [x for x in found]))
- seenmerge[n] = 1
- if pp[0] != nullid:
- visit.append(pp[0])
- # traverse the branches dict, eliminating branch tags from each
- # head that are visible from another branch tag for that head.
- out = {}
- viscache = {}
- for h in heads:
- def visible(node):
- if node in viscache:
- return viscache[node]
- ret = {}
- visit = [node]
- while visit:
- x = visit.pop()
- if x in viscache:
- ret.update(viscache[x])
- elif x not in ret:
- ret[x] = 1
- if x in branches:
- visit[len(visit):] = branches[x].keys()
- viscache[node] = ret
- return ret
- if h not in branches:
- continue
- # O(n^2), but somewhat limited. This only searches the
- # tags visible from a specific head, not all the tags in the
- # whole repo.
- for b in branches[h]:
- vis = False
- for bb in branches[h].keys():
- if b != bb:
- if b in visible(bb):
- vis = True
- break
- if not vis:
- l = out.setdefault(h, [])
- l[len(l):] = self.nodetags(b)
- return out
-
def branches(self, nodes):
if not nodes:
nodes = [self.changelog.tip()]
@@ -1858,55 +1792,45 @@
# write changelog data to temp files so concurrent readers will not see
# inconsistent view
- cl = None
- try:
- cl = appendfile.appendchangelog(self.sopener,
- self.changelog.version)
+ cl = self.changelog
+ cl.delayupdate()
+ oldheads = len(cl.heads())
- oldheads = len(cl.heads())
+ # pull off the changeset group
+ self.ui.status(_("adding changesets\n"))
+ cor = cl.count() - 1
+ chunkiter = changegroup.chunkiter(source)
+ if cl.addgroup(chunkiter, csmap, tr, 1) is None:
+ raise util.Abort(_("received changelog group is empty"))
+ cnr = cl.count() - 1
+ changesets = cnr - cor
- # pull off the changeset group
- self.ui.status(_("adding changesets\n"))
- cor = cl.count() - 1
- chunkiter = changegroup.chunkiter(source)
- if cl.addgroup(chunkiter, csmap, tr, 1) is None:
- raise util.Abort(_("received changelog group is empty"))
- cnr = cl.count() - 1
- changesets = cnr - cor
+ # pull off the manifest group
+ self.ui.status(_("adding manifests\n"))
+ chunkiter = changegroup.chunkiter(source)
+ # no need to check for empty manifest group here:
+ # if the result of the merge of 1 and 2 is the same in 3 and 4,
+ # no new manifest will be created and the manifest group will
+ # be empty during the pull
+ self.manifest.addgroup(chunkiter, revmap, tr)
- # pull off the manifest group
- self.ui.status(_("adding manifests\n"))
+ # process the files
+ self.ui.status(_("adding file changes\n"))
+ while 1:
+ f = changegroup.getchunk(source)
+ if not f:
+ break
+ self.ui.debug(_("adding %s revisions\n") % f)
+ fl = self.file(f)
+ o = fl.count()
chunkiter = changegroup.chunkiter(source)
- # no need to check for empty manifest group here:
- # if the result of the merge of 1 and 2 is the same in 3 and 4,
- # no new manifest will be created and the manifest group will
- # be empty during the pull
- self.manifest.addgroup(chunkiter, revmap, tr)
-
- # process the files
- self.ui.status(_("adding file changes\n"))
- while 1:
- f = changegroup.getchunk(source)
- if not f:
- break
- self.ui.debug(_("adding %s revisions\n") % f)
- fl = self.file(f)
- o = fl.count()
- chunkiter = changegroup.chunkiter(source)
- if fl.addgroup(chunkiter, revmap, tr) is None:
- raise util.Abort(_("received file revlog group is empty"))
- revisions += fl.count() - o
- files += 1
-
- cl.writedata()
- finally:
- if cl:
- cl.cleanup()
+ if fl.addgroup(chunkiter, revmap, tr) is None:
+ raise util.Abort(_("received file revlog group is empty"))
+ revisions += fl.count() - o
+ files += 1
# make changelog see real files again
- self.changelog = changelog.changelog(self.sopener,
- self.changelog.version)
- self.changelog.checkinlinesize(tr)
+ cl.finalize(tr)
newheads = len(self.changelog.heads())
heads = ""
diff -r 2d32e3ae01a7 -r 30e7aa755efd mercurial/lock.py
--- a/mercurial/lock.py Sat May 19 22:47:01 2007 +0200
+++ b/mercurial/lock.py Sat May 19 22:51:43 2007 +0200
@@ -5,8 +5,7 @@
# This software may be used and distributed according to the terms
# of the GNU General Public License, incorporated herein by reference.
-from demandload import *
-demandload(globals(), 'errno os socket time util')
+import errno, os, socket, time, util
class LockException(IOError):
def __init__(self, errno, strerror, filename, desc):
diff -r 2d32e3ae01a7 -r 30e7aa755efd mercurial/mail.py
--- a/mercurial/mail.py Sat May 19 22:47:01 2007 +0200
+++ b/mercurial/mail.py Sat May 19 22:51:43 2007 +0200
@@ -5,9 +5,8 @@
# This software may be used and distributed according to the terms
# of the GNU General Public License, incorporated herein by reference.
-from i18n import gettext as _
-from demandload import *
-demandload(globals(), "os re smtplib templater util socket")
+from i18n import _
+import os, smtplib, templater, util, socket
def _smtp(ui):
'''send mail using smtp.'''
diff -r 2d32e3ae01a7 -r 30e7aa755efd mercurial/manifest.py
--- a/mercurial/manifest.py Sat May 19 22:47:01 2007 +0200
+++ b/mercurial/manifest.py Sat May 19 22:51:43 2007 +0200
@@ -6,10 +6,8 @@
# of the GNU General Public License, incorporated herein by reference.
from revlog import *
-from i18n import gettext as _
-from demandload import *
-demandload(globals(), "array bisect struct")
-demandload(globals(), "mdiff")
+from i18n import _
+import array, bisect, struct, mdiff
class manifestdict(dict):
def __init__(self, mapping=None, flags=None):
@@ -37,11 +35,10 @@
return manifestdict(dict.copy(self), dict.copy(self._flags))
class manifest(revlog):
- def __init__(self, opener, defversion=REVLOGV0):
+ def __init__(self, opener):
self.mapcache = None
self.listcache = None
- revlog.__init__(self, opener, "00manifest.i", "00manifest.d",
- defversion)
+ revlog.__init__(self, opener, "00manifest.i")
def parselines(self, lines):
for l in lines.splitlines(1):
@@ -108,7 +105,7 @@
def find(self, node, f):
'''look up entry for a single file efficiently.
- return (node, flag) pair if found, (None, None) if not.'''
+ return (node, flags) pair if found, (None, None) if not.'''
if self.mapcache and node == self.mapcache[0]:
return self.mapcache[1].get(f), self.mapcache[1].flags(f)
text = self.revision(node)
@@ -117,7 +114,7 @@
return None, None
l = text[start:end]
f, n = l.split('\0')
- return bin(n[:40]), n[40:-1] == 'x'
+ return bin(n[:40]), n[40:-1]
def add(self, map, transaction, link, p1=None, p2=None,
changed=None):
diff -r 2d32e3ae01a7 -r 30e7aa755efd mercurial/mdiff.py
--- a/mercurial/mdiff.py Sat May 19 22:47:01 2007 +0200
+++ b/mercurial/mdiff.py Sat May 19 22:51:43 2007 +0200
@@ -5,9 +5,7 @@
# This software may be used and distributed according to the terms
# of the GNU General Public License, incorporated herein by reference.
-from demandload import demandload
-import bdiff, mpatch
-demandload(globals(), "re struct util md5")
+import bdiff, mpatch, re, struct, util, md5
def splitnewlines(text):
'''like str.splitlines, but only split on newlines.'''
@@ -252,6 +250,10 @@
def patch(a, bin):
return mpatch.patches(a, [bin])
+# similar to difflib.SequenceMatcher.get_matching_blocks
+def get_matching_blocks(a, b):
+ return [(d[0], d[2], d[1] - d[0]) for d in bdiff.blocks(a, b)]
+
patches = mpatch.patches
patchedsize = mpatch.patchedsize
textdiff = bdiff.bdiff
diff -r 2d32e3ae01a7 -r 30e7aa755efd mercurial/merge.py
--- a/mercurial/merge.py Sat May 19 22:47:01 2007 +0200
+++ b/mercurial/merge.py Sat May 19 22:51:43 2007 +0200
@@ -6,9 +6,8 @@
# of the GNU General Public License, incorporated herein by reference.
from node import *
-from i18n import gettext as _
-from demandload import *
-demandload(globals(), "errno util os tempfile context")
+from i18n import _
+import errno, util, os, tempfile, context
def filemerge(repo, fw, fo, wctx, mctx):
"""perform a 3-way merge in the working directory
@@ -21,8 +20,9 @@
def temp(prefix, ctx):
pre = "%s~%s." % (os.path.basename(ctx.path()), prefix)
(fd, name) = tempfile.mkstemp(prefix=pre)
+ data = repo.wwritedata(ctx.path(), ctx.data())
f = os.fdopen(fd, "wb")
- repo.wwrite(ctx.path(), ctx.data(), f)
+ f.write(data)
f.close()
return name
@@ -256,12 +256,17 @@
copy = {}
def fmerge(f, f2=None, fa=None):
- """merge executable flags"""
+ """merge flags"""
if not f2:
f2 = f
fa = f
a, b, c = ma.execf(fa), m1.execf(f), m2.execf(f2)
- return ((a^b) | (a^c)) ^ a
+ if ((a^b) | (a^c)) ^ a:
+ return 'x'
+ a, b, c = ma.linkf(fa), m1.linkf(f), m2.linkf(f2)
+ if ((a^b) | (a^c)) ^ a:
+ return 'l'
+ return ''
def act(msg, m, f, *args):
repo.ui.debug(" %s: %s -> %s\n" % (f, msg, m))
@@ -286,21 +291,21 @@
# is remote's version newer?
# or are we going back in time and clean?
elif overwrite or m2[f] != a or (backwards and not n[20:]):
- act("remote is newer", "g", f, m2.execf(f))
+ act("remote is newer", "g", f, m2.flags(f))
# local is newer, not overwrite, check mode bits
- elif fmerge(f) != m1.execf(f):
- act("update permissions", "e", f, m2.execf(f))
+ elif fmerge(f) != m1.flags(f):
+ act("update permissions", "e", f, m2.flags(f))
# contents same, check mode bits
- elif m1.execf(f) != m2.execf(f):
- if overwrite or fmerge(f) != m1.execf(f):
- act("update permissions", "e", f, m2.execf(f))
+ elif m1.flags(f) != m2.flags(f):
+ if overwrite or fmerge(f) != m1.flags(f):
+ act("update permissions", "e", f, m2.flags(f))
elif f in copied:
continue
elif f in copy:
f2 = copy[f]
if f2 not in m2: # directory rename
act("remote renamed directory to " + f2, "d",
- f, None, f2, m1.execf(f))
+ f, None, f2, m1.flags(f))
elif f2 in m1: # case 2 A,B/B/B
act("local copied to " + f2, "m",
f, f2, f, fmerge(f, f2, f2), False)
@@ -331,7 +336,7 @@
f2 = copy[f]
if f2 not in m1: # directory rename
act("local renamed directory to " + f2, "d",
- None, f, f2, m2.execf(f))
+ None, f, f2, m2.flags(f))
elif f2 in m2: # rename case 1, A/A,B/A
act("remote copied to " + f, "m",
f2, f, f, fmerge(f2, f, f2), False)
@@ -340,14 +345,14 @@
f2, f, f, fmerge(f2, f, f2), True)
elif f in ma:
if overwrite or backwards:
- act("recreating", "g", f, m2.execf(f))
+ act("recreating", "g", f, m2.flags(f))
elif n != ma[f]:
if repo.ui.prompt(
(_("remote changed %s which local deleted\n") % f) +
_("(k)eep or (d)elete?"), _("[kd]"), _("k")) == _("k"):
- act("prompt recreating", "g", f, m2.execf(f))
+ act("prompt recreating", "g", f, m2.flags(f))
else:
- act("remote created", "g", f, m2.execf(f))
+ act("remote created", "g", f, m2.flags(f))
return action
@@ -371,7 +376,7 @@
(f, inst.strerror))
removed += 1
elif m == "m": # merge
- f2, fd, flag, move = a[2:]
+ f2, fd, flags, move = a[2:]
r = filemerge(repo, f, f2, wctx, mctx)
if r > 0:
unresolved += 1
@@ -382,35 +387,32 @@
merged += 1
if f != fd:
repo.ui.debug(_("copying %s to %s\n") % (f, fd))
- repo.wwrite(fd, repo.wread(f))
+ repo.wwrite(fd, repo.wread(f), flags)
if move:
repo.ui.debug(_("removing %s\n") % f)
os.unlink(repo.wjoin(f))
- util.set_exec(repo.wjoin(fd), flag)
+ util.set_exec(repo.wjoin(fd), "x" in flags)
elif m == "g": # get
- flag = a[2]
+ flags = a[2]
repo.ui.note(_("getting %s\n") % f)
t = mctx.filectx(f).data()
- repo.wwrite(f, t)
- util.set_exec(repo.wjoin(f), flag)
+ repo.wwrite(f, t, flags)
updated += 1
elif m == "d": # directory rename
- f2, fd, flag = a[2:]
+ f2, fd, flags = a[2:]
if f:
repo.ui.note(_("moving %s to %s\n") % (f, fd))
t = wctx.filectx(f).data()
- repo.wwrite(fd, t)
- util.set_exec(repo.wjoin(fd), flag)
+ repo.wwrite(fd, t, flags)
util.unlink(repo.wjoin(f))
if f2:
repo.ui.note(_("getting %s to %s\n") % (f2, fd))
t = mctx.filectx(f2).data()
- repo.wwrite(fd, t)
- util.set_exec(repo.wjoin(fd), flag)
+ repo.wwrite(fd, t, flags)
updated += 1
elif m == "e": # exec
- flag = a[2]
- util.set_exec(repo.wjoin(f), flag)
+ flags = a[2]
+ util.set_exec(repo.wjoin(f), flags)
return updated, merged, removed, unresolved
@@ -480,21 +482,32 @@
if not wlock:
wlock = repo.wlock()
+ wc = repo.workingctx()
+ if node is None:
+ # tip of current branch
+ try:
+ node = repo.branchtags()[wc.branch()]
+ except KeyError:
+ raise util.Abort(_("branch %s not found") % wc.branch())
overwrite = force and not branchmerge
forcemerge = force and branchmerge
- wc = repo.workingctx()
pl = wc.parents()
p1, p2 = pl[0], repo.changectx(node)
pa = p1.ancestor(p2)
fp1, fp2, xp1, xp2 = p1.node(), p2.node(), str(p1), str(p2)
+ fastforward = False
### check phase
if not overwrite and len(pl) > 1:
raise util.Abort(_("outstanding uncommitted merges"))
if pa == p1 or pa == p2: # is there a linear path from p1 to p2?
if branchmerge:
- raise util.Abort(_("there is nothing to merge, just use "
- "'hg update' or look at 'hg heads'"))
+ if p1.branch() != p2.branch():
+ fastforward = True
+ branchmerge = False
+ else:
+ raise util.Abort(_("there is nothing to merge, just use "
+ "'hg update' or look at 'hg heads'"))
elif not (overwrite or branchmerge):
raise util.Abort(_("update spans branches, use 'hg merge' "
"or 'hg update -C' to lose changes"))
@@ -523,7 +536,7 @@
if not partial:
recordupdates(repo, action, branchmerge)
repo.dirstate.setparents(fp1, fp2)
- if not branchmerge:
+ if not branchmerge and not fastforward:
repo.dirstate.setbranch(p2.branch())
repo.hook('update', parent1=xp1, parent2=xp2, error=stats[3])
diff -r 2d32e3ae01a7 -r 30e7aa755efd mercurial/mpatch.c
--- a/mercurial/mpatch.c Sat May 19 22:47:01 2007 +0200
+++ b/mercurial/mpatch.c Sat May 19 22:51:43 2007 +0200
@@ -42,7 +42,11 @@
#else
/* not windows */
# include
-# include
+# ifdef __BEOS__
+# include
+# else
+# include
+# endif
# include
#endif
diff -r 2d32e3ae01a7 -r 30e7aa755efd mercurial/node.py
--- a/mercurial/node.py Sat May 19 22:47:01 2007 +0200
+++ b/mercurial/node.py Sat May 19 22:51:43 2007 +0200
@@ -7,8 +7,7 @@
of the GNU General Public License, incorporated herein by reference.
"""
-from demandload import demandload
-demandload(globals(), "binascii")
+import binascii
nullrev = -1
nullid = "\0" * 20
diff -r 2d32e3ae01a7 -r 30e7aa755efd mercurial/packagescan.py
--- a/mercurial/packagescan.py Sat May 19 22:47:01 2007 +0200
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,131 +0,0 @@
-# packagescan.py - Helper module for identifing used modules.
-# Used for the py2exe distutil.
-# This module must be the first mercurial module imported in setup.py
-#
-# Copyright 2005, 2006 Volker Kleinfeld
-#
-# This software may be used and distributed according to the terms
-# of the GNU General Public License, incorporated herein by reference.
-import glob
-import os
-import sys
-import ihooks
-import types
-import string
-
-# Install this module as fake demandload module
-sys.modules['mercurial.demandload'] = sys.modules[__name__]
-
-# Requiredmodules contains the modules imported by demandload.
-# Please note that demandload can be invoked before the
-# mercurial.packagescan.scan method is invoked in case a mercurial
-# module is imported.
-requiredmodules = {}
-def demandload(scope, modules):
- """ fake demandload function that collects the required modules
- foo import foo
- foo bar import foo, bar
- foo.bar import foo.bar
- foo@bar import foo as bar
- foo:bar from foo import bar
- foo:bar,quux from foo import bar, quux
- foo.bar:quux from foo.bar import quux"""
-
- for m in modules.split():
- mod = None
- try:
- module, fromlist = m.split(':')
- fromlist = fromlist.split(',')
- except:
- module = m
- fromlist = []
- as_ = None
- if '@' in module:
- module, as_ = module.split('@')
- mod = __import__(module, scope, scope, fromlist)
- if fromlist == []:
- # mod is only the top package, but we need all packages
- comp = module.split('.')
- i = 1
- mn = comp[0]
- while True:
- # mn and mod.__name__ might not be the same
- if not as_:
- as_ = mn
- scope[as_] = mod
- requiredmodules[mod.__name__] = 1
- if len(comp) == i: break
- mod = getattr(mod, comp[i])
- mn = string.join(comp[:i+1],'.')
- i += 1
- else:
- # mod is the last package in the component list
- requiredmodules[mod.__name__] = 1
- for f in fromlist:
- scope[f] = getattr(mod, f)
- if type(scope[f]) == types.ModuleType:
- requiredmodules[scope[f].__name__] = 1
-
-class SkipPackage(Exception):
- def __init__(self, reason):
- self.reason = reason
-
-scan_in_progress = False
-
-def scan(libpath, packagename):
- """ helper for finding all required modules of package """
- global scan_in_progress
- scan_in_progress = True
- # Use the package in the build directory
- libpath = os.path.abspath(libpath)
- sys.path.insert(0, libpath)
- packdir = os.path.join(libpath, packagename.replace('.', '/'))
- # A normal import would not find the package in
- # the build directory. ihook is used to force the import.
- # After the package is imported the import scope for
- # the following imports is settled.
- p = importfrom(packdir)
- globals()[packagename] = p
- sys.modules[packagename] = p
- # Fetch the python modules in the package
- cwd = os.getcwd()
- os.chdir(packdir)
- pymodulefiles = glob.glob('*.py')
- extmodulefiles = glob.glob('*.pyd')
- os.chdir(cwd)
- # Import all python modules and by that run the fake demandload
- for m in pymodulefiles:
- if m == '__init__.py': continue
- tmp = {}
- mname, ext = os.path.splitext(m)
- fullname = packagename+'.'+mname
- try:
- __import__(fullname, tmp, tmp)
- except SkipPackage, inst:
- print >> sys.stderr, 'skipping %s: %s' % (fullname, inst.reason)
- continue
- requiredmodules[fullname] = 1
- # Import all extension modules and by that run the fake demandload
- for m in extmodulefiles:
- tmp = {}
- mname, ext = os.path.splitext(m)
- fullname = packagename+'.'+mname
- __import__(fullname, tmp, tmp)
- requiredmodules[fullname] = 1
-
-def getmodules():
- return requiredmodules.keys()
-
-def importfrom(filename):
- """
- import module/package from a named file and returns the module.
- It does not check on sys.modules or includes the module in the scope.
- """
- loader = ihooks.BasicModuleLoader()
- path, file = os.path.split(filename)
- name, ext = os.path.splitext(file)
- m = loader.find_module_in_dir(name, path)
- if not m:
- raise ImportError, name
- m = loader.load_module(name, m)
- return m
diff -r 2d32e3ae01a7 -r 30e7aa755efd mercurial/patch.py
--- a/mercurial/patch.py Sat May 19 22:47:01 2007 +0200
+++ b/mercurial/patch.py Sat May 19 22:51:43 2007 +0200
@@ -5,12 +5,11 @@
# This software may be used and distributed according to the terms
# of the GNU General Public License, incorporated herein by reference.
-from demandload import demandload
-from i18n import gettext as _
+from i18n import _
from node import *
-demandload(globals(), "base85 cmdutil mdiff util")
-demandload(globals(), "cStringIO email.Parser errno os popen2 re shutil sha")
-demandload(globals(), "sys tempfile zlib")
+import base85, cmdutil, mdiff, util, context, revlog
+import cStringIO, email.Parser, os, popen2, re, sha
+import sys, tempfile, zlib
# helper functions
@@ -34,11 +33,11 @@
def extract(ui, fileobj):
'''extract patch from data read from fileobj.
- patch can be normal patch or contained in email message.
+ patch can be a normal patch or contained in an email message.
- return tuple (filename, message, user, date). any item in returned
- tuple can be None. if filename is None, fileobj did not contain
- patch. caller must unlink filename when done.'''
+ return tuple (filename, message, user, date, node, p1, p2).
+ Any item in the returned tuple can be None. If filename is None,
+ fileobj did not contain a patch. Caller must unlink filename when done.'''
# attempt to detect the start of a patch
# (this heuristic is borrowed from quilt)
@@ -49,16 +48,21 @@
fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
tmpfp = os.fdopen(fd, 'w')
try:
- hgpatch = False
-
msg = email.Parser.Parser().parse(fileobj)
message = msg['Subject']
user = msg['From']
# should try to parse msg['Date']
date = None
+ nodeid = None
+ branch = None
+ parents = []
if message:
+ if message.startswith('[PATCH'):
+ pend = message.find(']')
+ if pend >= 0:
+ message = message[pend+1:].lstrip()
message = message.replace('\n\t', ' ')
ui.debug('Subject: %s\n' % message)
if user:
@@ -74,6 +78,9 @@
payload = part.get_payload(decode=True)
m = diffre.search(payload)
if m:
+ hgpatch = False
+ ignoretext = False
+
ui.debug(_('found patch at byte %d\n') % m.start(0))
diffs_seen += 1
cfp = cStringIO.StringIO()
@@ -93,7 +100,15 @@
ui.debug('From: %s\n' % user)
elif line.startswith("# Date "):
date = line[7:]
- if not line.startswith('# '):
+ elif line.startswith("# Branch "):
+ branch = line[9:]
+ elif line.startswith("# Node ID "):
+ nodeid = line[10:]
+ elif line.startswith("# Parent "):
+ parents.append(line[10:])
+ elif line == '---' and 'git-send-email' in msg['X-Mailer']:
+ ignoretext = True
+ if not line.startswith('# ') and not ignoretext:
cfp.write(line)
cfp.write('\n')
message = cfp.getvalue()
@@ -111,8 +126,10 @@
tmpfp.close()
if not diffs_seen:
os.unlink(tmpname)
- return None, message, user, date
- return tmpname, message, user, date
+ return None, message, user, date, branch, None, None, None
+ p1 = parents and parents.pop(0) or None
+ p2 = parents and parents.pop(0) or None
+ return tmpname, message, user, date, branch, nodeid, p1, p2
GP_PATCH = 1 << 0 # we have to run patch
GP_FILTER = 1 << 1 # there's some copy/rename operation
@@ -279,9 +296,14 @@
"""patch and updates the files and fuzz variables"""
fuzz = False
- patcher = util.find_in_path('gpatch', os.environ.get('PATH', ''),
- 'patch')
args = []
+ patcher = ui.config('ui', 'patch')
+ if not patcher:
+ patcher = util.find_in_path('gpatch', os.environ.get('PATH', ''),
+ 'patch')
+ if util.needbinarypatch():
+ args.append('--binary')
+
if cwd:
args.append('-d %s' % util.shellquote(cwd))
fp = os.popen('%s %s -p%d < %s' % (patcher, ' '.join(args), strip,
@@ -378,8 +400,9 @@
dst = os.path.join(repo.root, gp.path)
# patch won't create empty files
if ctype == 'ADD' and not os.path.exists(dst):
- repo.wwrite(gp.path, '')
- util.set_exec(dst, x)
+ repo.wwrite(gp.path, '', x and 'x' or '')
+ else:
+ util.set_exec(dst, x)
cmdutil.addremove(repo, cfiles, wlock=wlock)
files = patches.keys()
files.extend([r for r in removes if r not in files])
@@ -441,107 +464,65 @@
if not node1:
node1 = repo.dirstate.parents()[0]
- clcache = {}
- def getchangelog(n):
- if n not in clcache:
- clcache[n] = repo.changelog.read(n)
- return clcache[n]
- mcache = {}
- def getmanifest(n):
- if n not in mcache:
- mcache[n] = repo.manifest.read(n)
- return mcache[n]
- fcache = {}
- def getfile(f):
- if f not in fcache:
- fcache[f] = repo.file(f)
- return fcache[f]
+ ccache = {}
+ def getctx(r):
+ if r not in ccache:
+ ccache[r] = context.changectx(repo, r)
+ return ccache[r]
+
+ flcache = {}
+ def getfilectx(f, ctx):
+ flctx = ctx.filectx(f, filelog=flcache.get(f))
+ if f not in flcache:
+ flcache[f] = flctx._filelog
+ return flctx
# reading the data for node1 early allows it to play nicely
# with repo.status and the revlog cache.
- change = getchangelog(node1)
- mmap = getmanifest(change[0])
- date1 = util.datestr(change[2])
+ ctx1 = context.changectx(repo, node1)
+ # force manifest reading
+ man1 = ctx1.manifest()
+ date1 = util.datestr(ctx1.date())
if not changes:
changes = repo.status(node1, node2, files, match=match)[:5]
modified, added, removed, deleted, unknown = changes
- if files:
- def filterfiles(filters):
- l = [x for x in filters if x in files]
-
- for t in files:
- if not t.endswith("/"):
- t += "/"
- l += [x for x in filters if x.startswith(t)]
- return l
-
- modified, added, removed = map(filterfiles, (modified, added, removed))
if not modified and not added and not removed:
return
- # returns False if there was no rename between n1 and n2
- # returns None if the file was created between n1 and n2
- # returns the (file, node) present in n1 that was renamed to f in n2
- def renamedbetween(f, n1, n2):
- r1, r2 = map(repo.changelog.rev, (n1, n2))
+ if node2:
+ ctx2 = context.changectx(repo, node2)
+ else:
+ ctx2 = context.workingctx(repo)
+ man2 = ctx2.manifest()
+
+ # returns False if there was no rename between ctx1 and ctx2
+ # returns None if the file was created between ctx1 and ctx2
+ # returns the (file, node) present in ctx1 that was renamed to f in ctx2
+ def renamed(f):
+ startrev = ctx1.rev()
+ c = ctx2
+ crev = c.rev()
+ if crev is None:
+ crev = repo.changelog.count()
orig = f
- src = None
- while r2 > r1:
- cl = getchangelog(n2)
- if f in cl[3]:
- m = getmanifest(cl[0])
+ while crev > startrev:
+ if f in c.files():
try:
- src = getfile(f).renamed(m[f])
- except KeyError:
+ src = getfilectx(f, c).renamed()
+ except revlog.LookupError:
return None
if src:
f = src[0]
- n2 = repo.changelog.parents(n2)[0]
- r2 = repo.changelog.rev(n2)
- cl = getchangelog(n1)
- m = getmanifest(cl[0])
- if f not in m:
+ crev = c.parents()[0].rev()
+ # try to reuse
+ c = getctx(crev)
+ if f not in man1:
return None
if f == orig:
return False
- return f, m[f]
-
- if node2:
- change = getchangelog(node2)
- mmap2 = getmanifest(change[0])
- _date2 = util.datestr(change[2])
- def date2(f):
- return _date2
- def read(f):
- return getfile(f).read(mmap2[f])
- def renamed(f):
- return renamedbetween(f, node1, node2)
- else:
- tz = util.makedate()[1]
- _date2 = util.datestr()
- def date2(f):
- try:
- return util.datestr((os.lstat(repo.wjoin(f)).st_mtime, tz))
- except OSError, err:
- if err.errno != errno.ENOENT: raise
- return _date2
- def read(f):
- return repo.wread(f)
- def renamed(f):
- src = repo.dirstate.copied(f)
- parent = repo.dirstate.parents()[0]
- if src:
- f = src
- of = renamedbetween(f, node1, parent)
- if of or of is None:
- return of
- elif src:
- cl = getchangelog(parent)[0]
- return (src, getmanifest(cl)[src])
- else:
- return None
+ return f
if repo.ui.quiet:
r = None
@@ -555,20 +536,21 @@
src = renamed(f)
if src:
copied[f] = src
- srcs = [x[1][0] for x in copied.items()]
+ srcs = [x[1] for x in copied.items()]
all = modified + added + removed
all.sort()
gone = {}
+
for f in all:
to = None
tn = None
dodiff = True
header = []
- if f in mmap:
- to = getfile(f).read(mmap[f])
+ if f in man1:
+ to = getfilectx(f, ctx1).data()
if f not in removed:
- tn = read(f)
+ tn = getfilectx(f, ctx2).data()
if opts.git:
def gitmode(x):
return x and '100755' or '100644'
@@ -579,13 +561,10 @@
a, b = f, f
if f in added:
- if node2:
- mode = gitmode(mmap2.execf(f))
- else:
- mode = gitmode(util.is_exec(repo.wjoin(f), None))
+ mode = gitmode(man2.execf(f))
if f in copied:
- a, arev = copied[f]
- omode = gitmode(mmap.execf(a))
+ a = copied[f]
+ omode = gitmode(man1.execf(a))
addmodehdr(header, omode, mode)
if a in removed and a not in gone:
op = 'rename'
@@ -594,7 +573,7 @@
op = 'copy'
header.append('%s from %s\n' % (op, a))
header.append('%s to %s\n' % (op, f))
- to = getfile(a).read(arev)
+ to = getfilectx(a, ctx1).data()
else:
header.append('new file mode %s\n' % mode)
if util.binary(tn):
@@ -603,14 +582,11 @@
if f in srcs:
dodiff = False
else:
- mode = gitmode(mmap.execf(f))
+ mode = gitmode(man1.execf(f))
header.append('deleted file mode %s\n' % mode)
else:
- omode = gitmode(mmap.execf(f))
- if node2:
- nmode = gitmode(mmap2.execf(f))
- else:
- nmode = gitmode(util.is_exec(repo.wjoin(f), mmap.execf(f)))
+ omode = gitmode(man1.execf(f))
+ nmode = gitmode(man2.execf(f))
addmodehdr(header, omode, nmode)
if util.binary(to) or util.binary(tn):
dodiff = 'binary'
@@ -620,7 +596,10 @@
if dodiff == 'binary':
text = b85diff(fp, to, tn)
else:
- text = mdiff.unidiff(to, date1, tn, date2(f), f, r, opts=opts)
+ text = mdiff.unidiff(to, date1,
+ # ctx2 date may be dynamic
+ tn, util.datestr(ctx2.date()),
+ f, r, opts=opts)
if text or len(header) > 1:
fp.write(''.join(header))
fp.write(text)
@@ -632,27 +611,31 @@
total = len(revs)
revwidth = max([len(str(rev)) for rev in revs])
- def single(node, seqno, fp):
- parents = [p for p in repo.changelog.parents(node) if p != nullid]
+ def single(rev, seqno, fp):
+ ctx = repo.changectx(rev)
+ node = ctx.node()
+ parents = [p.node() for p in ctx.parents() if p]
+ branch = ctx.branch()
if switch_parent:
parents.reverse()
prev = (parents and parents[0]) or nullid
- change = repo.changelog.read(node)
if not fp:
fp = cmdutil.make_file(repo, template, node, total=total,
seqno=seqno, revwidth=revwidth)
- if fp not in (sys.stdout, repo.ui):
+ if fp != sys.stdout and hasattr(fp, 'name'):
repo.ui.note("%s\n" % fp.name)
fp.write("# HG changeset patch\n")
- fp.write("# User %s\n" % change[1])
- fp.write("# Date %d %d\n" % change[2])
+ fp.write("# User %s\n" % ctx.user())
+ fp.write("# Date %d %d\n" % ctx.date())
+ if branch and (branch != 'default'):
+ fp.write("# Branch %s\n" % branch)
fp.write("# Node ID %s\n" % hex(node))
fp.write("# Parent %s\n" % hex(prev))
if len(parents) > 1:
fp.write("# Parent %s\n" % hex(parents[1]))
- fp.write(change[4].rstrip())
+ fp.write(ctx.description().rstrip())
fp.write("\n\n")
diff(repo, prev, node, fp=fp, opts=opts)
@@ -660,7 +643,7 @@
fp.close()
for seqno, rev in enumerate(revs):
- single(repo.lookup(rev), seqno+1, fp)
+ single(rev, seqno+1, fp)
def diffstat(patchlines):
if not util.find_in_path('diffstat', os.environ.get('PATH', '')):
diff -r 2d32e3ae01a7 -r 30e7aa755efd mercurial/repo.py
--- a/mercurial/repo.py Sat May 19 22:47:01 2007 +0200
+++ b/mercurial/repo.py Sat May 19 22:51:43 2007 +0200
@@ -9,9 +9,6 @@
class RepoError(Exception):
pass
-class LookupError(RepoError):
- pass
-
class repository(object):
def capable(self, name):
'''tell whether repo supports named capability.
diff -r 2d32e3ae01a7 -r 30e7aa755efd mercurial/revlog.py
--- a/mercurial/revlog.py Sat May 19 22:47:01 2007 +0200
+++ b/mercurial/revlog.py Sat May 19 22:51:43 2007 +0200
@@ -11,10 +11,9 @@
"""
from node import *
-from i18n import gettext as _
-from demandload import demandload
-demandload(globals(), "binascii changegroup errno ancestor mdiff os")
-demandload(globals(), "sha struct util zlib")
+from i18n import _
+import binascii, changegroup, errno, ancestor, mdiff, os
+import sha, struct, util, zlib
# revlog version strings
REVLOGV0 = 0
@@ -27,11 +26,6 @@
REVLOG_DEFAULT_FORMAT = REVLOGNG
REVLOG_DEFAULT_VERSION = REVLOG_DEFAULT_FORMAT | REVLOG_DEFAULT_FLAGS
-def flagstr(flag):
- if flag == "inline":
- return REVLOGNGINLINEDATA
- raise RevlogError(_("unknown revlog flag %s") % flag)
-
def hash(text, p1, p2):
"""generate a hash from the given text and its parent hashes
@@ -147,6 +141,9 @@
lend = len(data) / self.s
i = blockstart / self.s
off = 0
+ # lazyindex supports __delitem__
+ if lend > len(self.index) - i:
+ lend = len(self.index) - i
for x in xrange(lend):
if self.index[i + x] == None:
b = data[off : off + self.s]
@@ -282,6 +279,7 @@
del self.p.map[key]
class RevlogError(Exception): pass
+class LookupError(RevlogError): pass
class revlog(object):
"""
@@ -308,8 +306,7 @@
remove data, and can use some simple techniques to avoid the need
for locking while reading.
"""
- def __init__(self, opener, indexfile, datafile,
- defversion=REVLOG_DEFAULT_VERSION):
+ def __init__(self, opener, indexfile):
"""
create a revlog object
@@ -317,13 +314,17 @@
and can be used to implement COW semantics or the like.
"""
self.indexfile = indexfile
- self.datafile = datafile
+ self.datafile = indexfile[:-2] + ".d"
self.opener = opener
self.indexstat = None
self.cache = None
self.chunkcache = None
- self.defversion = defversion
+ self.defversion = REVLOG_DEFAULT_VERSION
+ if hasattr(opener, "defversion"):
+ self.defversion = opener.defversion
+ if self.defversion & REVLOGNG:
+ self.defversion |= REVLOGNGINLINEDATA
self.load()
def load(self):
@@ -475,7 +476,7 @@
try:
return self.nodemap[node]
except KeyError:
- raise RevlogError(_('%s: no node %s') % (self.indexfile, hex(node)))
+ raise LookupError(_('%s: no node %s') % (self.indexfile, hex(node)))
def linkrev(self, node):
return (node == nullid) and nullrev or self.index[self.rev(node)][-4]
def parents(self, node):
@@ -770,7 +771,7 @@
node = id
r = self.rev(node) # quick search the index
return node
- except RevlogError:
+ except LookupError:
pass # may be partial hex id
try:
# str(rev)
@@ -799,7 +800,7 @@
for n in self.nodemap:
if n.startswith(bin_id) and hex(n).startswith(id):
if node is not None:
- raise RevlogError(_("Ambiguous identifier"))
+ raise LookupError(_("Ambiguous identifier"))
node = n
if node is not None:
return node
@@ -819,7 +820,7 @@
if n:
return n
- raise RevlogError(_("No match found"))
+ raise LookupError(_("No match found"))
def cmp(self, node, text):
"""compare text with a given file revision"""
@@ -1159,13 +1160,13 @@
for p in (p1, p2):
if not p in self.nodemap:
- raise RevlogError(_("unknown parent %s") % short(p))
+ raise LookupError(_("unknown parent %s") % short(p))
if not chain:
# retrieve the parent revision of the delta chain
chain = p1
if not chain in self.nodemap:
- raise RevlogError(_("unknown base %s") % short(chain[:4]))
+ raise LookupError(_("unknown base %s") % short(chain[:4]))
# full versions are inserted when the needed deltas become
# comparable to the uncompressed text or when the previous
diff -r 2d32e3ae01a7 -r 30e7aa755efd mercurial/sshrepo.py
--- a/mercurial/sshrepo.py Sat May 19 22:47:01 2007 +0200
+++ b/mercurial/sshrepo.py Sat May 19 22:51:43 2007 +0200
@@ -7,9 +7,8 @@
from node import *
from remoterepo import *
-from i18n import gettext as _
-from demandload import *
-demandload(globals(), "hg os re stat util")
+from i18n import _
+import hg, os, re, stat, util
class sshrepository(remoterepository):
def __init__(self, ui, path, create=0):
diff -r 2d32e3ae01a7 -r 30e7aa755efd mercurial/sshserver.py
--- a/mercurial/sshserver.py Sat May 19 22:47:01 2007 +0200
+++ b/mercurial/sshserver.py Sat May 19 22:51:43 2007 +0200
@@ -6,10 +6,9 @@
# This software may be used and distributed according to the terms
# of the GNU General Public License, incorporated herein by reference.
-from demandload import demandload
-from i18n import gettext as _
+from i18n import _
from node import *
-demandload(globals(), "os streamclone sys tempfile util")
+import os, streamclone, sys, tempfile, util
class sshserver(object):
def __init__(self, ui, repo):
@@ -74,7 +73,7 @@
caps = ['unbundle', 'lookup', 'changegroupsubset']
if self.ui.configbool('server', 'uncompressed'):
- caps.append('stream=%d' % self.repo.revlogversion)
+ caps.append('stream=%d' % self.repo.changelog.version)
self.respond("capabilities: %s\n" % (' '.join(caps),))
def do_lock(self):
diff -r 2d32e3ae01a7 -r 30e7aa755efd mercurial/statichttprepo.py
--- a/mercurial/statichttprepo.py Sat May 19 22:47:01 2007 +0200
+++ b/mercurial/statichttprepo.py Sat May 19 22:51:43 2007 +0200
@@ -7,10 +7,9 @@
# This software may be used and distributed according to the terms
# of the GNU General Public License, incorporated herein by reference.
-from demandload import *
-from i18n import gettext as _
-demandload(globals(), "changelog filelog httprangereader")
-demandload(globals(), "repo localrepo manifest os urllib urllib2 util")
+from i18n import _
+import changelog, filelog, httprangereader
+import repo, localrepo, manifest, os, urllib, urllib2, util
class rangereader(httprangereader.httprangereader):
def read(self, size=None):
@@ -33,7 +32,6 @@
def __init__(self, ui, path):
self._url = path
self.ui = ui
- self.revlogversion = 0
self.path = (path + "/.hg")
self.opener = opener(self.path)
diff -r 2d32e3ae01a7 -r 30e7aa755efd mercurial/streamclone.py
--- a/mercurial/streamclone.py Sat May 19 22:47:01 2007 +0200
+++ b/mercurial/streamclone.py Sat May 19 22:51:43 2007 +0200
@@ -5,9 +5,8 @@
# This software may be used and distributed according to the terms
# of the GNU General Public License, incorporated herein by reference.
-from demandload import demandload
-from i18n import gettext as _
-demandload(globals(), "os stat util lock")
+from i18n import _
+import os, stat, util, lock
# if server supports streaming clone, it advertises "stream"
# capability with value that is version+flags of repo it is serving.
diff -r 2d32e3ae01a7 -r 30e7aa755efd mercurial/templater.py
--- a/mercurial/templater.py Sat May 19 22:47:01 2007 +0200
+++ b/mercurial/templater.py Sat May 19 22:51:43 2007 +0200
@@ -5,10 +5,9 @@
# This software may be used and distributed according to the terms
# of the GNU General Public License, incorporated herein by reference.
-from demandload import demandload
-from i18n import gettext as _
+from i18n import _
from node import *
-demandload(globals(), "cgi re sys os time urllib util textwrap")
+import cgi, re, sys, os, time, urllib, util, textwrap
def parsestring(s, quoted=True):
'''parse a string using simple c-like syntax.
diff -r 2d32e3ae01a7 -r 30e7aa755efd mercurial/transaction.py
--- a/mercurial/transaction.py Sat May 19 22:47:01 2007 +0200
+++ b/mercurial/transaction.py Sat May 19 22:51:43 2007 +0200
@@ -11,9 +11,8 @@
# This software may be used and distributed according to the terms
# of the GNU General Public License, incorporated herein by reference.
-from demandload import demandload
-from i18n import gettext as _
-demandload(globals(), 'os')
+from i18n import _
+import os
class transaction(object):
def __init__(self, report, opener, journal, after=None):
diff -r 2d32e3ae01a7 -r 30e7aa755efd mercurial/ui.py
--- a/mercurial/ui.py Sat May 19 22:47:01 2007 +0200
+++ b/mercurial/ui.py Sat May 19 22:51:43 2007 +0200
@@ -5,10 +5,9 @@
# This software may be used and distributed according to the terms
# of the GNU General Public License, incorporated herein by reference.
-from i18n import gettext as _
-from demandload import *
-demandload(globals(), "errno getpass os re socket sys tempfile")
-demandload(globals(), "ConfigParser traceback util")
+from i18n import _
+import errno, getpass, os, re, socket, sys, tempfile
+import ConfigParser, traceback, util
def dupconfig(orig):
new = util.configparser(orig.defaults())
@@ -310,7 +309,7 @@
sections.sort()
for section in sections:
for name, value in self.configitems(section, untrusted):
- yield section, name, value.replace('\n', '\\n')
+ yield section, name, str(value).replace('\n', '\\n')
def extensions(self):
result = self.configitems("extensions")
@@ -326,12 +325,6 @@
result.append(os.path.expanduser(value))
return result
- def configrevlog(self):
- result = {}
- for key, value in self.configitems("revlog"):
- result[key.lower()] = value
- return result
-
def username(self):
"""Return default username to be used in commits.
@@ -388,6 +381,9 @@
if not sys.stdout.closed: sys.stdout.flush()
for a in args:
sys.stderr.write(str(a))
+ # stderr may be buffered under win32 when redirected to files,
+ # including stdout.
+ if not sys.stderr.closed: sys.stderr.flush()
except IOError, inst:
if inst.errno != errno.EPIPE:
raise
diff -r 2d32e3ae01a7 -r 30e7aa755efd mercurial/util.py
--- a/mercurial/util.py Sat May 19 22:47:01 2007 +0200
+++ b/mercurial/util.py Sat May 19 22:51:43 2007 +0200
@@ -12,10 +12,9 @@
platform-specific details from the core.
"""
-from i18n import gettext as _
-from demandload import *
-demandload(globals(), "cStringIO errno getpass popen2 re shutil sys tempfile")
-demandload(globals(), "os threading time calendar ConfigParser locale glob")
+from i18n import _
+import cStringIO, errno, getpass, popen2, re, shutil, sys, tempfile
+import os, threading, time, calendar, ConfigParser, locale, glob
try:
_encoding = os.environ.get("HGENCODING") or locale.getpreferredencoding() \
@@ -117,11 +116,23 @@
class SignalInterrupt(Exception):
"""Exception raised on SIGTERM and SIGHUP."""
-# like SafeConfigParser but with case-sensitive keys
+# differences from SafeConfigParser:
+# - case-sensitive keys
+# - allows values that are not strings (this means that you may not
+# be able to save the configuration to a file)
class configparser(ConfigParser.SafeConfigParser):
def optionxform(self, optionstr):
return optionstr
+ def set(self, section, option, value):
+ return ConfigParser.ConfigParser.set(self, section, option, value)
+
+ def _interpolate(self, section, option, rawval, vars):
+ if not isinstance(rawval, basestring):
+ return rawval
+ return ConfigParser.SafeConfigParser._interpolate(self, section,
+ option, rawval, vars)
+
def cachefunc(func):
'''cache the result of function calls'''
# XXX doesn't handle keywords args
@@ -200,18 +211,6 @@
return fn(s, cmd[len(name):].lstrip())
return pipefilter(s, cmd)
-def find_in_path(name, path, default=None):
- '''find name in search path. path can be string (will be split
- with os.pathsep), or iterable thing that returns strings. if name
- found, return path to name. else return default.'''
- if isinstance(path, str):
- path = path.split(os.pathsep)
- for p in path:
- p_name = os.path.join(p, name)
- if os.path.exists(p_name):
- return p_name
- return default
-
def binary(s):
"""return true if a string is binary data using diff's heuristic"""
if s and '\0' in s[:4096]:
@@ -387,16 +386,17 @@
raise Abort('%s not under root' % myname)
-def matcher(canonroot, cwd='', names=['.'], inc=[], exc=[], head='', src=None):
- return _matcher(canonroot, cwd, names, inc, exc, head, 'glob', src)
+def matcher(canonroot, cwd='', names=[], inc=[], exc=[], src=None):
+ return _matcher(canonroot, cwd, names, inc, exc, 'glob', src)
-def cmdmatcher(canonroot, cwd='', names=['.'], inc=[], exc=[], head='',
- src=None, globbed=False):
- if not globbed:
+def cmdmatcher(canonroot, cwd='', names=[], inc=[], exc=[], src=None,
+ globbed=False, default=None):
+ default = default or 'relpath'
+ if default == 'relpath' and not globbed:
names = expand_glob(names)
- return _matcher(canonroot, cwd, names, inc, exc, head, 'relpath', src)
+ return _matcher(canonroot, cwd, names, inc, exc, default, src)
-def _matcher(canonroot, cwd, names, inc, exc, head, dflt_pat, src):
+def _matcher(canonroot, cwd, names, inc, exc, dflt_pat, src):
"""build a function to match a set of file patterns
arguments:
@@ -405,26 +405,30 @@
names - patterns to find
inc - patterns to include
exc - patterns to exclude
- head - a regex to prepend to patterns to control whether a match is rooted
+ dflt_pat - if a pattern in names has no explicit type, assume this one
+ src - where these patterns came from (e.g. .hgignore)
a pattern is one of:
- 'glob:'
- 're:'
- 'path:'
- 'relglob:'
- 'relpath:'
- 'relre:'
- ''
+ 'glob:' - a glob relative to cwd
+ 're:' - a regular expression
+ 'path:' - a path relative to canonroot
+ 'relglob:' - an unrooted glob (*.c matches C files in all dirs)
+ 'relpath:' - a path relative to cwd
+ 'relre:' - a regexp that doesn't have to match the start of a name
+ '' - one of the cases above, selected by the dflt_pat argument
returns:
a 3-tuple containing
- - list of explicit non-pattern names passed in
+ - list of roots (places where one should start a recursive walk of the fs);
+ this often matches the explicit non-pattern names passed in, but also
+ includes the initial part of glob: patterns that has no glob characters
- a bool match(filename) function
- a bool indicating if any patterns were passed in
+ """
- todo:
- make head regex a rooted bool
- """
+ # a common case: no patterns at all
+ if not names and not inc and not exc:
+ return [], always, False
def contains_glob(name):
for c in name:
@@ -433,84 +437,92 @@
def regex(kind, name, tail):
'''convert a pattern into a regular expression'''
+ if not name:
+ return ''
if kind == 're':
return name
elif kind == 'path':
return '^' + re.escape(name) + '(?:/|$)'
elif kind == 'relglob':
- return head + globre(name, '(?:|.*/)', tail)
+ return globre(name, '(?:|.*/)', tail)
elif kind == 'relpath':
- return head + re.escape(name) + tail
+ return re.escape(name) + '(?:/|$)'
elif kind == 'relre':
if name.startswith('^'):
return name
return '.*' + name
- return head + globre(name, '', tail)
+ return globre(name, '', tail)
def matchfn(pats, tail):
"""build a matching function from a set of patterns"""
if not pats:
return
- matches = []
- for k, p in pats:
- try:
- pat = '(?:%s)' % regex(k, p, tail)
- matches.append(re.compile(pat).match)
- except re.error:
- if src: raise Abort("%s: invalid pattern (%s): %s" % (src, k, p))
- else: raise Abort("invalid pattern (%s): %s" % (k, p))
-
- def buildfn(text):
- for m in matches:
- r = m(text)
- if r:
- return r
-
- return buildfn
+ try:
+ pat = '(?:%s)' % '|'.join([regex(k, p, tail) for (k, p) in pats])
+ return re.compile(pat).match
+ except re.error:
+ for k, p in pats:
+ try:
+ re.compile('(?:%s)' % regex(k, p, tail))
+ except re.error:
+ if src:
+ raise Abort("%s: invalid pattern (%s): %s" %
+ (src, k, p))
+ else:
+ raise Abort("invalid pattern (%s): %s" % (k, p))
+ raise Abort("invalid pattern")
def globprefix(pat):
'''return the non-glob prefix of a path, e.g. foo/* -> foo'''
root = []
- for p in pat.split(os.sep):
+ for p in pat.split('/'):
if contains_glob(p): break
root.append(p)
- return '/'.join(root)
+ return '/'.join(root) or '.'
+
+ def normalizepats(names, default):
+ pats = []
+ roots = []
+ anypats = False
+ for kind, name in [patkind(p, default) for p in names]:
+ if kind in ('glob', 'relpath'):
+ name = canonpath(canonroot, cwd, name)
+ elif kind in ('relglob', 'path'):
+ name = normpath(name)
+
+ pats.append((kind, name))
- pats = []
- files = []
- roots = []
- for kind, name in [patkind(p, dflt_pat) for p in names]:
- if kind in ('glob', 'relpath'):
- name = canonpath(canonroot, cwd, name)
- if name == '':
- kind, name = 'glob', '**'
- if kind in ('glob', 'path', 're'):
- pats.append((kind, name))
- if kind == 'glob':
- root = globprefix(name)
- if root: roots.append(root)
- elif kind == 'relpath':
- files.append((kind, name))
- roots.append(name)
+ if kind in ('glob', 're', 'relglob', 'relre'):
+ anypats = True
+
+ if kind == 'glob':
+ root = globprefix(name)
+ roots.append(root)
+ elif kind in ('relpath', 'path'):
+ roots.append(name or '.')
+ elif kind == 'relglob':
+ roots.append('.')
+ return roots, pats, anypats
+
+ roots, pats, anypats = normalizepats(names, dflt_pat)
patmatch = matchfn(pats, '$') or always
- filematch = matchfn(files, '(?:/|$)') or always
incmatch = always
if inc:
- inckinds = [patkind(canonpath(canonroot, cwd, i)) for i in inc]
+ dummy, inckinds, dummy = normalizepats(inc, 'glob')
incmatch = matchfn(inckinds, '(?:/|$)')
excmatch = lambda fn: False
if exc:
- exckinds = [patkind(canonpath(canonroot, cwd, x)) for x in exc]
+ dummy, exckinds, dummy = normalizepats(exc, 'glob')
excmatch = matchfn(exckinds, '(?:/|$)')
- return (roots,
- lambda fn: (incmatch(fn) and not excmatch(fn) and
- (fn.endswith('/') or
- (not pats and not files) or
- (pats and patmatch(fn)) or
- (files and filematch(fn)))),
- (inc or exc or (pats and pats != [('glob', '**')])) and True)
+ if not names and inc and not exc:
+ # common case: hgignore patterns
+ match = incmatch
+ else:
+ match = lambda fn: incmatch(fn) and not excmatch(fn) and patmatch(fn)
+
+ return (roots, match, (inc or exc or anypats) and True)
def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
'''enhanced shell command execution.
@@ -559,6 +571,15 @@
if cwd is not None and oldcwd != cwd:
os.chdir(oldcwd)
+# os.path.lexists is not available on python2.3
+def lexists(filename):
+ "test whether a file with this name exists. does not follow symlinks"
+ try:
+ os.lstat(filename)
+ except:
+ return False
+ return True
+
def rename(src, dst):
"""forcibly rename a file"""
try:
@@ -588,11 +609,18 @@
def copyfile(src, dest):
"copy a file, preserving mode"
- try:
- shutil.copyfile(src, dest)
- shutil.copymode(src, dest)
- except shutil.Error, inst:
- raise Abort(str(inst))
+ if os.path.islink(src):
+ try:
+ os.unlink(dest)
+ except:
+ pass
+ os.symlink(os.readlink(src), dest)
+ else:
+ try:
+ shutil.copyfile(src, dest)
+ shutil.copymode(src, dest)
+ except shutil.Error, inst:
+ raise Abort(str(inst))
def copyfiles(src, dst, hardlink=None):
"""Copy a directory tree using hardlinks if possible"""
@@ -724,12 +752,54 @@
except:
return True
+def checkexec(path):
+ """
+ Check whether the given path is on a filesystem with UNIX-like exec flags
+
+ Requires a directory (like /foo/.hg)
+ """
+ fh, fn = tempfile.mkstemp("", "", path)
+ os.close(fh)
+ m = os.stat(fn).st_mode
+ os.chmod(fn, m ^ 0111)
+ r = (os.stat(fn).st_mode != m)
+ os.unlink(fn)
+ return r
+
+def execfunc(path, fallback):
+ '''return an is_exec() function with default to fallback'''
+ if checkexec(path):
+ return lambda x: is_exec(os.path.join(path, x))
+ return fallback
+
+def checklink(path):
+ """check whether the given path is on a symlink-capable filesystem"""
+ # mktemp is not racy because symlink creation will fail if the
+ # file already exists
+ name = tempfile.mktemp(dir=path)
+ try:
+ os.symlink(".", name)
+ os.unlink(name)
+ return True
+ except (OSError, AttributeError):
+ return False
+
+def linkfunc(path, fallback):
+ '''return an is_link() function with default to fallback'''
+ if checklink(path):
+ return lambda x: os.path.islink(os.path.join(path, x))
+ return fallback
+
_umask = os.umask(0)
os.umask(_umask)
+def needbinarypatch():
+ """return True if patches should be applied in binary mode by default."""
+ return os.name == 'nt'
+
# Platform specific variants
if os.name == 'nt':
- demandload(globals(), "msvcrt")
+ import msvcrt
nulldev = 'NUL:'
class winstdout:
@@ -770,19 +840,18 @@
except:
return [r'c:\mercurial\mercurial.ini']
- def os_rcpath():
- '''return default os-specific hgrc search path'''
- path = system_rcpath()
- path.append(user_rcpath())
+ def user_rcpath():
+ '''return os-specific hgrc search path to the user dir'''
+ try:
+ userrc = user_rcpath_win32()
+ except:
+ userrc = os.path.join(os.path.expanduser('~'), 'mercurial.ini')
+ path = [userrc]
userprofile = os.environ.get('USERPROFILE')
if userprofile:
path.append(os.path.join(userprofile, 'mercurial.ini'))
return path
- def user_rcpath():
- '''return os-specific hgrc search path to the user dir'''
- return os.path.join(os.path.expanduser('~'), 'mercurial.ini')
-
def parse_patch_output(output_line):
"""parses the output produced by patch and returns the file name"""
pf = output_line[14:]
@@ -794,10 +863,10 @@
'''return False if pid dead, True if running or not known'''
return True
- def is_exec(f, last):
- return last
+ def set_exec(f, mode):
+ pass
- def set_exec(f, mode):
+ def set_link(f, mode):
pass
def set_binary(fd):
@@ -843,6 +912,30 @@
# username and groupname functions above, too.
def isowner(fp, st=None):
return True
+
+ def find_in_path(name, path, default=None):
+ '''find name in search path. path can be string (will be split
+ with os.pathsep), or iterable thing that returns strings. if name
+ found, return path to name. else return default. name is looked up
+ using cmd.exe rules, using PATHEXT.'''
+ if isinstance(path, str):
+ path = path.split(os.pathsep)
+
+ pathext = os.environ.get('PATHEXT', '.COM;.EXE;.BAT;.CMD')
+ pathext = pathext.lower().split(os.pathsep)
+ isexec = os.path.splitext(name)[1].lower() in pathext
+
+ for p in path:
+ p_name = os.path.join(p, name)
+
+ if isexec and os.path.exists(p_name):
+ return p_name
+
+ for ext in pathext:
+ p_name_ext = p_name + ext
+ if os.path.exists(p_name_ext):
+ return p_name_ext
+ return default
try:
# override functions with win32 versions if possible
@@ -865,18 +958,18 @@
pass
return rcs
- def os_rcpath():
- '''return default os-specific hgrc search path'''
+ def system_rcpath():
path = []
# old mod_python does not set sys.argv
if len(getattr(sys, 'argv', [])) > 0:
path.extend(rcfiles(os.path.dirname(sys.argv[0]) +
'/../etc/mercurial'))
path.extend(rcfiles('/etc/mercurial'))
- path.append(os.path.expanduser('~/.hgrc'))
- path = [os.path.normpath(f) for f in path]
return path
+ def user_rcpath():
+ return [os.path.expanduser('~/.hgrc')]
+
def parse_patch_output(output_line):
"""parses the output produced by patch and returns the file name"""
pf = output_line[14:]
@@ -884,7 +977,7 @@
pf = pf[1:-1] # Remove the quotes
return pf
- def is_exec(f, last):
+ def is_exec(f):
"""check whether a file is executable"""
return (os.lstat(f).st_mode & 0100 != 0)
@@ -899,6 +992,26 @@
else:
os.chmod(f, s & 0666)
+ def set_link(f, mode):
+ """make a file a symbolic link/regular file
+
+ if a file is changed to a link, its contents become the link data
+ if a link is changed to a file, its link data become its contents
+ """
+
+ m = os.path.islink(f)
+ if m == bool(mode):
+ return
+
+ if mode: # switch file to link
+ data = file(f).read()
+ os.unlink(f)
+ os.symlink(data, f)
+ else:
+ data = os.readlink(f)
+ os.unlink(f)
+ file(f, "w").write(data)
+
def set_binary(fd):
pass
@@ -961,6 +1074,18 @@
if st is None:
st = fstat(fp)
return st.st_uid == os.getuid()
+
+ def find_in_path(name, path, default=None):
+ '''find name in search path. path can be string (will be split
+ with os.pathsep), or iterable thing that returns strings. if name
+ found, return path to name. else return default.'''
+ if isinstance(path, str):
+ path = path.split(os.pathsep)
+ for p in path:
+ p_name = os.path.join(p, name)
+ if os.path.exists(p_name):
+ return p_name
+ return default
def _buildencodefun():
e = '_'
@@ -1061,11 +1186,19 @@
class atomicfile(atomictempfile):
"""the file will only be copied on close"""
def __init__(self, name, mode):
+ self._err = False
atomictempfile.__init__(self, name, mode)
+ def write(self, s):
+ try:
+ atomictempfile.write(self, s)
+ except:
+ self._err = True
+ raise
def close(self):
self.rename()
def __del__(self):
- self.rename()
+ if not self._err:
+ self.rename()
def o(path, mode="r", text=False, atomic=False, atomictemp=False):
if audit_p:
@@ -1339,6 +1472,13 @@
_rcpath = None
+def os_rcpath():
+ '''return default os-specific hgrc search path'''
+ path = system_rcpath()
+ path.extend(user_rcpath())
+ path = [os.path.normpath(f) for f in path]
+ return path
+
def rcpath():
'''return hgrc search path. if env var HGRCPATH is set, use it.
for each item in path, if directory, use files ending in .rc,
diff -r 2d32e3ae01a7 -r 30e7aa755efd mercurial/util_win32.py
--- a/mercurial/util_win32.py Sat May 19 22:47:01 2007 +0200
+++ b/mercurial/util_win32.py Sat May 19 22:51:43 2007 +0200
@@ -13,10 +13,10 @@
import win32api
-from demandload import *
-from i18n import gettext as _
-demandload(globals(), 'errno os pywintypes win32con win32file win32process')
-demandload(globals(), 'cStringIO win32com.shell:shell,shellcon winerror')
+from i18n import _
+import errno, os, pywintypes, win32con, win32file, win32process
+import cStringIO, winerror
+from win32com.shell import shell,shellcon
class WinError:
winerror_map = {
@@ -187,7 +187,7 @@
filename = win32api.GetModuleFileName(0)
return [os.path.join(os.path.dirname(filename), 'mercurial.ini')]
-def user_rcpath():
+def user_rcpath_win32():
'''return os-specific hgrc search path to the user dir'''
userdir = os.path.expanduser('~')
if userdir == '~':
diff -r 2d32e3ae01a7 -r 30e7aa755efd mercurial/verify.py
--- a/mercurial/verify.py Sat May 19 22:47:01 2007 +0200
+++ b/mercurial/verify.py Sat May 19 22:51:43 2007 +0200
@@ -6,7 +6,7 @@
# of the GNU General Public License, incorporated herein by reference.
from node import *
-from i18n import gettext as _
+from i18n import _
import revlog, mdiff
def verify(repo):
@@ -17,6 +17,8 @@
warnings = [0]
neededmanifests = {}
+ lock = repo.lock()
+
def err(msg):
repo.ui.warn(msg + "\n")
errors[0] += 1
@@ -39,8 +41,8 @@
elif revlogv1:
warn(_("warning: `%s' uses revlog format 0") % name)
- revlogv1 = repo.revlogversion != revlog.REVLOGV0
- if repo.ui.verbose or revlogv1 != repo.revlogv1:
+ revlogv1 = repo.changelog.version != revlog.REVLOGV0
+ if repo.ui.verbose or not revlogv1:
repo.ui.status(_("repository uses revlog format %d\n") %
(revlogv1 and 1 or 0))
diff -r 2d32e3ae01a7 -r 30e7aa755efd setup.py
--- a/setup.py Sat May 19 22:47:01 2007 +0200
+++ b/setup.py Sat May 19 22:51:43 2007 +0200
@@ -13,9 +13,11 @@
from distutils.core import setup, Extension
from distutils.command.install_data import install_data
-# mercurial.packagescan must be the first mercurial module imported
-import mercurial.packagescan
import mercurial.version
+import mercurial.demandimport
+mercurial.demandimport.enable = lambda: None
+
+extra = {}
# py2exe needs to be installed to work
try:
@@ -35,34 +37,10 @@
except ImportError:
pass
- # Due to the use of demandload py2exe is not finding the modules.
- # packagescan.getmodules creates a list of modules included in
- # the mercurial package plus dependant modules.
- from py2exe.build_exe import py2exe as build_exe
+ extra['console'] = ['hg']
- class py2exe_for_demandload(build_exe):
- """ overwrites the py2exe command class for getting the build
- directory and for setting the 'includes' option."""
- def initialize_options(self):
- self.build_lib = None
- build_exe.initialize_options(self)
- def finalize_options(self):
- # Get the build directory, ie. where to search for modules.
- self.set_undefined_options('build',
- ('build_lib', 'build_lib'))
- # Sets the 'includes' option with the list of needed modules
- if not self.includes:
- self.includes = []
- else:
- self.includes = self.includes.split(',')
- mercurial.packagescan.scan(self.build_lib, 'mercurial')
- mercurial.packagescan.scan(self.build_lib, 'mercurial.hgweb')
- mercurial.packagescan.scan(self.build_lib, 'hgext')
- self.includes += mercurial.packagescan.getmodules()
- build_exe.finalize_options(self)
except ImportError:
- py2exe_for_demandload = None
-
+ pass
# specify version string, otherwise 'hg identify' will be used:
version = ''
@@ -75,10 +53,6 @@
mercurial.version.remember_version(version)
cmdclass = {'install_data': install_package_data}
-py2exe_opts = {}
-if py2exe_for_demandload is not None:
- cmdclass['py2exe'] = py2exe_for_demandload
- py2exe_opts['console'] = ['hg']
setup(name='mercurial',
version=mercurial.version.get_version(),
@@ -100,4 +74,4 @@
license='COPYING',
readme='contrib/macosx/Readme.html',
welcome='contrib/macosx/Welcome.html')),
- **py2exe_opts)
+ **extra)
diff -r 2d32e3ae01a7 -r 30e7aa755efd templates/gitweb/header.tmpl
--- a/templates/gitweb/header.tmpl Sat May 19 22:47:01 2007 +0200
+++ b/templates/gitweb/header.tmpl Sat May 19 22:51:43 2007 +0200
@@ -4,7 +4,7 @@
-
+
-
+
diff -r 2d32e3ae01a7 -r 30e7aa755efd templates/gitweb/map
--- a/templates/gitweb/map Sat May 19 22:47:01 2007 +0200
+++ b/templates/gitweb/map Sat May 19 22:51:43 2007 +0200
@@ -39,7 +39,7 @@
fileannotatechild = '