--- a/hgext/convert/subversion.py Fri Dec 07 02:29:55 2007 -0600
+++ b/hgext/convert/subversion.py Fri Dec 07 14:59:33 2007 -0600
@@ -610,7 +610,7 @@
# Example SVN datetime. Includes microseconds.
# ISO-8601 conformant
# '2007-01-04T17:35:00.902377Z'
- date = util.parsedate(date[:18] + " UTC", ["%Y-%m-%dT%H:%M:%S"])
+ date = util.parsedate(date[:19] + " UTC", ["%Y-%m-%dT%H:%M:%S"])
log = message and self.recode(message)
author = author and self.recode(author) or ''
--- a/hgext/highlight.py Fri Dec 07 02:29:55 2007 -0600
+++ b/hgext/highlight.py Fri Dec 07 14:59:33 2007 -0600
@@ -82,14 +82,17 @@
return highlight(rawtext, lexer, formatter)
-def filerevision_pygments(self, fctx):
+def filerevision_pygments(self, tmpl, fctx):
"""Reimplement hgweb.filerevision to use syntax highlighting"""
- filename = fctx.path()
+ f = fctx.path()
rawtext = fctx.data()
text = rawtext
- mt = mimetypes.guess_type(filename)[0]
+ fl = fctx.filelog()
+ n = fctx.filenode()
+
+ mt = mimetypes.guess_type(f)[0]
if util.binary(text):
mt = mt or 'application/octet-stream'
@@ -107,36 +110,35 @@
style = self.config("web", "pygments_style", "colorful")
- text_formatted = lines(pygments_format(filename, text,
+ text_formatted = lines(pygments_format(f, text,
forcetext=forcetext,
stripecount=self.stripecount,
style=style))
# override per-line template
- self.t.cache['fileline'] = '#line#'
+ tmpl.cache['fileline'] = '#line#'
# append a <link ...> to the syntax highlighting css
- old_header = ''.join(self.t('header'))
+ old_header = ''.join(tmpl('header'))
if SYNTAX_CSS not in old_header:
new_header = old_header + SYNTAX_CSS
- self.t.cache['header'] = new_header
+ tmpl.cache['header'] = new_header
- yield self.t("filerevision",
- file=filename,
- path=hgweb_mod._up(filename), # fixme: make public
- text=text_formatted,
- raw=rawtext,
- mimetype=mt,
- rev=fctx.rev(),
- node=hex(fctx.node()),
- author=fctx.user(),
- date=fctx.date(),
- desc=fctx.description(),
- parent=self.siblings(fctx.parents()),
- child=self.siblings(fctx.children()),
- rename=self.renamelink(fctx.filelog(),
- fctx.filenode()),
- permissions=fctx.manifest().flags(filename))
+ yield tmpl("filerevision",
+ file=f,
+ path=hgweb_mod._up(f), # fixme: make public
+ text=text_formatted,
+ raw=rawtext,
+ mimetype=mt,
+ rev=fctx.rev(),
+ node=hex(fctx.node()),
+ author=fctx.user(),
+ date=fctx.date(),
+ desc=fctx.description(),
+ parent=self.siblings(fctx.parents()),
+ child=self.siblings(fctx.children()),
+ rename=self.renamelink(fl, n),
+ permissions=fctx.manifest().flags(f))
# monkeypatch in the new version
--- a/mercurial/hgweb/hgweb_mod.py Fri Dec 07 02:29:55 2007 -0600
+++ b/mercurial/hgweb/hgweb_mod.py Fri Dec 07 14:59:33 2007 -0600
@@ -6,14 +6,28 @@
# This software may be used and distributed according to the terms
# of the GNU General Public License, incorporated herein by reference.
-import errno, os, mimetypes, re, zlib, mimetools, cStringIO, sys
-import tempfile, urllib, bz2
+import os, mimetypes, re, mimetools, cStringIO
from mercurial.node import *
-from mercurial.i18n import gettext as _
-from mercurial import mdiff, ui, hg, util, archival, streamclone, patch
+from mercurial import mdiff, ui, hg, util, archival, patch
from mercurial import revlog, templater
-from common import ErrorResponse, get_mtime, staticfile, style_map, paritygen
+from common import ErrorResponse, get_mtime, style_map, paritygen
from request import wsgirequest
+import webcommands, protocol
+
+shortcuts = {
+ 'cl': [('cmd', ['changelog']), ('rev', None)],
+ 'sl': [('cmd', ['shortlog']), ('rev', None)],
+ 'cs': [('cmd', ['changeset']), ('node', None)],
+ 'f': [('cmd', ['file']), ('filenode', None)],
+ 'fl': [('cmd', ['filelog']), ('filenode', None)],
+ 'fd': [('cmd', ['filediff']), ('node', None)],
+ 'fa': [('cmd', ['annotate']), ('filenode', None)],
+ 'mf': [('cmd', ['manifest']), ('manifest', None)],
+ 'ca': [('cmd', ['archive']), ('node', None)],
+ 'tags': [('cmd', ['tags'])],
+ 'tip': [('cmd', ['changeset']), ('node', ['tip'])],
+ 'static': [('cmd', ['static']), ('file', None)]
+}
def _up(p):
if p[0] != "/":
@@ -107,17 +121,200 @@
self.allowpull = self.configbool("web", "allowpull", True)
self.encoding = self.config("web", "encoding", util._encoding)
+ def run(self):
+ if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."):
+ raise RuntimeError("This function is only intended to be called while running as a CGI script.")
+ import mercurial.hgweb.wsgicgi as wsgicgi
+ wsgicgi.launch(self)
+
+ def __call__(self, env, respond):
+ req = wsgirequest(env, respond)
+ self.run_wsgi(req)
+ return req
+
+ def run_wsgi(self, req):
+
+ self.refresh()
+
+ # expand form shortcuts
+
+ for k in shortcuts.iterkeys():
+ if k in req.form:
+ for name, value in shortcuts[k]:
+ if value is None:
+ value = req.form[k]
+ req.form[name] = value
+ del req.form[k]
+
+ # work with CGI variables to create coherent structure
+ # use SCRIPT_NAME, PATH_INFO and QUERY_STRING as well as our REPO_NAME
+
+ req.url = req.env['SCRIPT_NAME']
+ if not req.url.endswith('/'):
+ req.url += '/'
+ if req.env.has_key('REPO_NAME'):
+ req.url += req.env['REPO_NAME'] + '/'
+
+ if req.env.get('PATH_INFO'):
+ parts = req.env.get('PATH_INFO').strip('/').split('/')
+ repo_parts = req.env.get('REPO_NAME', '').split('/')
+ if parts[:len(repo_parts)] == repo_parts:
+ parts = parts[len(repo_parts):]
+ query = '/'.join(parts)
+ else:
+ query = req.env['QUERY_STRING'].split('&', 1)[0]
+ query = query.split(';', 1)[0]
+
+ # translate user-visible url structure to internal structure
+
+ args = query.split('/', 2)
+ if 'cmd' not in req.form and args and args[0]:
+
+ cmd = args.pop(0)
+ style = cmd.rfind('-')
+ if style != -1:
+ req.form['style'] = [cmd[:style]]
+ cmd = cmd[style+1:]
+
+ # avoid accepting e.g. style parameter as command
+ if hasattr(webcommands, cmd) or hasattr(protocol, cmd):
+ req.form['cmd'] = [cmd]
+
+ if args and args[0]:
+ node = args.pop(0)
+ req.form['node'] = [node]
+ if args:
+ req.form['file'] = args
+
+ if cmd == 'static':
+ req.form['file'] = req.form['node']
+ elif cmd == 'archive':
+ fn = req.form['node'][0]
+ for type_, spec in self.archive_specs.iteritems():
+ ext = spec[2]
+ if fn.endswith(ext):
+ req.form['node'] = [fn[:-len(ext)]]
+ req.form['type'] = [type_]
+
+ # actually process the request
+
+ try:
+
+ cmd = req.form.get('cmd', [''])[0]
+ if hasattr(protocol, cmd):
+ method = getattr(protocol, cmd)
+ method(self, req)
+ else:
+ tmpl = self.templater(req)
+ if cmd == '':
+ req.form['cmd'] = [tmpl.cache['default']]
+ cmd = req.form['cmd'][0]
+ method = getattr(webcommands, cmd)
+ method(self, req, tmpl)
+ del tmpl
+
+ except revlog.LookupError, err:
+ req.respond(404, tmpl(
+ 'error', error='revision not found: %s' % err.name))
+ except (hg.RepoError, revlog.RevlogError), inst:
+ req.respond('500 Internal Server Error',
+ tmpl('error', error=str(inst)))
+ except ErrorResponse, inst:
+ req.respond(inst.code, tmpl('error', error=inst.message))
+ except AttributeError:
+ req.respond(400, tmpl('error', error='No such method: ' + cmd))
+
+ def templater(self, req):
+
+ # determine scheme, port and server name
+ # this is needed to create absolute urls
+
+ proto = req.env.get('wsgi.url_scheme')
+ if proto == 'https':
+ proto = 'https'
+ default_port = "443"
+ else:
+ proto = 'http'
+ default_port = "80"
+
+ port = req.env["SERVER_PORT"]
+ port = port != default_port and (":" + port) or ""
+ urlbase = '%s://%s%s' % (proto, req.env['SERVER_NAME'], port)
+ staticurl = self.config("web", "staticurl") or req.url + 'static/'
+ if not staticurl.endswith('/'):
+ staticurl += '/'
+
+ # some functions for the templater
+
+ def header(**map):
+ header_file = cStringIO.StringIO(
+ ''.join(tmpl("header", encoding=self.encoding, **map)))
+ msg = mimetools.Message(header_file, 0)
+ req.header(msg.items())
+ yield header_file.read()
+
+ def rawfileheader(**map):
+ req.header([('Content-type', map['mimetype']),
+ ('Content-disposition', 'filename=%s' % map['file']),
+ ('Content-length', str(len(map['raw'])))])
+ yield ''
+
+ def footer(**map):
+ yield tmpl("footer", **map)
+
+ def motd(**map):
+ yield self.config("web", "motd", "")
+
+ def sessionvars(**map):
+ fields = []
+ if req.form.has_key('style'):
+ style = req.form['style'][0]
+ if style != self.config('web', 'style', ''):
+ fields.append(('style', style))
+
+ separator = req.url[-1] == '?' and ';' or '?'
+ for name, value in fields:
+ yield dict(name=name, value=value, separator=separator)
+ separator = ';'
+
+ # figure out which style to use
+
+ style = self.config("web", "style", "")
+ if req.form.has_key('style'):
+ style = req.form['style'][0]
+ mapfile = style_map(self.templatepath, style)
+
+ if not self.reponame:
+ self.reponame = (self.config("web", "name")
+ or req.env.get('REPO_NAME')
+ or req.url.strip('/') or self.repo.root)
+
+ # create the templater
+
+ tmpl = templater.templater(mapfile, templater.common_filters,
+ defaults={"url": req.url,
+ "staticurl": staticurl,
+ "urlbase": urlbase,
+ "repo": self.reponame,
+ "header": header,
+ "footer": footer,
+ "motd": motd,
+ "rawfileheader": rawfileheader,
+ "sessionvars": sessionvars
+ })
+ return tmpl
+
def archivelist(self, nodeid):
allowed = self.configlist("web", "allow_archive")
for i, spec in self.archive_specs.iteritems():
if i in allowed or self.configbool("web", "allow" + i):
yield {"type" : i, "extension" : spec[2], "node" : nodeid}
- def listfilediffs(self, files, changeset):
+ def listfilediffs(self, tmpl, files, changeset):
for f in files[:self.maxfiles]:
- yield self.t("filedifflink", node=hex(changeset), file=f)
+ yield tmpl("filedifflink", node=hex(changeset), file=f)
if len(files) > self.maxfiles:
- yield self.t("fileellipses")
+ yield tmpl("fileellipses")
def siblings(self, siblings=[], hiderev=None, **args):
siblings = [s for s in siblings if s.node() != nullid]
@@ -149,11 +346,11 @@
branches.append({"name": branch})
return branches
- def showtag(self, t1, node=nullid, **args):
+ def showtag(self, tmpl, t1, node=nullid, **args):
for t in self.repo.nodetags(node):
- yield self.t(t1, tag=t, **args)
+ yield tmpl(t1, tag=t, **args)
- def diff(self, node1, node2, files):
+ def diff(self, tmpl, node1, node2, files):
def filterfiles(filters, files):
l = [x for x in files if x in filters]
@@ -165,22 +362,22 @@
parity = paritygen(self.stripecount)
def diffblock(diff, f, fn):
- yield self.t("diffblock",
- lines=prettyprintlines(diff),
- parity=parity.next(),
- file=f,
- filenode=hex(fn or nullid))
+ yield tmpl("diffblock",
+ lines=prettyprintlines(diff),
+ parity=parity.next(),
+ file=f,
+ filenode=hex(fn or nullid))
def prettyprintlines(diff):
for l in diff.splitlines(1):
if l.startswith('+'):
- yield self.t("difflineplus", line=l)
+ yield tmpl("difflineplus", line=l)
elif l.startswith('-'):
- yield self.t("difflineminus", line=l)
+ yield tmpl("difflineminus", line=l)
elif l.startswith('@'):
- yield self.t("difflineat", line=l)
+ yield tmpl("difflineat", line=l)
else:
- yield self.t("diffline", line=l)
+ yield tmpl("diffline", line=l)
r = self.repo
c1 = r.changectx(node1)
@@ -210,7 +407,7 @@
yield diffblock(mdiff.unidiff(to, date1, tn, date2, f, f,
opts=diffopts), f, tn)
- def changelog(self, ctx, shortlog=False):
+ def changelog(self, tmpl, ctx, shortlog=False):
def changelist(limit=0,**map):
cl = self.repo.changelog
l = [] # build a list in forward order for efficiency
@@ -225,7 +422,7 @@
"changelogtag": self.showtag("changelogtag",n),
"desc": ctx.description(),
"date": ctx.date(),
- "files": self.listfilediffs(ctx.files(), n),
+ "files": self.listfilediffs(tmpl, ctx.files(), n),
"rev": i,
"node": hex(n),
"tags": self.nodetagsdict(n),
@@ -248,15 +445,15 @@
changenav = revnavgen(pos, maxchanges, count, self.repo.changectx)
- yield self.t(shortlog and 'shortlog' or 'changelog',
- changenav=changenav,
- node=hex(cl.tip()),
- rev=pos, changesets=count,
- entries=lambda **x: changelist(limit=0,**x),
- latestentry=lambda **x: changelist(limit=1,**x),
- archives=self.archivelist("tip"))
+ yield tmpl(shortlog and 'shortlog' or 'changelog',
+ changenav=changenav,
+ node=hex(cl.tip()),
+ rev=pos, changesets=count,
+ entries=lambda **x: changelist(limit=0,**x),
+ latestentry=lambda **x: changelist(limit=1,**x),
+ archives=self.archivelist("tip"))
- def search(self, query):
+ def search(self, tmpl, query):
def changelist(**map):
cl = self.repo.changelog
@@ -287,19 +484,19 @@
count += 1
n = ctx.node()
- yield self.t('searchentry',
- parity=parity.next(),
- author=ctx.user(),
- parent=self.siblings(ctx.parents()),
- child=self.siblings(ctx.children()),
- changelogtag=self.showtag("changelogtag",n),
- desc=ctx.description(),
- date=ctx.date(),
- files=self.listfilediffs(ctx.files(), n),
- rev=ctx.rev(),
- node=hex(n),
- tags=self.nodetagsdict(n),
- branches=self.nodebranchdict(ctx))
+ yield tmpl('searchentry',
+ parity=parity.next(),
+ author=ctx.user(),
+ parent=self.siblings(ctx.parents()),
+ child=self.siblings(ctx.children()),
+ changelogtag=self.showtag("changelogtag",n),
+ desc=ctx.description(),
+ date=ctx.date(),
+ files=self.listfilediffs(tmpl, ctx.files(), n),
+ rev=ctx.rev(),
+ node=hex(n),
+ tags=self.nodetagsdict(n),
+ branches=self.nodebranchdict(ctx))
if count >= self.maxchanges:
break
@@ -307,13 +504,13 @@
cl = self.repo.changelog
parity = paritygen(self.stripecount)
- yield self.t('search',
- query=query,
- node=hex(cl.tip()),
- entries=changelist,
- archives=self.archivelist("tip"))
+ yield tmpl('search',
+ query=query,
+ node=hex(cl.tip()),
+ entries=changelist,
+ archives=self.archivelist("tip"))
- def changeset(self, ctx):
+ def changeset(self, tmpl, ctx):
n = ctx.node()
parents = ctx.parents()
p1 = parents[0].node()
@@ -321,29 +518,29 @@
files = []
parity = paritygen(self.stripecount)
for f in ctx.files():
- files.append(self.t("filenodelink",
- node=hex(n), file=f,
- parity=parity.next()))
+ files.append(tmpl("filenodelink",
+ node=hex(n), file=f,
+ parity=parity.next()))
def diff(**map):
- yield self.diff(p1, n, None)
+ yield self.diff(tmpl, p1, n, None)
- yield self.t('changeset',
- diff=diff,
- rev=ctx.rev(),
- node=hex(n),
- parent=self.siblings(parents),
- child=self.siblings(ctx.children()),
- changesettag=self.showtag("changesettag",n),
- author=ctx.user(),
- desc=ctx.description(),
- date=ctx.date(),
- files=files,
- archives=self.archivelist(hex(n)),
- tags=self.nodetagsdict(n),
- branches=self.nodebranchdict(ctx))
+ yield tmpl('changeset',
+ diff=diff,
+ rev=ctx.rev(),
+ node=hex(n),
+ parent=self.siblings(parents),
+ child=self.siblings(ctx.children()),
+ changesettag=self.showtag("changesettag",n),
+ author=ctx.user(),
+ desc=ctx.description(),
+ date=ctx.date(),
+ files=files,
+ archives=self.archivelist(hex(n)),
+ tags=self.nodetagsdict(n),
+ branches=self.nodebranchdict(ctx))
- def filelog(self, fctx):
+ def filelog(self, tmpl, fctx):
f = fctx.path()
fl = fctx.filelog()
count = fl.count()
@@ -380,11 +577,11 @@
nodefunc = lambda x: fctx.filectx(fileid=x)
nav = revnavgen(pos, pagelen, count, nodefunc)
- yield self.t("filelog", file=f, node=hex(fctx.node()), nav=nav,
- entries=lambda **x: entries(limit=0, **x),
- latestentry=lambda **x: entries(limit=1, **x))
+ yield tmpl("filelog", file=f, node=hex(fctx.node()), nav=nav,
+ entries=lambda **x: entries(limit=0, **x),
+ latestentry=lambda **x: entries(limit=1, **x))
- def filerevision(self, fctx):
+ def filerevision(self, tmpl, fctx):
f = fctx.path()
text = fctx.data()
fl = fctx.filelog()
@@ -404,23 +601,23 @@
"linenumber": "% 6d" % (l + 1),
"parity": parity.next()}
- yield self.t("filerevision",
- file=f,
- path=_up(f),
- text=lines(),
- raw=rawtext,
- mimetype=mt,
- rev=fctx.rev(),
- node=hex(fctx.node()),
- author=fctx.user(),
- date=fctx.date(),
- desc=fctx.description(),
- parent=self.siblings(fctx.parents()),
- child=self.siblings(fctx.children()),
- rename=self.renamelink(fl, n),
- permissions=fctx.manifest().flags(f))
+ yield tmpl("filerevision",
+ file=f,
+ path=_up(f),
+ text=lines(),
+ raw=rawtext,
+ mimetype=mt,
+ rev=fctx.rev(),
+ node=hex(fctx.node()),
+ author=fctx.user(),
+ date=fctx.date(),
+ desc=fctx.description(),
+ parent=self.siblings(fctx.parents()),
+ child=self.siblings(fctx.children()),
+ rename=self.renamelink(fl, n),
+ permissions=fctx.manifest().flags(f))
- def fileannotate(self, fctx):
+ def fileannotate(self, tmpl, fctx):
f = fctx.path()
n = fctx.filenode()
fl = fctx.filelog()
@@ -442,21 +639,21 @@
"file": f.path(),
"line": l}
- yield self.t("fileannotate",
- file=f,
- annotate=annotate,
- path=_up(f),
- rev=fctx.rev(),
- node=hex(fctx.node()),
- author=fctx.user(),
- date=fctx.date(),
- desc=fctx.description(),
- rename=self.renamelink(fl, n),
- parent=self.siblings(fctx.parents()),
- child=self.siblings(fctx.children()),
- permissions=fctx.manifest().flags(f))
+ yield tmpl("fileannotate",
+ file=f,
+ annotate=annotate,
+ path=_up(f),
+ rev=fctx.rev(),
+ node=hex(fctx.node()),
+ author=fctx.user(),
+ date=fctx.date(),
+ desc=fctx.description(),
+ rename=self.renamelink(fl, n),
+ parent=self.siblings(fctx.parents()),
+ child=self.siblings(fctx.children()),
+ permissions=fctx.manifest().flags(f))
- def manifest(self, ctx, path):
+ def manifest(self, tmpl, ctx, path):
mf = ctx.manifest()
node = ctx.node()
@@ -510,19 +707,19 @@
"path": "%s%s" % (abspath, f),
"basename": f[:-1]}
- yield self.t("manifest",
- rev=ctx.rev(),
- node=hex(node),
- path=abspath,
- up=_up(abspath),
- upparity=parity.next(),
- fentries=filelist,
- dentries=dirlist,
- archives=self.archivelist(hex(node)),
- tags=self.nodetagsdict(node),
- branches=self.nodebranchdict(ctx))
+ yield tmpl("manifest",
+ rev=ctx.rev(),
+ node=hex(node),
+ path=abspath,
+ up=_up(abspath),
+ upparity=parity.next(),
+ fentries=filelist,
+ dentries=dirlist,
+ archives=self.archivelist(hex(node)),
+ tags=self.nodetagsdict(node),
+ branches=self.nodebranchdict(ctx))
- def tags(self):
+ def tags(self, tmpl):
i = self.repo.tagslist()
i.reverse()
parity = paritygen(self.stripecount)
@@ -540,13 +737,13 @@
"date": self.repo.changectx(n).date(),
"node": hex(n)}
- yield self.t("tags",
- node=hex(self.repo.changelog.tip()),
- entries=lambda **x: entries(False,0, **x),
- entriesnotip=lambda **x: entries(True,0, **x),
- latestentry=lambda **x: entries(True,1, **x))
+ yield tmpl("tags",
+ node=hex(self.repo.changelog.tip()),
+ entries=lambda **x: entries(False,0, **x),
+ entriesnotip=lambda **x: entries(True,0, **x),
+ latestentry=lambda **x: entries(True,1, **x))
- def summary(self):
+ def summary(self, tmpl):
i = self.repo.tagslist()
i.reverse()
@@ -561,11 +758,11 @@
if count > 10: # limit to 10 tags
break;
- yield self.t("tagentry",
- parity=parity.next(),
- tag=k,
- node=hex(n),
- date=self.repo.changectx(n).date())
+ yield tmpl("tagentry",
+ parity=parity.next(),
+ tag=k,
+ node=hex(n),
+ date=self.repo.changectx(n).date())
def branches(**map):
@@ -591,8 +788,8 @@
n = ctx.node()
hn = hex(n)
- l.insert(0, self.t(
- 'shortlogentry',
+ l.insert(0, tmpl(
+ 'shortlogentry',
parity=parity.next(),
author=ctx.user(),
desc=ctx.description(),
@@ -609,34 +806,34 @@
start = max(0, count - self.maxchanges)
end = min(count, start + self.maxchanges)
- yield self.t("summary",
- desc=self.config("web", "description", "unknown"),
- owner=(self.config("ui", "username") or # preferred
- self.config("web", "contact") or # deprecated
- self.config("web", "author", "unknown")), # also
- lastchange=cl.read(cl.tip())[2],
- tags=tagentries,
- branches=branches,
- shortlog=changelist,
- node=hex(cl.tip()),
- archives=self.archivelist("tip"))
+ yield tmpl("summary",
+ desc=self.config("web", "description", "unknown"),
+ owner=(self.config("ui", "username") or # preferred
+ self.config("web", "contact") or # deprecated
+ self.config("web", "author", "unknown")), # also
+ lastchange=cl.read(cl.tip())[2],
+ tags=tagentries,
+ branches=branches,
+ shortlog=changelist,
+ node=hex(cl.tip()),
+ archives=self.archivelist("tip"))
- def filediff(self, fctx):
+ def filediff(self, tmpl, fctx):
n = fctx.node()
path = fctx.path()
parents = fctx.parents()
p1 = parents and parents[0].node() or nullid
def diff(**map):
- yield self.diff(p1, n, [path])
+ yield self.diff(tmpl, p1, n, [path])
- yield self.t("filediff",
- file=path,
- node=hex(n),
- rev=fctx.rev(),
- parent=self.siblings(parents),
- child=self.siblings(fctx.children()),
- diff=diff)
+ yield tmpl("filediff",
+ file=path,
+ node=hex(n),
+ rev=fctx.rev(),
+ parent=self.siblings(parents),
+ child=self.siblings(fctx.children()),
+ diff=diff)
archive_specs = {
'bz2': ('application/x-tar', 'tbz2', '.tar.bz2', None),
@@ -644,7 +841,7 @@
'zip': ('application/zip', 'zip', '.zip', None),
}
- def archive(self, req, key, type_):
+ def archive(self, tmpl, req, key, type_):
reponame = re.sub(r"\W+", "-", os.path.basename(self.reponame))
cnode = self.repo.lookup(key)
arch_version = key
@@ -668,191 +865,6 @@
path = path.lstrip('/')
return util.canonpath(self.repo.root, '', path)
- def run(self):
- if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."):
- raise RuntimeError("This function is only intended to be called while running as a CGI script.")
- import mercurial.hgweb.wsgicgi as wsgicgi
- wsgicgi.launch(self)
-
- def __call__(self, env, respond):
- req = wsgirequest(env, respond)
- self.run_wsgi(req)
- return req
-
- def run_wsgi(self, req):
- def header(**map):
- header_file = cStringIO.StringIO(
- ''.join(self.t("header", encoding=self.encoding, **map)))
- msg = mimetools.Message(header_file, 0)
- req.header(msg.items())
- yield header_file.read()
-
- def rawfileheader(**map):
- req.header([('Content-type', map['mimetype']),
- ('Content-disposition', 'filename=%s' % map['file']),
- ('Content-length', str(len(map['raw'])))])
- yield ''
-
- def footer(**map):
- yield self.t("footer", **map)
-
- def motd(**map):
- yield self.config("web", "motd", "")
-
- def expand_form(form):
- shortcuts = {
- 'cl': [('cmd', ['changelog']), ('rev', None)],
- 'sl': [('cmd', ['shortlog']), ('rev', None)],
- 'cs': [('cmd', ['changeset']), ('node', None)],
- 'f': [('cmd', ['file']), ('filenode', None)],
- 'fl': [('cmd', ['filelog']), ('filenode', None)],
- 'fd': [('cmd', ['filediff']), ('node', None)],
- 'fa': [('cmd', ['annotate']), ('filenode', None)],
- 'mf': [('cmd', ['manifest']), ('manifest', None)],
- 'ca': [('cmd', ['archive']), ('node', None)],
- 'tags': [('cmd', ['tags'])],
- 'tip': [('cmd', ['changeset']), ('node', ['tip'])],
- 'static': [('cmd', ['static']), ('file', None)]
- }
-
- for k in shortcuts.iterkeys():
- if form.has_key(k):
- for name, value in shortcuts[k]:
- if value is None:
- value = form[k]
- form[name] = value
- del form[k]
-
- def rewrite_request(req):
- '''translate new web interface to traditional format'''
-
- req.url = req.env['SCRIPT_NAME']
- if not req.url.endswith('/'):
- req.url += '/'
- if req.env.has_key('REPO_NAME'):
- req.url += req.env['REPO_NAME'] + '/'
-
- if req.env.get('PATH_INFO'):
- parts = req.env.get('PATH_INFO').strip('/').split('/')
- repo_parts = req.env.get('REPO_NAME', '').split('/')
- if parts[:len(repo_parts)] == repo_parts:
- parts = parts[len(repo_parts):]
- query = '/'.join(parts)
- else:
- query = req.env['QUERY_STRING'].split('&', 1)[0]
- query = query.split(';', 1)[0]
-
- if req.form.has_key('cmd'):
- # old style
- return
-
- args = query.split('/', 2)
- if not args or not args[0]:
- return
-
- cmd = args.pop(0)
- style = cmd.rfind('-')
- if style != -1:
- req.form['style'] = [cmd[:style]]
- cmd = cmd[style+1:]
- # avoid accepting e.g. style parameter as command
- if hasattr(self, 'do_' + cmd):
- req.form['cmd'] = [cmd]
-
- if args and args[0]:
- node = args.pop(0)
- req.form['node'] = [node]
- if args:
- req.form['file'] = args
-
- if cmd == 'static':
- req.form['file'] = req.form['node']
- elif cmd == 'archive':
- fn = req.form['node'][0]
- for type_, spec in self.archive_specs.iteritems():
- ext = spec[2]
- if fn.endswith(ext):
- req.form['node'] = [fn[:-len(ext)]]
- req.form['type'] = [type_]
-
- def sessionvars(**map):
- fields = []
- if req.form.has_key('style'):
- style = req.form['style'][0]
- if style != self.config('web', 'style', ''):
- fields.append(('style', style))
-
- separator = req.url[-1] == '?' and ';' or '?'
- for name, value in fields:
- yield dict(name=name, value=value, separator=separator)
- separator = ';'
-
- self.refresh()
-
- expand_form(req.form)
- rewrite_request(req)
-
- style = self.config("web", "style", "")
- if req.form.has_key('style'):
- style = req.form['style'][0]
- mapfile = style_map(self.templatepath, style)
-
- proto = req.env.get('wsgi.url_scheme')
- if proto == 'https':
- proto = 'https'
- default_port = "443"
- else:
- proto = 'http'
- default_port = "80"
-
- port = req.env["SERVER_PORT"]
- port = port != default_port and (":" + port) or ""
- urlbase = '%s://%s%s' % (proto, req.env['SERVER_NAME'], port)
- staticurl = self.config("web", "staticurl") or req.url + 'static/'
- if not staticurl.endswith('/'):
- staticurl += '/'
-
- if not self.reponame:
- self.reponame = (self.config("web", "name")
- or req.env.get('REPO_NAME')
- or req.url.strip('/')
- or os.path.basename(self.repo.root))
-
- self.t = templater.templater(mapfile, templater.common_filters,
- defaults={"url": req.url,
- "staticurl": staticurl,
- "urlbase": urlbase,
- "repo": self.reponame,
- "header": header,
- "footer": footer,
- "motd": motd,
- "rawfileheader": rawfileheader,
- "sessionvars": sessionvars
- })
-
- try:
- if not req.form.has_key('cmd'):
- req.form['cmd'] = [self.t.cache['default']]
-
- cmd = req.form['cmd'][0]
-
- try:
- method = getattr(self, 'do_' + cmd)
- method(req)
- except revlog.LookupError, err:
- req.respond(404, self.t(
- 'error', error='revision not found: %s' % err.name))
- except (hg.RepoError, revlog.RevlogError), inst:
- req.respond('500 Internal Server Error',
- self.t('error', error=str(inst)))
- except ErrorResponse, inst:
- req.respond(inst.code, self.t('error', error=inst.message))
- except AttributeError:
- req.respond(400,
- self.t('error', error='No such method: ' + cmd))
- finally:
- self.t = None
-
def changectx(self, req):
if req.form.has_key('node'):
changeid = req.form['node'][0]
@@ -884,181 +896,6 @@
return fctx
- def do_log(self, req):
- if req.form.has_key('file') and req.form['file'][0]:
- self.do_filelog(req)
- else:
- self.do_changelog(req)
-
- def do_rev(self, req):
- self.do_changeset(req)
-
- def do_file(self, req):
- path = self.cleanpath(req.form.get('file', [''])[0])
- if path:
- try:
- req.write(self.filerevision(self.filectx(req)))
- return
- except revlog.LookupError:
- pass
-
- req.write(self.manifest(self.changectx(req), path))
-
- def do_diff(self, req):
- self.do_filediff(req)
-
- def do_changelog(self, req, shortlog = False):
- if req.form.has_key('node'):
- ctx = self.changectx(req)
- else:
- if req.form.has_key('rev'):
- hi = req.form['rev'][0]
- else:
- hi = self.repo.changelog.count() - 1
- try:
- ctx = self.repo.changectx(hi)
- except hg.RepoError:
- req.write(self.search(hi)) # XXX redirect to 404 page?
- return
-
- req.write(self.changelog(ctx, shortlog = shortlog))
-
- def do_shortlog(self, req):
- self.do_changelog(req, shortlog = True)
-
- def do_changeset(self, req):
- req.write(self.changeset(self.changectx(req)))
-
- def do_manifest(self, req):
- req.write(self.manifest(self.changectx(req),
- self.cleanpath(req.form['path'][0])))
-
- def do_tags(self, req):
- req.write(self.tags())
-
- def do_summary(self, req):
- req.write(self.summary())
-
- def do_filediff(self, req):
- req.write(self.filediff(self.filectx(req)))
-
- def do_annotate(self, req):
- req.write(self.fileannotate(self.filectx(req)))
-
- def do_filelog(self, req):
- req.write(self.filelog(self.filectx(req)))
-
- def do_lookup(self, req):
- try:
- r = hex(self.repo.lookup(req.form['key'][0]))
- success = 1
- except Exception,inst:
- r = str(inst)
- success = 0
- resp = "%s %s\n" % (success, r)
- req.httphdr("application/mercurial-0.1", length=len(resp))
- req.write(resp)
-
- def do_heads(self, req):
- resp = " ".join(map(hex, self.repo.heads())) + "\n"
- req.httphdr("application/mercurial-0.1", length=len(resp))
- req.write(resp)
-
- def do_branches(self, req):
- nodes = []
- if req.form.has_key('nodes'):
- nodes = map(bin, req.form['nodes'][0].split(" "))
- resp = cStringIO.StringIO()
- for b in self.repo.branches(nodes):
- resp.write(" ".join(map(hex, b)) + "\n")
- resp = resp.getvalue()
- req.httphdr("application/mercurial-0.1", length=len(resp))
- req.write(resp)
-
- def do_between(self, req):
- if req.form.has_key('pairs'):
- pairs = [map(bin, p.split("-"))
- for p in req.form['pairs'][0].split(" ")]
- resp = cStringIO.StringIO()
- for b in self.repo.between(pairs):
- resp.write(" ".join(map(hex, b)) + "\n")
- resp = resp.getvalue()
- req.httphdr("application/mercurial-0.1", length=len(resp))
- req.write(resp)
-
- def do_changegroup(self, req):
- req.httphdr("application/mercurial-0.1")
- nodes = []
- if not self.allowpull:
- return
-
- if req.form.has_key('roots'):
- nodes = map(bin, req.form['roots'][0].split(" "))
-
- z = zlib.compressobj()
- f = self.repo.changegroup(nodes, 'serve')
- while 1:
- chunk = f.read(4096)
- if not chunk:
- break
- req.write(z.compress(chunk))
-
- req.write(z.flush())
-
- def do_changegroupsubset(self, req):
- req.httphdr("application/mercurial-0.1")
- bases = []
- heads = []
- if not self.allowpull:
- return
-
- if req.form.has_key('bases'):
- bases = [bin(x) for x in req.form['bases'][0].split(' ')]
- if req.form.has_key('heads'):
- heads = [bin(x) for x in req.form['heads'][0].split(' ')]
-
- z = zlib.compressobj()
- f = self.repo.changegroupsubset(bases, heads, 'serve')
- while 1:
- chunk = f.read(4096)
- if not chunk:
- break
- req.write(z.compress(chunk))
-
- req.write(z.flush())
-
- def do_archive(self, req):
- type_ = req.form['type'][0]
- allowed = self.configlist("web", "allow_archive")
- if (type_ in self.archives and (type_ in allowed or
- self.configbool("web", "allow" + type_, False))):
- self.archive(req, req.form['node'][0], type_)
- return
-
- req.respond(400, self.t('error',
- error='Unsupported archive type: %s' % type_))
-
- def do_static(self, req):
- fname = req.form['file'][0]
- # a repo owner may set web.static in .hg/hgrc to get any file
- # readable by the user running the CGI script
- static = self.config("web", "static",
- os.path.join(self.templatepath, "static"),
- untrusted=False)
- req.write(staticfile(static, fname, req))
-
- def do_capabilities(self, req):
- caps = ['lookup', 'changegroupsubset']
- if self.configbool('server', 'uncompressed'):
- caps.append('stream=%d' % self.repo.changelog.version)
- # XXX: make configurable and/or share code with do_unbundle:
- unbundleversions = ['HG10GZ', 'HG10BZ', 'HG10UN']
- if unbundleversions:
- caps.append('unbundle=%s' % ','.join(unbundleversions))
- resp = ' '.join(caps)
- req.httphdr("application/mercurial-0.1", length=len(resp))
- req.write(resp)
-
def check_perm(self, req, op, default):
'''check permission for operation based on user auth.
return true if op allowed, else false.
@@ -1072,138 +909,3 @@
allow = self.configlist('web', 'allow_' + op)
return (allow and (allow == ['*'] or user in allow)) or default
-
- def do_unbundle(self, req):
- def bail(response, headers={}):
- length = int(req.env['CONTENT_LENGTH'])
- for s in util.filechunkiter(req, limit=length):
- # drain incoming bundle, else client will not see
- # response when run outside cgi script
- pass
- req.httphdr("application/mercurial-0.1", headers=headers)
- req.write('0\n')
- req.write(response)
-
- # require ssl by default, auth info cannot be sniffed and
- # replayed
- ssl_req = self.configbool('web', 'push_ssl', True)
- if ssl_req:
- if req.env.get('wsgi.url_scheme') != 'https':
- bail(_('ssl required\n'))
- return
- proto = 'https'
- else:
- proto = 'http'
-
- # do not allow push unless explicitly allowed
- if not self.check_perm(req, 'push', False):
- bail(_('push not authorized\n'),
- headers={'status': '401 Unauthorized'})
- return
-
- their_heads = req.form['heads'][0].split(' ')
-
- def check_heads():
- heads = map(hex, self.repo.heads())
- return their_heads == [hex('force')] or their_heads == heads
-
- # fail early if possible
- if not check_heads():
- bail(_('unsynced changes\n'))
- return
-
- req.httphdr("application/mercurial-0.1")
-
- # do not lock repo until all changegroup data is
- # streamed. save to temporary file.
-
- fd, tempname = tempfile.mkstemp(prefix='hg-unbundle-')
- fp = os.fdopen(fd, 'wb+')
- try:
- length = int(req.env['CONTENT_LENGTH'])
- for s in util.filechunkiter(req, limit=length):
- fp.write(s)
-
- try:
- lock = self.repo.lock()
- try:
- if not check_heads():
- req.write('0\n')
- req.write(_('unsynced changes\n'))
- return
-
- fp.seek(0)
- header = fp.read(6)
- if not header.startswith("HG"):
- # old client with uncompressed bundle
- def generator(f):
- yield header
- for chunk in f:
- yield chunk
- elif not header.startswith("HG10"):
- req.write("0\n")
- req.write(_("unknown bundle version\n"))
- return
- elif header == "HG10GZ":
- def generator(f):
- zd = zlib.decompressobj()
- for chunk in f:
- yield zd.decompress(chunk)
- elif header == "HG10BZ":
- def generator(f):
- zd = bz2.BZ2Decompressor()
- zd.decompress("BZ")
- for chunk in f:
- yield zd.decompress(chunk)
- elif header == "HG10UN":
- def generator(f):
- for chunk in f:
- yield chunk
- else:
- req.write("0\n")
- req.write(_("unknown bundle compression type\n"))
- return
- gen = generator(util.filechunkiter(fp, 4096))
-
- # send addchangegroup output to client
-
- old_stdout = sys.stdout
- sys.stdout = cStringIO.StringIO()
-
- try:
- url = 'remote:%s:%s' % (proto,
- req.env.get('REMOTE_HOST', ''))
- try:
- ret = self.repo.addchangegroup(
- util.chunkbuffer(gen), 'serve', url)
- except util.Abort, inst:
- sys.stdout.write("abort: %s\n" % inst)
- ret = 0
- finally:
- val = sys.stdout.getvalue()
- sys.stdout = old_stdout
- req.write('%d\n' % ret)
- req.write(val)
- finally:
- del lock
- except (OSError, IOError), inst:
- req.write('0\n')
- filename = getattr(inst, 'filename', '')
- # Don't send our filesystem layout to the client
- if filename.startswith(self.repo.root):
- filename = filename[len(self.repo.root)+1:]
- else:
- filename = ''
- error = getattr(inst, 'strerror', 'Unknown error')
- if inst.errno == errno.ENOENT:
- code = 404
- else:
- code = 500
- req.respond(code, '%s: %s\n' % (error, filename))
- finally:
- fp.close()
- os.unlink(tempname)
-
- def do_stream_out(self, req):
- req.httphdr("application/mercurial-0.1")
- streamclone.stream_out(self.repo, req, untrusted=True)
--- a/mercurial/hgweb/hgwebdir_mod.py Fri Dec 07 02:29:55 2007 -0600
+++ b/mercurial/hgweb/hgwebdir_mod.py Fri Dec 07 14:59:33 2007 -0600
@@ -20,7 +20,8 @@
return [(util.pconvert(name).strip('/'), path)
for name, path in items]
- self.parentui = parentui
+ self.parentui = parentui or ui.ui(report_untrusted=False,
+ interactive = False)
self.motd = None
self.style = None
self.stripecount = None
@@ -69,50 +70,66 @@
return req
def run_wsgi(self, req):
- def header(**map):
- header_file = cStringIO.StringIO(
- ''.join(tmpl("header", encoding=util._encoding, **map)))
- msg = mimetools.Message(header_file, 0)
- req.header(msg.items())
- yield header_file.read()
- def footer(**map):
- yield tmpl("footer", **map)
+ try:
+ try:
- def motd(**map):
- if self.motd is not None:
- yield self.motd
- else:
- yield config('web', 'motd', '')
+ virtual = req.env.get("PATH_INFO", "").strip('/')
+
+ # a static file
+ if virtual.startswith('static/') or 'static' in req.form:
+ static = os.path.join(templater.templatepath(), 'static')
+ if virtual.startswith('static/'):
+ fname = virtual[7:]
+ else:
+ fname = req.form['static'][0]
+ req.write(staticfile(static, fname, req))
+ return
- parentui = self.parentui or ui.ui(report_untrusted=False,
- interactive=False)
-
- def config(section, name, default=None, untrusted=True):
- return parentui.config(section, name, default, untrusted)
+ # top-level index
+ elif not virtual:
+ tmpl = self.templater(req)
+ self.makeindex(req, tmpl)
+ return
- url = req.env.get('SCRIPT_NAME', '')
- if not url.endswith('/'):
- url += '/'
-
- staticurl = config('web', 'staticurl') or url + 'static/'
- if not staticurl.endswith('/'):
- staticurl += '/'
+ # nested indexes and hgwebs
+ repos = dict(self.repos)
+ while virtual:
+ real = repos.get(virtual)
+ if real:
+ req.env['REPO_NAME'] = virtual
+ try:
+ repo = hg.repository(self.parentui, real)
+ hgweb(repo).run_wsgi(req)
+ return
+ except IOError, inst:
+ raise ErrorResponse(500, inst.strerror)
+ except hg.RepoError, inst:
+ raise ErrorResponse(500, str(inst))
- style = self.style
- if style is None:
- style = config('web', 'style', '')
- if req.form.has_key('style'):
- style = req.form['style'][0]
- if self.stripecount is None:
- self.stripecount = int(config('web', 'stripes', 1))
- mapfile = style_map(templater.templatepath(), style)
- tmpl = templater.templater(mapfile, templater.common_filters,
- defaults={"header": header,
- "footer": footer,
- "motd": motd,
- "url": url,
- "staticurl": staticurl})
+ # browse subdirectories
+ subdir = virtual + '/'
+ if [r for r in repos if r.startswith(subdir)]:
+ tmpl = self.templater(req)
+ self.makeindex(req, tmpl, subdir)
+ return
+
+ up = virtual.rfind('/')
+ if up < 0:
+ break
+ virtual = virtual[:up]
+
+ # prefixes not found
+ tmpl = self.templater(req)
+ req.respond(404, tmpl("notfound", repo=virtual))
+
+ except ErrorResponse, err:
+ tmpl = self.templater(req)
+ req.respond(err.code, tmpl('error', error=err.message or ''))
+ finally:
+ tmpl = None
+
+ def makeindex(self, req, tmpl, subdir=""):
def archivelist(ui, nodeid, url):
allowed = ui.configlist("web", "allow_archive", untrusted=True)
@@ -142,7 +159,7 @@
continue
name = name[len(subdir):]
- u = ui.ui(parentui=parentui)
+ u = ui.ui(parentui=self.parentui)
try:
u.readconfig(os.path.join(path, '.hg', 'hgrc'))
except Exception, e:
@@ -196,67 +213,65 @@
row['parity'] = parity.next()
yield row
- def makeindex(req, subdir=""):
- sortable = ["name", "description", "contact", "lastchange"]
- sortcolumn, descending = self.repos_sorted
- if req.form.has_key('sort'):
- sortcolumn = req.form['sort'][0]
- descending = sortcolumn.startswith('-')
- if descending:
- sortcolumn = sortcolumn[1:]
- if sortcolumn not in sortable:
- sortcolumn = ""
+ sortable = ["name", "description", "contact", "lastchange"]
+ sortcolumn, descending = self.repos_sorted
+ if req.form.has_key('sort'):
+ sortcolumn = req.form['sort'][0]
+ descending = sortcolumn.startswith('-')
+ if descending:
+ sortcolumn = sortcolumn[1:]
+ if sortcolumn not in sortable:
+ sortcolumn = ""
- sort = [("sort_%s" % column,
- "%s%s" % ((not descending and column == sortcolumn)
- and "-" or "", column))
- for column in sortable]
- req.write(tmpl("index", entries=entries, subdir=subdir,
- sortcolumn=sortcolumn, descending=descending,
- **dict(sort)))
+ sort = [("sort_%s" % column,
+ "%s%s" % ((not descending and column == sortcolumn)
+ and "-" or "", column))
+ for column in sortable]
+ req.write(tmpl("index", entries=entries, subdir=subdir,
+ sortcolumn=sortcolumn, descending=descending,
+ **dict(sort)))
+
+ def templater(self, req):
+
+ def header(**map):
+ header_file = cStringIO.StringIO(
+ ''.join(tmpl("header", encoding=util._encoding, **map)))
+ msg = mimetools.Message(header_file, 0)
+ req.header(msg.items())
+ yield header_file.read()
+
+ def footer(**map):
+ yield tmpl("footer", **map)
- try:
- try:
- virtual = req.env.get("PATH_INFO", "").strip('/')
- if virtual.startswith('static/'):
- static = os.path.join(templater.templatepath(), 'static')
- fname = virtual[7:]
- req.write(staticfile(static, fname, req))
- elif virtual:
- repos = dict(self.repos)
- while virtual:
- real = repos.get(virtual)
- if real:
- req.env['REPO_NAME'] = virtual
- try:
- repo = hg.repository(parentui, real)
- hgweb(repo).run_wsgi(req)
- return
- except IOError, inst:
- raise ErrorResponse(500, inst.strerror)
- except hg.RepoError, inst:
- raise ErrorResponse(500, str(inst))
+ def motd(**map):
+ if self.motd is not None:
+ yield self.motd
+ else:
+ yield config('web', 'motd', '')
+
+ def config(section, name, default=None, untrusted=True):
+ return self.parentui.config(section, name, default, untrusted)
+
+ url = req.env.get('SCRIPT_NAME', '')
+ if not url.endswith('/'):
+ url += '/'
- # browse subdirectories
- subdir = virtual + '/'
- if [r for r in repos if r.startswith(subdir)]:
- makeindex(req, subdir)
- return
-
- up = virtual.rfind('/')
- if up < 0:
- break
- virtual = virtual[:up]
+ staticurl = config('web', 'staticurl') or url + 'static/'
+ if not staticurl.endswith('/'):
+ staticurl += '/'
- req.respond(404, tmpl("notfound", repo=virtual))
- else:
- if req.form.has_key('static'):
- static = os.path.join(templater.templatepath(), "static")
- fname = req.form['static'][0]
- req.write(staticfile(static, fname, req))
- else:
- makeindex(req)
- except ErrorResponse, err:
- req.respond(err.code, tmpl('error', error=err.message or ''))
- finally:
- tmpl = None
+ style = self.style
+ if style is None:
+ style = config('web', 'style', '')
+ if req.form.has_key('style'):
+ style = req.form['style'][0]
+ if self.stripecount is None:
+ self.stripecount = int(config('web', 'stripes', 1))
+ mapfile = style_map(templater.templatepath(), style)
+ tmpl = templater.templater(mapfile, templater.common_filters,
+ defaults={"header": header,
+ "footer": footer,
+ "motd": motd,
+ "url": url,
+ "staticurl": staticurl})
+ return tmpl
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/hgweb/protocol.py Fri Dec 07 14:59:33 2007 -0600
@@ -0,0 +1,237 @@
+#
+# Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
+# Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+
+import cStringIO, zlib, bz2, tempfile, errno, os, sys
+from mercurial import util, streamclone
+from mercurial.i18n import gettext as _
+from mercurial.node import *
+
+def lookup(web, req):
+ try:
+ r = hex(web.repo.lookup(req.form['key'][0]))
+ success = 1
+ except Exception,inst:
+ r = str(inst)
+ success = 0
+ resp = "%s %s\n" % (success, r)
+ req.httphdr("application/mercurial-0.1", length=len(resp))
+ req.write(resp)
+
+def heads(web, req):
+ resp = " ".join(map(hex, web.repo.heads())) + "\n"
+ req.httphdr("application/mercurial-0.1", length=len(resp))
+ req.write(resp)
+
+def branches(web, req):
+ nodes = []
+ if req.form.has_key('nodes'):
+ nodes = map(bin, req.form['nodes'][0].split(" "))
+ resp = cStringIO.StringIO()
+ for b in web.repo.branches(nodes):
+ resp.write(" ".join(map(hex, b)) + "\n")
+ resp = resp.getvalue()
+ req.httphdr("application/mercurial-0.1", length=len(resp))
+ req.write(resp)
+
+def between(web, req):
+ if req.form.has_key('pairs'):
+ pairs = [map(bin, p.split("-"))
+ for p in req.form['pairs'][0].split(" ")]
+ resp = cStringIO.StringIO()
+ for b in web.repo.between(pairs):
+ resp.write(" ".join(map(hex, b)) + "\n")
+ resp = resp.getvalue()
+ req.httphdr("application/mercurial-0.1", length=len(resp))
+ req.write(resp)
+
+def changegroup(web, req):
+ req.httphdr("application/mercurial-0.1")
+ nodes = []
+ if not web.allowpull:
+ return
+
+ if req.form.has_key('roots'):
+ nodes = map(bin, req.form['roots'][0].split(" "))
+
+ z = zlib.compressobj()
+ f = web.repo.changegroup(nodes, 'serve')
+ while 1:
+ chunk = f.read(4096)
+ if not chunk:
+ break
+ req.write(z.compress(chunk))
+
+ req.write(z.flush())
+
+def changegroupsubset(web, req):
+ req.httphdr("application/mercurial-0.1")
+ bases = []
+ heads = []
+ if not web.allowpull:
+ return
+
+ if req.form.has_key('bases'):
+ bases = [bin(x) for x in req.form['bases'][0].split(' ')]
+ if req.form.has_key('heads'):
+ heads = [bin(x) for x in req.form['heads'][0].split(' ')]
+
+ z = zlib.compressobj()
+ f = web.repo.changegroupsubset(bases, heads, 'serve')
+ while 1:
+ chunk = f.read(4096)
+ if not chunk:
+ break
+ req.write(z.compress(chunk))
+
+ req.write(z.flush())
+
+def capabilities(web, req):
+ caps = ['lookup', 'changegroupsubset']
+ if web.configbool('server', 'uncompressed'):
+ caps.append('stream=%d' % web.repo.changelog.version)
+ # XXX: make configurable and/or share code with do_unbundle:
+ unbundleversions = ['HG10GZ', 'HG10BZ', 'HG10UN']
+ if unbundleversions:
+ caps.append('unbundle=%s' % ','.join(unbundleversions))
+ resp = ' '.join(caps)
+ req.httphdr("application/mercurial-0.1", length=len(resp))
+ req.write(resp)
+
+def unbundle(web, req):
+ def bail(response, headers={}):
+ length = int(req.env['CONTENT_LENGTH'])
+ for s in util.filechunkiter(req, limit=length):
+ # drain incoming bundle, else client will not see
+ # response when run outside cgi script
+ pass
+ req.httphdr("application/mercurial-0.1", headers=headers)
+ req.write('0\n')
+ req.write(response)
+
+ # require ssl by default, auth info cannot be sniffed and
+ # replayed
+ ssl_req = web.configbool('web', 'push_ssl', True)
+ if ssl_req:
+ if req.env.get('wsgi.url_scheme') != 'https':
+ bail(_('ssl required\n'))
+ return
+ proto = 'https'
+ else:
+ proto = 'http'
+
+ # do not allow push unless explicitly allowed
+ if not web.check_perm(req, 'push', False):
+ bail(_('push not authorized\n'),
+ headers={'status': '401 Unauthorized'})
+ return
+
+ their_heads = req.form['heads'][0].split(' ')
+
+ def check_heads():
+ heads = map(hex, web.repo.heads())
+ return their_heads == [hex('force')] or their_heads == heads
+
+ # fail early if possible
+ if not check_heads():
+ bail(_('unsynced changes\n'))
+ return
+
+ req.httphdr("application/mercurial-0.1")
+
+ # do not lock repo until all changegroup data is
+ # streamed. save to temporary file.
+
+ fd, tempname = tempfile.mkstemp(prefix='hg-unbundle-')
+ fp = os.fdopen(fd, 'wb+')
+ try:
+ length = int(req.env['CONTENT_LENGTH'])
+ for s in util.filechunkiter(req, limit=length):
+ fp.write(s)
+
+ try:
+ lock = web.repo.lock()
+ try:
+ if not check_heads():
+ req.write('0\n')
+ req.write(_('unsynced changes\n'))
+ return
+
+ fp.seek(0)
+ header = fp.read(6)
+ if not header.startswith("HG"):
+ # old client with uncompressed bundle
+ def generator(f):
+ yield header
+ for chunk in f:
+ yield chunk
+ elif not header.startswith("HG10"):
+ req.write("0\n")
+ req.write(_("unknown bundle version\n"))
+ return
+ elif header == "HG10GZ":
+ def generator(f):
+ zd = zlib.decompressobj()
+ for chunk in f:
+ yield zd.decompress(chunk)
+ elif header == "HG10BZ":
+ def generator(f):
+ zd = bz2.BZ2Decompressor()
+ zd.decompress("BZ")
+ for chunk in f:
+ yield zd.decompress(chunk)
+ elif header == "HG10UN":
+ def generator(f):
+ for chunk in f:
+ yield chunk
+ else:
+ req.write("0\n")
+ req.write(_("unknown bundle compression type\n"))
+ return
+ gen = generator(util.filechunkiter(fp, 4096))
+
+ # send addchangegroup output to client
+
+ old_stdout = sys.stdout
+ sys.stdout = cStringIO.StringIO()
+
+ try:
+ url = 'remote:%s:%s' % (proto,
+ req.env.get('REMOTE_HOST', ''))
+ try:
+ ret = web.repo.addchangegroup(
+ util.chunkbuffer(gen), 'serve', url)
+ except util.Abort, inst:
+ sys.stdout.write("abort: %s\n" % inst)
+ ret = 0
+ finally:
+ val = sys.stdout.getvalue()
+ sys.stdout = old_stdout
+ req.write('%d\n' % ret)
+ req.write(val)
+ finally:
+ del lock
+ except (OSError, IOError), inst:
+ req.write('0\n')
+ filename = getattr(inst, 'filename', '')
+ # Don't send our filesystem layout to the client
+ if filename.startswith(web.repo.root):
+ filename = filename[len(web.repo.root)+1:]
+ else:
+ filename = ''
+ error = getattr(inst, 'strerror', 'Unknown error')
+ if inst.errno == errno.ENOENT:
+ code = 404
+ else:
+ code = 500
+ req.respond(code, '%s: %s\n' % (error, filename))
+ finally:
+ fp.close()
+ os.unlink(tempname)
+
+def stream_out(web, req):
+ req.httphdr("application/mercurial-0.1")
+ streamclone.stream_out(web.repo, req, untrusted=True)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/hgweb/webcommands.py Fri Dec 07 14:59:33 2007 -0600
@@ -0,0 +1,92 @@
+#
+# Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
+# Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+
+import os
+from mercurial import revlog
+from common import staticfile
+
+def log(web, req, tmpl):
+ if req.form.has_key('file') and req.form['file'][0]:
+ filelog(web, req, tmpl)
+ else:
+ changelog(web, req, tmpl)
+
+def file(web, req, tmpl):
+ path = web.cleanpath(req.form.get('file', [''])[0])
+ if path:
+ try:
+ req.write(web.filerevision(tmpl, web.filectx(req)))
+ return
+ except revlog.LookupError:
+ pass
+
+ req.write(web.manifest(tmpl, web.changectx(req), path))
+
+def changelog(web, req, tmpl, shortlog = False):
+ if req.form.has_key('node'):
+ ctx = web.changectx(req)
+ else:
+ if req.form.has_key('rev'):
+ hi = req.form['rev'][0]
+ else:
+ hi = web.repo.changelog.count() - 1
+ try:
+ ctx = web.repo.changectx(hi)
+ except hg.RepoError:
+ req.write(web.search(tmpl, hi)) # XXX redirect to 404 page?
+ return
+
+ req.write(web.changelog(tmpl, ctx, shortlog = shortlog))
+
+def shortlog(web, req, tmpl):
+ changelog(web, req, tmpl, shortlog = True)
+
+def changeset(web, req, tmpl):
+ req.write(web.changeset(tmpl, web.changectx(req)))
+
+rev = changeset
+
+def manifest(web, req, tmpl):
+ req.write(web.manifest(tmpl, web.changectx(req),
+ web.cleanpath(req.form['path'][0])))
+
+def tags(web, req, tmpl):
+ req.write(web.tags(tmpl))
+
+def summary(web, req, tmpl):
+ req.write(web.summary(tmpl))
+
+def filediff(web, req, tmpl):
+ req.write(web.filediff(tmpl, web.filectx(req)))
+
+diff = filediff
+
+def annotate(web, req, tmpl):
+ req.write(web.fileannotate(tmpl, web.filectx(req)))
+
+def filelog(web, req, tmpl):
+ req.write(web.filelog(tmpl, web.filectx(req)))
+
+def archive(web, req, tmpl):
+ type_ = req.form['type'][0]
+ allowed = web.configlist("web", "allow_archive")
+ if (type_ in web.archives and (type_ in allowed or
+ web.configbool("web", "allow" + type_, False))):
+ web.archive(tmpl, req, req.form['node'][0], type_)
+ return
+
+ req.respond(400, tmpl('error',
+ error='Unsupported archive type: %s' % type_))
+
+def static(web, req, tmpl):
+ fname = req.form['file'][0]
+ # a repo owner may set web.static in .hg/hgrc to get any file
+ # readable by the user running the CGI script
+ static = web.config("web", "static",
+ os.path.join(web.templatepath, "static"),
+ untrusted=False)
+ req.write(staticfile(static, fname, req))
--- a/mercurial/ui.py Fri Dec 07 02:29:55 2007 -0600
+++ b/mercurial/ui.py Fri Dec 07 14:59:33 2007 -0600
@@ -403,7 +403,12 @@
readline.read_history_file
except ImportError:
pass
- return raw_input(prompt)
+ line = raw_input(prompt)
+ # When stdin is in binary mode on Windows, it can cause
+ # raw_input() to emit an extra trailing carriage return
+ if os.linesep == '\r\n' and line and line[-1] == '\r':
+ line = line[:-1]
+ return line
def prompt(self, msg, pat=None, default="y", matchflags=0):
if not self.interactive: return default
--- a/mercurial/util_win32.py Fri Dec 07 02:29:55 2007 -0600
+++ b/mercurial/util_win32.py Fri Dec 07 14:59:33 2007 -0600
@@ -180,6 +180,17 @@
def system_rcpath_win32():
'''return default os-specific hgrc search path'''
+ proc = win32api.GetCurrentProcess()
+ try:
+ # This will fail on windows < NT
+ filename = win32process.GetModuleFileNameEx(proc, 0)
+ except:
+ filename = win32api.GetModuleFileName(0)
+ # Use mercurial.ini found in directory with hg.exe
+ progrc = os.path.join(os.path.dirname(filename), 'mercurial.ini')
+ if os.path.isfile(progrc):
+ return [progrc]
+ # else look for a system rcpath in the registry
try:
value = win32api.RegQueryValue(
win32con.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Mercurial')
@@ -193,19 +204,12 @@
rcpath.append(os.path.join(p, f))
return rcpath
except pywintypes.error:
- pass
- proc = win32api.GetCurrentProcess()
- try:
- # This will fail on windows < NT
- filename = win32process.GetModuleFileNameEx(proc, 0)
- except:
- filename = win32api.GetModuleFileName(0)
- return [os.path.join(os.path.dirname(filename), 'mercurial.ini')]
+ return []
def user_rcpath_win32():
'''return os-specific hgrc search path to the user dir'''
userdir = os.path.expanduser('~')
- if userdir == '~':
+ if sys.getwindowsversion() != 2 and userdir == '~':
# We are on win < nt: fetch the APPDATA directory location and use
# the parent directory as the user home dir.
appdir = shell.SHGetPathFromIDList(
--- a/tests/coverage.py Fri Dec 07 02:29:55 2007 -0600
+++ b/tests/coverage.py Fri Dec 07 14:59:33 2007 -0600
@@ -22,15 +22,20 @@
# interface and limitations. See [GDR 2001-12-04b] for requirements and
# design.
-"""Usage:
+r"""Usage:
-coverage.py -x MODULE.py [ARG1 ARG2 ...]
+coverage.py -x [-p] MODULE.py [ARG1 ARG2 ...]
Execute module, passing the given command-line arguments, collecting
- coverage data.
+ coverage data. With the -p option, write to a temporary file containing
+ the machine name and process ID.
coverage.py -e
Erase collected coverage data.
+coverage.py -c
+ Collect data from multiple coverage files (as created by -p option above)
+ and store it into a single file representing the union of the coverage.
+
coverage.py -r [-m] [-o dir1,dir2,...] FILE1 FILE2 ...
Report on the statement coverage for the given files. With the -m
option, show line numbers of the statements that weren't executed.
@@ -49,16 +54,26 @@
Coverage data is saved in the file .coverage by default. Set the
COVERAGE_FILE environment variable to save it somewhere else."""
-__version__ = "2.5.20051204" # see detailed history at the end of this file.
+__version__ = "2.77.20070729" # see detailed history at the end of this file.
import compiler
import compiler.visitor
+import glob
import os
import re
import string
+import symbol
import sys
import threading
+import token
import types
+from socket import gethostname
+
+# Python version compatibility
+try:
+ strclass = basestring # new to 2.3
+except:
+ strclass = str
# 2. IMPLEMENTATION
#
@@ -81,25 +96,29 @@
# names to increase speed.
class StatementFindingAstVisitor(compiler.visitor.ASTVisitor):
+ """ A visitor for a parsed Abstract Syntax Tree which finds executable
+ statements.
+ """
def __init__(self, statements, excluded, suite_spots):
compiler.visitor.ASTVisitor.__init__(self)
self.statements = statements
self.excluded = excluded
self.suite_spots = suite_spots
self.excluding_suite = 0
-
+
def doRecursive(self, node):
- self.recordNodeLine(node)
for n in node.getChildNodes():
self.dispatch(n)
visitStmt = visitModule = doRecursive
-
+
def doCode(self, node):
if hasattr(node, 'decorators') and node.decorators:
self.dispatch(node.decorators)
- self.doSuite(node, node.code)
-
+ self.recordAndDispatch(node.code)
+ else:
+ self.doSuite(node, node.code)
+
visitFunction = visitClass = doCode
def getFirstLine(self, node):
@@ -119,17 +138,40 @@
for n in node.getChildNodes():
lineno = max(lineno, self.getLastLine(n))
return lineno
-
+
def doStatement(self, node):
self.recordLine(self.getFirstLine(node))
- visitAssert = visitAssign = visitAssTuple = visitDiscard = visitPrint = \
+ visitAssert = visitAssign = visitAssTuple = visitPrint = \
visitPrintnl = visitRaise = visitSubscript = visitDecorators = \
doStatement
+
+ def visitPass(self, node):
+ # Pass statements have weird interactions with docstrings. If this
+ # pass statement is part of one of those pairs, claim that the statement
+ # is on the later of the two lines.
+ l = node.lineno
+ if l:
+ lines = self.suite_spots.get(l, [l,l])
+ self.statements[lines[1]] = 1
+
+ def visitDiscard(self, node):
+ # Discard nodes are statements that execute an expression, but then
+ # discard the results. This includes function calls, so we can't
+ # ignore them all. But if the expression is a constant, the statement
+ # won't be "executed", so don't count it now.
+ if node.expr.__class__.__name__ != 'Const':
+ self.doStatement(node)
def recordNodeLine(self, node):
- return self.recordLine(node.lineno)
-
+ # Stmt nodes often have None, but shouldn't claim the first line of
+ # their children (because the first child might be an ignorable line
+ # like "global a").
+ if node.__class__.__name__ != 'Stmt':
+ return self.recordLine(self.getFirstLine(node))
+ else:
+ return 0
+
def recordLine(self, lineno):
# Returns a bool, whether the line is included or excluded.
if lineno:
@@ -137,7 +179,7 @@
# keyword.
if lineno in self.suite_spots:
lineno = self.suite_spots[lineno][0]
- # If we're inside an exluded suite, record that this line was
+ # If we're inside an excluded suite, record that this line was
# excluded.
if self.excluding_suite:
self.excluded[lineno] = 1
@@ -153,9 +195,9 @@
self.statements[lineno] = 1
return 1
return 0
-
+
default = recordNodeLine
-
+
def recordAndDispatch(self, node):
self.recordNodeLine(node)
self.dispatch(node)
@@ -166,7 +208,7 @@
self.excluding_suite = 1
self.recordAndDispatch(body)
self.excluding_suite = exsuite
-
+
def doPlainWordSuite(self, prevsuite, suite):
# Finding the exclude lines for else's is tricky, because they aren't
# present in the compiler parse tree. Look at the previous suite,
@@ -180,15 +222,17 @@
break
else:
self.doSuite(None, suite)
-
+
def doElse(self, prevsuite, node):
if node.else_:
self.doPlainWordSuite(prevsuite, node.else_)
-
+
def visitFor(self, node):
self.doSuite(node, node.body)
self.doElse(node.body, node)
+ visitWhile = visitFor
+
def visitIf(self, node):
# The first test has to be handled separately from the rest.
# The first test is credited to the line with the "if", but the others
@@ -198,10 +242,6 @@
self.doSuite(t, n)
self.doElse(node.tests[-1][1], node)
- def visitWhile(self, node):
- self.doSuite(node, node.body)
- self.doElse(node.body, node)
-
def visitTryExcept(self, node):
self.doSuite(node, node.body)
for i in range(len(node.handlers)):
@@ -216,11 +256,14 @@
else:
self.doSuite(a, h)
self.doElse(node.handlers[-1][2], node)
-
+
def visitTryFinally(self, node):
self.doSuite(node, node.body)
self.doPlainWordSuite(node.body, node.final)
-
+
+ def visitWith(self, node):
+ self.doSuite(node, node.body)
+
def visitGlobal(self, node):
# "global" statements don't execute like others (they don't call the
# trace function), so don't record their line numbers.
@@ -228,9 +271,9 @@
the_coverage = None
+class CoverageException(Exception): pass
+
class coverage:
- error = "coverage error"
-
# Name of the cache file (unless environment variable is set).
cache_default = ".coverage"
@@ -240,7 +283,7 @@
# A dictionary with an entry for (Python source file name, line number
# in that file) if that line has been executed.
c = {}
-
+
# A map from canonical Python source file name to a dictionary in
# which there's an entry for each line number that has been
# executed.
@@ -257,53 +300,58 @@
def __init__(self):
global the_coverage
if the_coverage:
- raise self.error, "Only one coverage object allowed."
+ raise CoverageException, "Only one coverage object allowed."
self.usecache = 1
self.cache = None
+ self.parallel_mode = False
self.exclude_re = ''
self.nesting = 0
self.cstack = []
self.xstack = []
- self.relative_dir = os.path.normcase(os.path.abspath(os.curdir)+os.path.sep)
+ self.relative_dir = os.path.normcase(os.path.abspath(os.curdir)+os.sep)
+ self.exclude('# *pragma[: ]*[nN][oO] *[cC][oO][vV][eE][rR]')
- # t(f, x, y). This method is passed to sys.settrace as a trace function.
- # See [van Rossum 2001-07-20b, 9.2] for an explanation of sys.settrace and
+ # t(f, x, y). This method is passed to sys.settrace as a trace function.
+ # See [van Rossum 2001-07-20b, 9.2] for an explanation of sys.settrace and
# the arguments and return value of the trace function.
# See [van Rossum 2001-07-20a, 3.2] for a description of frame and code
# objects.
-
- def t(self, f, w, a): #pragma: no cover
- #print w, f.f_code.co_filename, f.f_lineno
+
+ def t(self, f, w, unused): #pragma: no cover
if w == 'line':
+ #print "Executing %s @ %d" % (f.f_code.co_filename, f.f_lineno)
self.c[(f.f_code.co_filename, f.f_lineno)] = 1
for c in self.cstack:
c[(f.f_code.co_filename, f.f_lineno)] = 1
return self.t
-
- def help(self, error=None):
+
+ def help(self, error=None): #pragma: no cover
if error:
print error
print
print __doc__
sys.exit(1)
- def command_line(self):
+ def command_line(self, argv, help_fn=None):
import getopt
+ help_fn = help_fn or self.help
settings = {}
optmap = {
'-a': 'annotate',
+ '-c': 'collect',
'-d:': 'directory=',
'-e': 'erase',
'-h': 'help',
'-i': 'ignore-errors',
'-m': 'show-missing',
+ '-p': 'parallel-mode',
'-r': 'report',
'-x': 'execute',
- '-o': 'omit=',
+ '-o:': 'omit=',
}
short_opts = string.join(map(lambda o: o[1:], optmap.keys()), '')
long_opts = optmap.values()
- options, args = getopt.getopt(sys.argv[1:], short_opts, long_opts)
+ options, args = getopt.getopt(argv, short_opts, long_opts)
for o, a in options:
if optmap.has_key(o):
settings[optmap[o]] = 1
@@ -312,69 +360,84 @@
elif o[2:] in long_opts:
settings[o[2:]] = 1
elif o[2:] + '=' in long_opts:
- settings[o[2:]] = a
- else:
- self.help("Unknown option: '%s'." % o)
+ settings[o[2:]+'='] = a
+ else: #pragma: no cover
+ pass # Can't get here, because getopt won't return anything unknown.
+
if settings.get('help'):
- self.help()
+ help_fn()
+
for i in ['erase', 'execute']:
- for j in ['annotate', 'report']:
+ for j in ['annotate', 'report', 'collect']:
if settings.get(i) and settings.get(j):
- self.help("You can't specify the '%s' and '%s' "
+ help_fn("You can't specify the '%s' and '%s' "
"options at the same time." % (i, j))
+
args_needed = (settings.get('execute')
or settings.get('annotate')
or settings.get('report'))
- action = settings.get('erase') or args_needed
+ action = (settings.get('erase')
+ or settings.get('collect')
+ or args_needed)
if not action:
- self.help("You must specify at least one of -e, -x, -r, or -a.")
+ help_fn("You must specify at least one of -e, -x, -c, -r, or -a.")
if not args_needed and args:
- self.help("Unexpected arguments %s." % args)
-
+ help_fn("Unexpected arguments: %s" % " ".join(args))
+
+ self.parallel_mode = settings.get('parallel-mode')
self.get_ready()
- self.exclude('#pragma[: ]+[nN][oO] [cC][oO][vV][eE][rR]')
if settings.get('erase'):
self.erase()
if settings.get('execute'):
if not args:
- self.help("Nothing to do.")
+ help_fn("Nothing to do.")
sys.argv = args
self.start()
import __main__
sys.path[0] = os.path.dirname(sys.argv[0])
execfile(sys.argv[0], __main__.__dict__)
+ if settings.get('collect'):
+ self.collect()
if not args:
args = self.cexecuted.keys()
+
ignore_errors = settings.get('ignore-errors')
show_missing = settings.get('show-missing')
- directory = settings.get('directory')
- omit = filter(None, settings.get('omit', '').split(','))
- omit += ['/<'] # Always skip /<string> etc.
+ directory = settings.get('directory=')
+
+ omit = settings.get('omit=')
+ if omit is not None:
+ omit = omit.split(',')
+ else:
+ omit = []
if settings.get('report'):
self.report(args, show_missing, ignore_errors, omit_prefixes=omit)
if settings.get('annotate'):
self.annotate(args, directory, ignore_errors, omit_prefixes=omit)
- def use_cache(self, usecache):
+ def use_cache(self, usecache, cache_file=None):
self.usecache = usecache
-
- def get_ready(self):
+ if cache_file and not self.cache:
+ self.cache_default = cache_file
+
+ def get_ready(self, parallel_mode=False):
if self.usecache and not self.cache:
- self.cache = os.path.abspath(os.environ.get(self.cache_env,
- self.cache_default))
+ self.cache = os.environ.get(self.cache_env, self.cache_default)
+ if self.parallel_mode:
+ self.cache += "." + gethostname() + "." + str(os.getpid())
self.restore()
self.analysis_cache = {}
-
- def start(self):
+
+ def start(self, parallel_mode=False):
self.get_ready()
if self.nesting == 0: #pragma: no cover
sys.settrace(self.t)
if hasattr(threading, 'settrace'):
threading.settrace(self.t)
self.nesting += 1
-
+
def stop(self):
self.nesting -= 1
if self.nesting == 0: #pragma: no cover
@@ -383,12 +446,12 @@
threading.settrace(None)
def erase(self):
+ self.get_ready()
self.c = {}
self.analysis_cache = {}
self.cexecuted = {}
if self.cache and os.path.exists(self.cache):
os.remove(self.cache)
- self.exclude_re = ""
def exclude(self, re):
if self.exclude_re:
@@ -398,7 +461,7 @@
def begin_recursive(self):
self.cstack.append(self.c)
self.xstack.append(self.exclude_re)
-
+
def end_recursive(self):
self.c = self.cstack.pop()
self.exclude_re = self.xstack.pop()
@@ -406,8 +469,6 @@
# save(). Save coverage data to the coverage cache.
def save(self):
- # move to directory that must exist.
- os.chdir(os.sep)
if self.usecache and self.cache:
self.canonicalize_filenames()
cache = open(self.cache, 'wb')
@@ -421,17 +482,45 @@
self.c = {}
self.cexecuted = {}
assert self.usecache
- if not os.path.exists(self.cache):
- return
+ if os.path.exists(self.cache):
+ self.cexecuted = self.restore_file(self.cache)
+
+ def restore_file(self, file_name):
try:
- cache = open(self.cache, 'rb')
+ cache = open(file_name, 'rb')
import marshal
cexecuted = marshal.load(cache)
cache.close()
if isinstance(cexecuted, types.DictType):
- self.cexecuted = cexecuted
+ return cexecuted
+ else:
+ return {}
except:
- pass
+ return {}
+
+ # collect(). Collect data in multiple files produced by parallel mode
+
+ def collect(self):
+ cache_dir, local = os.path.split(self.cache)
+ for f in os.listdir(cache_dir or '.'):
+ if not f.startswith(local):
+ continue
+
+ full_path = os.path.join(cache_dir, f)
+ cexecuted = self.restore_file(full_path)
+ self.merge_data(cexecuted)
+
+ def merge_data(self, new_data):
+ for file_name, file_data in new_data.items():
+ if self.cexecuted.has_key(file_name):
+ self.merge_file_data(self.cexecuted[file_name], file_data)
+ else:
+ self.cexecuted[file_name] = file_data
+
+ def merge_file_data(self, cache_data, new_data):
+ for line_number in new_data.keys():
+ if not cache_data.has_key(line_number):
+ cache_data[line_number] = new_data[line_number]
# canonical_filename(filename). Return a canonical filename for the
# file (that is, an absolute path with no redundant components and
@@ -452,11 +541,14 @@
self.canonical_filename_cache[filename] = cf
return self.canonical_filename_cache[filename]
- # canonicalize_filenames(). Copy results from "c" to "cexecuted",
+ # canonicalize_filenames(). Copy results from "c" to "cexecuted",
# canonicalizing filenames on the way. Clear the "c" map.
def canonicalize_filenames(self):
for filename, lineno in self.c.keys():
+ if filename == '<string>':
+ # Can't do anything useful with exec'd strings, so skip them.
+ continue
f = self.canonical_filename(filename)
if not self.cexecuted.has_key(f):
self.cexecuted[f] = {}
@@ -468,18 +560,20 @@
def morf_filename(self, morf):
if isinstance(morf, types.ModuleType):
if not hasattr(morf, '__file__'):
- raise self.error, "Module has no __file__ attribute."
- file = morf.__file__
+ raise CoverageException, "Module has no __file__ attribute."
+ f = morf.__file__
else:
- file = morf
- return self.canonical_filename(file)
+ f = morf
+ return self.canonical_filename(f)
# analyze_morf(morf). Analyze the module or filename passed as
# the argument. If the source code can't be found, raise an error.
# Otherwise, return a tuple of (1) the canonical filename of the
# source code for the module, (2) a list of lines of statements
- # in the source code, and (3) a list of lines of excluded statements.
-
+ # in the source code, (3) a list of lines of excluded statements,
+ # and (4), a map of line numbers to multi-line line number ranges, for
+ # statements that cross lines.
+
def analyze_morf(self, morf):
if self.analysis_cache.has_key(morf):
return self.analysis_cache[morf]
@@ -487,30 +581,69 @@
ext = os.path.splitext(filename)[1]
if ext == '.pyc':
if not os.path.exists(filename[0:-1]):
- raise self.error, ("No source for compiled code '%s'."
+ raise CoverageException, ("No source for compiled code '%s'."
% filename)
filename = filename[0:-1]
elif ext != '.py':
- raise self.error, "File '%s' not Python source." % filename
+ raise CoverageException, "File '%s' not Python source." % filename
source = open(filename, 'r')
- lines, excluded_lines = self.find_executable_statements(
+ lines, excluded_lines, line_map = self.find_executable_statements(
source.read(), exclude=self.exclude_re
)
source.close()
- result = filename, lines, excluded_lines
+ result = filename, lines, excluded_lines, line_map
self.analysis_cache[morf] = result
return result
+ def first_line_of_tree(self, tree):
+ while True:
+ if len(tree) == 3 and type(tree[2]) == type(1):
+ return tree[2]
+ tree = tree[1]
+
+ def last_line_of_tree(self, tree):
+ while True:
+ if len(tree) == 3 and type(tree[2]) == type(1):
+ return tree[2]
+ tree = tree[-1]
+
+ def find_docstring_pass_pair(self, tree, spots):
+ for i in range(1, len(tree)):
+ if self.is_string_constant(tree[i]) and self.is_pass_stmt(tree[i+1]):
+ first_line = self.first_line_of_tree(tree[i])
+ last_line = self.last_line_of_tree(tree[i+1])
+ self.record_multiline(spots, first_line, last_line)
+
+ def is_string_constant(self, tree):
+ try:
+ return tree[0] == symbol.stmt and tree[1][1][1][0] == symbol.expr_stmt
+ except:
+ return False
+
+ def is_pass_stmt(self, tree):
+ try:
+ return tree[0] == symbol.stmt and tree[1][1][1][0] == symbol.pass_stmt
+ except:
+ return False
+
+ def record_multiline(self, spots, i, j):
+ for l in range(i, j+1):
+ spots[l] = (i, j)
+
def get_suite_spots(self, tree, spots):
- import symbol, token
+ """ Analyze a parse tree to find suite introducers which span a number
+ of lines.
+ """
for i in range(1, len(tree)):
- if isinstance(tree[i], tuple):
+ if type(tree[i]) == type(()):
if tree[i][0] == symbol.suite:
# Found a suite, look back for the colon and keyword.
lineno_colon = lineno_word = None
for j in range(i-1, 0, -1):
if tree[j][0] == token.COLON:
- lineno_colon = tree[j][2]
+ # Colons are never executed themselves: we want the
+ # line number of the last token before the colon.
+ lineno_colon = self.last_line_of_tree(tree[j-1])
elif tree[j][0] == token.NAME:
if tree[j][1] == 'elif':
# Find the line number of the first non-terminal
@@ -532,8 +665,18 @@
if lineno_colon and lineno_word:
# Found colon and keyword, mark all the lines
# between the two with the two line numbers.
- for l in range(lineno_word, lineno_colon+1):
- spots[l] = (lineno_word, lineno_colon)
+ self.record_multiline(spots, lineno_word, lineno_colon)
+
+ # "pass" statements are tricky: different versions of Python
+ # treat them differently, especially in the common case of a
+ # function with a doc string and a single pass statement.
+ self.find_docstring_pass_pair(tree[i], spots)
+
+ elif tree[i][0] == symbol.simple_stmt:
+ first_line = self.first_line_of_tree(tree[i])
+ last_line = self.last_line_of_tree(tree[i])
+ if first_line != last_line:
+ self.record_multiline(spots, first_line, last_line)
self.get_suite_spots(tree[i], spots)
def find_executable_statements(self, text, exclude=None):
@@ -547,10 +690,13 @@
if reExclude.search(lines[i]):
excluded[i+1] = 1
+ # Parse the code and analyze the parse tree to find out which statements
+ # are multiline, and where suites begin and end.
import parser
tree = parser.suite(text+'\n\n').totuple(1)
self.get_suite_spots(tree, suite_spots)
-
+ #print "Suite spots:", suite_spots
+
# Use the compiler module to parse the text and find the executable
# statements. We add newlines to be impervious to final partial lines.
statements = {}
@@ -562,7 +708,7 @@
lines.sort()
excluded_lines = excluded.keys()
excluded_lines.sort()
- return lines, excluded_lines
+ return lines, excluded_lines, suite_spots
# format_lines(statements, lines). Format a list of line numbers
# for printing by coalescing groups of lines as long as the lines
@@ -595,7 +741,8 @@
return "%d" % start
else:
return "%d-%d" % (start, end)
- return string.join(map(stringify, pairs), ", ")
+ ret = string.join(map(stringify, pairs), ", ")
+ return ret
# Backward compatibility with version 1.
def analysis(self, morf):
@@ -603,13 +750,17 @@
return f, s, m, mf
def analysis2(self, morf):
- filename, statements, excluded = self.analyze_morf(morf)
+ filename, statements, excluded, line_map = self.analyze_morf(morf)
self.canonicalize_filenames()
if not self.cexecuted.has_key(filename):
self.cexecuted[filename] = {}
missing = []
for line in statements:
- if not self.cexecuted[filename].has_key(line):
+ lines = line_map.get(line, [line, line])
+ for l in range(lines[0], lines[1]+1):
+ if self.cexecuted[filename].has_key(l):
+ break
+ else:
missing.append(line)
return (filename, statements, excluded, missing,
self.format_lines(statements, missing))
@@ -647,6 +798,15 @@
def report(self, morfs, show_missing=1, ignore_errors=0, file=None, omit_prefixes=[]):
if not isinstance(morfs, types.ListType):
morfs = [morfs]
+ # On windows, the shell doesn't expand wildcards. Do it here.
+ globbed = []
+ for morf in morfs:
+ if isinstance(morf, strclass):
+ globbed.extend(glob.glob(morf))
+ else:
+ globbed.append(morf)
+ morfs = globbed
+
morfs = self.filter_by_prefix(morfs, omit_prefixes)
morfs.sort(self.morf_name_compare)
@@ -684,8 +844,8 @@
raise
except:
if not ignore_errors:
- type, msg = sys.exc_info()[0:2]
- print >>file, fmt_err % (name, type, msg)
+ typ, msg = sys.exc_info()[0:2]
+ print >>file, fmt_err % (name, typ, msg)
if len(morfs) > 1:
print >>file, "-" * len(header)
if total_statements > 0:
@@ -713,7 +873,7 @@
except:
if not ignore_errors:
raise
-
+
def annotate_file(self, filename, statements, excluded, missing, directory=None):
source = open(filename, 'r')
if directory:
@@ -741,7 +901,7 @@
if self.blank_re.match(line):
dest.write(' ')
elif self.else_re.match(line):
- # Special logic for lines containing only 'else:'.
+ # Special logic for lines containing only 'else:'.
# See [GDR 2001-12-04b, 3.2].
if i >= len(statements) and j >= len(missing):
dest.write('! ')
@@ -765,18 +925,41 @@
the_coverage = coverage()
# Module functions call methods in the singleton object.
-def use_cache(*args, **kw): return the_coverage.use_cache(*args, **kw)
-def start(*args, **kw): return the_coverage.start(*args, **kw)
-def stop(*args, **kw): return the_coverage.stop(*args, **kw)
-def erase(*args, **kw): return the_coverage.erase(*args, **kw)
-def begin_recursive(*args, **kw): return the_coverage.begin_recursive(*args, **kw)
-def end_recursive(*args, **kw): return the_coverage.end_recursive(*args, **kw)
-def exclude(*args, **kw): return the_coverage.exclude(*args, **kw)
-def analysis(*args, **kw): return the_coverage.analysis(*args, **kw)
-def analysis2(*args, **kw): return the_coverage.analysis2(*args, **kw)
-def report(*args, **kw): return the_coverage.report(*args, **kw)
-def annotate(*args, **kw): return the_coverage.annotate(*args, **kw)
-def annotate_file(*args, **kw): return the_coverage.annotate_file(*args, **kw)
+def use_cache(*args, **kw):
+ return the_coverage.use_cache(*args, **kw)
+
+def start(*args, **kw):
+ return the_coverage.start(*args, **kw)
+
+def stop(*args, **kw):
+ return the_coverage.stop(*args, **kw)
+
+def erase(*args, **kw):
+ return the_coverage.erase(*args, **kw)
+
+def begin_recursive(*args, **kw):
+ return the_coverage.begin_recursive(*args, **kw)
+
+def end_recursive(*args, **kw):
+ return the_coverage.end_recursive(*args, **kw)
+
+def exclude(*args, **kw):
+ return the_coverage.exclude(*args, **kw)
+
+def analysis(*args, **kw):
+ return the_coverage.analysis(*args, **kw)
+
+def analysis2(*args, **kw):
+ return the_coverage.analysis2(*args, **kw)
+
+def report(*args, **kw):
+ return the_coverage.report(*args, **kw)
+
+def annotate(*args, **kw):
+ return the_coverage.annotate(*args, **kw)
+
+def annotate_file(*args, **kw):
+ return the_coverage.annotate_file(*args, **kw)
# Save coverage data when Python exits. (The atexit module wasn't
# introduced until Python 2.0, so use sys.exitfunc when it's not
@@ -789,7 +972,7 @@
# Command-line interface.
if __name__ == '__main__':
- the_coverage.command_line()
+ the_coverage.command_line(sys.argv[1:])
# A. REFERENCES
@@ -850,7 +1033,7 @@
# Thanks, Allen.
#
# 2005-12-02 NMB Call threading.settrace so that all threads are measured.
-# Thanks Martin Fuzzey. Add a file argument to report so that reports can be
+# Thanks Martin Fuzzey. Add a file argument to report so that reports can be
# captured to a different destination.
#
# 2005-12-03 NMB coverage.py can now measure itself.
@@ -858,10 +1041,46 @@
# 2005-12-04 NMB Adapted Greg Rogers' patch for using relative filenames,
# and sorting and omitting files to report on.
#
+# 2006-07-23 NMB Applied Joseph Tate's patch for function decorators.
+#
+# 2006-08-21 NMB Applied Sigve Tjora and Mark van der Wal's fixes for argument
+# handling.
+#
+# 2006-08-22 NMB Applied Geoff Bache's parallel mode patch.
+#
+# 2006-08-23 NMB Refactorings to improve testability. Fixes to command-line
+# logic for parallel mode and collect.
+#
+# 2006-08-25 NMB "#pragma: nocover" is excluded by default.
+#
+# 2006-09-10 NMB Properly ignore docstrings and other constant expressions that
+# appear in the middle of a function, a problem reported by Tim Leslie.
+# Minor changes to avoid lint warnings.
+#
+# 2006-09-17 NMB coverage.erase() shouldn't clobber the exclude regex.
+# Change how parallel mode is invoked, and fix erase() so that it erases the
+# cache when called programmatically.
+#
+# 2007-07-21 NMB In reports, ignore code executed from strings, since we can't
+# do anything useful with it anyway.
+# Better file handling on Linux, thanks Guillaume Chazarain.
+# Better shell support on Windows, thanks Noel O'Boyle.
+# Python 2.2 support maintained, thanks Catherine Proulx.
+#
+# 2007-07-22 NMB Python 2.5 now fully supported. The method of dealing with
+# multi-line statements is now less sensitive to the exact line that Python
+# reports during execution. Pass statements are handled specially so that their
+# disappearance during execution won't throw off the measurement.
+#
+# 2007-07-23 NMB Now Python 2.5 is *really* fully supported: the body of the
+# new with statement is counted as executable.
+#
+# 2007-07-29 NMB Better packaging.
+
# C. COPYRIGHT AND LICENCE
#
# Copyright 2001 Gareth Rees. All rights reserved.
-# Copyright 2004-2005 Ned Batchelder. All rights reserved.
+# Copyright 2004-2007 Ned Batchelder. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
@@ -888,4 +1107,4 @@
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
# DAMAGE.
#
-# $Id: coverage.py 26 2005-12-04 18:42:44Z ned $
+# $Id: coverage.py 74 2007-07-29 22:28:35Z nedbat $
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-hgweb-commands Fri Dec 07 14:59:33 2007 -0600
@@ -0,0 +1,53 @@
+#!/bin/sh
+# An attempt at more fully testing the hgweb web interface.
+# The following things are tested elsewhere and are therefore omitted:
+# - archive, tested in test-archive
+# - unbundle, tested in test-push-http
+# - changegroupsubset, tested in test-pull
+
+echo % Set up the repo
+hg init test
+cd test
+mkdir da
+echo foo > da/foo
+echo foo > foo
+hg ci -d'0 0' -Ambase
+hg tag 1.0
+hg serve -n test -p $HGPORT -d --pid-file=hg.pid -E errors.log
+cat hg.pid >> $DAEMON_PIDS
+
+echo % Logs and changes
+"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/log/?style=atom' | sed "s/http:\/\/[^/]*\//http:\/\/127.0.0.1\//"
+"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/log/1/?style=atom' | sed "s/http:\/\/[^/]*\//http:\/\/127.0.0.1\//"
+"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/log/1/foo/?style=atom' | sed "s/http:\/\/[^/]*\//http:\/\/127.0.0.1\//"
+"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/shortlog/' | sed "s/[0-9]* years/many years/"
+"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/rev/1/?style=raw'
+
+echo % File-related
+"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/file/1/foo/?style=raw'
+"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/annotate/1/foo/?style=raw'
+"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/file/1/?style=raw'
+"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/filediff/1/foo/?style=raw'
+
+echo % Overviews
+"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/tags/?style=atom' | sed "s/http:\/\/[^/]*\//http:\/\/127.0.0.1\//"
+"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/summary/?style=gitweb' | sed "s/[0-9]* years ago/long ago/"
+
+echo % capabilities
+"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/capabilities'
+echo % heads
+"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/heads'
+echo % lookup
+"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/lookup/1'
+echo % branches
+"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/branches'
+echo % changegroup
+"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/changegroup'
+echo % stream_out
+"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/stream_out'
+
+echo % Static files
+"$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/static/style.css'
+
+echo % ERRORS ENCOUNTERED
+cat errors.log
Binary file tests/test-hgweb-commands.out has changed
--- a/tests/test-hgwebdir Fri Dec 07 02:29:55 2007 -0600
+++ b/tests/test-hgwebdir Fri Dec 07 14:59:33 2007 -0600
@@ -27,7 +27,7 @@
EOF
hg serve -p $HGPORT -d --pid-file=hg.pid --webdir-conf paths.conf \
- -A access-paths.log -E error-paths.log
+ -A access-paths.log -E error-paths-1.log
cat hg.pid >> $DAEMON_PIDS
echo % should give a 404 - file does not exist
@@ -48,7 +48,7 @@
EOF
hg serve -p $HGPORT1 -d --pid-file=hg.pid --webdir-conf paths.conf \
- -A access-paths.log -E error-paths.log
+ -A access-paths.log -E error-paths-2.log
cat hg.pid >> $DAEMON_PIDS
echo % should succeed, slashy names
@@ -75,3 +75,10 @@
"$TESTDIR/get-with-headers.py" localhost:$HGPORT2 '/a/file/tip/a?style=raw'
"$TESTDIR/get-with-headers.py" localhost:$HGPORT2 '/b/file/tip/b?style=raw'
"$TESTDIR/get-with-headers.py" localhost:$HGPORT2 '/c/file/tip/c?style=raw'
+
+echo % paths errors 1
+cat error-paths-1.log
+echo % paths errors 2
+cat error-paths-2.log
+echo % collections errors
+cat error-collections.log
--- a/tests/test-hgwebdir.out Fri Dec 07 02:29:55 2007 -0600
+++ b/tests/test-hgwebdir.out Fri Dec 07 14:59:33 2007 -0600
@@ -119,3 +119,6 @@
200 Script output follows
c
+% paths errors 1
+% paths errors 2
+% collections errors