Mercurial > hg-stable
changeset 1605:428e0a9c9968
merge with mainline
author | Vadim Gelfer <vadim.gelfer@gmail.com> |
---|---|
date | Wed, 28 Dec 2005 07:22:26 -0800 |
parents | da3f1121721b (current diff) fb4149eebdd4 (diff) |
children | ba625c8083d8 |
files | |
diffstat | 7 files changed, 134 insertions(+), 151 deletions(-) [+] |
line wrap: on
line diff
--- a/hgeditor Wed Dec 28 07:13:52 2005 -0800 +++ b/hgeditor Wed Dec 28 07:22:26 2005 -0800 @@ -1,10 +1,7 @@ #!/bin/sh # -# This is an example of using HGEDITOR to automate the signing of -# commits and so on. - -# change this to one to turn on GPG support -SIGN=0 +# This is an example of using HGEDITOR to create of diff to review the +# changes while commiting. # If you want to pass your favourite editor some other parameters # only for Mercurial, modify this: @@ -43,12 +40,7 @@ done ) -echo > "$HGTMP/msg" -if [ "$SIGN" == "1" ]; then - MANIFEST=`grep '^HG: manifest hash' "$1" | cut -b 19-` - echo -e "\nmanifest hash: $MANIFEST" >> "$HGTMP/msg" -fi -grep -vE '^(HG: manifest hash .*)?$' "$1" >> "$HGTMP/msg" +cat "$1" > "$HGTMP/msg" CHECKSUM=`md5sum "$HGTMP/msg"` if [ -s "$HGTMP/diff" ]; then @@ -58,14 +50,6 @@ fi echo "$CHECKSUM" | md5sum -c >/dev/null 2>&1 && exit 13 -if [ "$SIGN" == "1" ]; then - { - head -n 1 "$HGTMP/msg" - echo - grep -v "^HG:" "$HGTMP/msg" | gpg -t -a -u "${HGUSER}" --clearsign - } > "$HGTMP/msg.gpg" && mv "$HGTMP/msg.gpg" "$1" -else - mv "$HGTMP/msg" "$1" -fi +mv "$HGTMP/msg" "$1" exit $?
--- a/mercurial/commands.py Wed Dec 28 07:13:52 2005 -0800 +++ b/mercurial/commands.py Wed Dec 28 07:22:26 2005 -0800 @@ -463,7 +463,8 @@ opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt, longopt and " --%s" % longopt), "%s%s" % (desc, - default and _(" (default: %s)") % default + default + and _(" (default: %s)") % default or ""))) if opt_output: @@ -730,7 +731,8 @@ revs = None if opts['rev']: if not other.local(): - raise util.Abort("clone -r not supported yet for remote repositories.") + error = "clone -r not supported yet for remote repositories." + raise util.Abort(error) else: revs = [other.lookup(rev) for rev in opts['rev']] repo = hg.repository(ui, dest, create=1) @@ -985,7 +987,8 @@ ui.warn(_("%s in manifest1, but listed as state %s") % (f, state)) errors += 1 if errors: - raise util.Abort(_(".hg/dirstate inconsistent with current parent's manifest")) + error = _(".hg/dirstate inconsistent with current parent's manifest") + raise util.Abort(error) def debugconfig(ui): """show combined config settings from all hgrc files""" @@ -1176,7 +1179,8 @@ revs = list(revrange(ui, repo, changesets)) total = len(revs) revwidth = max(map(len, revs)) - ui.note(len(revs) > 1 and _("Exporting patches:\n") or _("Exporting patch:\n")) + msg = len(revs) > 1 and _("Exporting patches:\n") or _("Exporting patch:\n") + ui.note(msg) for cset in revs: seqno += 1 doexport(ui, repo, cset, seqno, total, revwidth, opts) @@ -2019,7 +2023,7 @@ for f in changes: ui.write(format % f) -def tag(ui, repo, name, rev=None, **opts): +def tag(ui, repo, name, rev_=None, **opts): """add a tag for the current tip or a given revision Name a particular revision using <name>. @@ -2037,10 +2041,10 @@ """ if name == "tip": raise util.Abort(_("the name 'tip' is reserved")) - if 'rev' in opts: - rev = opts['rev'] - if rev: - r = hex(repo.lookup(rev)) + if opts['rev']: + rev_ = opts['rev'] + if rev_: + r = hex(repo.lookup(rev_)) else: r = hex(repo.changelog.tip()) @@ -2095,7 +2099,7 @@ n = repo.changelog.tip() show_changeset(ui, repo, changenode=n) -def unbundle(ui, repo, fname): +def unbundle(ui, repo, fname, **opts): """apply a changegroup file Apply a compressed changegroup file generated by the bundle @@ -2112,7 +2116,13 @@ yield zd.decompress(chunk) bzgen = bzgenerator(util.filechunkiter(f, 4096)) - repo.addchangegroup(util.chunkbuffer(bzgen)) + if repo.addchangegroup(util.chunkbuffer(bzgen)): + return 1 + + if opts['update']: + return update(ui, repo) + else: + ui.status(_("(run 'hg update' to get a working copy)\n")) def undo(ui, repo): """undo the last commit or pull @@ -2221,8 +2231,10 @@ [('U', 'noupdate', None, _('do not update the new working directory')), ('e', 'ssh', "", _('specify ssh command to use')), ('', 'pull', None, _('use pull protocol to copy metadata')), - ('r', 'rev', [], _('a changeset you would like to have after cloning')), - ('', 'remotecmd', "", _('specify hg command to run on the remote side'))], + ('r', 'rev', [], + _('a changeset you would like to have after cloning')), + ('', 'remotecmd', "", + _('specify hg command to run on the remote side'))], _('hg clone [OPTION]... SOURCE [DEST]')), "^commit|ci": (commit, @@ -2235,10 +2247,14 @@ ('u', 'user', "", _('record user as commiter'))], _('hg commit [OPTION]... [FILE]...')), "copy|cp": (copy, - [('I', 'include', [], _('include names matching the given patterns')), - ('X', 'exclude', [], _('exclude names matching the given patterns')), - ('A', 'after', None, _('record a copy that has already occurred')), - ('f', 'force', None, _('forcibly copy over an existing managed file'))], + [('I', 'include', [], + _('include names matching the given patterns')), + ('X', 'exclude', [], + _('exclude names matching the given patterns')), + ('A', 'after', None, + _('record a copy that has already occurred')), + ('f', 'force', None, + _('forcibly copy over an existing managed file'))], _('hg copy [OPTION]... [SOURCE]... DEST')), "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')), "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')), @@ -2279,7 +2295,8 @@ ('X', 'exclude', [], _('exclude names matching the given patterns')), ('', 'all', None, _('print all revisions that match')), ('i', 'ignore-case', None, _('ignore case when matching')), - ('l', 'files-with-matches', None, _('print only filenames and revs that match')), + ('l', 'files-with-matches', None, + _('print only filenames and revs that match')), ('n', 'line-number', None, _('print matching line numbers')), ('r', 'rev', [], _('search in given revision range')), ('u', 'user', None, _('print user who committed change'))], @@ -2293,9 +2310,11 @@ "identify|id": (identify, [], _('hg identify')), "import|patch": (import_, - [('p', 'strip', 1, _('directory strip option for patch. This has the same\n') + - _('meaning as the corresponding patch option')), - ('f', 'force', None, _('skip check for outstanding uncommitted changes')), + [('p', 'strip', 1, + _('directory strip option for patch. This has the same\n') + + _('meaning as the corresponding patch option')), + ('f', 'force', None, + _('skip check for outstanding uncommitted changes')), ('b', 'base', "", _('base path'))], "hg import [-f] [-p NUM] [-b BASE] PATCH..."), "incoming|in": (incoming, @@ -2307,8 +2326,10 @@ "locate": (locate, [('r', 'rev', '', _('search the repository as it stood at rev')), - ('0', 'print0', None, _('end filenames with NUL, for use with xargs')), - ('f', 'fullpath', None, _('print complete paths from the filesystem root')), + ('0', 'print0', None, + _('end filenames with NUL, for use with xargs')), + ('f', 'fullpath', None, + _('print complete paths from the filesystem root')), ('I', 'include', [], _('include names matching the given patterns')), ('X', 'exclude', [], _('exclude names matching the given patterns'))], _('hg locate [OPTION]... [PATTERN]...')), @@ -2333,16 +2354,19 @@ "paths": (paths, [], _('hg paths [NAME]')), "^pull": (pull, - [('u', 'update', None, _('update the working directory to tip after pull')), + [('u', 'update', None, + _('update the working directory to tip after pull')), ('e', 'ssh', "", _('specify ssh command to use')), ('r', 'rev', [], _('a specific revision you would like to pull')), - ('', 'remotecmd', "", _('specify hg command to run on the remote side'))], + ('', 'remotecmd', "", + _('specify hg command to run on the remote side'))], _('hg pull [-u] [-e FILE] [-r rev] [--remotecmd FILE] [SOURCE]')), "^push": (push, [('f', 'force', None, _('force push')), ('e', 'ssh', "", _('specify ssh command to use')), - ('', 'remotecmd', "", _('specify hg command to run on the remote side'))], + ('', 'remotecmd', "", + _('specify hg command to run on the remote side'))], _('hg push [-f] [-e FILE] [--remotecmd FILE] [DEST]')), "rawcommit": (rawcommit, @@ -2355,14 +2379,20 @@ _('hg rawcommit [OPTION]... [FILE]...')), "recover": (recover, [], _("hg recover")), "^remove|rm": (remove, - [('I', 'include', [], _('include names matching the given patterns')), - ('X', 'exclude', [], _('exclude names matching the given patterns'))], + [('I', 'include', [], + _('include names matching the given patterns')), + ('X', 'exclude', [], + _('exclude names matching the given patterns'))], _("hg remove [OPTION]... FILE...")), "rename|mv": (rename, - [('I', 'include', [], _('include names matching the given patterns')), - ('X', 'exclude', [], _('exclude names matching the given patterns')), - ('A', 'after', None, _('record a rename that has already occurred')), - ('f', 'force', None, _('forcibly copy over an existing managed file'))], + [('I', 'include', [], + _('include names matching the given patterns')), + ('X', 'exclude', [], + _('exclude names matching the given patterns')), + ('A', 'after', None, + _('record a rename that has already occurred')), + ('f', 'force', None, + _('forcibly copy over an existing managed file'))], _('hg rename [OPTION]... [SOURCE]... DEST')), "^revert": (revert, @@ -2377,7 +2407,8 @@ ('E', 'errorlog', '', _('name of error log file to write to')), ('p', 'port', 0, _('port to use (default: 8000)')), ('a', 'address', '', _('address to use')), - ('n', 'name', "", _('name to show in web pages (default: working dir)')), + ('n', 'name', "", + _('name to show in web pages (default: working dir)')), ('', 'stdio', None, _('for remote clients')), ('t', 'templates', "", _('web templates to use')), ('', 'style', "", _('template style to use')), @@ -2390,7 +2421,8 @@ ('r', 'removed', None, _('show only removed files')), ('u', 'unknown', None, _('show only unknown (not tracked) files')), ('n', 'no-status', None, _('hide status prefix')), - ('0', 'print0', None, _('end filenames with NUL, for use with xargs')), + ('0', 'print0', None, + _('end filenames with NUL, for use with xargs')), ('I', 'include', [], _('include names matching the given patterns')), ('X', 'exclude', [], _('exclude names matching the given patterns'))], _("hg status [OPTION]... [FILE]...")), @@ -2406,8 +2438,9 @@ "tip": (tip, [], _('hg tip')), "unbundle": (unbundle, - [], - _('hg unbundle FILE')), + [('u', 'update', None, + _('update the working directory to tip after unbundle'))], + _('hg unbundle [-u] FILE')), "undo": (undo, [], _('hg undo')), "^update|up|checkout|co": (update, @@ -2423,7 +2456,8 @@ globalopts = [ ('R', 'repository', "", _("repository root directory")), ('', 'cwd', '', _("change working directory")), - ('y', 'noninteractive', None, _("do not prompt, assume 'yes' for any required answers")), + ('y', 'noninteractive', None, + _("do not prompt, assume 'yes' for any required answers")), ('q', 'quiet', None, _("suppress output")), ('v', 'verbose', None, _("enable additional output")), ('', 'debug', None, _("enable debugging output")),
--- a/mercurial/localrepo.py Wed Dec 28 07:13:52 2005 -0800 +++ b/mercurial/localrepo.py Wed Dec 28 07:22:26 2005 -0800 @@ -400,10 +400,6 @@ fp1 = m1.get(f, nullid) fp2 = m2.get(f, nullid) - # is the same revision on two branches of a merge? - if fp2 == fp1: - fp2 = nullid - if fp2 != nullid: # is one parent an ancestor of the other? fpa = r.ancestor(fp1, fp2) @@ -413,7 +409,7 @@ fp2 = nullid # is the file unmodified from the parent? - if not meta and t == r.read(fp1): + if not meta and t == r.read(fp1) and fp2 == nullid: # record the proper existing parent in manifest # no need to add a revision new[f] = fp1
--- a/mercurial/revlog.py Wed Dec 28 07:13:52 2005 -0800 +++ b/mercurial/revlog.py Wed Dec 28 07:22:26 2005 -0800 @@ -188,6 +188,7 @@ self.datafile = datafile self.opener = opener self.cache = None + self.chunkcache = None try: i = self.opener(self.indexfile).read() @@ -473,6 +474,35 @@ """apply a list of patches to a string""" return mdiff.patches(t, pl) + def chunk(self, rev): + start, length = self.start(rev), self.length(rev) + end = start + length + + def loadcache(): + cache_length = max(4096 * 1024, length) # 4Mo + df = self.opener(self.datafile) + df.seek(start) + self.chunkcache = (start, df.read(cache_length)) + + if not self.chunkcache: + loadcache() + + cache_start = self.chunkcache[0] + cache_end = cache_start + len(self.chunkcache[1]) + if start >= cache_start and end <= cache_end: + # it is cached + offset = start - cache_start + else: + loadcache() + offset = 0 + + #def checkchunk(): + # df = self.opener(self.datafile) + # df.seek(start) + # return df.read(length) + #assert s == checkchunk() + return decompress(self.chunkcache[1][offset:offset + length]) + def delta(self, node): """return or calculate a delta between a node and its predecessor""" r = self.rev(node) @@ -481,10 +511,7 @@ return self.diff(self.revision(self.node(r - 1)), self.revision(node)) else: - f = self.opener(self.datafile) - f.seek(self.start(r)) - data = f.read(self.length(r)) - return decompress(data) + return self.chunk(r) def revision(self, node): """return an uncompressed revision of a given""" @@ -494,33 +521,22 @@ # look up what we need to read text = None rev = self.rev(node) - start, length, base, link, p1, p2, node = self.index[rev] - end = start + length - if base != rev: start = self.start(base) + base = self.base(rev) # do we have useful data cached? if self.cache and self.cache[1] >= base and self.cache[1] < rev: base = self.cache[1] - start = self.start(base + 1) text = self.cache[2] - last = 0 - - f = self.opener(self.datafile) - f.seek(start) - data = f.read(end - start) - - if text is None: - last = self.length(base) - text = decompress(data[:last]) + else: + text = self.chunk(base) bins = [] for r in xrange(base + 1, rev + 1): - s = self.length(r) - bins.append(decompress(data[last:last + s])) - last = last + s + bins.append(self.chunk(r)) text = mdiff.patches(text, bins) + p1, p2 = self.parents(node) if node != hash(text, p1, p2): raise RevlogError(_("integrity check failed on %s:%d") % (self.datafile, rev)) @@ -650,7 +666,7 @@ #print "next x" gx = x.next() - def group(self, nodelist, lookup, infocollect = None): + def group(self, nodelist, lookup, infocollect=None): """calculate a delta group Given a list of changeset revs, return a set of deltas and @@ -660,7 +676,6 @@ changesets. parent is parent[0] """ revs = [self.rev(n) for n in nodelist] - needed = dict.fromkeys(revs, 1) # if we don't have any revisions touched by these changesets, bail if not revs: @@ -671,88 +686,30 @@ p = self.parents(self.node(revs[0]))[0] revs.insert(0, self.rev(p)) - # for each delta that isn't contiguous in the log, we need to - # reconstruct the base, reconstruct the result, and then - # calculate the delta. We also need to do this where we've - # stored a full version and not a delta - for i in xrange(0, len(revs) - 1): - a, b = revs[i], revs[i + 1] - if a + 1 != b or self.base(b) == b: - for j in xrange(self.base(a), a + 1): - needed[j] = 1 - for j in xrange(self.base(b), b + 1): - needed[j] = 1 - - # calculate spans to retrieve from datafile - needed = needed.keys() - needed.sort() - spans = [] - oo = -1 - ol = 0 - for n in needed: - if n < 0: continue - o = self.start(n) - l = self.length(n) - if oo + ol == o: # can we merge with the previous? - nl = spans[-1][2] - nl.append((n, l)) - ol += l - spans[-1] = (oo, ol, nl) - else: - oo = o - ol = l - spans.append((oo, ol, [(n, l)])) - - # read spans in, divide up chunks - chunks = {} - for span in spans: - # we reopen the file for each span to make http happy for now - f = self.opener(self.datafile) - f.seek(span[0]) - data = f.read(span[1]) - - # divide up the span - pos = 0 - for r, l in span[2]: - chunks[r] = decompress(data[pos: pos + l]) - pos += l - # helper to reconstruct intermediate versions def construct(text, base, rev): - bins = [chunks[r] for r in xrange(base + 1, rev + 1)] + bins = [self.chunk(r) for r in xrange(base + 1, rev + 1)] return mdiff.patches(text, bins) # build deltas - deltas = [] for d in xrange(0, len(revs) - 1): a, b = revs[d], revs[d + 1] - n = self.node(b) + na = self.node(a) + nb = self.node(b) if infocollect is not None: - infocollect(n) + infocollect(nb) # do we need to construct a new delta? if a + 1 != b or self.base(b) == b: - if a >= 0: - base = self.base(a) - ta = chunks[self.base(a)] - ta = construct(ta, base, a) - else: - ta = "" - - base = self.base(b) - if a > base: - base = a - tb = ta - else: - tb = chunks[self.base(b)] - tb = construct(tb, base, b) + ta = self.revision(na) + tb = self.revision(nb) d = self.diff(ta, tb) else: - d = chunks[b] + d = self.chunk(b) - p = self.parents(n) - meta = n + p[0] + p[1] + lookup(n) + p = self.parents(nb) + meta = nb + p[0] + p[1] + lookup(nb) l = struct.pack(">l", len(meta) + len(d) + 4) yield l yield meta
--- a/mercurial/statichttprepo.py Wed Dec 28 07:13:52 2005 -0800 +++ b/mercurial/statichttprepo.py Wed Dec 28 07:22:26 2005 -0800 @@ -35,6 +35,8 @@ self.changelog = changelog.changelog(self.opener) self.tagscache = None self.nodetagscache = None + self.encodepats = None + self.decodepats = None def dev(self): return -1
--- a/tests/test-tag Wed Dec 28 07:13:52 2005 -0800 +++ b/tests/test-tag Wed Dec 28 07:22:26 2005 -0800 @@ -11,6 +11,13 @@ echo foo >> .hgtags hg tag -d "0 0" "bleah2" || echo "failed" +hg revert .hgtags +hg tag -d "0 0" -r 0 "bleah0" +hg tag -l -d "0 0" "bleah1" 1 + +cat .hgtags +cat .hg/localtags + hg tag -l 'xx newline' hg tag -l 'xx:xx'
--- a/tests/test-tag.out Wed Dec 28 07:13:52 2005 -0800 +++ b/tests/test-tag.out Wed Dec 28 07:22:26 2005 -0800 @@ -18,5 +18,8 @@ abort: working copy of .hgtags is changed (please commit .hgtags manually) failed +acb14030fe0a21b60322c440ad2d20cf7685a376 bleah +acb14030fe0a21b60322c440ad2d20cf7685a376 bleah0 +863197ef03781c4fc00276d83eb66c4cb9cd91df bleah1 abort: '\n' cannot be used in a tag name abort: ':' cannot be used in a tag name