# HG changeset patch # User Thomas Arendsen Hein # Date 1163747214 -3600 # Node ID eb0b4a2d70a9c650d22bad9df0772878af0ac4e0 # Parent e8730b5b8a32ef74ff9457f8e83346262ec629e7 white space and line break cleanups diff -r e8730b5b8a32 -r eb0b4a2d70a9 contrib/darcs2hg.py --- a/contrib/darcs2hg.py Thu Nov 16 08:52:55 2006 +0100 +++ b/contrib/darcs2hg.py Fri Nov 17 08:06:54 2006 +0100 @@ -74,8 +74,8 @@ changes = cmd("darcs changes --reverse --xml-output", darcsRepo) doc = xml_dom.parseString(changes) for patch_node in doc.childNodes[0].childNodes: - name = filter(lambda n:n.nodeName == "name", patch_node.childNodes) - comm = filter(lambda n:n.nodeName == "comment", patch_node.childNodes) + name = filter(lambda n: n.nodeName == "name", patch_node.childNodes) + comm = filter(lambda n: n.nodeName == "comment", patch_node.childNodes) if not name:continue else: name = name[0].childNodes[0].data if not comm: comm = "" @@ -87,7 +87,7 @@ def darcs_tip(darcs_repo): changes = cmd("darcs changes",darcs_repo,silent=True) - changes = filter(lambda l:l.strip().startswith("* "), changes.split("\n")) + changes = filter(lambda l: l.strip().startswith("* "), changes.split("\n")) return len(changes) def darcs_pull(hg_repo, darcs_repo, chash): diff -r e8730b5b8a32 -r eb0b4a2d70a9 mercurial/ancestor.py --- a/mercurial/ancestor.py Thu Nov 16 08:52:55 2006 +0100 +++ b/mercurial/ancestor.py Fri Nov 17 08:06:54 2006 +0100 @@ -51,7 +51,7 @@ def generations(vertex): sg, s = None, {} - for g,v in ancestors(vertex): + for g, v in ancestors(vertex): if g != sg: if sg: yield sg, s diff -r e8730b5b8a32 -r eb0b4a2d70a9 mercurial/byterange.py --- a/mercurial/byterange.py Thu Nov 16 08:52:55 2006 +0100 +++ b/mercurial/byterange.py Fri Nov 17 08:06:54 2006 +0100 @@ -121,7 +121,7 @@ """ return (self.realpos - self.firstbyte) - def seek(self,offset,whence=0): + def seek(self, offset, whence=0): """Seek within the byte range. Positioning is identical to that described under tell(). """ @@ -170,7 +170,7 @@ size = (self.lastbyte - self.realpos) return size - def _do_seek(self,offset): + def _do_seek(self, offset): """Seek based on whether wrapped object supports seek(). offset is relative to the current position (self.realpos). """ @@ -179,9 +179,9 @@ self._poor_mans_seek(offset) else: self.fo.seek(self.realpos + offset) - self.realpos+= offset + self.realpos += offset - def _poor_mans_seek(self,offset): + def _poor_mans_seek(self, offset): """Seek by calling the wrapped file objects read() method. This is used for file like objects that do not have native seek support. The wrapped objects read() method is called @@ -199,7 +199,7 @@ buf = self.fo.read(bufsize) if len(buf) != bufsize: raise RangeError('Requested Range Not Satisfiable') - pos+= bufsize + pos += bufsize class FileRangeHandler(urllib2.FileHandler): """FileHandler subclass that adds Range support. @@ -221,16 +221,17 @@ if port or socket.gethostbyname(host) not in self.get_names(): raise urllib2.URLError('file not on local host') fo = open(localfile,'rb') - brange = req.headers.get('Range',None) + brange = req.headers.get('Range', None) brange = range_header_to_tuple(brange) assert brange != () if brange: - (fb,lb) = brange - if lb == '': lb = size + (fb, lb) = brange + if lb == '': + lb = size if fb < 0 or fb > size or lb > size: raise RangeError('Requested Range Not Satisfiable') size = (lb - fb) - fo = RangeableFileObject(fo, (fb,lb)) + fo = RangeableFileObject(fo, (fb, lb)) headers = mimetools.Message(StringIO( 'Content-Type: %s\nContent-Length: %d\nLast-modified: %s\n' % (mtype or 'text/plain', size, modified))) @@ -292,18 +293,19 @@ # -- range support modifications start here rest = None - range_tup = range_header_to_tuple(req.headers.get('Range',None)) + range_tup = range_header_to_tuple(req.headers.get('Range', None)) assert range_tup != () if range_tup: - (fb,lb) = range_tup - if fb > 0: rest = fb + (fb, lb) = range_tup + if fb > 0: + rest = fb # -- range support modifications end here fp, retrlen = fw.retrfile(file, type, rest) # -- range support modifications start here if range_tup: - (fb,lb) = range_tup + (fb, lb) = range_tup if lb == '': if retrlen is None or retrlen == 0: raise RangeError('Requested Range Not Satisfiable due to unobtainable file length.') @@ -314,7 +316,7 @@ raise RangeError('Requested Range Not Satisfiable') else: retrlen = lb - fb - fp = RangeableFileObject(fp, (0,retrlen)) + fp = RangeableFileObject(fp, (0, retrlen)) # -- range support modifications end here headers = "" @@ -340,8 +342,12 @@ # argument and pass it on to ftp.ntransfercmd def retrfile(self, file, type, rest=None): self.endtransfer() - if type in ('d', 'D'): cmd = 'TYPE A'; isdir = 1 - else: cmd = 'TYPE ' + type; isdir = 0 + if type in ('d', 'D'): + cmd = 'TYPE A' + isdir = 1 + else: + cmd = 'TYPE ' + type + isdir = 0 try: self.ftp.voidcmd(cmd) except ftplib.all_errors: @@ -372,8 +378,10 @@ # Set transfer mode to ASCII! self.ftp.voidcmd('TYPE A') # Try a directory listing - if file: cmd = 'LIST ' + file - else: cmd = 'LIST' + if file: + cmd = 'LIST ' + file + else: + cmd = 'LIST' conn = self.ftp.ntransfercmd(cmd) self.busy = 1 # Pass back both a suitably decorated object and a retrieval length @@ -401,15 +409,16 @@ """ global _rangere - if range_header is None: return None + if range_header is None: + return None if _rangere is None: import re _rangere = re.compile(r'^bytes=(\d{1,})-(\d*)') match = _rangere.match(range_header) if match: - tup = range_tuple_normalize(match.group(1,2)) + tup = range_tuple_normalize(match.group(1, 2)) if tup and tup[1]: - tup = (tup[0],tup[1]+1) + tup = (tup[0], tup[1]+1) return tup return () @@ -418,11 +427,12 @@ Return a string of the form "bytes=-" or None if no range is needed. """ - if range_tup is None: return None + if range_tup is None: + return None range_tup = range_tuple_normalize(range_tup) if range_tup: if range_tup[1]: - range_tup = (range_tup[0],range_tup[1] - 1) + range_tup = (range_tup[0], range_tup[1] - 1) return 'bytes=%s-%s' % range_tup def range_tuple_normalize(range_tup): @@ -432,19 +442,28 @@ an int. Finally, return None if the normalized tuple == (0,'') as that is equivelant to retrieving the entire file. """ - if range_tup is None: return None + if range_tup is None: + return None # handle first byte fb = range_tup[0] - if fb in (None,''): fb = 0 - else: fb = int(fb) + if fb in (None, ''): + fb = 0 + else: + fb = int(fb) # handle last byte - try: lb = range_tup[1] - except IndexError: lb = '' + try: + lb = range_tup[1] + except IndexError: + lb = '' else: - if lb is None: lb = '' - elif lb != '': lb = int(lb) + if lb is None: + lb = '' + elif lb != '': + lb = int(lb) # check if range is over the entire file - if (fb,lb) == (0,''): return None + if (fb, lb) == (0, ''): + return None # check that the range is valid - if lb < fb: raise RangeError('Invalid byte range: %s-%s' % (fb,lb)) - return (fb,lb) + if lb < fb: + raise RangeError('Invalid byte range: %s-%s' % (fb, lb)) + return (fb, lb) diff -r e8730b5b8a32 -r eb0b4a2d70a9 mercurial/cmdutil.py --- a/mercurial/cmdutil.py Thu Nov 16 08:52:55 2006 +0100 +++ b/mercurial/cmdutil.py Fri Nov 17 08:06:54 2006 +0100 @@ -93,7 +93,7 @@ if seqno is not None: expander['n'] = lambda: str(seqno) if total is not None and seqno is not None: - expander['n'] = lambda:str(seqno).zfill(len(str(total))) + expander['n'] = lambda: str(seqno).zfill(len(str(total))) if pathname is not None: expander['s'] = lambda: os.path.basename(pathname) expander['d'] = lambda: os.path.dirname(pathname) or '.' diff -r e8730b5b8a32 -r eb0b4a2d70a9 mercurial/commands.py --- a/mercurial/commands.py Thu Nov 16 08:52:55 2006 +0100 +++ b/mercurial/commands.py Fri Nov 17 08:06:54 2006 +0100 @@ -266,7 +266,7 @@ List the repository's named branches. """ b = repo.branchtags() - l = [(-repo.changelog.rev(n), n, t) for t,n in b.items()] + l = [(-repo.changelog.rev(n), n, t) for t, n in b.items()] l.sort() for r, n, t in l: hexfunc = ui.debugflag and hex or short @@ -987,7 +987,7 @@ fstate = {} skip = {} - get = util.cachefunc(lambda r:repo.changectx(r).changeset()) + get = util.cachefunc(lambda r: repo.changectx(r).changeset()) changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts) count = 0 incrementing = False @@ -1422,7 +1422,7 @@ files and full commit message is shown. """ - get = util.cachefunc(lambda r:repo.changectx(r).changeset()) + get = util.cachefunc(lambda r: repo.changectx(r).changeset()) changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts) if opts['limit']: diff -r e8730b5b8a32 -r eb0b4a2d70a9 mercurial/context.py --- a/mercurial/context.py Thu Nov 16 08:52:55 2006 +0100 +++ b/mercurial/context.py Fri Nov 17 08:06:54 2006 +0100 @@ -69,12 +69,12 @@ def parents(self): """return contexts for each parent changeset""" p = self._repo.changelog.parents(self._node) - return [ changectx(self._repo, x) for x in p ] + return [changectx(self._repo, x) for x in p] def children(self): """return contexts for each child changeset""" c = self._repo.changelog.children(self._node) - return [ changectx(self._repo, x) for x in c ] + return [changectx(self._repo, x) for x in c] def filenode(self, path): if '_manifest' in self.__dict__: @@ -210,20 +210,20 @@ def parents(self): p = self._path fl = self._filelog - pl = [ (p, n, fl) for n in self._filelog.parents(self._filenode) ] + pl = [(p, n, fl) for n in self._filelog.parents(self._filenode)] r = self.renamed() if r: pl[0] = (r[0], r[1], None) - return [ filectx(self._repo, p, fileid=n, filelog=l) - for p,n,l in pl if n != nullid ] + return [filectx(self._repo, p, fileid=n, filelog=l) + for p,n,l in pl if n != nullid] def children(self): # hard for renames c = self._filelog.children(self._filenode) - return [ filectx(self._repo, self._path, fileid=x, - filelog=self._filelog) for x in c ] + return [filectx(self._repo, self._path, fileid=x, + filelog=self._filelog) for x in c] def annotate(self, follow=False): '''returns a list of tuples of (ctx, line) for each line @@ -248,9 +248,9 @@ # we want to reuse filectx objects as much as possible p = f._path if f._filerev is None: # working dir - pl = [ (n.path(), n.filerev()) for n in f.parents() ] + pl = [(n.path(), n.filerev()) for n in f.parents()] else: - pl = [ (p, n) for n in f._filelog.parentrevs(f._filerev) ] + pl = [(p, n) for n in f._filelog.parentrevs(f._filerev)] if follow: r = f.renamed() @@ -313,7 +313,7 @@ # prime the ancestor cache for the working directory for c in (self, fc2): if c._filerev == None: - pl = [ (n.path(), n.filenode()) for n in c.parents() ] + pl = [(n.path(), n.filenode()) for n in c.parents()] acache[(c._path, None)] = pl flcache = {self._path:self._filelog, fc2._path:fc2._filelog} @@ -324,17 +324,17 @@ if f not in flcache: flcache[f] = self._repo.file(f) fl = flcache[f] - pl = [ (f,p) for p in fl.parents(n) if p != nullid ] + pl = [(f, p) for p in fl.parents(n) if p != nullid] re = fl.renamed(n) if re: pl.append(re) - acache[vertex]=pl + acache[vertex] = pl return pl a, b = (self._path, self._filenode), (fc2._path, fc2._filenode) v = ancestor.ancestor(a, b, parents) if v: - f,n = v + f, n = v return filectx(self._repo, f, fileid=n, filelog=flcache[f]) return None @@ -372,7 +372,7 @@ man = self._parents[0].manifest().copy() copied = self._repo.dirstate.copies() modified, added, removed, deleted, unknown = self._status[:5] - for i,l in (("a", added), ("m", modified), ("u", unknown)): + for i, l in (("a", added), ("m", modified), ("u", unknown)): for f in l: man[f] = man.get(copied.get(f, f), nullid) + i man.set(f, util.is_exec(self._repo.wjoin(f), man.execf(f))) @@ -480,14 +480,14 @@ rp = self._repopath pcl = self._changectx._parents fl = self._filelog - pl = [ (rp, pcl[0]._manifest.get(rp, nullid), fl) ] + pl = [(rp, pcl[0]._manifest.get(rp, nullid), fl)] if len(pcl) > 1: if rp != p: fl = None pl.append((p, pcl[1]._manifest.get(p, nullid), fl)) - return [ filectx(self._repo, p, fileid=n, filelog=l) - for p,n,l in pl if n != nullid ] + return [filectx(self._repo, p, fileid=n, filelog=l) + for p,n,l in pl if n != nullid] def children(self): return [] diff -r e8730b5b8a32 -r eb0b4a2d70a9 mercurial/fancyopts.py --- a/mercurial/fancyopts.py Thu Nov 16 08:52:55 2006 +0100 +++ b/mercurial/fancyopts.py Fri Nov 17 08:06:54 2006 +0100 @@ -20,7 +20,7 @@ opts, args = getopt.getopt(args, short, long) for opt, arg in opts: - if dt[map[opt]] is type(fancyopts): state[map[opt]](state,map[opt],arg) + if dt[map[opt]] is type(fancyopts): state[map[opt]](state, map[opt], arg) elif dt[map[opt]] is type(1): state[map[opt]] = int(arg) elif dt[map[opt]] is type(''): state[map[opt]] = arg elif dt[map[opt]] is type([]): state[map[opt]].append(arg) diff -r e8730b5b8a32 -r eb0b4a2d70a9 mercurial/hgweb/hgweb_mod.py --- a/mercurial/hgweb/hgweb_mod.py Thu Nov 16 08:52:55 2006 +0100 +++ b/mercurial/hgweb/hgweb_mod.py Fri Nov 17 08:06:54 2006 +0100 @@ -139,7 +139,7 @@ def showtag(self, t1, node=nullid, **args): for t in self.repo.nodetags(node): - yield self.t(t1, tag=t, **args) + yield self.t(t1, tag=t, **args) def diff(self, node1, node2, files): def filterfiles(filters, files): @@ -331,7 +331,7 @@ count = fl.count() pagelen = self.maxshortchanges pos = fctx.filerev() - start = max(0, pos - pagelen + 1) + start = max(0, pos - pagelen + 1) end = min(count, start + pagelen) pos = end - 1 @@ -446,7 +446,7 @@ l = len(path) abspath = "/" + path - for f,n in mf.items(): + for f, n in mf.items(): if f[:l] != path: continue remain = f[l:] @@ -504,8 +504,9 @@ def entries(notip=False, **map): parity = 0 - for k,n in i: - if notip and k == "tip": continue + for k, n in i: + if notip and k == "tip": + continue yield {"parity": self.stripes(parity), "tag": k, "date": cl.read(n)[2], @@ -526,7 +527,7 @@ def tagentries(**map): parity = 0 count = 0 - for k,n in i: + for k, n in i: if k == "tip": # skip tip continue; @@ -805,7 +806,7 @@ }) if not req.form.has_key('cmd'): - req.form['cmd'] = [self.t.cache['default'],] + req.form['cmd'] = [self.t.cache['default']] cmd = req.form['cmd'][0] diff -r e8730b5b8a32 -r eb0b4a2d70a9 mercurial/hgweb/request.py --- a/mercurial/hgweb/request.py Thu Nov 16 08:52:55 2006 +0100 +++ b/mercurial/hgweb/request.py Fri Nov 17 08:06:54 2006 +0100 @@ -34,7 +34,7 @@ class _wsgirequest(object): def __init__(self, destination, wsgienv, start_response): version = wsgienv['wsgi.version'] - if (version < (1,0)) or (version >= (2, 0)): + if (version < (1, 0)) or (version >= (2, 0)): raise RuntimeError("Unknown and unsupported WSGI version %d.%d" \ % version) self.inp = wsgienv['wsgi.input'] diff -r e8730b5b8a32 -r eb0b4a2d70a9 mercurial/hgweb/server.py --- a/mercurial/hgweb/server.py Thu Nov 16 08:52:55 2006 +0100 +++ b/mercurial/hgweb/server.py Fri Nov 17 08:06:54 2006 +0100 @@ -186,7 +186,8 @@ if hasattr(os, "fork"): _mixin = SocketServer.ForkingMixIn else: - class _mixin: pass + class _mixin: + pass class MercurialHTTPServer(object, _mixin, BaseHTTPServer.HTTPServer): def __init__(self, *args, **kargs): diff -r e8730b5b8a32 -r eb0b4a2d70a9 mercurial/hgweb/wsgicgi.py --- a/mercurial/hgweb/wsgicgi.py Thu Nov 16 08:52:55 2006 +0100 +++ b/mercurial/hgweb/wsgicgi.py Fri Nov 17 08:06:54 2006 +0100 @@ -13,12 +13,12 @@ def launch(application): environ = dict(os.environ.items()) - environ['wsgi.input'] = sys.stdin - environ['wsgi.errors'] = sys.stderr - environ['wsgi.version'] = (1,0) - environ['wsgi.multithread'] = False + environ['wsgi.input'] = sys.stdin + environ['wsgi.errors'] = sys.stderr + environ['wsgi.version'] = (1, 0) + environ['wsgi.multithread'] = False environ['wsgi.multiprocess'] = True - environ['wsgi.run_once'] = True + environ['wsgi.run_once'] = True if environ.get('HTTPS','off') in ('on','1'): environ['wsgi.url_scheme'] = 'https' @@ -31,20 +31,20 @@ def write(data): if not headers_set: - raise AssertionError("write() before start_response()") + raise AssertionError("write() before start_response()") elif not headers_sent: - # Before the first output, send the stored headers - status, response_headers = headers_sent[:] = headers_set - out.write('Status: %s\r\n' % status) - for header in response_headers: - out.write('%s: %s\r\n' % header) - out.write('\r\n') + # Before the first output, send the stored headers + status, response_headers = headers_sent[:] = headers_set + out.write('Status: %s\r\n' % status) + for header in response_headers: + out.write('%s: %s\r\n' % header) + out.write('\r\n') out.write(data) out.flush() - def start_response(status,response_headers,exc_info=None): + def start_response(status, response_headers, exc_info=None): if exc_info: try: if headers_sent: @@ -55,7 +55,7 @@ elif headers_set: raise AssertionError("Headers already set!") - headers_set[:] = [status,response_headers] + headers_set[:] = [status, response_headers] return write result = application(environ, start_response) diff -r e8730b5b8a32 -r eb0b4a2d70a9 mercurial/merge.py --- a/mercurial/merge.py Thu Nov 16 08:52:55 2006 +0100 +++ b/mercurial/merge.py Fri Nov 17 08:06:54 2006 +0100 @@ -133,7 +133,7 @@ match = {} u1 = nonoverlap(m1, m2, ma) u2 = nonoverlap(m2, m1, ma) - ctx = util.cachefunc(lambda f,n: repo.filectx(f, fileid=n[:20])) + ctx = util.cachefunc(lambda f, n: repo.filectx(f, fileid=n[:20])) def checkpair(c, f2, man): ''' check if an apparent pair actually matches ''' @@ -285,7 +285,7 @@ if inst.errno != errno.ENOENT: repo.ui.warn(_("update failed to remove %s: %s!\n") % (f, inst.strerror)) - removed +=1 + removed += 1 elif m == "m": # merge f2, fd, flag, move = a[2:] r = filemerge(repo, f, f2, wctx, mctx) diff -r e8730b5b8a32 -r eb0b4a2d70a9 mercurial/packagescan.py --- a/mercurial/packagescan.py Thu Nov 16 08:52:55 2006 +0100 +++ b/mercurial/packagescan.py Fri Nov 17 08:06:54 2006 +0100 @@ -55,14 +55,14 @@ scope[as_] = mod requiredmodules[mod.__name__] = 1 if len(comp) == i: break - mod = getattr(mod,comp[i]) + mod = getattr(mod, comp[i]) mn = string.join(comp[:i+1],'.') i += 1 else: # mod is the last package in the component list requiredmodules[mod.__name__] = 1 for f in fromlist: - scope[f] = getattr(mod,f) + scope[f] = getattr(mod, f) if type(scope[f]) == types.ModuleType: requiredmodules[scope[f].__name__] = 1 @@ -72,14 +72,14 @@ scan_in_progress = False -def scan(libpath,packagename): +def scan(libpath, packagename): """ helper for finding all required modules of package """ global scan_in_progress scan_in_progress = True # Use the package in the build directory libpath = os.path.abspath(libpath) - sys.path.insert(0,libpath) - packdir = os.path.join(libpath,packagename.replace('.', '/')) + sys.path.insert(0, libpath) + packdir = os.path.join(libpath, packagename.replace('.', '/')) # A normal import would not find the package in # the build directory. ihook is used to force the import. # After the package is imported the import scope for @@ -97,10 +97,10 @@ for m in pymodulefiles: if m == '__init__.py': continue tmp = {} - mname,ext = os.path.splitext(m) + mname, ext = os.path.splitext(m) fullname = packagename+'.'+mname try: - __import__(fullname,tmp,tmp) + __import__(fullname, tmp, tmp) except SkipPackage, inst: print >> sys.stderr, 'skipping %s: %s' % (fullname, inst.reason) continue @@ -108,9 +108,9 @@ # Import all extension modules and by that run the fake demandload for m in extmodulefiles: tmp = {} - mname,ext = os.path.splitext(m) + mname, ext = os.path.splitext(m) fullname = packagename+'.'+mname - __import__(fullname,tmp,tmp) + __import__(fullname, tmp, tmp) requiredmodules[fullname] = 1 def getmodules(): diff -r e8730b5b8a32 -r eb0b4a2d70a9 mercurial/patch.py --- a/mercurial/patch.py Thu Nov 16 08:52:55 2006 +0100 +++ b/mercurial/patch.py Fri Nov 17 08:06:54 2006 +0100 @@ -143,7 +143,7 @@ if m: if gp: gitpatches.append(gp) - src, dst = m.group(1,2) + src, dst = m.group(1, 2) gp = gitpatch(dst) gp.lineno = lineno elif gp: diff -r e8730b5b8a32 -r eb0b4a2d70a9 mercurial/util.py --- a/mercurial/util.py Thu Nov 16 08:52:55 2006 +0100 +++ b/mercurial/util.py Fri Nov 17 08:06:54 2006 +0100 @@ -610,8 +610,8 @@ return path def user_rcpath(): - '''return os-specific hgrc search path to the user dir''' - return os.path.join(os.path.expanduser('~'), 'mercurial.ini') + '''return os-specific hgrc search path to the user dir''' + return os.path.join(os.path.expanduser('~'), 'mercurial.ini') def parse_patch_output(output_line): """parses the output produced by patch and returns the file name"""