Mercurial > hg
view tests/test-ancestor.py @ 35428:71427ff1dff8
workers: handling exceptions in windows workers
This adds handling of exceptions from worker threads and resurfaces them as if the function ran without workers.
If any of the threads throws, the main thread kills all running threads giving them 5 sec to handle the interruption and raises the first exception received.
We don't have to join threads if is_alive() is false
Test Plan:
Ran multiple updates/enable/disable sparse profile and things worked well
Ran test on CentOS- all tests passing on @ passed here
Added a forged exception into the worker code and got it properly resurfaced and the rest of workers killed: P58642088
PS C:\open\<repo>> ..\facebook-hg-rpms\build\hg\hg.exe --config extensions.fsmonitor=! sparse --enable-profile <profile>
updating [==> ] 1300/39166 1m57sException in thread Thread-3:
Traceback (most recent call last):
File "C:\open\facebook-hg-rpms\build\hg\hg-python\lib\threading.py", line 801, in __bootstrap_inner
self.run()
File "C:\open\facebook-hg-rpms\build\hg\mercurial\worker.py", line 244, in run
raise e
Exception: Forged exception
Exception in thread Thread-2:
Traceback (most recent call last):
File "C:\open\facebook-hg-rpms\build\hg\hg-python\lib\threading.py", line 801, in __bootstrap_inner
self.run()
File "C:\open\facebook-hg-rpms\build\hg\mercurial\worker.py", line 244, in run
raise e
Exception: Forged exception
<...>
Traceback (most recent call last):
File "C:\open\facebook-hg-rpms\build\hg\hgexe.py", line 41, in <module>
dispatch.run()
File "C:\open\facebook-hg-rpms\build\hg\mercurial\dispatch.py", line 85, in run
status = (dispatch(req) or 0) & 255
File "C:\open\facebook-hg-rpms\build\hg\mercurial\dispatch.py", line 173, in dispatch
ret = _runcatch(req)
File "C:\open\facebook-hg-rpms\build\hg\mercurial\dispatch.py", line 324, in _runcatch
return _callcatch(ui, _runcatchfunc)
File "C:\open\facebook-hg-rpms\build\hg\mercurial\dispatch.py", line 332, in _callcatch
return scmutil.callcatch(ui, func)
File "C:\open\facebook-hg-rpms\build\hg\mercurial\scmutil.py", line 154, in callcatch
return func()
File "C:\open\facebook-hg-rpms\build\hg\mercurial\dispatch.py", line 314, in _runcatchfunc
return _dispatch(req)
File "C:\open\facebook-hg-rpms\build\hg\mercurial\dispatch.py", line 951, in _dispatch
cmdpats, cmdoptions)
File "C:\open\facebook-hg-rpms\build\hg\hg-python\lib\site-packages\remotefilelog\__init__.py", line 415, in runcommand
return orig(lui, repo, *args, **kwargs)
File "C:\open\facebook-hg-rpms\build\hg\hg-python\lib\site-packages\hgext3rd\undo.py", line 118, in _runcommandwrapper
result = orig(lui, repo, cmd, fullargs, *args)
File "C:\open\facebook-hg-rpms\build\hg\hgext\journal.py", line 84, in runcommand
return orig(lui, repo, cmd, fullargs, *args)
File "C:\open\facebook-hg-rpms\build\hg\hg-python\lib\site-packages\hgext3rd\perftweaks.py", line 268, in _tracksparseprofiles
res = runcommand(lui, repo, *args)
File "C:\open\facebook-hg-rpms\build\hg\hg-python\lib\site-packages\hgext3rd\perftweaks.py", line 256, in _trackdirstatesizes
res = runcommand(lui, repo, *args)
File "C:\open\facebook-hg-rpms\build\hg\hg-python\lib\site-packages\hgext3rd\copytrace.py", line 144, in _runcommand
return orig(lui, repo, cmd, fullargs, ui, *args, **kwargs)
File "C:\open\facebook-hg-rpms\build\hg\hg-python\lib\site-packages\hgext3rd\fbamend\hiddenoverride.py", line 119, in runcommand
result = orig(lui, repo, cmd, fullargs, *args)
File "C:\open\facebook-hg-rpms\build\hg\mercurial\dispatch.py", line 712, in runcommand
ret = _runcommand(ui, options, cmd, d)
File "C:\open\facebook-hg-rpms\build\hg\mercurial\dispatch.py", line 959, in _runcommand
return cmdfunc()
File "C:\open\facebook-hg-rpms\build\hg\mercurial\dispatch.py", line 948, in <lambda>
d = lambda: util.checksignature(func)(ui, *args, **strcmdopt)
File "C:\open\facebook-hg-rpms\build\hg\mercurial\util.py", line 1183, in check
return func(*args, **kwargs)
File "C:\open\facebook-hg-rpms\build\hg\hg-python\lib\site-packages\hgext3rd\fbsparse.py", line 860, in sparse
disableprofile=disableprofile, force=force)
File "C:\open\facebook-hg-rpms\build\hg\hg-python\lib\site-packages\hgext3rd\fbsparse.py", line 949, in _config
len, _refresh(ui, repo, oldstatus, oldsparsematch, force))
File "C:\open\facebook-hg-rpms\build\hg\hg-python\lib\site-packages\hgext3rd\fbsparse.py", line 1116, in _refresh
mergemod.applyupdates(repo, typeactions, repo[None], repo['.'], False)
File "C:\open\facebook-hg-rpms\build\hg\hg-python\lib\site-packages\remotefilelog\__init__.py", line 311, in applyupdates
return orig(repo, actions, wctx, mctx, overwrite, labels=labels)
File "C:\open\facebook-hg-rpms\build\hg\mercurial\merge.py", line 1464, in applyupdates
for i, item in prog:
File "C:\open\facebook-hg-rpms\build\hg\mercurial\worker.py", line 286, in _windowsworker
raise t.exception
Exception: Forged exception
PS C:\open\ovrsource>
Differential Revision: https://phab.mercurial-scm.org/D1459
author | Wojciech Lis <wlis@fb.com> |
---|---|
date | Mon, 20 Nov 2017 10:27:41 -0800 |
parents | f501322512b6 |
children | 6754d0c5e1b5 |
line wrap: on
line source
from __future__ import absolute_import, print_function import binascii import getopt import math import os import random import sys import time from mercurial.node import nullrev from mercurial import ( ancestor, debugcommands, hg, pycompat, ui as uimod, util, ) if pycompat.ispy3: long = int xrange = range def buildgraph(rng, nodes=100, rootprob=0.05, mergeprob=0.2, prevprob=0.7): '''nodes: total number of nodes in the graph rootprob: probability that a new node (not 0) will be a root mergeprob: probability that, excluding a root a node will be a merge prevprob: probability that p1 will be the previous node return value is a graph represented as an adjacency list. ''' graph = [None] * nodes for i in xrange(nodes): if i == 0 or rng.random() < rootprob: graph[i] = [nullrev] elif i == 1: graph[i] = [0] elif rng.random() < mergeprob: if i == 2 or rng.random() < prevprob: # p1 is prev p1 = i - 1 else: p1 = rng.randrange(i - 1) p2 = rng.choice(list(range(0, p1)) + list(range(p1 + 1, i))) graph[i] = [p1, p2] elif rng.random() < prevprob: graph[i] = [i - 1] else: graph[i] = [rng.randrange(i - 1)] return graph def buildancestorsets(graph): ancs = [None] * len(graph) for i in xrange(len(graph)): ancs[i] = {i} if graph[i] == [nullrev]: continue for p in graph[i]: ancs[i].update(ancs[p]) return ancs class naiveincrementalmissingancestors(object): def __init__(self, ancs, bases): self.ancs = ancs self.bases = set(bases) def addbases(self, newbases): self.bases.update(newbases) def removeancestorsfrom(self, revs): for base in self.bases: if base != nullrev: revs.difference_update(self.ancs[base]) revs.discard(nullrev) def missingancestors(self, revs): res = set() for rev in revs: if rev != nullrev: res.update(self.ancs[rev]) for base in self.bases: if base != nullrev: res.difference_update(self.ancs[base]) return sorted(res) def test_missingancestors(seed, rng): # empirically observed to take around 1 second graphcount = 100 testcount = 10 inccount = 10 nerrs = [0] # the default mu and sigma give us a nice distribution of mostly # single-digit counts (including 0) with some higher ones def lognormrandom(mu, sigma): return int(math.floor(rng.lognormvariate(mu, sigma))) def samplerevs(nodes, mu=1.1, sigma=0.8): count = min(lognormrandom(mu, sigma), len(nodes)) return rng.sample(nodes, count) def err(seed, graph, bases, seq, output, expected): if nerrs[0] == 0: print('seed:', hex(seed)[:-1], file=sys.stderr) if gerrs[0] == 0: print('graph:', graph, file=sys.stderr) print('* bases:', bases, file=sys.stderr) print('* seq: ', seq, file=sys.stderr) print('* output: ', output, file=sys.stderr) print('* expected:', expected, file=sys.stderr) nerrs[0] += 1 gerrs[0] += 1 for g in xrange(graphcount): graph = buildgraph(rng) ancs = buildancestorsets(graph) gerrs = [0] for _ in xrange(testcount): # start from nullrev to include it as a possibility graphnodes = range(nullrev, len(graph)) bases = samplerevs(graphnodes) # fast algorithm inc = ancestor.incrementalmissingancestors(graph.__getitem__, bases) # reference slow algorithm naiveinc = naiveincrementalmissingancestors(ancs, bases) seq = [] revs = [] for _ in xrange(inccount): if rng.random() < 0.2: newbases = samplerevs(graphnodes) seq.append(('addbases', newbases)) inc.addbases(newbases) naiveinc.addbases(newbases) if rng.random() < 0.4: # larger set so that there are more revs to remove from revs = samplerevs(graphnodes, mu=1.5) seq.append(('removeancestorsfrom', revs)) hrevs = set(revs) rrevs = set(revs) inc.removeancestorsfrom(hrevs) naiveinc.removeancestorsfrom(rrevs) if hrevs != rrevs: err(seed, graph, bases, seq, sorted(hrevs), sorted(rrevs)) else: revs = samplerevs(graphnodes) seq.append(('missingancestors', revs)) h = inc.missingancestors(revs) r = naiveinc.missingancestors(revs) if h != r: err(seed, graph, bases, seq, h, r) # graph is a dict of child->parent adjacency lists for this graph: # o 13 # | # | o 12 # | | # | | o 11 # | | |\ # | | | | o 10 # | | | | | # | o---+ | 9 # | | | | | # o | | | | 8 # / / / / # | | o | 7 # | | | | # o---+ | 6 # / / / # | | o 5 # | |/ # | o 4 # | | # o | 3 # | | # | o 2 # |/ # o 1 # | # o 0 graph = {0: [-1], 1: [0], 2: [1], 3: [1], 4: [2], 5: [4], 6: [4], 7: [4], 8: [-1], 9: [6, 7], 10: [5], 11: [3, 7], 12: [9], 13: [8]} def genlazyancestors(revs, stoprev=0, inclusive=False): print(("%% lazy ancestor set for %s, stoprev = %s, inclusive = %s" % (revs, stoprev, inclusive))) return ancestor.lazyancestors(graph.get, revs, stoprev=stoprev, inclusive=inclusive) def printlazyancestors(s, l): print('membership: %r' % [n for n in l if n in s]) print('iteration: %r' % list(s)) def test_lazyancestors(): # Empty revs s = genlazyancestors([]) printlazyancestors(s, [3, 0, -1]) # Standard example s = genlazyancestors([11, 13]) printlazyancestors(s, [11, 13, 7, 9, 8, 3, 6, 4, 1, -1, 0]) # Standard with ancestry in the initial set (1 is ancestor of 3) s = genlazyancestors([1, 3]) printlazyancestors(s, [1, -1, 0]) # Including revs s = genlazyancestors([11, 13], inclusive=True) printlazyancestors(s, [11, 13, 7, 9, 8, 3, 6, 4, 1, -1, 0]) # Test with stoprev s = genlazyancestors([11, 13], stoprev=6) printlazyancestors(s, [11, 13, 7, 9, 8, 3, 6, 4, 1, -1, 0]) s = genlazyancestors([11, 13], stoprev=6, inclusive=True) printlazyancestors(s, [11, 13, 7, 9, 8, 3, 6, 4, 1, -1, 0]) # The C gca algorithm requires a real repo. These are textual descriptions of # DAGs that have been known to be problematic, and, optionally, known pairs # of revisions and their expected ancestor list. dagtests = [ ('+2*2*2/*3/2', {}), ('+3*3/*2*2/*4*4/*4/2*4/2*2', {}), ('+2*2*/2*4*/4*/3*2/4', {(6, 7): [3, 5]}), ] def test_gca(): u = uimod.ui.load() for i, (dag, tests) in enumerate(dagtests): repo = hg.repository(u, b'gca%d' % i, create=1) cl = repo.changelog if not util.safehasattr(cl.index, 'ancestors'): # C version not available return debugcommands.debugbuilddag(u, repo, dag) # Compare the results of the Python and C versions. This does not # include choosing a winner when more than one gca exists -- we make # sure both return exactly the same set of gcas. # Also compare against expected results, if available. for a in cl: for b in cl: cgcas = sorted(cl.index.ancestors(a, b)) pygcas = sorted(ancestor.ancestors(cl.parentrevs, a, b)) expected = None if (a, b) in tests: expected = tests[(a, b)] if cgcas != pygcas or (expected and cgcas != expected): print("test_gca: for dag %s, gcas for %d, %d:" % (dag, a, b)) print(" C returned: %s" % cgcas) print(" Python returned: %s" % pygcas) if expected: print(" expected: %s" % expected) def main(): seed = None opts, args = getopt.getopt(sys.argv[1:], 's:', ['seed=']) for o, a in opts: if o in ('-s', '--seed'): seed = long(a, base=0) # accepts base 10 or 16 strings if seed is None: try: seed = long(binascii.hexlify(os.urandom(16)), 16) except AttributeError: seed = long(time.time() * 1000) rng = random.Random(seed) test_missingancestors(seed, rng) test_lazyancestors() test_gca() if __name__ == '__main__': main()