Mercurial > hg
view hgext/remotefilelog/connectionpool.py @ 51730:63ede7a43a37 stable
demandimport: don't delay threading import
A recent cpython change breaks demandimport by importing threading
locally in importlib.util.LazyLoader.exec_module; add it (plus warnings
and _weakrefset, which are imported by threading) to demandimport's
ignore list.
```
Traceback (most recent call last):
File "/usr/bin/hg", line 57, in <module>
from mercurial import dispatch
File "<frozen importlib._bootstrap>", line 1360, in _find_and_load
File "<frozen importlib._bootstrap>", line 1331, in _find_and_load_unlocked
File "<frozen importlib._bootstrap>", line 935, in _load_unlocked
File "/usr/lib/python3/dist-packages/hgdemandimport/demandimportpy3.py", line 52, in exec_module
super().exec_module(module)
File "<frozen importlib.util>", line 257, in exec_module
File "<frozen importlib._bootstrap>", line 1360, in _find_and_load
File "<frozen importlib._bootstrap>", line 1331, in _find_and_load_unlocked
File "<frozen importlib._bootstrap>", line 935, in _load_unlocked
File "/usr/lib/python3/dist-packages/hgdemandimport/demandimportpy3.py", line 52, in exec_module
super().exec_module(module)
File "<frozen importlib.util>", line 267, in exec_module
AttributeError: partially initialized module 'threading' has no attribute 'RLock' (most likely due to a circular import)
```
Ref: https://github.com/python/cpython/issues/117983
https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=1076449
https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=1076747
author | Julien Cristau <jcristau@debian.org> |
---|---|
date | Fri, 26 Jul 2024 10:52:28 +0200 |
parents | d718eddf01d9 |
children | 493034cc3265 |
line wrap: on
line source
# connectionpool.py - class for pooling peer connections for reuse # # Copyright 2017 Facebook, Inc. # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from mercurial import ( hg, sshpeer, ) _sshv1peer = sshpeer.sshv1peer class connectionpool: def __init__(self, repo): self._repo = repo self._pool = dict() def get(self, path): pathpool = self._pool.get(path) if pathpool is None: pathpool = list() self._pool[path] = pathpool conn = None if len(pathpool) > 0: try: conn = pathpool.pop() peer = conn.peer # If the connection has died, drop it if isinstance(peer, _sshv1peer): if peer._subprocess.poll() is not None: conn = None except IndexError: pass if conn is None: peer = hg.peer(self._repo.ui, {}, path) if hasattr(peer, '_cleanup'): class mypeer(peer.__class__): def _cleanup(self, warn=None): # close pipee first so peer.cleanup reading it won't # deadlock, if there are other processes with pipeo # open (i.e. us). if hasattr(self, 'pipee'): self.pipee.close() return super(mypeer, self)._cleanup() peer.__class__ = mypeer conn = connection(pathpool, peer) return conn def close(self): for pathpool in self._pool.values(): for conn in pathpool: conn.close() del pathpool[:] class connection: def __init__(self, pool, peer): self._pool = pool self.peer = peer def __enter__(self): return self def __exit__(self, type, value, traceback): # Only add the connection back to the pool if there was no exception, # since an exception could mean the connection is not in a reusable # state. if type is None: self._pool.append(self) else: self.close() def close(self): if hasattr(self.peer, 'cleanup'): self.peer.cleanup()