Compare commits
40 Commits
1c2e174406
...
65370098af
Author | SHA1 | Date |
---|---|---|
|
65370098af | |
|
26e0bc8a45 | |
|
4df5ad147d | |
|
4a36c279a9 | |
|
5875f9b64e | |
|
25149d362b | |
|
463fea62dd | |
|
435058c0ad | |
|
8670dd7ecf | |
|
3fd59f6987 | |
|
7fc5dcd626 | |
|
3303ddee0c | |
|
72bf233e67 | |
|
70d4467c70 | |
|
72442cfce3 | |
|
17fdd55342 | |
|
0cf1edaf19 | |
|
07b1ecc490 | |
|
39d14c50a7 | |
|
0478ae7f80 | |
|
d09219269d | |
|
a712087ccf | |
|
8c7e404cc5 | |
|
d82adaa9b0 | |
|
3eee08cf65 | |
|
f3fb5fc907 | |
|
5311b6a7cb | |
|
4e2c1282dc | |
|
3699f1fcb2 | |
|
4c87ed9732 | |
|
e646ce5c0d | |
|
b6d800954a | |
|
beb7097ab4 | |
|
724c22d266 | |
|
ecd61226d8 | |
|
69fd46e1ce | |
|
af660c1019 | |
|
34e9e529d2 | |
|
816b82f9fe | |
|
e8111e40f9 |
|
@ -62,7 +62,9 @@ async def recv_and_spawn_net_killers(
|
||||||
await ctx.started()
|
await ctx.started()
|
||||||
async with (
|
async with (
|
||||||
ctx.open_stream() as stream,
|
ctx.open_stream() as stream,
|
||||||
trio.open_nursery() as n,
|
trio.open_nursery(
|
||||||
|
strict_exception_groups=False,
|
||||||
|
) as tn,
|
||||||
):
|
):
|
||||||
async for i in stream:
|
async for i in stream:
|
||||||
print(f'child echoing {i}')
|
print(f'child echoing {i}')
|
||||||
|
@ -77,11 +79,11 @@ async def recv_and_spawn_net_killers(
|
||||||
i >= break_ipc_after
|
i >= break_ipc_after
|
||||||
):
|
):
|
||||||
broke_ipc = True
|
broke_ipc = True
|
||||||
n.start_soon(
|
tn.start_soon(
|
||||||
iter_ipc_stream,
|
iter_ipc_stream,
|
||||||
stream,
|
stream,
|
||||||
)
|
)
|
||||||
n.start_soon(
|
tn.start_soon(
|
||||||
partial(
|
partial(
|
||||||
break_ipc_then_error,
|
break_ipc_then_error,
|
||||||
stream=stream,
|
stream=stream,
|
||||||
|
|
|
@ -25,7 +25,7 @@ async def bp_then_error(
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
# sync with ``trio``-side (caller) task
|
# sync with `trio`-side (caller) task
|
||||||
to_trio.send_nowait('start')
|
to_trio.send_nowait('start')
|
||||||
|
|
||||||
# NOTE: what happens here inside the hook needs some refinement..
|
# NOTE: what happens here inside the hook needs some refinement..
|
||||||
|
@ -33,8 +33,7 @@ async def bp_then_error(
|
||||||
# we set `Lock.local_task_in_debug = 'sync'`, we probably want
|
# we set `Lock.local_task_in_debug = 'sync'`, we probably want
|
||||||
# some further, at least, meta-data about the task/actor in debug
|
# some further, at least, meta-data about the task/actor in debug
|
||||||
# in terms of making it clear it's `asyncio` mucking about.
|
# in terms of making it clear it's `asyncio` mucking about.
|
||||||
breakpoint()
|
breakpoint() # asyncio-side
|
||||||
|
|
||||||
|
|
||||||
# short checkpoint / delay
|
# short checkpoint / delay
|
||||||
await asyncio.sleep(0.5) # asyncio-side
|
await asyncio.sleep(0.5) # asyncio-side
|
||||||
|
@ -58,7 +57,6 @@ async def trio_ctx(
|
||||||
# this will block until the ``asyncio`` task sends a "first"
|
# this will block until the ``asyncio`` task sends a "first"
|
||||||
# message, see first line in above func.
|
# message, see first line in above func.
|
||||||
async with (
|
async with (
|
||||||
|
|
||||||
to_asyncio.open_channel_from(
|
to_asyncio.open_channel_from(
|
||||||
bp_then_error,
|
bp_then_error,
|
||||||
# raise_after_bp=not bp_before_started,
|
# raise_after_bp=not bp_before_started,
|
||||||
|
@ -69,7 +67,7 @@ async def trio_ctx(
|
||||||
assert first == 'start'
|
assert first == 'start'
|
||||||
|
|
||||||
if bp_before_started:
|
if bp_before_started:
|
||||||
await tractor.pause()
|
await tractor.pause() # trio-side
|
||||||
|
|
||||||
await ctx.started(first) # trio-side
|
await ctx.started(first) # trio-side
|
||||||
|
|
||||||
|
@ -111,7 +109,7 @@ async def main(
|
||||||
|
|
||||||
# pause in parent to ensure no cross-actor
|
# pause in parent to ensure no cross-actor
|
||||||
# locking problems exist!
|
# locking problems exist!
|
||||||
await tractor.pause()
|
await tractor.pause() # trio-root
|
||||||
|
|
||||||
if cancel_from_root:
|
if cancel_from_root:
|
||||||
await ctx.cancel()
|
await ctx.cancel()
|
||||||
|
|
|
@ -21,11 +21,13 @@ async def name_error():
|
||||||
|
|
||||||
|
|
||||||
async def main():
|
async def main():
|
||||||
"""Test breakpoint in a streaming actor.
|
'''
|
||||||
"""
|
Test breakpoint in a streaming actor.
|
||||||
|
|
||||||
|
'''
|
||||||
async with tractor.open_nursery(
|
async with tractor.open_nursery(
|
||||||
debug_mode=True,
|
debug_mode=True,
|
||||||
# loglevel='cancel',
|
loglevel='cancel',
|
||||||
# loglevel='devx',
|
# loglevel='devx',
|
||||||
) as n:
|
) as n:
|
||||||
|
|
||||||
|
|
|
@ -40,7 +40,7 @@ async def main():
|
||||||
"""
|
"""
|
||||||
async with tractor.open_nursery(
|
async with tractor.open_nursery(
|
||||||
debug_mode=True,
|
debug_mode=True,
|
||||||
# loglevel='cancel',
|
loglevel='devx',
|
||||||
) as n:
|
) as n:
|
||||||
|
|
||||||
# spawn both actors
|
# spawn both actors
|
||||||
|
|
|
@ -39,7 +39,6 @@ async def main(
|
||||||
loglevel='devx',
|
loglevel='devx',
|
||||||
) as an,
|
) as an,
|
||||||
):
|
):
|
||||||
|
|
||||||
ptl: tractor.Portal = await an.start_actor(
|
ptl: tractor.Portal = await an.start_actor(
|
||||||
'hanger',
|
'hanger',
|
||||||
enable_modules=[__name__],
|
enable_modules=[__name__],
|
||||||
|
@ -54,13 +53,16 @@ async def main(
|
||||||
|
|
||||||
print(
|
print(
|
||||||
'Yo my child hanging..?\n'
|
'Yo my child hanging..?\n'
|
||||||
'Sending SIGUSR1 to see a tree-trace!\n'
|
# "i'm a user who wants to see a `stackscope` tree!\n"
|
||||||
)
|
)
|
||||||
|
|
||||||
# XXX simulate the wrapping test's "user actions"
|
# XXX simulate the wrapping test's "user actions"
|
||||||
# (i.e. if a human didn't run this manually but wants to
|
# (i.e. if a human didn't run this manually but wants to
|
||||||
# know what they should do to reproduce test behaviour)
|
# know what they should do to reproduce test behaviour)
|
||||||
if from_test:
|
if from_test:
|
||||||
|
print(
|
||||||
|
f'Sending SIGUSR1 to {cpid!r}!\n'
|
||||||
|
)
|
||||||
os.kill(
|
os.kill(
|
||||||
cpid,
|
cpid,
|
||||||
signal.SIGUSR1,
|
signal.SIGUSR1,
|
||||||
|
|
|
@ -91,7 +91,7 @@ async def main() -> list[int]:
|
||||||
an: ActorNursery
|
an: ActorNursery
|
||||||
async with tractor.open_nursery(
|
async with tractor.open_nursery(
|
||||||
loglevel='cancel',
|
loglevel='cancel',
|
||||||
debug_mode=True,
|
# debug_mode=True,
|
||||||
) as an:
|
) as an:
|
||||||
|
|
||||||
seed = int(1e3)
|
seed = int(1e3)
|
||||||
|
|
|
@ -3,20 +3,18 @@ import trio
|
||||||
import tractor
|
import tractor
|
||||||
|
|
||||||
|
|
||||||
async def sleepy_jane():
|
async def sleepy_jane() -> None:
|
||||||
uid = tractor.current_actor().uid
|
uid: tuple = tractor.current_actor().uid
|
||||||
print(f'Yo i am actor {uid}')
|
print(f'Yo i am actor {uid}')
|
||||||
await trio.sleep_forever()
|
await trio.sleep_forever()
|
||||||
|
|
||||||
|
|
||||||
async def main():
|
async def main():
|
||||||
'''
|
'''
|
||||||
Spawn a flat actor cluster, with one process per
|
Spawn a flat actor cluster, with one process per detected core.
|
||||||
detected core.
|
|
||||||
|
|
||||||
'''
|
'''
|
||||||
portal_map: dict[str, tractor.Portal]
|
portal_map: dict[str, tractor.Portal]
|
||||||
results: dict[str, str]
|
|
||||||
|
|
||||||
# look at this hip new syntax!
|
# look at this hip new syntax!
|
||||||
async with (
|
async with (
|
||||||
|
@ -25,11 +23,16 @@ async def main():
|
||||||
modules=[__name__]
|
modules=[__name__]
|
||||||
) as portal_map,
|
) as portal_map,
|
||||||
|
|
||||||
trio.open_nursery() as n,
|
trio.open_nursery(
|
||||||
|
strict_exception_groups=False,
|
||||||
|
) as tn,
|
||||||
):
|
):
|
||||||
|
|
||||||
for (name, portal) in portal_map.items():
|
for (name, portal) in portal_map.items():
|
||||||
n.start_soon(portal.run, sleepy_jane)
|
tn.start_soon(
|
||||||
|
portal.run,
|
||||||
|
sleepy_jane,
|
||||||
|
)
|
||||||
|
|
||||||
await trio.sleep(0.5)
|
await trio.sleep(0.5)
|
||||||
|
|
||||||
|
@ -41,4 +44,4 @@ if __name__ == '__main__':
|
||||||
try:
|
try:
|
||||||
trio.run(main)
|
trio.run(main)
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
pass
|
print('trio cancelled by KBI')
|
||||||
|
|
|
@ -32,25 +32,22 @@ classifiers = [
|
||||||
"Topic :: System :: Distributed Computing",
|
"Topic :: System :: Distributed Computing",
|
||||||
]
|
]
|
||||||
dependencies = [
|
dependencies = [
|
||||||
# trio runtime and friends
|
# trio runtime and friends
|
||||||
# (poetry) proper range specs,
|
# (poetry) proper range specs,
|
||||||
# https://packaging.python.org/en/latest/discussions/install-requires-vs-requirements/#id5
|
# https://packaging.python.org/en/latest/discussions/install-requires-vs-requirements/#id5
|
||||||
# TODO, for 3.13 we must go go `0.27` which means we have to
|
# TODO, for 3.13 we must go go `0.27` which means we have to
|
||||||
# disable strict egs or port to handling them internally!
|
# disable strict egs or port to handling them internally!
|
||||||
# trio='^0.27'
|
"trio>0.27",
|
||||||
"trio>=0.24,<0.25",
|
|
||||||
"tricycle>=0.4.1,<0.5",
|
"tricycle>=0.4.1,<0.5",
|
||||||
"trio-typing>=0.10.0,<0.11",
|
"trio-typing>=0.10.0,<0.11",
|
||||||
|
|
||||||
"wrapt>=1.16.0,<2",
|
"wrapt>=1.16.0,<2",
|
||||||
"colorlog>=6.8.2,<7",
|
"colorlog>=6.8.2,<7",
|
||||||
|
# built-in multi-actor `pdb` REPL
|
||||||
# built-in multi-actor `pdb` REPL
|
"pdbp>=1.6,<2", # windows only (from `pdbp`)
|
||||||
"pdbp>=1.5.0,<2",
|
"tabcompleter>=1.4.0",
|
||||||
|
# typed IPC msging
|
||||||
# typed IPC msging
|
# TODO, get back on release once 3.13 support is out!
|
||||||
# TODO, get back on release once 3.13 support is out!
|
"msgspec>=0.19.0",
|
||||||
"msgspec",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
# ------ project ------
|
# ------ project ------
|
||||||
|
@ -65,30 +62,44 @@ dev = [
|
||||||
# `tractor.devx` tooling
|
# `tractor.devx` tooling
|
||||||
"greenback>=1.2.1,<2",
|
"greenback>=1.2.1,<2",
|
||||||
"stackscope>=0.2.2,<0.3",
|
"stackscope>=0.2.2,<0.3",
|
||||||
|
|
||||||
# xonsh usage/integration (namely as @goodboy's sh of choice Bp)
|
|
||||||
"xonsh>=0.19.1",
|
|
||||||
"xontrib-vox>=0.0.1,<0.0.2",
|
|
||||||
"prompt-toolkit>=3.0.43,<4",
|
|
||||||
"xonsh-vox-tabcomplete>=0.5,<0.6",
|
|
||||||
"pyperclip>=1.9.0",
|
"pyperclip>=1.9.0",
|
||||||
|
"prompt-toolkit>=3.0.50",
|
||||||
|
"xonsh>=0.19.2",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
# ------ dependency-groups ------
|
||||||
|
|
||||||
[tool.uv.sources]
|
[tool.uv.sources]
|
||||||
msgspec = { git = "https://github.com/jcrist/msgspec.git" }
|
# XXX NOTE, only for @goodboy's hacking on `pprint(sort_dicts=False)`
|
||||||
|
# for the `pp` alias..
|
||||||
|
# pdbp = { path = "../pdbp", editable = true }
|
||||||
|
|
||||||
# ------ tool.uv.sources ------
|
# ------ tool.uv.sources ------
|
||||||
# TODO, distributed (multi-host) extensions
|
# TODO, distributed (multi-host) extensions
|
||||||
# linux kernel networking
|
# linux kernel networking
|
||||||
# 'pyroute2
|
# 'pyroute2
|
||||||
|
|
||||||
|
# ------ tool.uv.sources ------
|
||||||
|
|
||||||
|
[tool.uv]
|
||||||
|
# XXX NOTE, prefer the sys python bc apparently the distis from
|
||||||
|
# `astral` are built in a way that breaks `pdbp`+`tabcompleter`'s
|
||||||
|
# likely due to linking against `libedit` over `readline`..
|
||||||
|
# |_https://docs.astral.sh/uv/concepts/python-versions/#managed-python-distributions
|
||||||
|
# |_https://gregoryszorc.com/docs/python-build-standalone/main/quirks.html#use-of-libedit-on-linux
|
||||||
|
#
|
||||||
|
# https://docs.astral.sh/uv/reference/settings/#python-preference
|
||||||
|
python-preference = 'system'
|
||||||
|
|
||||||
|
# ------ tool.uv ------
|
||||||
|
|
||||||
[tool.hatch.build.targets.sdist]
|
[tool.hatch.build.targets.sdist]
|
||||||
include = ["tractor"]
|
include = ["tractor"]
|
||||||
|
|
||||||
[tool.hatch.build.targets.wheel]
|
[tool.hatch.build.targets.wheel]
|
||||||
include = ["tractor"]
|
include = ["tractor"]
|
||||||
|
|
||||||
# ------ dependency-groups ------
|
# ------ tool.hatch ------
|
||||||
|
|
||||||
[tool.towncrier]
|
[tool.towncrier]
|
||||||
package = "tractor"
|
package = "tractor"
|
||||||
|
@ -138,3 +149,5 @@ log_cli = false
|
||||||
# TODO: maybe some of these layout choices?
|
# TODO: maybe some of these layout choices?
|
||||||
# https://docs.pytest.org/en/8.0.x/explanation/goodpractices.html#choosing-a-test-layout-import-rules
|
# https://docs.pytest.org/en/8.0.x/explanation/goodpractices.html#choosing-a-test-layout-import-rules
|
||||||
# pythonpath = "src"
|
# pythonpath = "src"
|
||||||
|
|
||||||
|
# ------ tool.pytest ------
|
||||||
|
|
|
@ -75,7 +75,10 @@ def pytest_configure(config):
|
||||||
|
|
||||||
@pytest.fixture(scope='session')
|
@pytest.fixture(scope='session')
|
||||||
def debug_mode(request):
|
def debug_mode(request):
|
||||||
return request.config.option.tractor_debug_mode
|
debug_mode: bool = request.config.option.tractor_debug_mode
|
||||||
|
# if debug_mode:
|
||||||
|
# breakpoint()
|
||||||
|
return debug_mode
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope='session', autouse=True)
|
@pytest.fixture(scope='session', autouse=True)
|
||||||
|
@ -92,6 +95,12 @@ def spawn_backend(request) -> str:
|
||||||
return request.config.option.spawn_backend
|
return request.config.option.spawn_backend
|
||||||
|
|
||||||
|
|
||||||
|
# @pytest.fixture(scope='function', autouse=True)
|
||||||
|
# def debug_enabled(request) -> str:
|
||||||
|
# from tractor import _state
|
||||||
|
# if _state._runtime_vars['_debug_mode']:
|
||||||
|
# breakpoint()
|
||||||
|
|
||||||
_ci_env: bool = os.environ.get('CI', False)
|
_ci_env: bool = os.environ.get('CI', False)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -22,7 +22,7 @@ from tractor.devx._debug import (
|
||||||
_repl_fail_msg as _repl_fail_msg,
|
_repl_fail_msg as _repl_fail_msg,
|
||||||
_ctlc_ignore_header as _ctlc_ignore_header,
|
_ctlc_ignore_header as _ctlc_ignore_header,
|
||||||
)
|
)
|
||||||
from conftest import (
|
from ..conftest import (
|
||||||
_ci_env,
|
_ci_env,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -30,7 +30,7 @@ from conftest import (
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def spawn(
|
def spawn(
|
||||||
start_method,
|
start_method,
|
||||||
testdir: pytest.Testdir,
|
testdir: pytest.Pytester,
|
||||||
reg_addr: tuple[str, int],
|
reg_addr: tuple[str, int],
|
||||||
|
|
||||||
) -> Callable[[str], None]:
|
) -> Callable[[str], None]:
|
||||||
|
@ -44,16 +44,32 @@ def spawn(
|
||||||
'`pexpect` based tests only supported on `trio` backend'
|
'`pexpect` based tests only supported on `trio` backend'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def unset_colors():
|
||||||
|
'''
|
||||||
|
Python 3.13 introduced colored tracebacks that break patt
|
||||||
|
matching,
|
||||||
|
|
||||||
|
https://docs.python.org/3/using/cmdline.html#envvar-PYTHON_COLORS
|
||||||
|
https://docs.python.org/3/using/cmdline.html#using-on-controlling-color
|
||||||
|
|
||||||
|
'''
|
||||||
|
import os
|
||||||
|
os.environ['PYTHON_COLORS'] = '0'
|
||||||
|
|
||||||
def _spawn(
|
def _spawn(
|
||||||
cmd: str,
|
cmd: str,
|
||||||
**mkcmd_kwargs,
|
**mkcmd_kwargs,
|
||||||
):
|
):
|
||||||
|
unset_colors()
|
||||||
return testdir.spawn(
|
return testdir.spawn(
|
||||||
cmd=mk_cmd(
|
cmd=mk_cmd(
|
||||||
cmd,
|
cmd,
|
||||||
**mkcmd_kwargs,
|
**mkcmd_kwargs,
|
||||||
),
|
),
|
||||||
expect_timeout=3,
|
expect_timeout=3,
|
||||||
|
# preexec_fn=unset_colors,
|
||||||
|
# ^TODO? get `pytest` core to expose underlying
|
||||||
|
# `pexpect.spawn()` stuff?
|
||||||
)
|
)
|
||||||
|
|
||||||
# such that test-dep can pass input script name.
|
# such that test-dep can pass input script name.
|
||||||
|
@ -83,6 +99,14 @@ def ctlc(
|
||||||
'https://github.com/goodboy/tractor/issues/320'
|
'https://github.com/goodboy/tractor/issues/320'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if mark.name == 'ctlcs_bish':
|
||||||
|
pytest.skip(
|
||||||
|
f'Test {node} prolly uses something from the stdlib (namely `asyncio`..)\n'
|
||||||
|
f'The test and/or underlying example script can *sometimes* run fine '
|
||||||
|
f'locally but more then likely until the cpython peeps get their sh#$ together, '
|
||||||
|
f'this test will definitely not behave like `trio` under SIGINT..\n'
|
||||||
|
)
|
||||||
|
|
||||||
if use_ctlc:
|
if use_ctlc:
|
||||||
# XXX: disable pygments highlighting for auto-tests
|
# XXX: disable pygments highlighting for auto-tests
|
||||||
# since some envs (like actions CI) will struggle
|
# since some envs (like actions CI) will struggle
|
||||||
|
|
|
@ -29,7 +29,6 @@ from .conftest import (
|
||||||
_repl_fail_msg,
|
_repl_fail_msg,
|
||||||
)
|
)
|
||||||
from .conftest import (
|
from .conftest import (
|
||||||
_ci_env,
|
|
||||||
expect,
|
expect,
|
||||||
in_prompt_msg,
|
in_prompt_msg,
|
||||||
assert_before,
|
assert_before,
|
||||||
|
@ -310,10 +309,13 @@ def test_subactor_breakpoint(
|
||||||
child.expect(EOF)
|
child.expect(EOF)
|
||||||
|
|
||||||
assert in_prompt_msg(
|
assert in_prompt_msg(
|
||||||
child,
|
child, [
|
||||||
['RemoteActorError:',
|
'MessagingError:',
|
||||||
|
'RemoteActorError:',
|
||||||
"('breakpoint_forever'",
|
"('breakpoint_forever'",
|
||||||
'bdb.BdbQuit',]
|
'bdb.BdbQuit',
|
||||||
|
],
|
||||||
|
pause_on_false=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -6,6 +6,9 @@ All these tests can be understood (somewhat) by running the
|
||||||
equivalent `examples/debugging/` scripts manually.
|
equivalent `examples/debugging/` scripts manually.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
from contextlib import (
|
||||||
|
contextmanager as cm,
|
||||||
|
)
|
||||||
# from functools import partial
|
# from functools import partial
|
||||||
# import itertools
|
# import itertools
|
||||||
import time
|
import time
|
||||||
|
@ -15,7 +18,7 @@ import time
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from pexpect.exceptions import (
|
from pexpect.exceptions import (
|
||||||
# TIMEOUT,
|
TIMEOUT,
|
||||||
EOF,
|
EOF,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -32,7 +35,23 @@ from .conftest import (
|
||||||
# _repl_fail_msg,
|
# _repl_fail_msg,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@cm
|
||||||
|
def maybe_expect_timeout(
|
||||||
|
ctlc: bool = False,
|
||||||
|
) -> None:
|
||||||
|
try:
|
||||||
|
yield
|
||||||
|
except TIMEOUT:
|
||||||
|
# breakpoint()
|
||||||
|
if ctlc:
|
||||||
|
pytest.xfail(
|
||||||
|
'Some kinda redic threading SIGINT bug i think?\n'
|
||||||
|
'See the notes in `examples/debugging/sync_bp.py`..\n'
|
||||||
|
)
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.ctlcs_bish
|
||||||
def test_pause_from_sync(
|
def test_pause_from_sync(
|
||||||
spawn,
|
spawn,
|
||||||
ctlc: bool,
|
ctlc: bool,
|
||||||
|
@ -67,10 +86,10 @@ def test_pause_from_sync(
|
||||||
child.expect(PROMPT)
|
child.expect(PROMPT)
|
||||||
|
|
||||||
# XXX shouldn't see gb loaded message with PDB loglevel!
|
# XXX shouldn't see gb loaded message with PDB loglevel!
|
||||||
assert not in_prompt_msg(
|
# assert not in_prompt_msg(
|
||||||
child,
|
# child,
|
||||||
['`greenback` portal opened!'],
|
# ['`greenback` portal opened!'],
|
||||||
)
|
# )
|
||||||
# should be same root task
|
# should be same root task
|
||||||
assert_before(
|
assert_before(
|
||||||
child,
|
child,
|
||||||
|
@ -162,7 +181,14 @@ def test_pause_from_sync(
|
||||||
)
|
)
|
||||||
|
|
||||||
child.sendline('c')
|
child.sendline('c')
|
||||||
child.expect(EOF)
|
|
||||||
|
# XXX TODO, weird threading bug it seems despite the
|
||||||
|
# `abandon_on_cancel: bool` setting to
|
||||||
|
# `trio.to_thread.run_sync()`..
|
||||||
|
with maybe_expect_timeout(
|
||||||
|
ctlc=ctlc,
|
||||||
|
):
|
||||||
|
child.expect(EOF)
|
||||||
|
|
||||||
|
|
||||||
def expect_any_of(
|
def expect_any_of(
|
||||||
|
@ -220,8 +246,10 @@ def expect_any_of(
|
||||||
return expected_patts
|
return expected_patts
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.ctlcs_bish
|
||||||
def test_sync_pause_from_aio_task(
|
def test_sync_pause_from_aio_task(
|
||||||
spawn,
|
spawn,
|
||||||
|
|
||||||
ctlc: bool
|
ctlc: bool
|
||||||
# ^TODO, fix for `asyncio`!!
|
# ^TODO, fix for `asyncio`!!
|
||||||
):
|
):
|
||||||
|
@ -270,10 +298,12 @@ def test_sync_pause_from_aio_task(
|
||||||
# error raised in `asyncio.Task`
|
# error raised in `asyncio.Task`
|
||||||
"raise ValueError('asyncio side error!')": [
|
"raise ValueError('asyncio side error!')": [
|
||||||
_crash_msg,
|
_crash_msg,
|
||||||
'return await chan.receive()', # `.to_asyncio` impl internals in tb
|
|
||||||
"<Task 'trio_ctx'",
|
"<Task 'trio_ctx'",
|
||||||
"@ ('aio_daemon'",
|
"@ ('aio_daemon'",
|
||||||
"ValueError: asyncio side error!",
|
"ValueError: asyncio side error!",
|
||||||
|
|
||||||
|
# XXX, we no longer show this frame by default!
|
||||||
|
# 'return await chan.receive()', # `.to_asyncio` impl internals in tb
|
||||||
],
|
],
|
||||||
|
|
||||||
# parent-side propagation via actor-nursery/portal
|
# parent-side propagation via actor-nursery/portal
|
||||||
|
@ -325,6 +355,7 @@ def test_sync_pause_from_aio_task(
|
||||||
)
|
)
|
||||||
|
|
||||||
child.sendline('c')
|
child.sendline('c')
|
||||||
|
# with maybe_expect_timeout():
|
||||||
child.expect(EOF)
|
child.expect(EOF)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -15,11 +15,18 @@ TODO:
|
||||||
'''
|
'''
|
||||||
import os
|
import os
|
||||||
import signal
|
import signal
|
||||||
|
import time
|
||||||
|
|
||||||
from .conftest import (
|
from .conftest import (
|
||||||
expect,
|
expect,
|
||||||
assert_before,
|
assert_before,
|
||||||
# in_prompt_msg,
|
in_prompt_msg,
|
||||||
|
PROMPT,
|
||||||
|
_pause_msg,
|
||||||
|
)
|
||||||
|
from pexpect.exceptions import (
|
||||||
|
# TIMEOUT,
|
||||||
|
EOF,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -47,41 +54,39 @@ def test_shield_pause(
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
script_pid: int = child.pid
|
||||||
print(
|
print(
|
||||||
'Sending SIGUSR1 to see a tree-trace!',
|
f'Sending SIGUSR1 to {script_pid}\n'
|
||||||
|
f'(kill -s SIGUSR1 {script_pid})\n'
|
||||||
)
|
)
|
||||||
os.kill(
|
os.kill(
|
||||||
child.pid,
|
script_pid,
|
||||||
signal.SIGUSR1,
|
signal.SIGUSR1,
|
||||||
)
|
)
|
||||||
|
time.sleep(0.2)
|
||||||
expect(
|
expect(
|
||||||
child,
|
child,
|
||||||
# end-of-tree delimiter
|
# end-of-tree delimiter
|
||||||
"------ \('root', ",
|
"end-of-\('root'",
|
||||||
)
|
)
|
||||||
|
|
||||||
assert_before(
|
assert_before(
|
||||||
child,
|
child,
|
||||||
[
|
[
|
||||||
'Trying to dump `stackscope` tree..',
|
# 'Srying to dump `stackscope` tree..',
|
||||||
'Dumping `stackscope` tree for actor',
|
# 'Dumping `stackscope` tree for actor',
|
||||||
"('root'", # uid line
|
"('root'", # uid line
|
||||||
|
|
||||||
|
# TODO!? this used to show?
|
||||||
|
# -[ ] mk reproducable for @oremanj?
|
||||||
|
#
|
||||||
# parent block point (non-shielded)
|
# parent block point (non-shielded)
|
||||||
'await trio.sleep_forever() # in root',
|
# 'await trio.sleep_forever() # in root',
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
# expect(
|
|
||||||
# child,
|
|
||||||
# # relay to the sub should be reported
|
|
||||||
# 'Relaying `SIGUSR1`[10] to sub-actor',
|
|
||||||
# )
|
|
||||||
|
|
||||||
expect(
|
expect(
|
||||||
child,
|
child,
|
||||||
# end-of-tree delimiter
|
# end-of-tree delimiter
|
||||||
"------ \('hanger', ",
|
"end-of-\('hanger'",
|
||||||
)
|
)
|
||||||
assert_before(
|
assert_before(
|
||||||
child,
|
child,
|
||||||
|
@ -91,11 +96,11 @@ def test_shield_pause(
|
||||||
|
|
||||||
"('hanger'", # uid line
|
"('hanger'", # uid line
|
||||||
|
|
||||||
|
# TODO!? SEE ABOVE
|
||||||
# hanger LOC where it's shield-halted
|
# hanger LOC where it's shield-halted
|
||||||
'await trio.sleep_forever() # in subactor',
|
# 'await trio.sleep_forever() # in subactor',
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
# breakpoint()
|
|
||||||
|
|
||||||
# simulate the user sending a ctl-c to the hanging program.
|
# simulate the user sending a ctl-c to the hanging program.
|
||||||
# this should result in the terminator kicking in since
|
# this should result in the terminator kicking in since
|
||||||
|
@ -118,3 +123,50 @@ def test_shield_pause(
|
||||||
"'--uid', \"('hanger',",
|
"'--uid', \"('hanger',",
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_breakpoint_hook_restored(
|
||||||
|
spawn,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Ensures our actor runtime sets a custom `breakpoint()` hook
|
||||||
|
on open then restores the stdlib's default on close.
|
||||||
|
|
||||||
|
The hook state validation is done via `assert`s inside the
|
||||||
|
invoked script with only `breakpoint()` (not `tractor.pause()`)
|
||||||
|
calls used.
|
||||||
|
|
||||||
|
'''
|
||||||
|
child = spawn('restore_builtin_breakpoint')
|
||||||
|
|
||||||
|
child.expect(PROMPT)
|
||||||
|
assert_before(
|
||||||
|
child,
|
||||||
|
[
|
||||||
|
_pause_msg,
|
||||||
|
"<Task '__main__.main'",
|
||||||
|
"('root'",
|
||||||
|
"first bp, tractor hook set",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
child.sendline('c')
|
||||||
|
child.expect(PROMPT)
|
||||||
|
assert_before(
|
||||||
|
child,
|
||||||
|
[
|
||||||
|
"last bp, stdlib hook restored",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
# since the stdlib hook was already restored there should be NO
|
||||||
|
# `tractor` `log.pdb()` content from console!
|
||||||
|
assert not in_prompt_msg(
|
||||||
|
child,
|
||||||
|
[
|
||||||
|
_pause_msg,
|
||||||
|
"<Task '__main__.main'",
|
||||||
|
"('root'",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
child.sendline('c')
|
||||||
|
child.expect(EOF)
|
||||||
|
|
|
@ -3,7 +3,6 @@ Sketchy network blackoutz, ugly byzantine gens, puedes eschuchar la
|
||||||
cancelacion?..
|
cancelacion?..
|
||||||
|
|
||||||
'''
|
'''
|
||||||
import itertools
|
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from types import ModuleType
|
from types import ModuleType
|
||||||
|
|
||||||
|
@ -230,13 +229,10 @@ def test_ipc_channel_break_during_stream(
|
||||||
# get raw instance from pytest wrapper
|
# get raw instance from pytest wrapper
|
||||||
value = excinfo.value
|
value = excinfo.value
|
||||||
if isinstance(value, ExceptionGroup):
|
if isinstance(value, ExceptionGroup):
|
||||||
value = next(
|
excs = value.exceptions
|
||||||
itertools.dropwhile(
|
assert len(excs) == 1
|
||||||
lambda exc: not isinstance(exc, expect_final_exc),
|
final_exc = excs[0]
|
||||||
value.exceptions,
|
assert isinstance(final_exc, expect_final_exc)
|
||||||
)
|
|
||||||
)
|
|
||||||
assert value
|
|
||||||
|
|
||||||
|
|
||||||
@tractor.context
|
@tractor.context
|
||||||
|
@ -259,15 +255,16 @@ async def break_ipc_after_started(
|
||||||
|
|
||||||
def test_stream_closed_right_after_ipc_break_and_zombie_lord_engages():
|
def test_stream_closed_right_after_ipc_break_and_zombie_lord_engages():
|
||||||
'''
|
'''
|
||||||
Verify that is a subactor's IPC goes down just after bringing up a stream
|
Verify that is a subactor's IPC goes down just after bringing up
|
||||||
the parent can trigger a SIGINT and the child will be reaped out-of-IPC by
|
a stream the parent can trigger a SIGINT and the child will be
|
||||||
the localhost process supervision machinery: aka "zombie lord".
|
reaped out-of-IPC by the localhost process supervision machinery:
|
||||||
|
aka "zombie lord".
|
||||||
|
|
||||||
'''
|
'''
|
||||||
async def main():
|
async def main():
|
||||||
with trio.fail_after(3):
|
with trio.fail_after(3):
|
||||||
async with tractor.open_nursery() as n:
|
async with tractor.open_nursery() as an:
|
||||||
portal = await n.start_actor(
|
portal = await an.start_actor(
|
||||||
'ipc_breaker',
|
'ipc_breaker',
|
||||||
enable_modules=[__name__],
|
enable_modules=[__name__],
|
||||||
)
|
)
|
||||||
|
|
|
@ -307,7 +307,15 @@ async def inf_streamer(
|
||||||
|
|
||||||
async with (
|
async with (
|
||||||
ctx.open_stream() as stream,
|
ctx.open_stream() as stream,
|
||||||
trio.open_nursery() as tn,
|
|
||||||
|
# XXX TODO, INTERESTING CASE!!
|
||||||
|
# - if we don't collapse the eg then the embedded
|
||||||
|
# `trio.EndOfChannel` doesn't propagate directly to the above
|
||||||
|
# .open_stream() parent, resulting in it also raising instead
|
||||||
|
# of gracefully absorbing as normal.. so how to handle?
|
||||||
|
trio.open_nursery(
|
||||||
|
strict_exception_groups=False,
|
||||||
|
) as tn,
|
||||||
):
|
):
|
||||||
async def close_stream_on_sentinel():
|
async def close_stream_on_sentinel():
|
||||||
async for msg in stream:
|
async for msg in stream:
|
||||||
|
|
|
@ -130,7 +130,7 @@ def test_multierror(
|
||||||
try:
|
try:
|
||||||
await portal2.result()
|
await portal2.result()
|
||||||
except tractor.RemoteActorError as err:
|
except tractor.RemoteActorError as err:
|
||||||
assert err.boxed_type == AssertionError
|
assert err.boxed_type is AssertionError
|
||||||
print("Look Maa that first actor failed hard, hehh")
|
print("Look Maa that first actor failed hard, hehh")
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
@ -182,7 +182,7 @@ def test_multierror_fast_nursery(reg_addr, start_method, num_subactors, delay):
|
||||||
|
|
||||||
for exc in exceptions:
|
for exc in exceptions:
|
||||||
assert isinstance(exc, tractor.RemoteActorError)
|
assert isinstance(exc, tractor.RemoteActorError)
|
||||||
assert exc.boxed_type == AssertionError
|
assert exc.boxed_type is AssertionError
|
||||||
|
|
||||||
|
|
||||||
async def do_nothing():
|
async def do_nothing():
|
||||||
|
@ -504,7 +504,9 @@ def test_cancel_via_SIGINT_other_task(
|
||||||
if is_win(): # smh
|
if is_win(): # smh
|
||||||
timeout += 1
|
timeout += 1
|
||||||
|
|
||||||
async def spawn_and_sleep_forever(task_status=trio.TASK_STATUS_IGNORED):
|
async def spawn_and_sleep_forever(
|
||||||
|
task_status=trio.TASK_STATUS_IGNORED
|
||||||
|
):
|
||||||
async with tractor.open_nursery() as tn:
|
async with tractor.open_nursery() as tn:
|
||||||
for i in range(3):
|
for i in range(3):
|
||||||
await tn.run_in_actor(
|
await tn.run_in_actor(
|
||||||
|
@ -517,7 +519,9 @@ def test_cancel_via_SIGINT_other_task(
|
||||||
async def main():
|
async def main():
|
||||||
# should never timeout since SIGINT should cancel the current program
|
# should never timeout since SIGINT should cancel the current program
|
||||||
with trio.fail_after(timeout):
|
with trio.fail_after(timeout):
|
||||||
async with trio.open_nursery() as n:
|
async with trio.open_nursery(
|
||||||
|
strict_exception_groups=False,
|
||||||
|
) as n:
|
||||||
await n.start(spawn_and_sleep_forever)
|
await n.start(spawn_and_sleep_forever)
|
||||||
if 'mp' in spawn_backend:
|
if 'mp' in spawn_backend:
|
||||||
time.sleep(0.1)
|
time.sleep(0.1)
|
||||||
|
@ -610,6 +614,12 @@ def test_fast_graceful_cancel_when_spawn_task_in_soft_proc_wait_for_daemon(
|
||||||
nurse.start_soon(delayed_kbi)
|
nurse.start_soon(delayed_kbi)
|
||||||
|
|
||||||
await p.run(do_nuthin)
|
await p.run(do_nuthin)
|
||||||
|
|
||||||
|
# need to explicitly re-raise the lone kbi..now
|
||||||
|
except* KeyboardInterrupt as kbi_eg:
|
||||||
|
assert (len(excs := kbi_eg.exceptions) == 1)
|
||||||
|
raise excs[0]
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
duration = time.time() - start
|
duration = time.time() - start
|
||||||
if duration > timeout:
|
if duration > timeout:
|
||||||
|
|
|
@ -874,13 +874,13 @@ def chk_pld_type(
|
||||||
return roundtrip
|
return roundtrip
|
||||||
|
|
||||||
|
|
||||||
def test_limit_msgspec():
|
def test_limit_msgspec(
|
||||||
|
debug_mode: bool,
|
||||||
|
):
|
||||||
async def main():
|
async def main():
|
||||||
async with tractor.open_root_actor(
|
async with tractor.open_root_actor(
|
||||||
debug_mode=True
|
debug_mode=debug_mode,
|
||||||
):
|
):
|
||||||
|
|
||||||
# ensure we can round-trip a boxing `PayloadMsg`
|
# ensure we can round-trip a boxing `PayloadMsg`
|
||||||
assert chk_pld_type(
|
assert chk_pld_type(
|
||||||
payload_spec=Any,
|
payload_spec=Any,
|
||||||
|
|
|
@ -95,8 +95,8 @@ async def trio_main(
|
||||||
|
|
||||||
# stash a "service nursery" as "actor local" (aka a Python global)
|
# stash a "service nursery" as "actor local" (aka a Python global)
|
||||||
global _nursery
|
global _nursery
|
||||||
n = _nursery
|
tn = _nursery
|
||||||
assert n
|
assert tn
|
||||||
|
|
||||||
async def consume_stream():
|
async def consume_stream():
|
||||||
async with wrapper_mngr() as stream:
|
async with wrapper_mngr() as stream:
|
||||||
|
@ -104,10 +104,10 @@ async def trio_main(
|
||||||
print(msg)
|
print(msg)
|
||||||
|
|
||||||
# run 2 tasks to ensure broadcaster chan use
|
# run 2 tasks to ensure broadcaster chan use
|
||||||
n.start_soon(consume_stream)
|
tn.start_soon(consume_stream)
|
||||||
n.start_soon(consume_stream)
|
tn.start_soon(consume_stream)
|
||||||
|
|
||||||
n.start_soon(trio_sleep_and_err)
|
tn.start_soon(trio_sleep_and_err)
|
||||||
|
|
||||||
await trio.sleep_forever()
|
await trio.sleep_forever()
|
||||||
|
|
||||||
|
@ -117,8 +117,10 @@ async def open_actor_local_nursery(
|
||||||
ctx: tractor.Context,
|
ctx: tractor.Context,
|
||||||
):
|
):
|
||||||
global _nursery
|
global _nursery
|
||||||
async with trio.open_nursery() as n:
|
async with trio.open_nursery(
|
||||||
_nursery = n
|
strict_exception_groups=False,
|
||||||
|
) as tn:
|
||||||
|
_nursery = tn
|
||||||
await ctx.started()
|
await ctx.started()
|
||||||
await trio.sleep(10)
|
await trio.sleep(10)
|
||||||
# await trio.sleep(1)
|
# await trio.sleep(1)
|
||||||
|
@ -132,7 +134,7 @@ async def open_actor_local_nursery(
|
||||||
# never yields back.. aka a scenario where the
|
# never yields back.. aka a scenario where the
|
||||||
# ``tractor.context`` task IS NOT in the service n's cancel
|
# ``tractor.context`` task IS NOT in the service n's cancel
|
||||||
# scope.
|
# scope.
|
||||||
n.cancel_scope.cancel()
|
tn.cancel_scope.cancel()
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
|
@ -157,7 +159,7 @@ def test_actor_managed_trio_nursery_task_error_cancels_aio(
|
||||||
async with tractor.open_nursery() as n:
|
async with tractor.open_nursery() as n:
|
||||||
p = await n.start_actor(
|
p = await n.start_actor(
|
||||||
'nursery_mngr',
|
'nursery_mngr',
|
||||||
infect_asyncio=asyncio_mode,
|
infect_asyncio=asyncio_mode, # TODO, is this enabling debug mode?
|
||||||
enable_modules=[__name__],
|
enable_modules=[__name__],
|
||||||
)
|
)
|
||||||
async with (
|
async with (
|
||||||
|
|
|
@ -181,7 +181,9 @@ async def spawn_and_check_registry(
|
||||||
|
|
||||||
try:
|
try:
|
||||||
async with tractor.open_nursery() as n:
|
async with tractor.open_nursery() as n:
|
||||||
async with trio.open_nursery() as trion:
|
async with trio.open_nursery(
|
||||||
|
strict_exception_groups=False,
|
||||||
|
) as trion:
|
||||||
|
|
||||||
portals = {}
|
portals = {}
|
||||||
for i in range(3):
|
for i in range(3):
|
||||||
|
@ -316,7 +318,9 @@ async def close_chans_before_nursery(
|
||||||
async with portal2.open_stream_from(
|
async with portal2.open_stream_from(
|
||||||
stream_forever
|
stream_forever
|
||||||
) as agen2:
|
) as agen2:
|
||||||
async with trio.open_nursery() as n:
|
async with trio.open_nursery(
|
||||||
|
strict_exception_groups=False,
|
||||||
|
) as n:
|
||||||
n.start_soon(streamer, agen1)
|
n.start_soon(streamer, agen1)
|
||||||
n.start_soon(cancel, use_signal, .5)
|
n.start_soon(cancel, use_signal, .5)
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -19,7 +19,7 @@ from tractor._testing import (
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def run_example_in_subproc(
|
def run_example_in_subproc(
|
||||||
loglevel: str,
|
loglevel: str,
|
||||||
testdir: pytest.Testdir,
|
testdir: pytest.Pytester,
|
||||||
reg_addr: tuple[str, int],
|
reg_addr: tuple[str, int],
|
||||||
):
|
):
|
||||||
|
|
||||||
|
@ -81,28 +81,36 @@ def run_example_in_subproc(
|
||||||
|
|
||||||
# walk yields: (dirpath, dirnames, filenames)
|
# walk yields: (dirpath, dirnames, filenames)
|
||||||
[
|
[
|
||||||
(p[0], f) for p in os.walk(examples_dir()) for f in p[2]
|
(p[0], f)
|
||||||
|
for p in os.walk(examples_dir())
|
||||||
|
for f in p[2]
|
||||||
|
|
||||||
if '__' not in f
|
if (
|
||||||
and f[0] != '_'
|
'__' not in f
|
||||||
and 'debugging' not in p[0]
|
and f[0] != '_'
|
||||||
and 'integration' not in p[0]
|
and 'debugging' not in p[0]
|
||||||
and 'advanced_faults' not in p[0]
|
and 'integration' not in p[0]
|
||||||
and 'multihost' not in p[0]
|
and 'advanced_faults' not in p[0]
|
||||||
|
and 'multihost' not in p[0]
|
||||||
|
)
|
||||||
],
|
],
|
||||||
|
|
||||||
ids=lambda t: t[1],
|
ids=lambda t: t[1],
|
||||||
)
|
)
|
||||||
def test_example(run_example_in_subproc, example_script):
|
def test_example(
|
||||||
"""Load and run scripts from this repo's ``examples/`` dir as a user
|
run_example_in_subproc,
|
||||||
|
example_script,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Load and run scripts from this repo's ``examples/`` dir as a user
|
||||||
would copy and pasing them into their editor.
|
would copy and pasing them into their editor.
|
||||||
|
|
||||||
On windows a little more "finessing" is done to make
|
On windows a little more "finessing" is done to make
|
||||||
``multiprocessing`` play nice: we copy the ``__main__.py`` into the
|
``multiprocessing`` play nice: we copy the ``__main__.py`` into the
|
||||||
test directory and invoke the script as a module with ``python -m
|
test directory and invoke the script as a module with ``python -m
|
||||||
test_example``.
|
test_example``.
|
||||||
"""
|
|
||||||
ex_file = os.path.join(*example_script)
|
'''
|
||||||
|
ex_file: str = os.path.join(*example_script)
|
||||||
|
|
||||||
if 'rpc_bidir_streaming' in ex_file and sys.version_info < (3, 9):
|
if 'rpc_bidir_streaming' in ex_file and sys.version_info < (3, 9):
|
||||||
pytest.skip("2-way streaming example requires py3.9 async with syntax")
|
pytest.skip("2-way streaming example requires py3.9 async with syntax")
|
||||||
|
@ -128,7 +136,8 @@ def test_example(run_example_in_subproc, example_script):
|
||||||
# shouldn't eventually once we figure out what's
|
# shouldn't eventually once we figure out what's
|
||||||
# a better way to be explicit about aio side
|
# a better way to be explicit about aio side
|
||||||
# cancels?
|
# cancels?
|
||||||
and 'asyncio.exceptions.CancelledError' not in last_error
|
and
|
||||||
|
'asyncio.exceptions.CancelledError' not in last_error
|
||||||
):
|
):
|
||||||
raise Exception(errmsg)
|
raise Exception(errmsg)
|
||||||
|
|
||||||
|
|
|
@ -32,6 +32,16 @@ from tractor.trionics import BroadcastReceiver
|
||||||
from tractor._testing import expect_ctxc
|
from tractor._testing import expect_ctxc
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(
|
||||||
|
scope='module',
|
||||||
|
)
|
||||||
|
def delay(debug_mode: bool) -> int:
|
||||||
|
if debug_mode:
|
||||||
|
return 999
|
||||||
|
else:
|
||||||
|
return 1
|
||||||
|
|
||||||
|
|
||||||
async def sleep_and_err(
|
async def sleep_and_err(
|
||||||
sleep_for: float = 0.1,
|
sleep_for: float = 0.1,
|
||||||
|
|
||||||
|
@ -59,20 +69,26 @@ async def trio_cancels_single_aio_task():
|
||||||
await tractor.to_asyncio.run_task(aio_sleep_forever)
|
await tractor.to_asyncio.run_task(aio_sleep_forever)
|
||||||
|
|
||||||
|
|
||||||
def test_trio_cancels_aio_on_actor_side(reg_addr):
|
def test_trio_cancels_aio_on_actor_side(
|
||||||
|
reg_addr: tuple[str, int],
|
||||||
|
delay: int,
|
||||||
|
debug_mode: bool,
|
||||||
|
):
|
||||||
'''
|
'''
|
||||||
Spawn an infected actor that is cancelled by the ``trio`` side
|
Spawn an infected actor that is cancelled by the ``trio`` side
|
||||||
task using std cancel scope apis.
|
task using std cancel scope apis.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
async def main():
|
async def main():
|
||||||
async with tractor.open_nursery(
|
with trio.fail_after(1 + delay):
|
||||||
registry_addrs=[reg_addr]
|
async with tractor.open_nursery(
|
||||||
) as n:
|
registry_addrs=[reg_addr],
|
||||||
await n.run_in_actor(
|
debug_mode=debug_mode,
|
||||||
trio_cancels_single_aio_task,
|
) as an:
|
||||||
infect_asyncio=True,
|
await an.run_in_actor(
|
||||||
)
|
trio_cancels_single_aio_task,
|
||||||
|
infect_asyncio=True,
|
||||||
|
)
|
||||||
|
|
||||||
trio.run(main)
|
trio.run(main)
|
||||||
|
|
||||||
|
@ -116,7 +132,10 @@ async def asyncio_actor(
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
|
||||||
def test_aio_simple_error(reg_addr):
|
def test_aio_simple_error(
|
||||||
|
reg_addr: tuple[str, int],
|
||||||
|
debug_mode: bool,
|
||||||
|
):
|
||||||
'''
|
'''
|
||||||
Verify a simple remote asyncio error propagates back through trio
|
Verify a simple remote asyncio error propagates back through trio
|
||||||
to the parent actor.
|
to the parent actor.
|
||||||
|
@ -125,9 +144,10 @@ def test_aio_simple_error(reg_addr):
|
||||||
'''
|
'''
|
||||||
async def main():
|
async def main():
|
||||||
async with tractor.open_nursery(
|
async with tractor.open_nursery(
|
||||||
registry_addrs=[reg_addr]
|
registry_addrs=[reg_addr],
|
||||||
) as n:
|
debug_mode=debug_mode,
|
||||||
await n.run_in_actor(
|
) as an:
|
||||||
|
await an.run_in_actor(
|
||||||
asyncio_actor,
|
asyncio_actor,
|
||||||
target='sleep_and_err',
|
target='sleep_and_err',
|
||||||
expect_err='AssertionError',
|
expect_err='AssertionError',
|
||||||
|
@ -153,14 +173,19 @@ def test_aio_simple_error(reg_addr):
|
||||||
assert err.boxed_type is AssertionError
|
assert err.boxed_type is AssertionError
|
||||||
|
|
||||||
|
|
||||||
def test_tractor_cancels_aio(reg_addr):
|
def test_tractor_cancels_aio(
|
||||||
|
reg_addr: tuple[str, int],
|
||||||
|
debug_mode: bool,
|
||||||
|
):
|
||||||
'''
|
'''
|
||||||
Verify we can cancel a spawned asyncio task gracefully.
|
Verify we can cancel a spawned asyncio task gracefully.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
async def main():
|
async def main():
|
||||||
async with tractor.open_nursery() as n:
|
async with tractor.open_nursery(
|
||||||
portal = await n.run_in_actor(
|
debug_mode=debug_mode,
|
||||||
|
) as an:
|
||||||
|
portal = await an.run_in_actor(
|
||||||
asyncio_actor,
|
asyncio_actor,
|
||||||
target='aio_sleep_forever',
|
target='aio_sleep_forever',
|
||||||
expect_err='trio.Cancelled',
|
expect_err='trio.Cancelled',
|
||||||
|
@ -172,7 +197,9 @@ def test_tractor_cancels_aio(reg_addr):
|
||||||
trio.run(main)
|
trio.run(main)
|
||||||
|
|
||||||
|
|
||||||
def test_trio_cancels_aio(reg_addr):
|
def test_trio_cancels_aio(
|
||||||
|
reg_addr: tuple[str, int],
|
||||||
|
):
|
||||||
'''
|
'''
|
||||||
Much like the above test with ``tractor.Portal.cancel_actor()``
|
Much like the above test with ``tractor.Portal.cancel_actor()``
|
||||||
except we just use a standard ``trio`` cancellation api.
|
except we just use a standard ``trio`` cancellation api.
|
||||||
|
@ -203,7 +230,8 @@ async def trio_ctx(
|
||||||
|
|
||||||
# this will block until the ``asyncio`` task sends a "first"
|
# this will block until the ``asyncio`` task sends a "first"
|
||||||
# message.
|
# message.
|
||||||
with trio.fail_after(2):
|
delay: int = 999 if tractor.debug_mode() else 1
|
||||||
|
with trio.fail_after(1 + delay):
|
||||||
try:
|
try:
|
||||||
async with (
|
async with (
|
||||||
trio.open_nursery(
|
trio.open_nursery(
|
||||||
|
@ -239,8 +267,10 @@ async def trio_ctx(
|
||||||
ids='parent_actor_cancels_child={}'.format
|
ids='parent_actor_cancels_child={}'.format
|
||||||
)
|
)
|
||||||
def test_context_spawns_aio_task_that_errors(
|
def test_context_spawns_aio_task_that_errors(
|
||||||
reg_addr,
|
reg_addr: tuple[str, int],
|
||||||
|
delay: int,
|
||||||
parent_cancels: bool,
|
parent_cancels: bool,
|
||||||
|
debug_mode: bool,
|
||||||
):
|
):
|
||||||
'''
|
'''
|
||||||
Verify that spawning a task via an intertask channel ctx mngr that
|
Verify that spawning a task via an intertask channel ctx mngr that
|
||||||
|
@ -249,13 +279,13 @@ def test_context_spawns_aio_task_that_errors(
|
||||||
|
|
||||||
'''
|
'''
|
||||||
async def main():
|
async def main():
|
||||||
with trio.fail_after(2):
|
with trio.fail_after(1 + delay):
|
||||||
async with tractor.open_nursery() as n:
|
async with tractor.open_nursery() as an:
|
||||||
p = await n.start_actor(
|
p = await an.start_actor(
|
||||||
'aio_daemon',
|
'aio_daemon',
|
||||||
enable_modules=[__name__],
|
enable_modules=[__name__],
|
||||||
infect_asyncio=True,
|
infect_asyncio=True,
|
||||||
# debug_mode=True,
|
debug_mode=debug_mode,
|
||||||
loglevel='cancel',
|
loglevel='cancel',
|
||||||
)
|
)
|
||||||
async with (
|
async with (
|
||||||
|
@ -322,11 +352,12 @@ async def aio_cancel():
|
||||||
|
|
||||||
def test_aio_cancelled_from_aio_causes_trio_cancelled(
|
def test_aio_cancelled_from_aio_causes_trio_cancelled(
|
||||||
reg_addr: tuple,
|
reg_addr: tuple,
|
||||||
|
delay: int,
|
||||||
):
|
):
|
||||||
'''
|
'''
|
||||||
When the `asyncio.Task` cancels itself the `trio` side cshould
|
When the `asyncio.Task` cancels itself the `trio` side should
|
||||||
also cancel and teardown and relay the cancellation cross-process
|
also cancel and teardown and relay the cancellation cross-process
|
||||||
to the caller (parent).
|
to the parent caller.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
async def main():
|
async def main():
|
||||||
|
@ -342,7 +373,7 @@ def test_aio_cancelled_from_aio_causes_trio_cancelled(
|
||||||
# NOTE: normally the `an.__aexit__()` waits on the
|
# NOTE: normally the `an.__aexit__()` waits on the
|
||||||
# portal's result but we do it explicitly here
|
# portal's result but we do it explicitly here
|
||||||
# to avoid indent levels.
|
# to avoid indent levels.
|
||||||
with trio.fail_after(1):
|
with trio.fail_after(1 + delay):
|
||||||
await p.wait_for_result()
|
await p.wait_for_result()
|
||||||
|
|
||||||
with pytest.raises(
|
with pytest.raises(
|
||||||
|
@ -353,11 +384,10 @@ def test_aio_cancelled_from_aio_causes_trio_cancelled(
|
||||||
# might get multiple `trio.Cancelled`s as well inside an inception
|
# might get multiple `trio.Cancelled`s as well inside an inception
|
||||||
err: RemoteActorError|ExceptionGroup = excinfo.value
|
err: RemoteActorError|ExceptionGroup = excinfo.value
|
||||||
if isinstance(err, ExceptionGroup):
|
if isinstance(err, ExceptionGroup):
|
||||||
err = next(itertools.dropwhile(
|
excs = err.exceptions
|
||||||
lambda exc: not isinstance(exc, tractor.RemoteActorError),
|
assert len(excs) == 1
|
||||||
err.exceptions
|
final_exc = excs[0]
|
||||||
))
|
assert isinstance(final_exc, tractor.RemoteActorError)
|
||||||
assert err
|
|
||||||
|
|
||||||
# relayed boxed error should be our `trio`-task's
|
# relayed boxed error should be our `trio`-task's
|
||||||
# cancel-signal-proxy-equivalent of `asyncio.CancelledError`.
|
# cancel-signal-proxy-equivalent of `asyncio.CancelledError`.
|
||||||
|
@ -370,15 +400,18 @@ async def no_to_trio_in_args():
|
||||||
|
|
||||||
|
|
||||||
async def push_from_aio_task(
|
async def push_from_aio_task(
|
||||||
|
|
||||||
sequence: Iterable,
|
sequence: Iterable,
|
||||||
to_trio: trio.abc.SendChannel,
|
to_trio: trio.abc.SendChannel,
|
||||||
expect_cancel: False,
|
expect_cancel: False,
|
||||||
fail_early: bool,
|
fail_early: bool,
|
||||||
|
exit_early: bool,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
# print('trying breakpoint')
|
||||||
|
# breakpoint()
|
||||||
|
|
||||||
# sync caller ctx manager
|
# sync caller ctx manager
|
||||||
to_trio.send_nowait(True)
|
to_trio.send_nowait(True)
|
||||||
|
|
||||||
|
@ -387,10 +420,27 @@ async def push_from_aio_task(
|
||||||
to_trio.send_nowait(i)
|
to_trio.send_nowait(i)
|
||||||
await asyncio.sleep(0.001)
|
await asyncio.sleep(0.001)
|
||||||
|
|
||||||
if i == 50 and fail_early:
|
if (
|
||||||
raise Exception
|
i == 50
|
||||||
|
):
|
||||||
|
if fail_early:
|
||||||
|
print('Raising exc from aio side!')
|
||||||
|
raise Exception
|
||||||
|
|
||||||
print('asyncio streamer complete!')
|
if exit_early:
|
||||||
|
# TODO? really you could enforce the same
|
||||||
|
# SC-proto we use for actors here with asyncio
|
||||||
|
# such that a Return[None] msg would be
|
||||||
|
# implicitly delivered to the trio side?
|
||||||
|
#
|
||||||
|
# XXX => this might be the end-all soln for
|
||||||
|
# converting any-inter-task system (regardless
|
||||||
|
# of maybe-remote runtime or language) to be
|
||||||
|
# SC-compat no?
|
||||||
|
print(f'asyncio breaking early @ {i!r}')
|
||||||
|
break
|
||||||
|
|
||||||
|
print('asyncio streaming complete!')
|
||||||
|
|
||||||
except asyncio.CancelledError:
|
except asyncio.CancelledError:
|
||||||
if not expect_cancel:
|
if not expect_cancel:
|
||||||
|
@ -402,9 +452,10 @@ async def push_from_aio_task(
|
||||||
|
|
||||||
|
|
||||||
async def stream_from_aio(
|
async def stream_from_aio(
|
||||||
exit_early: bool = False,
|
trio_exit_early: bool = False,
|
||||||
raise_err: bool = False,
|
trio_raise_err: bool = False,
|
||||||
aio_raise_err: bool = False,
|
aio_raise_err: bool = False,
|
||||||
|
aio_exit_early: bool = False,
|
||||||
fan_out: bool = False,
|
fan_out: bool = False,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
@ -417,8 +468,17 @@ async def stream_from_aio(
|
||||||
async with to_asyncio.open_channel_from(
|
async with to_asyncio.open_channel_from(
|
||||||
push_from_aio_task,
|
push_from_aio_task,
|
||||||
sequence=seq,
|
sequence=seq,
|
||||||
expect_cancel=raise_err or exit_early,
|
expect_cancel=trio_raise_err or trio_exit_early,
|
||||||
fail_early=aio_raise_err,
|
fail_early=aio_raise_err,
|
||||||
|
exit_early=aio_exit_early,
|
||||||
|
|
||||||
|
# such that we can test exit early cases
|
||||||
|
# for each side explicitly.
|
||||||
|
suppress_graceful_exits=(not(
|
||||||
|
aio_exit_early
|
||||||
|
or
|
||||||
|
trio_exit_early
|
||||||
|
))
|
||||||
|
|
||||||
) as (first, chan):
|
) as (first, chan):
|
||||||
|
|
||||||
|
@ -431,13 +491,19 @@ async def stream_from_aio(
|
||||||
],
|
],
|
||||||
):
|
):
|
||||||
async for value in chan:
|
async for value in chan:
|
||||||
print(f'trio received {value}')
|
print(f'trio received: {value!r}')
|
||||||
|
|
||||||
|
# XXX, debugging EoC not being handled correctly
|
||||||
|
# in `transate_aio_errors()`..
|
||||||
|
# if value is None:
|
||||||
|
# await tractor.pause(shield=True)
|
||||||
|
|
||||||
pulled.append(value)
|
pulled.append(value)
|
||||||
|
|
||||||
if value == 50:
|
if value == 50:
|
||||||
if raise_err:
|
if trio_raise_err:
|
||||||
raise Exception
|
raise Exception
|
||||||
elif exit_early:
|
elif trio_exit_early:
|
||||||
print('`consume()` breaking early!\n')
|
print('`consume()` breaking early!\n')
|
||||||
break
|
break
|
||||||
|
|
||||||
|
@ -454,11 +520,11 @@ async def stream_from_aio(
|
||||||
# tasks are joined..
|
# tasks are joined..
|
||||||
chan.subscribe() as br,
|
chan.subscribe() as br,
|
||||||
|
|
||||||
trio.open_nursery() as n,
|
trio.open_nursery() as tn,
|
||||||
):
|
):
|
||||||
# start 2nd task that get's broadcast the same
|
# start 2nd task that get's broadcast the same
|
||||||
# value set.
|
# value set.
|
||||||
n.start_soon(consume, br)
|
tn.start_soon(consume, br)
|
||||||
await consume(chan)
|
await consume(chan)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
|
@ -471,10 +537,14 @@ async def stream_from_aio(
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
|
|
||||||
if (
|
if not (
|
||||||
not raise_err and
|
trio_raise_err
|
||||||
not exit_early and
|
or
|
||||||
not aio_raise_err
|
trio_exit_early
|
||||||
|
or
|
||||||
|
aio_raise_err
|
||||||
|
or
|
||||||
|
aio_exit_early
|
||||||
):
|
):
|
||||||
if fan_out:
|
if fan_out:
|
||||||
# we get double the pulled values in the
|
# we get double the pulled values in the
|
||||||
|
@ -484,6 +554,7 @@ async def stream_from_aio(
|
||||||
assert list(sorted(pulled)) == expect
|
assert list(sorted(pulled)) == expect
|
||||||
|
|
||||||
else:
|
else:
|
||||||
|
# await tractor.pause()
|
||||||
assert pulled == expect
|
assert pulled == expect
|
||||||
else:
|
else:
|
||||||
assert not fan_out
|
assert not fan_out
|
||||||
|
@ -497,10 +568,13 @@ async def stream_from_aio(
|
||||||
'fan_out', [False, True],
|
'fan_out', [False, True],
|
||||||
ids='fan_out_w_chan_subscribe={}'.format
|
ids='fan_out_w_chan_subscribe={}'.format
|
||||||
)
|
)
|
||||||
def test_basic_interloop_channel_stream(reg_addr, fan_out):
|
def test_basic_interloop_channel_stream(
|
||||||
|
reg_addr: tuple[str, int],
|
||||||
|
fan_out: bool,
|
||||||
|
):
|
||||||
async def main():
|
async def main():
|
||||||
async with tractor.open_nursery() as n:
|
async with tractor.open_nursery() as an:
|
||||||
portal = await n.run_in_actor(
|
portal = await an.run_in_actor(
|
||||||
stream_from_aio,
|
stream_from_aio,
|
||||||
infect_asyncio=True,
|
infect_asyncio=True,
|
||||||
fan_out=fan_out,
|
fan_out=fan_out,
|
||||||
|
@ -514,10 +588,10 @@ def test_basic_interloop_channel_stream(reg_addr, fan_out):
|
||||||
# TODO: parametrize the above test and avoid the duplication here?
|
# TODO: parametrize the above test and avoid the duplication here?
|
||||||
def test_trio_error_cancels_intertask_chan(reg_addr):
|
def test_trio_error_cancels_intertask_chan(reg_addr):
|
||||||
async def main():
|
async def main():
|
||||||
async with tractor.open_nursery() as n:
|
async with tractor.open_nursery() as an:
|
||||||
portal = await n.run_in_actor(
|
portal = await an.run_in_actor(
|
||||||
stream_from_aio,
|
stream_from_aio,
|
||||||
raise_err=True,
|
trio_raise_err=True,
|
||||||
infect_asyncio=True,
|
infect_asyncio=True,
|
||||||
)
|
)
|
||||||
# should trigger remote actor error
|
# should trigger remote actor error
|
||||||
|
@ -530,43 +604,116 @@ def test_trio_error_cancels_intertask_chan(reg_addr):
|
||||||
excinfo.value.boxed_type is Exception
|
excinfo.value.boxed_type is Exception
|
||||||
|
|
||||||
|
|
||||||
def test_trio_closes_early_and_channel_exits(
|
def test_trio_closes_early_causes_aio_checkpoint_raise(
|
||||||
reg_addr: tuple[str, int],
|
reg_addr: tuple[str, int],
|
||||||
|
delay: int,
|
||||||
|
debug_mode: bool,
|
||||||
):
|
):
|
||||||
'''
|
'''
|
||||||
Check that if the `trio`-task "exits early" on `async for`ing the
|
Check that if the `trio`-task "exits early and silently" (in this
|
||||||
inter-task-channel (via a `break`) we exit silently from the
|
case during `async for`-ing the inter-task-channel via
|
||||||
`open_channel_from()` block and get a final `Return[None]` msg.
|
a `break`-from-loop), we raise `TrioTaskExited` on the
|
||||||
|
`asyncio`-side which also then bubbles up through the
|
||||||
|
`open_channel_from()` block indicating that the `asyncio.Task`
|
||||||
|
hit a ran another checkpoint despite the `trio.Task` exit.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
async def main():
|
async def main():
|
||||||
with trio.fail_after(2):
|
with trio.fail_after(1 + delay):
|
||||||
async with tractor.open_nursery(
|
async with tractor.open_nursery(
|
||||||
# debug_mode=True,
|
debug_mode=debug_mode,
|
||||||
# enable_stack_on_sig=True,
|
# enable_stack_on_sig=True,
|
||||||
) as n:
|
) as an:
|
||||||
portal = await n.run_in_actor(
|
portal = await an.run_in_actor(
|
||||||
stream_from_aio,
|
stream_from_aio,
|
||||||
exit_early=True,
|
trio_exit_early=True,
|
||||||
infect_asyncio=True,
|
infect_asyncio=True,
|
||||||
)
|
)
|
||||||
# should raise RAE diectly
|
# should raise RAE diectly
|
||||||
print('waiting on final infected subactor result..')
|
print('waiting on final infected subactor result..')
|
||||||
res: None = await portal.wait_for_result()
|
res: None = await portal.wait_for_result()
|
||||||
assert res is None
|
assert res is None
|
||||||
print('infected subactor returned result: {res!r}\n')
|
print(f'infected subactor returned result: {res!r}\n')
|
||||||
|
|
||||||
# should be a quiet exit on a simple channel exit
|
# should be a quiet exit on a simple channel exit
|
||||||
trio.run(
|
with pytest.raises(RemoteActorError) as excinfo:
|
||||||
main,
|
trio.run(main)
|
||||||
# strict_exception_groups=False,
|
|
||||||
)
|
# ensure remote error is an explicit `AsyncioCancelled` sub-type
|
||||||
|
# which indicates to the aio task that the trio side exited
|
||||||
|
# silently WITHOUT raising a `trio.Cancelled` (which would
|
||||||
|
# normally be raised instead as a `AsyncioCancelled`).
|
||||||
|
excinfo.value.boxed_type is to_asyncio.TrioTaskExited
|
||||||
|
|
||||||
|
|
||||||
def test_aio_errors_and_channel_propagates_and_closes(reg_addr):
|
def test_aio_exits_early_relays_AsyncioTaskExited(
|
||||||
|
# TODO, parametrize the 3 possible trio side conditions:
|
||||||
|
# - trio blocking on receive, aio exits early
|
||||||
|
# - trio cancelled AND aio exits early on its next tick
|
||||||
|
# - trio errors AND aio exits early on its next tick
|
||||||
|
reg_addr: tuple[str, int],
|
||||||
|
debug_mode: bool,
|
||||||
|
delay: int,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Check that if the `asyncio`-task "exits early and silently" (in this
|
||||||
|
case during `push_from_aio_task()` pushing to the `InterLoopTaskChannel`
|
||||||
|
it `break`s from the loop), we raise `AsyncioTaskExited` on the
|
||||||
|
`trio`-side which then DOES NOT BUBBLE up through the
|
||||||
|
`open_channel_from()` block UNLESS,
|
||||||
|
|
||||||
|
- the trio.Task also errored/cancelled, in which case we wrap
|
||||||
|
both errors in an eg
|
||||||
|
- the trio.Task was blocking on rxing a value from the
|
||||||
|
`InterLoopTaskChannel`.
|
||||||
|
|
||||||
|
'''
|
||||||
async def main():
|
async def main():
|
||||||
async with tractor.open_nursery() as n:
|
with trio.fail_after(1 + delay):
|
||||||
portal = await n.run_in_actor(
|
async with tractor.open_nursery(
|
||||||
|
debug_mode=debug_mode,
|
||||||
|
# enable_stack_on_sig=True,
|
||||||
|
) as an:
|
||||||
|
portal = await an.run_in_actor(
|
||||||
|
stream_from_aio,
|
||||||
|
infect_asyncio=True,
|
||||||
|
trio_exit_early=False,
|
||||||
|
aio_exit_early=True,
|
||||||
|
)
|
||||||
|
# should raise RAE diectly
|
||||||
|
print('waiting on final infected subactor result..')
|
||||||
|
res: None = await portal.wait_for_result()
|
||||||
|
assert res is None
|
||||||
|
print(f'infected subactor returned result: {res!r}\n')
|
||||||
|
|
||||||
|
# should be a quiet exit on a simple channel exit
|
||||||
|
with pytest.raises(RemoteActorError) as excinfo:
|
||||||
|
trio.run(main)
|
||||||
|
|
||||||
|
exc = excinfo.value
|
||||||
|
|
||||||
|
# TODO, wow bug!
|
||||||
|
# -[ ] bp handler not replaced!?!?
|
||||||
|
# breakpoint()
|
||||||
|
|
||||||
|
# import pdbp; pdbp.set_trace()
|
||||||
|
|
||||||
|
# ensure remote error is an explicit `AsyncioCancelled` sub-type
|
||||||
|
# which indicates to the aio task that the trio side exited
|
||||||
|
# silently WITHOUT raising a `trio.Cancelled` (which would
|
||||||
|
# normally be raised instead as a `AsyncioCancelled`).
|
||||||
|
assert exc.boxed_type is to_asyncio.AsyncioTaskExited
|
||||||
|
|
||||||
|
|
||||||
|
def test_aio_errors_and_channel_propagates_and_closes(
|
||||||
|
reg_addr: tuple[str, int],
|
||||||
|
debug_mode: bool,
|
||||||
|
):
|
||||||
|
async def main():
|
||||||
|
async with tractor.open_nursery(
|
||||||
|
debug_mode=debug_mode,
|
||||||
|
) as an:
|
||||||
|
portal = await an.run_in_actor(
|
||||||
stream_from_aio,
|
stream_from_aio,
|
||||||
aio_raise_err=True,
|
aio_raise_err=True,
|
||||||
infect_asyncio=True,
|
infect_asyncio=True,
|
||||||
|
@ -592,7 +739,13 @@ async def aio_echo_server(
|
||||||
to_trio.send_nowait('start')
|
to_trio.send_nowait('start')
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
msg = await from_trio.get()
|
try:
|
||||||
|
msg = await from_trio.get()
|
||||||
|
except to_asyncio.TrioTaskExited:
|
||||||
|
print(
|
||||||
|
'breaking aio echo loop due to `trio` exit!'
|
||||||
|
)
|
||||||
|
break
|
||||||
|
|
||||||
# echo the msg back
|
# echo the msg back
|
||||||
to_trio.send_nowait(msg)
|
to_trio.send_nowait(msg)
|
||||||
|
@ -641,13 +794,15 @@ async def trio_to_aio_echo_server(
|
||||||
ids='raise_error={}'.format,
|
ids='raise_error={}'.format,
|
||||||
)
|
)
|
||||||
def test_echoserver_detailed_mechanics(
|
def test_echoserver_detailed_mechanics(
|
||||||
reg_addr,
|
reg_addr: tuple[str, int],
|
||||||
|
debug_mode: bool,
|
||||||
raise_error_mid_stream,
|
raise_error_mid_stream,
|
||||||
):
|
):
|
||||||
|
|
||||||
async def main():
|
async def main():
|
||||||
async with tractor.open_nursery() as n:
|
async with tractor.open_nursery(
|
||||||
p = await n.start_actor(
|
debug_mode=debug_mode,
|
||||||
|
) as an:
|
||||||
|
p = await an.start_actor(
|
||||||
'aio_server',
|
'aio_server',
|
||||||
enable_modules=[__name__],
|
enable_modules=[__name__],
|
||||||
infect_asyncio=True,
|
infect_asyncio=True,
|
||||||
|
@ -852,6 +1007,8 @@ def test_sigint_closes_lifetime_stack(
|
||||||
|
|
||||||
'''
|
'''
|
||||||
async def main():
|
async def main():
|
||||||
|
|
||||||
|
delay = 999 if tractor.debug_mode() else 1
|
||||||
try:
|
try:
|
||||||
an: tractor.ActorNursery
|
an: tractor.ActorNursery
|
||||||
async with tractor.open_nursery(
|
async with tractor.open_nursery(
|
||||||
|
@ -902,7 +1059,7 @@ def test_sigint_closes_lifetime_stack(
|
||||||
if wait_for_ctx:
|
if wait_for_ctx:
|
||||||
print('waiting for ctx outcome in parent..')
|
print('waiting for ctx outcome in parent..')
|
||||||
try:
|
try:
|
||||||
with trio.fail_after(1):
|
with trio.fail_after(1 + delay):
|
||||||
await ctx.wait_for_result()
|
await ctx.wait_for_result()
|
||||||
except tractor.ContextCancelled as ctxc:
|
except tractor.ContextCancelled as ctxc:
|
||||||
assert ctxc.canceller == ctx.chan.uid
|
assert ctxc.canceller == ctx.chan.uid
|
||||||
|
|
|
@ -170,7 +170,7 @@ def test_do_not_swallow_error_before_started_by_remote_contextcancelled(
|
||||||
trio.run(main)
|
trio.run(main)
|
||||||
|
|
||||||
rae = excinfo.value
|
rae = excinfo.value
|
||||||
assert rae.boxed_type == TypeError
|
assert rae.boxed_type is TypeError
|
||||||
|
|
||||||
|
|
||||||
@tractor.context
|
@tractor.context
|
||||||
|
|
|
@ -39,7 +39,7 @@ def test_infected_root_actor(
|
||||||
|
|
||||||
'''
|
'''
|
||||||
async def _trio_main():
|
async def _trio_main():
|
||||||
with trio.fail_after(2):
|
with trio.fail_after(2 if not debug_mode else 999):
|
||||||
first: str
|
first: str
|
||||||
chan: to_asyncio.LinkedTaskChannel
|
chan: to_asyncio.LinkedTaskChannel
|
||||||
async with (
|
async with (
|
||||||
|
@ -59,7 +59,11 @@ def test_infected_root_actor(
|
||||||
assert out == i
|
assert out == i
|
||||||
print(f'asyncio echoing {i}')
|
print(f'asyncio echoing {i}')
|
||||||
|
|
||||||
if raise_error_mid_stream and i == 500:
|
if (
|
||||||
|
raise_error_mid_stream
|
||||||
|
and
|
||||||
|
i == 500
|
||||||
|
):
|
||||||
raise raise_error_mid_stream
|
raise raise_error_mid_stream
|
||||||
|
|
||||||
if out is None:
|
if out is None:
|
||||||
|
|
|
@ -2,7 +2,9 @@
|
||||||
Broadcast channels for fan-out to local tasks.
|
Broadcast channels for fan-out to local tasks.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from contextlib import asynccontextmanager
|
from contextlib import (
|
||||||
|
asynccontextmanager as acm,
|
||||||
|
)
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from itertools import cycle
|
from itertools import cycle
|
||||||
import time
|
import time
|
||||||
|
@ -15,6 +17,7 @@ import tractor
|
||||||
from tractor.trionics import (
|
from tractor.trionics import (
|
||||||
broadcast_receiver,
|
broadcast_receiver,
|
||||||
Lagged,
|
Lagged,
|
||||||
|
collapse_eg,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -62,7 +65,7 @@ async def ensure_sequence(
|
||||||
break
|
break
|
||||||
|
|
||||||
|
|
||||||
@asynccontextmanager
|
@acm
|
||||||
async def open_sequence_streamer(
|
async def open_sequence_streamer(
|
||||||
|
|
||||||
sequence: list[int],
|
sequence: list[int],
|
||||||
|
@ -74,9 +77,9 @@ async def open_sequence_streamer(
|
||||||
async with tractor.open_nursery(
|
async with tractor.open_nursery(
|
||||||
arbiter_addr=reg_addr,
|
arbiter_addr=reg_addr,
|
||||||
start_method=start_method,
|
start_method=start_method,
|
||||||
) as tn:
|
) as an:
|
||||||
|
|
||||||
portal = await tn.start_actor(
|
portal = await an.start_actor(
|
||||||
'sequence_echoer',
|
'sequence_echoer',
|
||||||
enable_modules=[__name__],
|
enable_modules=[__name__],
|
||||||
)
|
)
|
||||||
|
@ -155,9 +158,12 @@ def test_consumer_and_parent_maybe_lag(
|
||||||
) as stream:
|
) as stream:
|
||||||
|
|
||||||
try:
|
try:
|
||||||
async with trio.open_nursery() as n:
|
async with (
|
||||||
|
collapse_eg(),
|
||||||
|
trio.open_nursery() as tn,
|
||||||
|
):
|
||||||
|
|
||||||
n.start_soon(
|
tn.start_soon(
|
||||||
ensure_sequence,
|
ensure_sequence,
|
||||||
stream,
|
stream,
|
||||||
sequence.copy(),
|
sequence.copy(),
|
||||||
|
@ -230,8 +236,8 @@ def test_faster_task_to_recv_is_cancelled_by_slower(
|
||||||
|
|
||||||
) as stream:
|
) as stream:
|
||||||
|
|
||||||
async with trio.open_nursery() as n:
|
async with trio.open_nursery() as tn:
|
||||||
n.start_soon(
|
tn.start_soon(
|
||||||
ensure_sequence,
|
ensure_sequence,
|
||||||
stream,
|
stream,
|
||||||
sequence.copy(),
|
sequence.copy(),
|
||||||
|
@ -253,7 +259,7 @@ def test_faster_task_to_recv_is_cancelled_by_slower(
|
||||||
continue
|
continue
|
||||||
|
|
||||||
print('cancelling faster subtask')
|
print('cancelling faster subtask')
|
||||||
n.cancel_scope.cancel()
|
tn.cancel_scope.cancel()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
value = await stream.receive()
|
value = await stream.receive()
|
||||||
|
@ -371,13 +377,13 @@ def test_ensure_slow_consumers_lag_out(
|
||||||
f'on {lags}:{value}')
|
f'on {lags}:{value}')
|
||||||
return
|
return
|
||||||
|
|
||||||
async with trio.open_nursery() as nursery:
|
async with trio.open_nursery() as tn:
|
||||||
|
|
||||||
for i in range(1, num_laggers):
|
for i in range(1, num_laggers):
|
||||||
|
|
||||||
task_name = f'sub_{i}'
|
task_name = f'sub_{i}'
|
||||||
laggers[task_name] = 0
|
laggers[task_name] = 0
|
||||||
nursery.start_soon(
|
tn.start_soon(
|
||||||
partial(
|
partial(
|
||||||
sub_and_print,
|
sub_and_print,
|
||||||
delay=i*0.001,
|
delay=i*0.001,
|
||||||
|
@ -497,6 +503,7 @@ def test_no_raise_on_lag():
|
||||||
# internals when the no raise flag is set.
|
# internals when the no raise flag is set.
|
||||||
loglevel='warning',
|
loglevel='warning',
|
||||||
),
|
),
|
||||||
|
collapse_eg(),
|
||||||
trio.open_nursery() as n,
|
trio.open_nursery() as n,
|
||||||
):
|
):
|
||||||
n.start_soon(slow)
|
n.start_soon(slow)
|
||||||
|
|
|
@ -64,7 +64,9 @@ def test_stashed_child_nursery(use_start_soon):
|
||||||
async def main():
|
async def main():
|
||||||
|
|
||||||
async with (
|
async with (
|
||||||
trio.open_nursery() as pn,
|
trio.open_nursery(
|
||||||
|
strict_exception_groups=False,
|
||||||
|
) as pn,
|
||||||
):
|
):
|
||||||
cn = await pn.start(mk_child_nursery)
|
cn = await pn.start(mk_child_nursery)
|
||||||
assert cn
|
assert cn
|
||||||
|
@ -101,6 +103,7 @@ def test_stashed_child_nursery(use_start_soon):
|
||||||
def test_acm_embedded_nursery_propagates_enter_err(
|
def test_acm_embedded_nursery_propagates_enter_err(
|
||||||
canc_from_finally: bool,
|
canc_from_finally: bool,
|
||||||
unmask_from_canc: bool,
|
unmask_from_canc: bool,
|
||||||
|
debug_mode: bool,
|
||||||
):
|
):
|
||||||
'''
|
'''
|
||||||
Demo how a masking `trio.Cancelled` could be handled by unmasking from the
|
Demo how a masking `trio.Cancelled` could be handled by unmasking from the
|
||||||
|
@ -174,7 +177,9 @@ def test_acm_embedded_nursery_propagates_enter_err(
|
||||||
await trio.lowlevel.checkpoint()
|
await trio.lowlevel.checkpoint()
|
||||||
|
|
||||||
async def _main():
|
async def _main():
|
||||||
with tractor.devx.open_crash_handler() as bxerr:
|
with tractor.devx.maybe_open_crash_handler(
|
||||||
|
pdb=debug_mode,
|
||||||
|
) as bxerr:
|
||||||
assert not bxerr.value
|
assert not bxerr.value
|
||||||
|
|
||||||
async with (
|
async with (
|
||||||
|
|
|
@ -44,6 +44,7 @@ from ._state import (
|
||||||
current_actor as current_actor,
|
current_actor as current_actor,
|
||||||
is_root_process as is_root_process,
|
is_root_process as is_root_process,
|
||||||
current_ipc_ctx as current_ipc_ctx,
|
current_ipc_ctx as current_ipc_ctx,
|
||||||
|
debug_mode as debug_mode
|
||||||
)
|
)
|
||||||
from ._exceptions import (
|
from ._exceptions import (
|
||||||
ContextCancelled as ContextCancelled,
|
ContextCancelled as ContextCancelled,
|
||||||
|
@ -66,3 +67,4 @@ from ._root import (
|
||||||
from ._ipc import Channel as Channel
|
from ._ipc import Channel as Channel
|
||||||
from ._portal import Portal as Portal
|
from ._portal import Portal as Portal
|
||||||
from ._runtime import Actor as Actor
|
from ._runtime import Actor as Actor
|
||||||
|
from . import hilevel as hilevel
|
||||||
|
|
|
@ -19,10 +19,13 @@ Actor cluster helpers.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
from contextlib import (
|
||||||
from contextlib import asynccontextmanager as acm
|
asynccontextmanager as acm,
|
||||||
|
)
|
||||||
from multiprocessing import cpu_count
|
from multiprocessing import cpu_count
|
||||||
from typing import AsyncGenerator, Optional
|
from typing import (
|
||||||
|
AsyncGenerator,
|
||||||
|
)
|
||||||
|
|
||||||
import trio
|
import trio
|
||||||
import tractor
|
import tractor
|
||||||
|
|
|
@ -950,7 +950,7 @@ class Context:
|
||||||
# f'Context.cancel() => {self.chan.uid}\n'
|
# f'Context.cancel() => {self.chan.uid}\n'
|
||||||
f'c)=> {self.chan.uid}\n'
|
f'c)=> {self.chan.uid}\n'
|
||||||
# f'{self.chan.uid}\n'
|
# f'{self.chan.uid}\n'
|
||||||
f' |_ @{self.dst_maddr}\n'
|
f' |_ @{self.dst_maddr}\n'
|
||||||
f' >> {self.repr_rpc}\n'
|
f' >> {self.repr_rpc}\n'
|
||||||
# f' >> {self._nsf}() -> {codec}[dict]:\n\n'
|
# f' >> {self._nsf}() -> {codec}[dict]:\n\n'
|
||||||
# TODO: pull msg-type from spec re #320
|
# TODO: pull msg-type from spec re #320
|
||||||
|
@ -1003,7 +1003,8 @@ class Context:
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
log.cancel(
|
log.cancel(
|
||||||
'Timed out on cancel request of remote task?\n'
|
f'Timed out on cancel request of remote task?\n'
|
||||||
|
f'\n'
|
||||||
f'{reminfo}'
|
f'{reminfo}'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -1560,12 +1561,12 @@ class Context:
|
||||||
strict_pld_parity=strict_pld_parity,
|
strict_pld_parity=strict_pld_parity,
|
||||||
hide_tb=hide_tb,
|
hide_tb=hide_tb,
|
||||||
)
|
)
|
||||||
except BaseException as err:
|
except BaseException as _bexc:
|
||||||
|
err = _bexc
|
||||||
if not isinstance(err, MsgTypeError):
|
if not isinstance(err, MsgTypeError):
|
||||||
__tracebackhide__: bool = False
|
__tracebackhide__: bool = False
|
||||||
|
|
||||||
raise
|
raise err
|
||||||
|
|
||||||
|
|
||||||
# TODO: maybe a flag to by-pass encode op if already done
|
# TODO: maybe a flag to by-pass encode op if already done
|
||||||
# here in caller?
|
# here in caller?
|
||||||
|
@ -1982,7 +1983,10 @@ async def open_context_from_portal(
|
||||||
ctxc_from_callee: ContextCancelled|None = None
|
ctxc_from_callee: ContextCancelled|None = None
|
||||||
try:
|
try:
|
||||||
async with (
|
async with (
|
||||||
trio.open_nursery() as tn,
|
trio.open_nursery(
|
||||||
|
strict_exception_groups=False,
|
||||||
|
) as tn,
|
||||||
|
|
||||||
msgops.maybe_limit_plds(
|
msgops.maybe_limit_plds(
|
||||||
ctx=ctx,
|
ctx=ctx,
|
||||||
spec=ctx_meta.get('pld_spec'),
|
spec=ctx_meta.get('pld_spec'),
|
||||||
|
|
|
@ -238,7 +238,7 @@ def _trio_main(
|
||||||
nest_from_op(
|
nest_from_op(
|
||||||
input_op='>(', # see syntax ideas above
|
input_op='>(', # see syntax ideas above
|
||||||
tree_str=actor_info,
|
tree_str=actor_info,
|
||||||
back_from_op=1,
|
back_from_op=2, # since "complete"
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
logmeth = log.info
|
logmeth = log.info
|
||||||
|
|
|
@ -22,6 +22,7 @@ from __future__ import annotations
|
||||||
import builtins
|
import builtins
|
||||||
import importlib
|
import importlib
|
||||||
from pprint import pformat
|
from pprint import pformat
|
||||||
|
from pdb import bdb
|
||||||
import sys
|
import sys
|
||||||
from types import (
|
from types import (
|
||||||
TracebackType,
|
TracebackType,
|
||||||
|
@ -82,6 +83,48 @@ class InternalError(RuntimeError):
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
|
||||||
|
class AsyncioCancelled(Exception):
|
||||||
|
'''
|
||||||
|
Asyncio cancelled translation (non-base) error
|
||||||
|
for use with the ``to_asyncio`` module
|
||||||
|
to be raised in the ``trio`` side task
|
||||||
|
|
||||||
|
NOTE: this should NOT inherit from `asyncio.CancelledError` or
|
||||||
|
tests should break!
|
||||||
|
|
||||||
|
'''
|
||||||
|
|
||||||
|
|
||||||
|
class AsyncioTaskExited(Exception):
|
||||||
|
'''
|
||||||
|
asyncio.Task "exited" translation error for use with the
|
||||||
|
`to_asyncio` APIs to be raised in the `trio` side task indicating
|
||||||
|
on `.run_task()`/`.open_channel_from()` exit that the aio side
|
||||||
|
exited early/silently.
|
||||||
|
|
||||||
|
'''
|
||||||
|
|
||||||
|
class TrioCancelled(Exception):
|
||||||
|
'''
|
||||||
|
Trio cancelled translation (non-base) error
|
||||||
|
for use with the `to_asyncio` module
|
||||||
|
to be raised in the `asyncio.Task` to indicate
|
||||||
|
that the `trio` side raised `Cancelled` or an error.
|
||||||
|
|
||||||
|
'''
|
||||||
|
|
||||||
|
class TrioTaskExited(Exception):
|
||||||
|
'''
|
||||||
|
The `trio`-side task exited without explicitly cancelling the
|
||||||
|
`asyncio.Task` peer.
|
||||||
|
|
||||||
|
This is very similar to how `trio.ClosedResource` acts as
|
||||||
|
a "clean shutdown" signal to the consumer side of a mem-chan,
|
||||||
|
|
||||||
|
https://trio.readthedocs.io/en/stable/reference-core.html#clean-shutdown-with-channels
|
||||||
|
|
||||||
|
'''
|
||||||
|
|
||||||
|
|
||||||
# NOTE: more or less should be close to these:
|
# NOTE: more or less should be close to these:
|
||||||
# 'boxed_type',
|
# 'boxed_type',
|
||||||
|
@ -127,8 +170,8 @@ _body_fields: list[str] = list(
|
||||||
|
|
||||||
def get_err_type(type_name: str) -> BaseException|None:
|
def get_err_type(type_name: str) -> BaseException|None:
|
||||||
'''
|
'''
|
||||||
Look up an exception type by name from the set of locally
|
Look up an exception type by name from the set of locally known
|
||||||
known namespaces:
|
namespaces:
|
||||||
|
|
||||||
- `builtins`
|
- `builtins`
|
||||||
- `tractor._exceptions`
|
- `tractor._exceptions`
|
||||||
|
@ -139,6 +182,7 @@ def get_err_type(type_name: str) -> BaseException|None:
|
||||||
builtins,
|
builtins,
|
||||||
_this_mod,
|
_this_mod,
|
||||||
trio,
|
trio,
|
||||||
|
bdb,
|
||||||
]:
|
]:
|
||||||
if type_ref := getattr(
|
if type_ref := getattr(
|
||||||
ns,
|
ns,
|
||||||
|
@ -358,6 +402,13 @@ class RemoteActorError(Exception):
|
||||||
self._ipc_msg.src_type_str
|
self._ipc_msg.src_type_str
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if not self._src_type:
|
||||||
|
raise TypeError(
|
||||||
|
f'Failed to lookup src error type with '
|
||||||
|
f'`tractor._exceptions.get_err_type()` :\n'
|
||||||
|
f'{self.src_type_str}'
|
||||||
|
)
|
||||||
|
|
||||||
return self._src_type
|
return self._src_type
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
@ -366,6 +417,9 @@ class RemoteActorError(Exception):
|
||||||
String-name of the (last hop's) boxed error type.
|
String-name of the (last hop's) boxed error type.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
# TODO, maybe support also serializing the
|
||||||
|
# `ExceptionGroup.exeptions: list[BaseException]` set under
|
||||||
|
# certain conditions?
|
||||||
bt: Type[BaseException] = self.boxed_type
|
bt: Type[BaseException] = self.boxed_type
|
||||||
if bt:
|
if bt:
|
||||||
return str(bt.__name__)
|
return str(bt.__name__)
|
||||||
|
@ -652,16 +706,10 @@ class RemoteActorError(Exception):
|
||||||
failing actor's remote env.
|
failing actor's remote env.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
src_type_ref: Type[BaseException] = self.src_type
|
|
||||||
if not src_type_ref:
|
|
||||||
raise TypeError(
|
|
||||||
'Failed to lookup src error type:\n'
|
|
||||||
f'{self.src_type_str}'
|
|
||||||
)
|
|
||||||
|
|
||||||
# TODO: better tb insertion and all the fancier dunder
|
# TODO: better tb insertion and all the fancier dunder
|
||||||
# metadata stuff as per `.__context__` etc. and friends:
|
# metadata stuff as per `.__context__` etc. and friends:
|
||||||
# https://github.com/python-trio/trio/issues/611
|
# https://github.com/python-trio/trio/issues/611
|
||||||
|
src_type_ref: Type[BaseException] = self.src_type
|
||||||
return src_type_ref(self.tb_str)
|
return src_type_ref(self.tb_str)
|
||||||
|
|
||||||
# TODO: local recontruction of nested inception for a given
|
# TODO: local recontruction of nested inception for a given
|
||||||
|
@ -787,8 +835,11 @@ class MsgTypeError(
|
||||||
'''
|
'''
|
||||||
if (
|
if (
|
||||||
(_bad_msg := self.msgdata.get('_bad_msg'))
|
(_bad_msg := self.msgdata.get('_bad_msg'))
|
||||||
and
|
and (
|
||||||
isinstance(_bad_msg, PayloadMsg)
|
isinstance(_bad_msg, PayloadMsg)
|
||||||
|
or
|
||||||
|
isinstance(_bad_msg, msgtypes.Start)
|
||||||
|
)
|
||||||
):
|
):
|
||||||
return _bad_msg
|
return _bad_msg
|
||||||
|
|
||||||
|
@ -981,18 +1032,6 @@ class MessagingError(Exception):
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
|
||||||
class AsyncioCancelled(Exception):
|
|
||||||
'''
|
|
||||||
Asyncio cancelled translation (non-base) error
|
|
||||||
for use with the ``to_asyncio`` module
|
|
||||||
to be raised in the ``trio`` side task
|
|
||||||
|
|
||||||
NOTE: this should NOT inherit from `asyncio.CancelledError` or
|
|
||||||
tests should break!
|
|
||||||
|
|
||||||
'''
|
|
||||||
|
|
||||||
|
|
||||||
def pack_error(
|
def pack_error(
|
||||||
exc: BaseException|RemoteActorError,
|
exc: BaseException|RemoteActorError,
|
||||||
|
|
||||||
|
@ -1172,7 +1211,7 @@ def is_multi_cancelled(
|
||||||
trio.Cancelled in ignore_nested
|
trio.Cancelled in ignore_nested
|
||||||
# XXX always count-in `trio`'s native signal
|
# XXX always count-in `trio`'s native signal
|
||||||
):
|
):
|
||||||
ignore_nested |= {trio.Cancelled}
|
ignore_nested.update({trio.Cancelled})
|
||||||
|
|
||||||
if isinstance(exc, BaseExceptionGroup):
|
if isinstance(exc, BaseExceptionGroup):
|
||||||
matched_exc: BaseExceptionGroup|None = exc.subgroup(
|
matched_exc: BaseExceptionGroup|None = exc.subgroup(
|
||||||
|
|
|
@ -255,8 +255,8 @@ class MsgpackTCPStream(MsgTransport):
|
||||||
raise TransportClosed(
|
raise TransportClosed(
|
||||||
message=(
|
message=(
|
||||||
f'IPC transport already closed by peer\n'
|
f'IPC transport already closed by peer\n'
|
||||||
f'x)> {type(trans_err)}\n'
|
f'x]> {type(trans_err)}\n'
|
||||||
f' |_{self}\n'
|
f' |_{self}\n'
|
||||||
),
|
),
|
||||||
loglevel=loglevel,
|
loglevel=loglevel,
|
||||||
) from trans_err
|
) from trans_err
|
||||||
|
@ -273,8 +273,8 @@ class MsgpackTCPStream(MsgTransport):
|
||||||
raise TransportClosed(
|
raise TransportClosed(
|
||||||
message=(
|
message=(
|
||||||
f'IPC transport already manually closed locally?\n'
|
f'IPC transport already manually closed locally?\n'
|
||||||
f'x)> {type(closure_err)} \n'
|
f'x]> {type(closure_err)} \n'
|
||||||
f' |_{self}\n'
|
f' |_{self}\n'
|
||||||
),
|
),
|
||||||
loglevel='error',
|
loglevel='error',
|
||||||
raise_on_report=(
|
raise_on_report=(
|
||||||
|
@ -289,8 +289,8 @@ class MsgpackTCPStream(MsgTransport):
|
||||||
raise TransportClosed(
|
raise TransportClosed(
|
||||||
message=(
|
message=(
|
||||||
f'IPC transport already gracefully closed\n'
|
f'IPC transport already gracefully closed\n'
|
||||||
f')>\n'
|
f']>\n'
|
||||||
f'|_{self}\n'
|
f' |_{self}\n'
|
||||||
),
|
),
|
||||||
loglevel='transport',
|
loglevel='transport',
|
||||||
# cause=??? # handy or no?
|
# cause=??? # handy or no?
|
||||||
|
|
|
@ -533,6 +533,10 @@ async def open_portal(
|
||||||
async with maybe_open_nursery(
|
async with maybe_open_nursery(
|
||||||
tn,
|
tn,
|
||||||
shield=shield,
|
shield=shield,
|
||||||
|
strict_exception_groups=False,
|
||||||
|
# ^XXX^ TODO? soo roll our own then ??
|
||||||
|
# -> since we kinda want the "if only one `.exception` then
|
||||||
|
# just raise that" interface?
|
||||||
) as tn:
|
) as tn:
|
||||||
|
|
||||||
if not channel.connected():
|
if not channel.connected():
|
||||||
|
|
|
@ -111,8 +111,8 @@ async def open_root_actor(
|
||||||
Runtime init entry point for ``tractor``.
|
Runtime init entry point for ``tractor``.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
__tracebackhide__: bool = hide_tb
|
|
||||||
_debug.hide_runtime_frames()
|
_debug.hide_runtime_frames()
|
||||||
|
__tracebackhide__: bool = hide_tb
|
||||||
|
|
||||||
# TODO: stick this in a `@cm` defined in `devx._debug`?
|
# TODO: stick this in a `@cm` defined in `devx._debug`?
|
||||||
#
|
#
|
||||||
|
@ -362,7 +362,10 @@ async def open_root_actor(
|
||||||
)
|
)
|
||||||
|
|
||||||
# start the actor runtime in a new task
|
# start the actor runtime in a new task
|
||||||
async with trio.open_nursery() as nursery:
|
async with trio.open_nursery(
|
||||||
|
strict_exception_groups=False,
|
||||||
|
# ^XXX^ TODO? instead unpack any RAE as per "loose" style?
|
||||||
|
) as nursery:
|
||||||
|
|
||||||
# ``_runtime.async_main()`` creates an internal nursery
|
# ``_runtime.async_main()`` creates an internal nursery
|
||||||
# and blocks here until any underlying actor(-process)
|
# and blocks here until any underlying actor(-process)
|
||||||
|
@ -387,6 +390,12 @@ async def open_root_actor(
|
||||||
BaseExceptionGroup,
|
BaseExceptionGroup,
|
||||||
) as err:
|
) as err:
|
||||||
|
|
||||||
|
# TODO, in beginning to handle the subsubactor with
|
||||||
|
# crashed grandparent cases..
|
||||||
|
#
|
||||||
|
# was_locked: bool = await _debug.maybe_wait_for_debugger(
|
||||||
|
# child_in_debug=True,
|
||||||
|
# )
|
||||||
# XXX NOTE XXX see equiv note inside
|
# XXX NOTE XXX see equiv note inside
|
||||||
# `._runtime.Actor._stream_handler()` where in the
|
# `._runtime.Actor._stream_handler()` where in the
|
||||||
# non-root or root-that-opened-this-mahually case we
|
# non-root or root-that-opened-this-mahually case we
|
||||||
|
@ -457,12 +466,19 @@ def run_daemon(
|
||||||
|
|
||||||
start_method: str | None = None,
|
start_method: str | None = None,
|
||||||
debug_mode: bool = False,
|
debug_mode: bool = False,
|
||||||
|
|
||||||
|
# TODO, support `infected_aio=True` mode by,
|
||||||
|
# - calling the appropriate entrypoint-func from `.to_asyncio`
|
||||||
|
# - maybe init-ing `greenback` as done above in
|
||||||
|
# `open_root_actor()`.
|
||||||
|
|
||||||
**kwargs
|
**kwargs
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
'''
|
'''
|
||||||
Spawn daemon actor which will respond to RPC; the main task simply
|
Spawn a root (daemon) actor which will respond to RPC; the main
|
||||||
starts the runtime and then sleeps forever.
|
task simply starts the runtime and then blocks via embedded
|
||||||
|
`trio.sleep_forever()`.
|
||||||
|
|
||||||
This is a very minimal convenience wrapper around starting
|
This is a very minimal convenience wrapper around starting
|
||||||
a "run-until-cancelled" root actor which can be started with a set
|
a "run-until-cancelled" root actor which can be started with a set
|
||||||
|
@ -475,7 +491,6 @@ def run_daemon(
|
||||||
importlib.import_module(path)
|
importlib.import_module(path)
|
||||||
|
|
||||||
async def _main():
|
async def _main():
|
||||||
|
|
||||||
async with open_root_actor(
|
async with open_root_actor(
|
||||||
registry_addrs=registry_addrs,
|
registry_addrs=registry_addrs,
|
||||||
name=name,
|
name=name,
|
||||||
|
|
|
@ -620,7 +620,11 @@ async def _invoke(
|
||||||
tn: trio.Nursery
|
tn: trio.Nursery
|
||||||
rpc_ctx_cs: CancelScope
|
rpc_ctx_cs: CancelScope
|
||||||
async with (
|
async with (
|
||||||
trio.open_nursery() as tn,
|
trio.open_nursery(
|
||||||
|
strict_exception_groups=False,
|
||||||
|
# ^XXX^ TODO? instead unpack any RAE as per "loose" style?
|
||||||
|
|
||||||
|
) as tn,
|
||||||
msgops.maybe_limit_plds(
|
msgops.maybe_limit_plds(
|
||||||
ctx=ctx,
|
ctx=ctx,
|
||||||
spec=ctx_meta.get('pld_spec'),
|
spec=ctx_meta.get('pld_spec'),
|
||||||
|
@ -733,8 +737,8 @@ async def _invoke(
|
||||||
# XXX: do we ever trigger this block any more?
|
# XXX: do we ever trigger this block any more?
|
||||||
except (
|
except (
|
||||||
BaseExceptionGroup,
|
BaseExceptionGroup,
|
||||||
trio.Cancelled,
|
|
||||||
BaseException,
|
BaseException,
|
||||||
|
trio.Cancelled,
|
||||||
|
|
||||||
) as scope_error:
|
) as scope_error:
|
||||||
if (
|
if (
|
||||||
|
@ -847,8 +851,8 @@ async def try_ship_error_to_remote(
|
||||||
log.critical(
|
log.critical(
|
||||||
'IPC transport failure -> '
|
'IPC transport failure -> '
|
||||||
f'failed to ship error to {remote_descr}!\n\n'
|
f'failed to ship error to {remote_descr}!\n\n'
|
||||||
f'X=> {channel.uid}\n\n'
|
f'{type(msg)!r}[{msg.boxed_type_str}] X=> {channel.uid}\n'
|
||||||
|
f'\n'
|
||||||
# TODO: use `.msg.preetty_struct` for this!
|
# TODO: use `.msg.preetty_struct` for this!
|
||||||
f'{msg}\n'
|
f'{msg}\n'
|
||||||
)
|
)
|
||||||
|
|
|
@ -1283,7 +1283,8 @@ class Actor:
|
||||||
msg: str = (
|
msg: str = (
|
||||||
f'Actor-runtime cancel request from {requester_type}\n\n'
|
f'Actor-runtime cancel request from {requester_type}\n\n'
|
||||||
f'<=c) {requesting_uid}\n'
|
f'<=c) {requesting_uid}\n'
|
||||||
f' |_{self}\n'
|
f' |_{self}\n'
|
||||||
|
f'\n'
|
||||||
)
|
)
|
||||||
|
|
||||||
# TODO: what happens here when we self-cancel tho?
|
# TODO: what happens here when we self-cancel tho?
|
||||||
|
@ -1303,13 +1304,15 @@ class Actor:
|
||||||
lock_req_ctx.has_outcome
|
lock_req_ctx.has_outcome
|
||||||
):
|
):
|
||||||
msg += (
|
msg += (
|
||||||
'-> Cancelling active debugger request..\n'
|
f'\n'
|
||||||
|
f'-> Cancelling active debugger request..\n'
|
||||||
f'|_{_debug.Lock.repr()}\n\n'
|
f'|_{_debug.Lock.repr()}\n\n'
|
||||||
f'|_{lock_req_ctx}\n\n'
|
f'|_{lock_req_ctx}\n\n'
|
||||||
)
|
)
|
||||||
# lock_req_ctx._scope.cancel()
|
# lock_req_ctx._scope.cancel()
|
||||||
# TODO: wrap this in a method-API..
|
# TODO: wrap this in a method-API..
|
||||||
debug_req.req_cs.cancel()
|
debug_req.req_cs.cancel()
|
||||||
|
# if lock_req_ctx:
|
||||||
|
|
||||||
# self-cancel **all** ongoing RPC tasks
|
# self-cancel **all** ongoing RPC tasks
|
||||||
await self.cancel_rpc_tasks(
|
await self.cancel_rpc_tasks(
|
||||||
|
@ -1718,11 +1721,15 @@ async def async_main(
|
||||||
# parent is kept alive as a resilient service until
|
# parent is kept alive as a resilient service until
|
||||||
# cancellation steps have (mostly) occurred in
|
# cancellation steps have (mostly) occurred in
|
||||||
# a deterministic way.
|
# a deterministic way.
|
||||||
async with trio.open_nursery() as root_nursery:
|
async with trio.open_nursery(
|
||||||
|
strict_exception_groups=False,
|
||||||
|
) as root_nursery:
|
||||||
actor._root_n = root_nursery
|
actor._root_n = root_nursery
|
||||||
assert actor._root_n
|
assert actor._root_n
|
||||||
|
|
||||||
async with trio.open_nursery() as service_nursery:
|
async with trio.open_nursery(
|
||||||
|
strict_exception_groups=False,
|
||||||
|
) as service_nursery:
|
||||||
# This nursery is used to handle all inbound
|
# This nursery is used to handle all inbound
|
||||||
# connections to us such that if the TCP server
|
# connections to us such that if the TCP server
|
||||||
# is killed, connections can continue to process
|
# is killed, connections can continue to process
|
||||||
|
|
|
@ -327,9 +327,10 @@ async def soft_kill(
|
||||||
uid: tuple[str, str] = portal.channel.uid
|
uid: tuple[str, str] = portal.channel.uid
|
||||||
try:
|
try:
|
||||||
log.cancel(
|
log.cancel(
|
||||||
'Soft killing sub-actor via portal request\n'
|
f'Soft killing sub-actor via portal request\n'
|
||||||
f'c)> {portal.chan.uid}\n'
|
f'\n'
|
||||||
f' |_{proc}\n'
|
f'(c=> {portal.chan.uid}\n'
|
||||||
|
f' |_{proc}\n'
|
||||||
)
|
)
|
||||||
# wait on sub-proc to signal termination
|
# wait on sub-proc to signal termination
|
||||||
await wait_func(proc)
|
await wait_func(proc)
|
||||||
|
|
|
@ -108,6 +108,7 @@ def is_main_process() -> bool:
|
||||||
return mp.current_process().name == 'MainProcess'
|
return mp.current_process().name == 'MainProcess'
|
||||||
|
|
||||||
|
|
||||||
|
# TODO, more verby name?
|
||||||
def debug_mode() -> bool:
|
def debug_mode() -> bool:
|
||||||
'''
|
'''
|
||||||
Bool determining if "debug mode" is on which enables
|
Bool determining if "debug mode" is on which enables
|
||||||
|
|
|
@ -376,7 +376,7 @@ class MsgStream(trio.abc.Channel):
|
||||||
f'Stream self-closed by {self._ctx.side!r}-side before EoC\n'
|
f'Stream self-closed by {self._ctx.side!r}-side before EoC\n'
|
||||||
# } bc a stream is a "scope"/msging-phase inside an IPC
|
# } bc a stream is a "scope"/msging-phase inside an IPC
|
||||||
f'x}}>\n'
|
f'x}}>\n'
|
||||||
f'|_{self}\n'
|
f' |_{self}\n'
|
||||||
)
|
)
|
||||||
log.cancel(message)
|
log.cancel(message)
|
||||||
self._eoc = trio.EndOfChannel(message)
|
self._eoc = trio.EndOfChannel(message)
|
||||||
|
|
|
@ -395,17 +395,23 @@ async def _open_and_supervise_one_cancels_all_nursery(
|
||||||
# `ActorNursery.start_actor()`).
|
# `ActorNursery.start_actor()`).
|
||||||
|
|
||||||
# errors from this daemon actor nursery bubble up to caller
|
# errors from this daemon actor nursery bubble up to caller
|
||||||
async with trio.open_nursery() as da_nursery:
|
async with trio.open_nursery(
|
||||||
|
strict_exception_groups=False,
|
||||||
|
# ^XXX^ TODO? instead unpack any RAE as per "loose" style?
|
||||||
|
) as da_nursery:
|
||||||
try:
|
try:
|
||||||
# This is the inner level "run in actor" nursery. It is
|
# This is the inner level "run in actor" nursery. It is
|
||||||
# awaited first since actors spawned in this way (using
|
# awaited first since actors spawned in this way (using
|
||||||
# ``ActorNusery.run_in_actor()``) are expected to only
|
# `ActorNusery.run_in_actor()`) are expected to only
|
||||||
# return a single result and then complete (i.e. be canclled
|
# return a single result and then complete (i.e. be canclled
|
||||||
# gracefully). Errors collected from these actors are
|
# gracefully). Errors collected from these actors are
|
||||||
# immediately raised for handling by a supervisor strategy.
|
# immediately raised for handling by a supervisor strategy.
|
||||||
# As such if the strategy propagates any error(s) upwards
|
# As such if the strategy propagates any error(s) upwards
|
||||||
# the above "daemon actor" nursery will be notified.
|
# the above "daemon actor" nursery will be notified.
|
||||||
async with trio.open_nursery() as ria_nursery:
|
async with trio.open_nursery(
|
||||||
|
strict_exception_groups=False,
|
||||||
|
# ^XXX^ TODO? instead unpack any RAE as per "loose" style?
|
||||||
|
) as ria_nursery:
|
||||||
|
|
||||||
an = ActorNursery(
|
an = ActorNursery(
|
||||||
actor,
|
actor,
|
||||||
|
@ -472,8 +478,8 @@ async def _open_and_supervise_one_cancels_all_nursery(
|
||||||
ContextCancelled,
|
ContextCancelled,
|
||||||
}:
|
}:
|
||||||
log.cancel(
|
log.cancel(
|
||||||
'Actor-nursery caught remote cancellation\n\n'
|
'Actor-nursery caught remote cancellation\n'
|
||||||
|
'\n'
|
||||||
f'{inner_err.tb_str}'
|
f'{inner_err.tb_str}'
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
|
@ -565,7 +571,9 @@ async def _open_and_supervise_one_cancels_all_nursery(
|
||||||
@acm
|
@acm
|
||||||
# @api_frame
|
# @api_frame
|
||||||
async def open_nursery(
|
async def open_nursery(
|
||||||
|
hide_tb: bool = True,
|
||||||
**kwargs,
|
**kwargs,
|
||||||
|
# ^TODO, paramspec for `open_root_actor()`
|
||||||
|
|
||||||
) -> typing.AsyncGenerator[ActorNursery, None]:
|
) -> typing.AsyncGenerator[ActorNursery, None]:
|
||||||
'''
|
'''
|
||||||
|
@ -583,7 +591,7 @@ async def open_nursery(
|
||||||
which cancellation scopes correspond to each spawned subactor set.
|
which cancellation scopes correspond to each spawned subactor set.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
__tracebackhide__: bool = True
|
__tracebackhide__: bool = hide_tb
|
||||||
implicit_runtime: bool = False
|
implicit_runtime: bool = False
|
||||||
actor: Actor = current_actor(err_on_no_runtime=False)
|
actor: Actor = current_actor(err_on_no_runtime=False)
|
||||||
an: ActorNursery|None = None
|
an: ActorNursery|None = None
|
||||||
|
@ -599,7 +607,10 @@ async def open_nursery(
|
||||||
# mark us for teardown on exit
|
# mark us for teardown on exit
|
||||||
implicit_runtime: bool = True
|
implicit_runtime: bool = True
|
||||||
|
|
||||||
async with open_root_actor(**kwargs) as actor:
|
async with open_root_actor(
|
||||||
|
hide_tb=hide_tb,
|
||||||
|
**kwargs,
|
||||||
|
) as actor:
|
||||||
assert actor is current_actor()
|
assert actor is current_actor()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -637,8 +648,10 @@ async def open_nursery(
|
||||||
# show frame on any internal runtime-scope error
|
# show frame on any internal runtime-scope error
|
||||||
if (
|
if (
|
||||||
an
|
an
|
||||||
and not an.cancelled
|
and
|
||||||
and an._scope_error
|
not an.cancelled
|
||||||
|
and
|
||||||
|
an._scope_error
|
||||||
):
|
):
|
||||||
__tracebackhide__: bool = False
|
__tracebackhide__: bool = False
|
||||||
|
|
||||||
|
|
|
@ -19,7 +19,10 @@ Various helpers/utils for auditing your `tractor` app and/or the
|
||||||
core runtime.
|
core runtime.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
from contextlib import asynccontextmanager as acm
|
from contextlib import (
|
||||||
|
asynccontextmanager as acm,
|
||||||
|
)
|
||||||
|
import os
|
||||||
import pathlib
|
import pathlib
|
||||||
|
|
||||||
import tractor
|
import tractor
|
||||||
|
@ -59,7 +62,12 @@ def mk_cmd(
|
||||||
exs_subpath: str = 'debugging',
|
exs_subpath: str = 'debugging',
|
||||||
) -> str:
|
) -> str:
|
||||||
'''
|
'''
|
||||||
Generate a shell command suitable to pass to ``pexpect.spawn()``.
|
Generate a shell command suitable to pass to `pexpect.spawn()`
|
||||||
|
which runs the script as a python program's entrypoint.
|
||||||
|
|
||||||
|
In particular ensure we disable the new tb coloring via unsetting
|
||||||
|
`$PYTHON_COLORS` so that `pexpect` can pattern match without
|
||||||
|
color-escape-codes.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
script_path: pathlib.Path = (
|
script_path: pathlib.Path = (
|
||||||
|
@ -67,10 +75,15 @@ def mk_cmd(
|
||||||
/ exs_subpath
|
/ exs_subpath
|
||||||
/ f'{ex_name}.py'
|
/ f'{ex_name}.py'
|
||||||
)
|
)
|
||||||
return ' '.join([
|
py_cmd: str = ' '.join([
|
||||||
'python',
|
'python',
|
||||||
str(script_path)
|
str(script_path)
|
||||||
])
|
])
|
||||||
|
# XXX, required for py 3.13+
|
||||||
|
# https://docs.python.org/3/using/cmdline.html#using-on-controlling-color
|
||||||
|
# https://docs.python.org/3/using/cmdline.html#envvar-PYTHON_COLORS
|
||||||
|
os.environ['PYTHON_COLORS'] = '0'
|
||||||
|
return py_cmd
|
||||||
|
|
||||||
|
|
||||||
@acm
|
@acm
|
||||||
|
|
|
@ -317,8 +317,6 @@ class Lock:
|
||||||
we_released: bool = False
|
we_released: bool = False
|
||||||
ctx_in_debug: Context|None = cls.ctx_in_debug
|
ctx_in_debug: Context|None = cls.ctx_in_debug
|
||||||
repl_task: Task|Thread|None = DebugStatus.repl_task
|
repl_task: Task|Thread|None = DebugStatus.repl_task
|
||||||
message: str = ''
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if not DebugStatus.is_main_trio_thread():
|
if not DebugStatus.is_main_trio_thread():
|
||||||
thread: threading.Thread = threading.current_thread()
|
thread: threading.Thread = threading.current_thread()
|
||||||
|
@ -333,6 +331,10 @@ class Lock:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
task: Task = current_task()
|
task: Task = current_task()
|
||||||
|
message: str = (
|
||||||
|
'TTY NOT RELEASED on behalf of caller\n'
|
||||||
|
f'|_{task}\n'
|
||||||
|
)
|
||||||
|
|
||||||
# sanity check that if we're the root actor
|
# sanity check that if we're the root actor
|
||||||
# the lock is marked as such.
|
# the lock is marked as such.
|
||||||
|
@ -347,11 +349,6 @@ class Lock:
|
||||||
else:
|
else:
|
||||||
assert DebugStatus.repl_task is not task
|
assert DebugStatus.repl_task is not task
|
||||||
|
|
||||||
message: str = (
|
|
||||||
'TTY lock was NOT released on behalf of caller\n'
|
|
||||||
f'|_{task}\n'
|
|
||||||
)
|
|
||||||
|
|
||||||
lock: trio.StrictFIFOLock = cls._debug_lock
|
lock: trio.StrictFIFOLock = cls._debug_lock
|
||||||
owner: Task = lock.statistics().owner
|
owner: Task = lock.statistics().owner
|
||||||
if (
|
if (
|
||||||
|
@ -366,23 +363,21 @@ class Lock:
|
||||||
# correct task, greenback-spawned-task and/or thread
|
# correct task, greenback-spawned-task and/or thread
|
||||||
# being set to the `.repl_task` such that the above
|
# being set to the `.repl_task` such that the above
|
||||||
# condition matches and we actually release the lock.
|
# condition matches and we actually release the lock.
|
||||||
|
#
|
||||||
# This is particular of note from `.pause_from_sync()`!
|
# This is particular of note from `.pause_from_sync()`!
|
||||||
|
|
||||||
):
|
):
|
||||||
cls._debug_lock.release()
|
cls._debug_lock.release()
|
||||||
we_released: bool = True
|
we_released: bool = True
|
||||||
if repl_task:
|
if repl_task:
|
||||||
message: str = (
|
message: str = (
|
||||||
'Lock released on behalf of root-actor-local REPL owner\n'
|
'TTY released on behalf of root-actor-local REPL owner\n'
|
||||||
f'|_{repl_task}\n'
|
f'|_{repl_task}\n'
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
message: str = (
|
message: str = (
|
||||||
'TTY lock released by us on behalf of remote peer?\n'
|
'TTY released by us on behalf of remote peer?\n'
|
||||||
f'|_ctx_in_debug: {ctx_in_debug}\n\n'
|
f'{ctx_in_debug}\n'
|
||||||
)
|
)
|
||||||
# mk_pdb().set_trace()
|
|
||||||
# elif owner:
|
|
||||||
|
|
||||||
except RuntimeError as rte:
|
except RuntimeError as rte:
|
||||||
log.exception(
|
log.exception(
|
||||||
|
@ -400,7 +395,8 @@ class Lock:
|
||||||
req_handler_finished: trio.Event|None = Lock.req_handler_finished
|
req_handler_finished: trio.Event|None = Lock.req_handler_finished
|
||||||
if (
|
if (
|
||||||
not lock_stats.owner
|
not lock_stats.owner
|
||||||
and req_handler_finished is None
|
and
|
||||||
|
req_handler_finished is None
|
||||||
):
|
):
|
||||||
message += (
|
message += (
|
||||||
'-> No new task holds the TTY lock!\n\n'
|
'-> No new task holds the TTY lock!\n\n'
|
||||||
|
@ -418,8 +414,8 @@ class Lock:
|
||||||
repl_task
|
repl_task
|
||||||
)
|
)
|
||||||
message += (
|
message += (
|
||||||
f'A non-caller task still owns this lock on behalf of '
|
f'A non-caller task still owns this lock on behalf of\n'
|
||||||
f'`{behalf_of_task}`\n'
|
f'{behalf_of_task}\n'
|
||||||
f'lock owner task: {lock_stats.owner}\n'
|
f'lock owner task: {lock_stats.owner}\n'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -447,8 +443,6 @@ class Lock:
|
||||||
|
|
||||||
if message:
|
if message:
|
||||||
log.devx(message)
|
log.devx(message)
|
||||||
else:
|
|
||||||
import pdbp; pdbp.set_trace()
|
|
||||||
|
|
||||||
return we_released
|
return we_released
|
||||||
|
|
||||||
|
@ -668,10 +662,11 @@ async def lock_stdio_for_peer(
|
||||||
fail_reason: str = (
|
fail_reason: str = (
|
||||||
f'on behalf of peer\n\n'
|
f'on behalf of peer\n\n'
|
||||||
f'x)<=\n'
|
f'x)<=\n'
|
||||||
f' |_{subactor_task_uid!r}@{ctx.chan.uid!r}\n\n'
|
f' |_{subactor_task_uid!r}@{ctx.chan.uid!r}\n'
|
||||||
|
f'\n'
|
||||||
'Forcing `Lock.release()` due to acquire failure!\n\n'
|
'Forcing `Lock.release()` due to acquire failure!\n\n'
|
||||||
f'x)=> {ctx}\n'
|
f'x)=>\n'
|
||||||
|
f' {ctx}'
|
||||||
)
|
)
|
||||||
if isinstance(req_err, trio.Cancelled):
|
if isinstance(req_err, trio.Cancelled):
|
||||||
fail_reason = (
|
fail_reason = (
|
||||||
|
@ -1179,7 +1174,7 @@ async def request_root_stdio_lock(
|
||||||
log.devx(
|
log.devx(
|
||||||
'Initing stdio-lock request task with root actor'
|
'Initing stdio-lock request task with root actor'
|
||||||
)
|
)
|
||||||
# TODO: likely we can implement this mutex more generally as
|
# TODO: can we implement this mutex more generally as
|
||||||
# a `._sync.Lock`?
|
# a `._sync.Lock`?
|
||||||
# -[ ] simply add the wrapping needed for the debugger specifics?
|
# -[ ] simply add the wrapping needed for the debugger specifics?
|
||||||
# - the `__pld_spec__` impl and maybe better APIs for the client
|
# - the `__pld_spec__` impl and maybe better APIs for the client
|
||||||
|
@ -1190,6 +1185,7 @@ async def request_root_stdio_lock(
|
||||||
# - https://docs.python.org/3.8/library/multiprocessing.html#multiprocessing.RLock
|
# - https://docs.python.org/3.8/library/multiprocessing.html#multiprocessing.RLock
|
||||||
DebugStatus.req_finished = trio.Event()
|
DebugStatus.req_finished = trio.Event()
|
||||||
DebugStatus.req_task = current_task()
|
DebugStatus.req_task = current_task()
|
||||||
|
req_err: BaseException|None = None
|
||||||
try:
|
try:
|
||||||
from tractor._discovery import get_root
|
from tractor._discovery import get_root
|
||||||
# NOTE: we need this to ensure that this task exits
|
# NOTE: we need this to ensure that this task exits
|
||||||
|
@ -1212,6 +1208,7 @@ async def request_root_stdio_lock(
|
||||||
# )
|
# )
|
||||||
DebugStatus.req_cs = req_cs
|
DebugStatus.req_cs = req_cs
|
||||||
req_ctx: Context|None = None
|
req_ctx: Context|None = None
|
||||||
|
ctx_eg: BaseExceptionGroup|None = None
|
||||||
try:
|
try:
|
||||||
# TODO: merge into single async with ?
|
# TODO: merge into single async with ?
|
||||||
async with get_root() as portal:
|
async with get_root() as portal:
|
||||||
|
@ -1242,7 +1239,12 @@ async def request_root_stdio_lock(
|
||||||
)
|
)
|
||||||
|
|
||||||
# try:
|
# try:
|
||||||
assert status.subactor_uid == actor_uid
|
if (locker := status.subactor_uid) != actor_uid:
|
||||||
|
raise DebugStateError(
|
||||||
|
f'Root actor locked by another peer !?\n'
|
||||||
|
f'locker: {locker!r}\n'
|
||||||
|
f'actor_uid: {actor_uid}\n'
|
||||||
|
)
|
||||||
assert status.cid
|
assert status.cid
|
||||||
# except AttributeError:
|
# except AttributeError:
|
||||||
# log.exception('failed pldspec asserts!')
|
# log.exception('failed pldspec asserts!')
|
||||||
|
@ -1279,10 +1281,11 @@ async def request_root_stdio_lock(
|
||||||
f'Exitting {req_ctx.side!r}-side of locking req_ctx\n'
|
f'Exitting {req_ctx.side!r}-side of locking req_ctx\n'
|
||||||
)
|
)
|
||||||
|
|
||||||
except (
|
except* (
|
||||||
tractor.ContextCancelled,
|
tractor.ContextCancelled,
|
||||||
trio.Cancelled,
|
trio.Cancelled,
|
||||||
):
|
) as _taskc_eg:
|
||||||
|
ctx_eg = _taskc_eg
|
||||||
log.cancel(
|
log.cancel(
|
||||||
'Debug lock request was CANCELLED?\n\n'
|
'Debug lock request was CANCELLED?\n\n'
|
||||||
f'<=c) {req_ctx}\n'
|
f'<=c) {req_ctx}\n'
|
||||||
|
@ -1291,21 +1294,23 @@ async def request_root_stdio_lock(
|
||||||
)
|
)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
except (
|
except* (
|
||||||
BaseException,
|
BaseException,
|
||||||
) as ctx_err:
|
) as _ctx_eg:
|
||||||
|
ctx_eg = _ctx_eg
|
||||||
message: str = (
|
message: str = (
|
||||||
'Failed during debug request dialog with root actor?\n\n'
|
'Failed during debug request dialog with root actor?\n'
|
||||||
)
|
)
|
||||||
if (req_ctx := DebugStatus.req_ctx):
|
if (req_ctx := DebugStatus.req_ctx):
|
||||||
message += (
|
message += (
|
||||||
f'<=x) {req_ctx}\n\n'
|
f'<=x)\n'
|
||||||
|
f' |_{req_ctx}\n'
|
||||||
f'Cancelling IPC ctx!\n'
|
f'Cancelling IPC ctx!\n'
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
await req_ctx.cancel()
|
await req_ctx.cancel()
|
||||||
except trio.ClosedResourceError as terr:
|
except trio.ClosedResourceError as terr:
|
||||||
ctx_err.add_note(
|
ctx_eg.add_note(
|
||||||
# f'Failed with {type(terr)!r} x)> `req_ctx.cancel()` '
|
# f'Failed with {type(terr)!r} x)> `req_ctx.cancel()` '
|
||||||
f'Failed with `req_ctx.cancel()` <x) {type(terr)!r} '
|
f'Failed with `req_ctx.cancel()` <x) {type(terr)!r} '
|
||||||
)
|
)
|
||||||
|
@ -1314,21 +1319,45 @@ async def request_root_stdio_lock(
|
||||||
message += 'Failed in `Portal.open_context()` call ??\n'
|
message += 'Failed in `Portal.open_context()` call ??\n'
|
||||||
|
|
||||||
log.exception(message)
|
log.exception(message)
|
||||||
ctx_err.add_note(message)
|
ctx_eg.add_note(message)
|
||||||
raise ctx_err
|
raise ctx_eg
|
||||||
|
|
||||||
except (
|
except BaseException as _req_err:
|
||||||
tractor.ContextCancelled,
|
req_err = _req_err
|
||||||
trio.Cancelled,
|
|
||||||
):
|
# XXX NOTE, since new `trio` enforces strict egs by default
|
||||||
log.cancel(
|
# we have to always handle the eg explicitly given the
|
||||||
'Debug lock request CANCELLED?\n'
|
# `Portal.open_context()` call above (which implicitly opens
|
||||||
f'{req_ctx}\n'
|
# a nursery).
|
||||||
)
|
match req_err:
|
||||||
raise
|
case BaseExceptionGroup():
|
||||||
|
# for an eg of just one taskc, just unpack and raise
|
||||||
|
# since we want to propagate a plane ol' `Cancelled`
|
||||||
|
# up from the `.pause()` call.
|
||||||
|
excs: list[BaseException] = req_err.exceptions
|
||||||
|
if (
|
||||||
|
len(excs) == 1
|
||||||
|
and
|
||||||
|
type(exc := excs[0]) in (
|
||||||
|
tractor.ContextCancelled,
|
||||||
|
trio.Cancelled,
|
||||||
|
)
|
||||||
|
):
|
||||||
|
log.cancel(
|
||||||
|
'Debug lock request CANCELLED?\n'
|
||||||
|
f'{req_ctx}\n'
|
||||||
|
)
|
||||||
|
raise exc
|
||||||
|
case (
|
||||||
|
tractor.ContextCancelled(),
|
||||||
|
trio.Cancelled(),
|
||||||
|
):
|
||||||
|
log.cancel(
|
||||||
|
'Debug lock request CANCELLED?\n'
|
||||||
|
f'{req_ctx}\n'
|
||||||
|
)
|
||||||
|
raise exc
|
||||||
|
|
||||||
except BaseException as req_err:
|
|
||||||
# log.error('Failed to request root stdio-lock?')
|
|
||||||
DebugStatus.req_err = req_err
|
DebugStatus.req_err = req_err
|
||||||
DebugStatus.release()
|
DebugStatus.release()
|
||||||
|
|
||||||
|
@ -1343,7 +1372,7 @@ async def request_root_stdio_lock(
|
||||||
'Failed during stdio-locking dialog from root actor\n\n'
|
'Failed during stdio-locking dialog from root actor\n\n'
|
||||||
|
|
||||||
f'<=x)\n'
|
f'<=x)\n'
|
||||||
f'|_{DebugStatus.req_ctx}\n'
|
f' |_{DebugStatus.req_ctx}\n'
|
||||||
) from req_err
|
) from req_err
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
|
@ -1406,7 +1435,7 @@ def any_connected_locker_child() -> bool:
|
||||||
actor: Actor = current_actor()
|
actor: Actor = current_actor()
|
||||||
|
|
||||||
if not is_root_process():
|
if not is_root_process():
|
||||||
raise RuntimeError('This is a root-actor only API!')
|
raise InternalError('This is a root-actor only API!')
|
||||||
|
|
||||||
if (
|
if (
|
||||||
(ctx := Lock.ctx_in_debug)
|
(ctx := Lock.ctx_in_debug)
|
||||||
|
@ -2143,11 +2172,12 @@ async def _pause(
|
||||||
# `_enter_repl_sync()` into a common @cm?
|
# `_enter_repl_sync()` into a common @cm?
|
||||||
except BaseException as _pause_err:
|
except BaseException as _pause_err:
|
||||||
pause_err: BaseException = _pause_err
|
pause_err: BaseException = _pause_err
|
||||||
|
_repl_fail_report: str|None = _repl_fail_msg
|
||||||
if isinstance(pause_err, bdb.BdbQuit):
|
if isinstance(pause_err, bdb.BdbQuit):
|
||||||
log.devx(
|
log.devx(
|
||||||
'REPL for pdb was explicitly quit!\n'
|
'REPL for pdb was explicitly quit!\n'
|
||||||
)
|
)
|
||||||
_repl_fail_msg = None
|
_repl_fail_report = None
|
||||||
|
|
||||||
# when the actor is mid-runtime cancellation the
|
# when the actor is mid-runtime cancellation the
|
||||||
# `Actor._service_n` might get closed before we can spawn
|
# `Actor._service_n` might get closed before we can spawn
|
||||||
|
@ -2167,16 +2197,16 @@ async def _pause(
|
||||||
return
|
return
|
||||||
|
|
||||||
elif isinstance(pause_err, trio.Cancelled):
|
elif isinstance(pause_err, trio.Cancelled):
|
||||||
_repl_fail_msg = (
|
_repl_fail_report += (
|
||||||
'You called `tractor.pause()` from an already cancelled scope!\n\n'
|
'You called `tractor.pause()` from an already cancelled scope!\n\n'
|
||||||
'Consider `await tractor.pause(shield=True)` to make it work B)\n'
|
'Consider `await tractor.pause(shield=True)` to make it work B)\n'
|
||||||
)
|
)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
_repl_fail_msg += f'on behalf of {repl_task} ??\n'
|
_repl_fail_report += f'on behalf of {repl_task} ??\n'
|
||||||
|
|
||||||
if _repl_fail_msg:
|
if _repl_fail_report:
|
||||||
log.exception(_repl_fail_msg)
|
log.exception(_repl_fail_report)
|
||||||
|
|
||||||
if not actor.is_infected_aio():
|
if not actor.is_infected_aio():
|
||||||
DebugStatus.release(cancel_req_task=True)
|
DebugStatus.release(cancel_req_task=True)
|
||||||
|
@ -2257,6 +2287,13 @@ def _set_trace(
|
||||||
repl.set_trace(frame=caller_frame)
|
repl.set_trace(frame=caller_frame)
|
||||||
|
|
||||||
|
|
||||||
|
# XXX TODO! XXX, ensure `pytest -s` doesn't just
|
||||||
|
# hang on this being called in a test.. XD
|
||||||
|
# -[ ] maybe something in our test suite or is there
|
||||||
|
# some way we can detect output capture is enabled
|
||||||
|
# from the process itself?
|
||||||
|
# |_ronny: ?
|
||||||
|
#
|
||||||
async def pause(
|
async def pause(
|
||||||
*,
|
*,
|
||||||
hide_tb: bool = True,
|
hide_tb: bool = True,
|
||||||
|
@ -3051,7 +3088,8 @@ async def maybe_wait_for_debugger(
|
||||||
|
|
||||||
if (
|
if (
|
||||||
not debug_mode()
|
not debug_mode()
|
||||||
and not child_in_debug
|
and
|
||||||
|
not child_in_debug
|
||||||
):
|
):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
@ -3109,7 +3147,7 @@ async def maybe_wait_for_debugger(
|
||||||
logmeth(
|
logmeth(
|
||||||
msg
|
msg
|
||||||
+
|
+
|
||||||
'\nRoot is waiting on tty lock to release from\n\n'
|
'\n^^ Root is waiting on tty lock release.. ^^\n'
|
||||||
# f'{caller_frame_info}\n'
|
# f'{caller_frame_info}\n'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -3163,6 +3201,15 @@ async def maybe_wait_for_debugger(
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
class BoxedMaybeException(Struct):
|
||||||
|
'''
|
||||||
|
Box a maybe-exception for post-crash introspection usage
|
||||||
|
from the body of a `open_crash_handler()` scope.
|
||||||
|
|
||||||
|
'''
|
||||||
|
value: BaseException|None = None
|
||||||
|
|
||||||
|
|
||||||
# TODO: better naming and what additionals?
|
# TODO: better naming and what additionals?
|
||||||
# - [ ] optional runtime plugging?
|
# - [ ] optional runtime plugging?
|
||||||
# - [ ] detection for sync vs. async code?
|
# - [ ] detection for sync vs. async code?
|
||||||
|
@ -3172,11 +3219,11 @@ async def maybe_wait_for_debugger(
|
||||||
@cm
|
@cm
|
||||||
def open_crash_handler(
|
def open_crash_handler(
|
||||||
catch: set[BaseException] = {
|
catch: set[BaseException] = {
|
||||||
# Exception,
|
|
||||||
BaseException,
|
BaseException,
|
||||||
},
|
},
|
||||||
ignore: set[BaseException] = {
|
ignore: set[BaseException] = {
|
||||||
KeyboardInterrupt,
|
KeyboardInterrupt,
|
||||||
|
trio.Cancelled,
|
||||||
},
|
},
|
||||||
tb_hide: bool = True,
|
tb_hide: bool = True,
|
||||||
):
|
):
|
||||||
|
@ -3193,9 +3240,6 @@ def open_crash_handler(
|
||||||
'''
|
'''
|
||||||
__tracebackhide__: bool = tb_hide
|
__tracebackhide__: bool = tb_hide
|
||||||
|
|
||||||
class BoxedMaybeException(Struct):
|
|
||||||
value: BaseException|None = None
|
|
||||||
|
|
||||||
# TODO, yield a `outcome.Error`-like boxed type?
|
# TODO, yield a `outcome.Error`-like boxed type?
|
||||||
# -[~] use `outcome.Value/Error` X-> frozen!
|
# -[~] use `outcome.Value/Error` X-> frozen!
|
||||||
# -[x] write our own..?
|
# -[x] write our own..?
|
||||||
|
@ -3237,6 +3281,8 @@ def open_crash_handler(
|
||||||
def maybe_open_crash_handler(
|
def maybe_open_crash_handler(
|
||||||
pdb: bool = False,
|
pdb: bool = False,
|
||||||
tb_hide: bool = True,
|
tb_hide: bool = True,
|
||||||
|
|
||||||
|
**kwargs,
|
||||||
):
|
):
|
||||||
'''
|
'''
|
||||||
Same as `open_crash_handler()` but with bool input flag
|
Same as `open_crash_handler()` but with bool input flag
|
||||||
|
@ -3247,9 +3293,11 @@ def maybe_open_crash_handler(
|
||||||
'''
|
'''
|
||||||
__tracebackhide__: bool = tb_hide
|
__tracebackhide__: bool = tb_hide
|
||||||
|
|
||||||
rtctx = nullcontext
|
rtctx = nullcontext(
|
||||||
|
enter_result=BoxedMaybeException()
|
||||||
|
)
|
||||||
if pdb:
|
if pdb:
|
||||||
rtctx = open_crash_handler
|
rtctx = open_crash_handler(**kwargs)
|
||||||
|
|
||||||
with rtctx():
|
with rtctx as boxed_maybe_exc:
|
||||||
yield
|
yield boxed_maybe_exc
|
||||||
|
|
|
@ -35,6 +35,7 @@ from signal import (
|
||||||
signal,
|
signal,
|
||||||
getsignal,
|
getsignal,
|
||||||
SIGUSR1,
|
SIGUSR1,
|
||||||
|
SIGINT,
|
||||||
)
|
)
|
||||||
# import traceback
|
# import traceback
|
||||||
from types import ModuleType
|
from types import ModuleType
|
||||||
|
@ -48,6 +49,7 @@ from tractor import (
|
||||||
_state,
|
_state,
|
||||||
log as logmod,
|
log as logmod,
|
||||||
)
|
)
|
||||||
|
from tractor.devx import _debug
|
||||||
|
|
||||||
log = logmod.get_logger(__name__)
|
log = logmod.get_logger(__name__)
|
||||||
|
|
||||||
|
@ -76,22 +78,45 @@ def dump_task_tree() -> None:
|
||||||
)
|
)
|
||||||
actor: Actor = _state.current_actor()
|
actor: Actor = _state.current_actor()
|
||||||
thr: Thread = current_thread()
|
thr: Thread = current_thread()
|
||||||
|
current_sigint_handler: Callable = getsignal(SIGINT)
|
||||||
|
if (
|
||||||
|
current_sigint_handler
|
||||||
|
is not
|
||||||
|
_debug.DebugStatus._trio_handler
|
||||||
|
):
|
||||||
|
sigint_handler_report: str = (
|
||||||
|
'The default `trio` SIGINT handler was replaced?!'
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
sigint_handler_report: str = (
|
||||||
|
'The default `trio` SIGINT handler is in use?!'
|
||||||
|
)
|
||||||
|
|
||||||
|
# sclang symbology
|
||||||
|
# |_<object>
|
||||||
|
# |_(Task/Thread/Process/Actor
|
||||||
|
# |_{Supervisor/Scope
|
||||||
|
# |_[Storage/Memory/IPC-Stream/Data-Struct
|
||||||
|
|
||||||
log.devx(
|
log.devx(
|
||||||
f'Dumping `stackscope` tree for actor\n'
|
f'Dumping `stackscope` tree for actor\n'
|
||||||
f'{actor.uid}:\n'
|
f'(>: {actor.uid!r}\n'
|
||||||
f'|_{mp.current_process()}\n'
|
f' |_{mp.current_process()}\n'
|
||||||
f' |_{thr}\n'
|
f' |_{thr}\n'
|
||||||
f' |_{actor}\n\n'
|
f' |_{actor}\n'
|
||||||
|
|
||||||
# start-of-trace-tree delimiter (mostly for testing)
|
|
||||||
'------ - ------\n'
|
|
||||||
'\n'
|
|
||||||
+
|
|
||||||
f'{tree_str}\n'
|
|
||||||
+
|
|
||||||
# end-of-trace-tree delimiter (mostly for testing)
|
|
||||||
f'\n'
|
f'\n'
|
||||||
f'------ {actor.uid!r} ------\n'
|
f'{sigint_handler_report}\n'
|
||||||
|
f'signal.getsignal(SIGINT) -> {current_sigint_handler!r}\n'
|
||||||
|
# f'\n'
|
||||||
|
# start-of-trace-tree delimiter (mostly for testing)
|
||||||
|
# f'------ {actor.uid!r} ------\n'
|
||||||
|
f'\n'
|
||||||
|
f'------ start-of-{actor.uid!r} ------\n'
|
||||||
|
f'|\n'
|
||||||
|
f'{tree_str}'
|
||||||
|
# end-of-trace-tree delimiter (mostly for testing)
|
||||||
|
f'|\n'
|
||||||
|
f'|_____ end-of-{actor.uid!r} ______\n'
|
||||||
)
|
)
|
||||||
# TODO: can remove this right?
|
# TODO: can remove this right?
|
||||||
# -[ ] was original code from author
|
# -[ ] was original code from author
|
||||||
|
@ -123,11 +148,11 @@ def dump_tree_on_sig(
|
||||||
) -> None:
|
) -> None:
|
||||||
global _tree_dumped, _handler_lock
|
global _tree_dumped, _handler_lock
|
||||||
with _handler_lock:
|
with _handler_lock:
|
||||||
if _tree_dumped:
|
# if _tree_dumped:
|
||||||
log.warning(
|
# log.warning(
|
||||||
'Already dumped for this actor...??'
|
# 'Already dumped for this actor...??'
|
||||||
)
|
# )
|
||||||
return
|
# return
|
||||||
|
|
||||||
_tree_dumped = True
|
_tree_dumped = True
|
||||||
|
|
||||||
|
@ -161,9 +186,9 @@ def dump_tree_on_sig(
|
||||||
)
|
)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
log.devx(
|
# log.devx(
|
||||||
'Supposedly we dumped just fine..?'
|
# 'Supposedly we dumped just fine..?'
|
||||||
)
|
# )
|
||||||
|
|
||||||
if not relay_to_subs:
|
if not relay_to_subs:
|
||||||
return
|
return
|
||||||
|
@ -202,11 +227,11 @@ def enable_stack_on_sig(
|
||||||
(https://www.gnu.org/software/bash/manual/bash.html#Command-Substitution)
|
(https://www.gnu.org/software/bash/manual/bash.html#Command-Substitution)
|
||||||
you could use:
|
you could use:
|
||||||
|
|
||||||
>> kill -SIGUSR1 $(pgrep -f '<cmd>')
|
>> kill -SIGUSR1 $(pgrep -f <part-of-cmd: str>)
|
||||||
|
|
||||||
Or with with `xonsh` (which has diff capture-from-subproc syntax)
|
OR without a sub-shell,
|
||||||
|
|
||||||
>> kill -SIGUSR1 @$(pgrep -f '<cmd>')
|
>> pkill --signal SIGUSR1 -f <part-of-cmd: str>
|
||||||
|
|
||||||
'''
|
'''
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -258,6 +258,9 @@ class PldRx(Struct):
|
||||||
f'|_pld={pld!r}\n'
|
f'|_pld={pld!r}\n'
|
||||||
)
|
)
|
||||||
return pld
|
return pld
|
||||||
|
except TypeError as typerr:
|
||||||
|
__tracebackhide__: bool = False
|
||||||
|
raise typerr
|
||||||
|
|
||||||
# XXX pld-value type failure
|
# XXX pld-value type failure
|
||||||
except ValidationError as valerr:
|
except ValidationError as valerr:
|
||||||
|
@ -796,8 +799,14 @@ def validate_payload_msg(
|
||||||
__tracebackhide__: bool = hide_tb
|
__tracebackhide__: bool = hide_tb
|
||||||
codec: MsgCodec = current_codec()
|
codec: MsgCodec = current_codec()
|
||||||
msg_bytes: bytes = codec.encode(pld_msg)
|
msg_bytes: bytes = codec.encode(pld_msg)
|
||||||
|
roundtripped: Started|None = None
|
||||||
try:
|
try:
|
||||||
roundtripped: Started = codec.decode(msg_bytes)
|
roundtripped: Started = codec.decode(msg_bytes)
|
||||||
|
except TypeError as typerr:
|
||||||
|
__tracebackhide__: bool = False
|
||||||
|
raise typerr
|
||||||
|
|
||||||
|
try:
|
||||||
ctx: Context = getattr(ipc, 'ctx', ipc)
|
ctx: Context = getattr(ipc, 'ctx', ipc)
|
||||||
pld: PayloadT = ctx.pld_rx.decode_pld(
|
pld: PayloadT = ctx.pld_rx.decode_pld(
|
||||||
msg=roundtripped,
|
msg=roundtripped,
|
||||||
|
@ -822,6 +831,11 @@ def validate_payload_msg(
|
||||||
)
|
)
|
||||||
raise ValidationError(complaint)
|
raise ValidationError(complaint)
|
||||||
|
|
||||||
|
# usually due to `.decode()` input type
|
||||||
|
except TypeError as typerr:
|
||||||
|
__tracebackhide__: bool = False
|
||||||
|
raise typerr
|
||||||
|
|
||||||
# raise any msg type error NO MATTER WHAT!
|
# raise any msg type error NO MATTER WHAT!
|
||||||
except ValidationError as verr:
|
except ValidationError as verr:
|
||||||
try:
|
try:
|
||||||
|
@ -832,9 +846,13 @@ def validate_payload_msg(
|
||||||
verb_header='Trying to send ',
|
verb_header='Trying to send ',
|
||||||
is_invalid_payload=True,
|
is_invalid_payload=True,
|
||||||
)
|
)
|
||||||
except BaseException:
|
except BaseException as _be:
|
||||||
|
if not roundtripped:
|
||||||
|
raise verr
|
||||||
|
|
||||||
|
be = _be
|
||||||
__tracebackhide__: bool = False
|
__tracebackhide__: bool = False
|
||||||
raise
|
raise be
|
||||||
|
|
||||||
if not raise_mte:
|
if not raise_mte:
|
||||||
return mte
|
return mte
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -29,3 +29,6 @@ from ._broadcast import (
|
||||||
BroadcastReceiver as BroadcastReceiver,
|
BroadcastReceiver as BroadcastReceiver,
|
||||||
Lagged as Lagged,
|
Lagged as Lagged,
|
||||||
)
|
)
|
||||||
|
from ._beg import (
|
||||||
|
collapse_eg as collapse_eg,
|
||||||
|
)
|
||||||
|
|
|
@ -0,0 +1,58 @@
|
||||||
|
# tractor: structured concurrent "actors".
|
||||||
|
# Copyright 2018-eternity Tyler Goodlet.
|
||||||
|
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
'''
|
||||||
|
`BaseExceptionGroup` related utils and helpers pertaining to
|
||||||
|
first-class-`trio` from a historical perspective B)
|
||||||
|
|
||||||
|
'''
|
||||||
|
from contextlib import (
|
||||||
|
asynccontextmanager as acm,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def maybe_collapse_eg(
|
||||||
|
beg: BaseExceptionGroup,
|
||||||
|
) -> BaseException:
|
||||||
|
'''
|
||||||
|
If the input beg can collapse to a single non-eg sub-exception,
|
||||||
|
return it instead.
|
||||||
|
|
||||||
|
'''
|
||||||
|
if len(excs := beg.exceptions) == 1:
|
||||||
|
return excs[0]
|
||||||
|
|
||||||
|
return beg
|
||||||
|
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def collapse_eg():
|
||||||
|
'''
|
||||||
|
If `BaseExceptionGroup` raised in the body scope is
|
||||||
|
"collapse-able" (in the same way that
|
||||||
|
`trio.open_nursery(strict_exception_groups=False)` works) then
|
||||||
|
only raise the lone emedded non-eg in in place.
|
||||||
|
|
||||||
|
'''
|
||||||
|
try:
|
||||||
|
yield
|
||||||
|
except* BaseException as beg:
|
||||||
|
if (
|
||||||
|
exc := maybe_collapse_eg(beg)
|
||||||
|
) is not beg:
|
||||||
|
raise exc
|
||||||
|
|
||||||
|
raise beg
|
|
@ -15,7 +15,7 @@
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
``tokio`` style broadcast channel.
|
`tokio` style broadcast channel.
|
||||||
https://docs.rs/tokio/1.11.0/tokio/sync/broadcast/index.html
|
https://docs.rs/tokio/1.11.0/tokio/sync/broadcast/index.html
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
|
|
@ -57,6 +57,8 @@ async def maybe_open_nursery(
|
||||||
shield: bool = False,
|
shield: bool = False,
|
||||||
lib: ModuleType = trio,
|
lib: ModuleType = trio,
|
||||||
|
|
||||||
|
**kwargs, # proxy thru
|
||||||
|
|
||||||
) -> AsyncGenerator[trio.Nursery, Any]:
|
) -> AsyncGenerator[trio.Nursery, Any]:
|
||||||
'''
|
'''
|
||||||
Create a new nursery if None provided.
|
Create a new nursery if None provided.
|
||||||
|
@ -67,7 +69,7 @@ async def maybe_open_nursery(
|
||||||
if nursery is not None:
|
if nursery is not None:
|
||||||
yield nursery
|
yield nursery
|
||||||
else:
|
else:
|
||||||
async with lib.open_nursery() as nursery:
|
async with lib.open_nursery(**kwargs) as nursery:
|
||||||
nursery.cancel_scope.shield = shield
|
nursery.cancel_scope.shield = shield
|
||||||
yield nursery
|
yield nursery
|
||||||
|
|
||||||
|
@ -143,9 +145,14 @@ async def gather_contexts(
|
||||||
'Use a non-lazy iterator or sequence type intead!'
|
'Use a non-lazy iterator or sequence type intead!'
|
||||||
)
|
)
|
||||||
|
|
||||||
async with trio.open_nursery() as n:
|
async with trio.open_nursery(
|
||||||
|
strict_exception_groups=False,
|
||||||
|
# ^XXX^ TODO? soo roll our own then ??
|
||||||
|
# -> since we kinda want the "if only one `.exception` then
|
||||||
|
# just raise that" interface?
|
||||||
|
) as tn:
|
||||||
for mngr in mngrs:
|
for mngr in mngrs:
|
||||||
n.start_soon(
|
tn.start_soon(
|
||||||
_enter_and_wait,
|
_enter_and_wait,
|
||||||
mngr,
|
mngr,
|
||||||
unwrapped,
|
unwrapped,
|
||||||
|
|
82
uv.lock
82
uv.lock
|
@ -147,7 +147,31 @@ wheels = [
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "msgspec"
|
name = "msgspec"
|
||||||
version = "0.19.0"
|
version = "0.19.0"
|
||||||
source = { git = "https://github.com/jcrist/msgspec.git#dd965dce22e5278d4935bea923441ecde31b5325" }
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/cf/9b/95d8ce458462b8b71b8a70fa94563b2498b89933689f3a7b8911edfae3d7/msgspec-0.19.0.tar.gz", hash = "sha256:604037e7cd475345848116e89c553aa9a233259733ab51986ac924ab1b976f8e", size = 216934 }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/24/d4/2ec2567ac30dab072cce3e91fb17803c52f0a37aab6b0c24375d2b20a581/msgspec-0.19.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aa77046904db764b0462036bc63ef71f02b75b8f72e9c9dd4c447d6da1ed8f8e", size = 187939 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/2b/c0/18226e4328897f4f19875cb62bb9259fe47e901eade9d9376ab5f251a929/msgspec-0.19.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:047cfa8675eb3bad68722cfe95c60e7afabf84d1bd8938979dd2b92e9e4a9551", size = 182202 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/81/25/3a4b24d468203d8af90d1d351b77ea3cffb96b29492855cf83078f16bfe4/msgspec-0.19.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e78f46ff39a427e10b4a61614a2777ad69559cc8d603a7c05681f5a595ea98f7", size = 209029 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/85/2e/db7e189b57901955239f7689b5dcd6ae9458637a9c66747326726c650523/msgspec-0.19.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c7adf191e4bd3be0e9231c3b6dc20cf1199ada2af523885efc2ed218eafd011", size = 210682 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/03/97/7c8895c9074a97052d7e4a1cc1230b7b6e2ca2486714eb12c3f08bb9d284/msgspec-0.19.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:f04cad4385e20be7c7176bb8ae3dca54a08e9756cfc97bcdb4f18560c3042063", size = 214003 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/61/61/e892997bcaa289559b4d5869f066a8021b79f4bf8e955f831b095f47a4cd/msgspec-0.19.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:45c8fb410670b3b7eb884d44a75589377c341ec1392b778311acdbfa55187716", size = 216833 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ce/3d/71b2dffd3a1c743ffe13296ff701ee503feaebc3f04d0e75613b6563c374/msgspec-0.19.0-cp311-cp311-win_amd64.whl", hash = "sha256:70eaef4934b87193a27d802534dc466778ad8d536e296ae2f9334e182ac27b6c", size = 186184 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/b2/5f/a70c24f075e3e7af2fae5414c7048b0e11389685b7f717bb55ba282a34a7/msgspec-0.19.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f98bd8962ad549c27d63845b50af3f53ec468b6318400c9f1adfe8b092d7b62f", size = 190485 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/89/b0/1b9763938cfae12acf14b682fcf05c92855974d921a5a985ecc197d1c672/msgspec-0.19.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:43bbb237feab761b815ed9df43b266114203f53596f9b6e6f00ebd79d178cdf2", size = 183910 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/87/81/0c8c93f0b92c97e326b279795f9c5b956c5a97af28ca0fbb9fd86c83737a/msgspec-0.19.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4cfc033c02c3e0aec52b71710d7f84cb3ca5eb407ab2ad23d75631153fdb1f12", size = 210633 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/d0/ef/c5422ce8af73928d194a6606f8ae36e93a52fd5e8df5abd366903a5ca8da/msgspec-0.19.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d911c442571605e17658ca2b416fd8579c5050ac9adc5e00c2cb3126c97f73bc", size = 213594 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/19/2b/4137bc2ed45660444842d042be2cf5b18aa06efd2cda107cff18253b9653/msgspec-0.19.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:757b501fa57e24896cf40a831442b19a864f56d253679f34f260dcb002524a6c", size = 214053 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/9d/e6/8ad51bdc806aac1dc501e8fe43f759f9ed7284043d722b53323ea421c360/msgspec-0.19.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5f0f65f29b45e2816d8bded36e6b837a4bf5fb60ec4bc3c625fa2c6da4124537", size = 219081 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/b1/ef/27dd35a7049c9a4f4211c6cd6a8c9db0a50647546f003a5867827ec45391/msgspec-0.19.0-cp312-cp312-win_amd64.whl", hash = "sha256:067f0de1c33cfa0b6a8206562efdf6be5985b988b53dd244a8e06f993f27c8c0", size = 187467 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/3c/cb/2842c312bbe618d8fefc8b9cedce37f773cdc8fa453306546dba2c21fd98/msgspec-0.19.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f12d30dd6266557aaaf0aa0f9580a9a8fbeadfa83699c487713e355ec5f0bd86", size = 190498 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/58/95/c40b01b93465e1a5f3b6c7d91b10fb574818163740cc3acbe722d1e0e7e4/msgspec-0.19.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82b2c42c1b9ebc89e822e7e13bbe9d17ede0c23c187469fdd9505afd5a481314", size = 183950 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/e8/f0/5b764e066ce9aba4b70d1db8b087ea66098c7c27d59b9dd8a3532774d48f/msgspec-0.19.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19746b50be214a54239aab822964f2ac81e38b0055cca94808359d779338c10e", size = 210647 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/9d/87/bc14f49bc95c4cb0dd0a8c56028a67c014ee7e6818ccdce74a4862af259b/msgspec-0.19.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60ef4bdb0ec8e4ad62e5a1f95230c08efb1f64f32e6e8dd2ced685bcc73858b5", size = 213563 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/53/2f/2b1c2b056894fbaa975f68f81e3014bb447516a8b010f1bed3fb0e016ed7/msgspec-0.19.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac7f7c377c122b649f7545810c6cd1b47586e3aa3059126ce3516ac7ccc6a6a9", size = 213996 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/aa/5a/4cd408d90d1417e8d2ce6a22b98a6853c1b4d7cb7669153e4424d60087f6/msgspec-0.19.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a5bc1472223a643f5ffb5bf46ccdede7f9795078194f14edd69e3aab7020d327", size = 219087 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/23/d8/f15b40611c2d5753d1abb0ca0da0c75348daf1252220e5dda2867bd81062/msgspec-0.19.0-cp313-cp313-win_amd64.whl", hash = "sha256:317050bc0f7739cb30d257ff09152ca309bf5a369854bbf1e57dffc310c1f20f", size = 187432 },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "mypy-extensions"
|
name = "mypy-extensions"
|
||||||
|
@ -330,6 +354,7 @@ dependencies = [
|
||||||
{ name = "colorlog" },
|
{ name = "colorlog" },
|
||||||
{ name = "msgspec" },
|
{ name = "msgspec" },
|
||||||
{ name = "pdbp" },
|
{ name = "pdbp" },
|
||||||
|
{ name = "tabcompleter" },
|
||||||
{ name = "tricycle" },
|
{ name = "tricycle" },
|
||||||
{ name = "trio" },
|
{ name = "trio" },
|
||||||
{ name = "trio-typing" },
|
{ name = "trio-typing" },
|
||||||
|
@ -345,17 +370,16 @@ dev = [
|
||||||
{ name = "pytest" },
|
{ name = "pytest" },
|
||||||
{ name = "stackscope" },
|
{ name = "stackscope" },
|
||||||
{ name = "xonsh" },
|
{ name = "xonsh" },
|
||||||
{ name = "xonsh-vox-tabcomplete" },
|
|
||||||
{ name = "xontrib-vox" },
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.metadata]
|
[package.metadata]
|
||||||
requires-dist = [
|
requires-dist = [
|
||||||
{ name = "colorlog", specifier = ">=6.8.2,<7" },
|
{ name = "colorlog", specifier = ">=6.8.2,<7" },
|
||||||
{ name = "msgspec", git = "https://github.com/jcrist/msgspec.git" },
|
{ name = "msgspec", specifier = ">=0.19.0" },
|
||||||
{ name = "pdbp", specifier = ">=1.5.0,<2" },
|
{ name = "pdbp", specifier = ">=1.6,<2" },
|
||||||
|
{ name = "tabcompleter", specifier = ">=1.4.0" },
|
||||||
{ name = "tricycle", specifier = ">=0.4.1,<0.5" },
|
{ name = "tricycle", specifier = ">=0.4.1,<0.5" },
|
||||||
{ name = "trio", specifier = ">=0.24,<0.25" },
|
{ name = "trio", specifier = ">0.27" },
|
||||||
{ name = "trio-typing", specifier = ">=0.10.0,<0.11" },
|
{ name = "trio-typing", specifier = ">=0.10.0,<0.11" },
|
||||||
{ name = "wrapt", specifier = ">=1.16.0,<2" },
|
{ name = "wrapt", specifier = ">=1.16.0,<2" },
|
||||||
]
|
]
|
||||||
|
@ -364,13 +388,11 @@ requires-dist = [
|
||||||
dev = [
|
dev = [
|
||||||
{ name = "greenback", specifier = ">=1.2.1,<2" },
|
{ name = "greenback", specifier = ">=1.2.1,<2" },
|
||||||
{ name = "pexpect", specifier = ">=4.9.0,<5" },
|
{ name = "pexpect", specifier = ">=4.9.0,<5" },
|
||||||
{ name = "prompt-toolkit", specifier = ">=3.0.43,<4" },
|
{ name = "prompt-toolkit", specifier = ">=3.0.50" },
|
||||||
{ name = "pyperclip", specifier = ">=1.9.0" },
|
{ name = "pyperclip", specifier = ">=1.9.0" },
|
||||||
{ name = "pytest", specifier = ">=8.2.0,<9" },
|
{ name = "pytest", specifier = ">=8.2.0,<9" },
|
||||||
{ name = "stackscope", specifier = ">=0.2.2,<0.3" },
|
{ name = "stackscope", specifier = ">=0.2.2,<0.3" },
|
||||||
{ name = "xonsh", specifier = ">=0.19.1" },
|
{ name = "xonsh", specifier = ">=0.19.2" },
|
||||||
{ name = "xonsh-vox-tabcomplete", specifier = ">=0.5,<0.6" },
|
|
||||||
{ name = "xontrib-vox", specifier = ">=0.0.1,<0.0.2" },
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -387,7 +409,7 @@ wheels = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "trio"
|
name = "trio"
|
||||||
version = "0.24.0"
|
version = "0.29.0"
|
||||||
source = { registry = "https://pypi.org/simple" }
|
source = { registry = "https://pypi.org/simple" }
|
||||||
dependencies = [
|
dependencies = [
|
||||||
{ name = "attrs" },
|
{ name = "attrs" },
|
||||||
|
@ -397,9 +419,9 @@ dependencies = [
|
||||||
{ name = "sniffio" },
|
{ name = "sniffio" },
|
||||||
{ name = "sortedcontainers" },
|
{ name = "sortedcontainers" },
|
||||||
]
|
]
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/8a/f3/07c152213222c615fe2391b8e1fea0f5af83599219050a549c20fcbd9ba2/trio-0.24.0.tar.gz", hash = "sha256:ffa09a74a6bf81b84f8613909fb0beaee84757450183a7a2e0b47b455c0cac5d", size = 545131 }
|
sdist = { url = "https://files.pythonhosted.org/packages/a1/47/f62e62a1a6f37909aed0bf8f5d5411e06fa03846cfcb64540cd1180ccc9f/trio-0.29.0.tar.gz", hash = "sha256:ea0d3967159fc130acb6939a0be0e558e364fee26b5deeecc893a6b08c361bdf", size = 588952 }
|
||||||
wheels = [
|
wheels = [
|
||||||
{ url = "https://files.pythonhosted.org/packages/14/fb/9299cf74953f473a15accfdbe2c15218e766bae8c796f2567c83bae03e98/trio-0.24.0-py3-none-any.whl", hash = "sha256:c3bd3a4e3e3025cd9a2241eae75637c43fe0b9e88b4c97b9161a55b9e54cd72c", size = 460205 },
|
{ url = "https://files.pythonhosted.org/packages/c9/55/c4d9bea8b3d7937901958f65124123512419ab0eb73695e5f382521abbfb/trio-0.29.0-py3-none-any.whl", hash = "sha256:d8c463f1a9cc776ff63e331aba44c125f423a5a13c684307e828d930e625ba66", size = 492920 },
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -492,35 +514,15 @@ wheels = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "xonsh"
|
name = "xonsh"
|
||||||
version = "0.19.1"
|
version = "0.19.2"
|
||||||
source = { registry = "https://pypi.org/simple" }
|
source = { registry = "https://pypi.org/simple" }
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/98/6e/b54a0b2685535995ee50f655103c463f9d339455c9b08c4bce3e03e7bb17/xonsh-0.19.1.tar.gz", hash = "sha256:5d3de649c909f6d14bc69232219bcbdb8152c830e91ddf17ad169c672397fb97", size = 796468 }
|
sdist = { url = "https://files.pythonhosted.org/packages/68/4e/56e95a5e607eb3b0da37396f87cde70588efc8ef819ab16f02d5b8378dc4/xonsh-0.19.2.tar.gz", hash = "sha256:cfdd0680d954a2c3aefd6caddcc7143a3d06aa417ed18365a08219bb71b960b0", size = 799960 }
|
||||||
wheels = [
|
wheels = [
|
||||||
{ url = "https://files.pythonhosted.org/packages/8c/e6/db44068c5725af9678e37980ae9503165393d51b80dc8517fa4ec74af1cf/xonsh-0.19.1-py310-none-any.whl", hash = "sha256:83eb6610ed3535f8542abd80af9554fb7e2805b0b3f96e445f98d4b5cf1f7046", size = 640686 },
|
{ url = "https://files.pythonhosted.org/packages/6c/13/281094759df87b23b3c02dc4a16603ab08ea54d7f6acfeb69f3341137c7a/xonsh-0.19.2-py310-none-any.whl", hash = "sha256:ec7f163fd3a4943782aa34069d4e72793328c916a5975949dbec8536cbfc089b", size = 642301 },
|
||||||
{ url = "https://files.pythonhosted.org/packages/77/4e/e487e82349866b245c559433c9ba626026a2e66bd17d7f9ac1045082f146/xonsh-0.19.1-py311-none-any.whl", hash = "sha256:c176e515b0260ab803963d1f0924f1e32f1064aa6fd5d791aa0cf6cda3a924ae", size = 640680 },
|
{ url = "https://files.pythonhosted.org/packages/29/41/a51e4c3918fe9a293b150cb949b1b8c6d45eb17dfed480dcb76ea43df4e7/xonsh-0.19.2-py311-none-any.whl", hash = "sha256:53c45f7a767901f2f518f9b8dd60fc653e0498e56e89825e1710bb0859985049", size = 642286 },
|
||||||
{ url = "https://files.pythonhosted.org/packages/5d/88/09060815548219b8f6953a06c247cb5c92d03cbdf7a02a980bda1b5754db/xonsh-0.19.1-py312-none-any.whl", hash = "sha256:fe1266c86b117aced3bdc4d5972420bda715864435d0bd3722d63451e8001036", size = 640604 },
|
{ url = "https://files.pythonhosted.org/packages/0a/93/9a77b731f492fac27c577dea2afb5a2bcc2a6a1c79be0c86c95498060270/xonsh-0.19.2-py312-none-any.whl", hash = "sha256:b24c619aa52b59eae4d35c4195dba9b19a2c548fb5c42c6f85f2b8ccb96807b5", size = 642386 },
|
||||||
{ url = "https://files.pythonhosted.org/packages/83/ff/7873cb8184cffeafddbf861712831c2baa2e9dbecdbfd33b1228f0db0019/xonsh-0.19.1-py313-none-any.whl", hash = "sha256:3f158b6fc0bba954e0b989004d4261bafc4bd94c68c2abd75b825da23e5a869c", size = 641166 },
|
{ url = "https://files.pythonhosted.org/packages/be/75/070324769c1ff88d971ce040f4f486339be98e0a365c8dd9991eb654265b/xonsh-0.19.2-py313-none-any.whl", hash = "sha256:c53ef6c19f781fbc399ed1b382b5c2aac2125010679a3b61d643978273c27df0", size = 642873 },
|
||||||
{ url = "https://files.pythonhosted.org/packages/cc/03/b9f8dd338df0a330011d104e63d4d0acd8bbbc1e990ff049487b6bdf585d/xonsh-0.19.1-py39-none-any.whl", hash = "sha256:a900a6eb87d881a7ef90b1ac8522ba3699582f0bcb1e9abd863d32f6d63faf04", size = 632912 },
|
{ url = "https://files.pythonhosted.org/packages/fa/cb/2c7ccec54f5b0e73fdf7650e8336582ff0347d9001c5ef8271dc00c034fe/xonsh-0.19.2-py39-none-any.whl", hash = "sha256:bcc0225dc3847f1ed2f175dac6122fbcc54cea67d9c2dc2753d9615e2a5ff284", size = 634602 },
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "xonsh-vox-tabcomplete"
|
|
||||||
version = "0.5"
|
|
||||||
source = { registry = "https://pypi.org/simple" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/ab/fd/af0c2ee6c067c2a4dc64ec03598c94de1f6ec5984b3116af917f3add4a16/xonsh_vox_tabcomplete-0.5-py3-none-any.whl", hash = "sha256:9701b198180f167071234e77eab87b7befa97c1873b088d0b3fbbe6d6d8dcaad", size = 14381 },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "xontrib-vox"
|
|
||||||
version = "0.0.1"
|
|
||||||
source = { registry = "https://pypi.org/simple" }
|
|
||||||
dependencies = [
|
|
||||||
{ name = "xonsh" },
|
|
||||||
]
|
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/6c/ac/a5db68a1f2e4036f7ff4c8546b1cbe29edee2ff40e0ff931836745988b79/xontrib-vox-0.0.1.tar.gz", hash = "sha256:c1f0b155992b4b0ebe6dcfd651084a8707ade7372f7e456c484d2a85339d9907", size = 16504 }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/23/58/dcdf11849c8340033da00669527ce75d8292a4e8d82605c082ed236a081a/xontrib_vox-0.0.1-py3-none-any.whl", hash = "sha256:df2bbb815832db5b04d46684f540eac967ee40ef265add2662a95d6947d04c70", size = 13467 },
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
|
Loading…
Reference in New Issue