Compare commits

...

9 Commits

Author SHA1 Message Date
Tyler Goodlet 46893c6d15 Another `is` fix.. 2025-02-25 20:15:59 -05:00
Tyler Goodlet 8fa538c40e Unset `$PYTHON_COLORS` for test debugger suite..
Since obvi all our `pexpect` patterns aren't going to match with
a heck-ton of terminal color escape sequences in the output XD
2025-02-25 20:14:40 -05:00
Tyler Goodlet ca65aea090 Flip to `strict_exception_groups=False` in core tns
Since it'll likely need a bit of detailing to get the test suite running
identically with strict egs (exception groups), i've opted to just flip
the switch on a few core nursery scopes for now until as such a time
i can focus enough to port the matching internals.. Xp
2025-02-25 19:37:30 -05:00
Tyler Goodlet 0d97438228 Clean up some imports in `._clustering` 2025-02-25 11:20:57 -05:00
Tyler Goodlet 2d1f29425d Drop `asyncio`-canc error from `._exceptions` 2025-02-25 11:20:07 -05:00
Tyler Goodlet 4ca5161ec4 Tweak some test asserts to better `is` style 2025-02-25 11:16:01 -05:00
Tyler Goodlet 24755aac5a Bump various (dev) deps and prefer sys python
Since it turns out there's a few gotchas moving to python 3.13,
- we need to pin to new(er) `trio` which now flips to strict exception
  groups (something to be handled in a follow up patch).
- since we're now using `uv` we should (at least for now) prefer the
  system `python` (over astral's distis) since they compile for
  `libedit` in terms of what the (new) `readline.backend: str` will read
  as; this will break our tab-completion and vi-mode settings in
  the `pdbp` REPL without a user configuring a `~/.editrc`
  appropriately.
- go back to using latest `pdbp` (not a local dev version) since it
  should work fine presuming the previous bullet is addressed.

Lock bumps,
- for now use latest `trio==0.29.0` (which i gotta feeling might have
  broken some existing attempts at strict-eg handling i've tried..)
- update to latest `xonsh`, `pdbp` and its dep `tabcompleter`

Other cleaning,
- put back in various deps "comments" from `poetry` content.
- drop the `xonsh-vox` and `xontrib-vox` dev deps; no `vox` support with
  `uv` rn anyway..
2025-02-24 20:59:38 -05:00
Tyler Goodlet ccff698f87 Draft test-doc for "out-of-band" `asyncio.Task`..
Since there's no way to activate `greenback`'s portal in such cases, we
should at least have a test verifying our very loud error about the
inability to support this usage..
2025-02-24 13:08:25 -05:00
Tyler Goodlet 63cf709433 Raise "independent" task errors in an eg
The (rare) condition is heavily detailed in new comments in
the `cancel_trio()` callback but, more or less the idea here is to be
extra pedantic in raising an `Exceptiongroup` of errors from each task
(both `asyncio` and `trio`) whenever the 2 tasks raise "independently"
- in the sense that it's not obviously one side's task causing an error
(or cancellation) in the other. In this case we set the error for each
side on the `LinkedTaskChannel` (via new attrs described later).

As a synopsis, most of this work was refined out of supporting
`infected_aio=True` mode in the **root actor** and in particular as part
of getting that to work inside the `modden` daemon which at the time of
writing was still using the `i3ipc` lib and thus `asyncio`.

Impl deats,
- extend the `LinkedTaskChannel` field/API set (and type it),
  - `._trio_task: trio.Task` for test/user introspection.
- also "stage" some ideas for a more refined interface,
  - `.started()` to deliver the value yielded to the `trio.Task` parent.
   |_ also includes some todos for how to implement this design
      underneath.
  - `._aio_first: Any|None = None` to hold that value ^.
  - `.wait_aio_complete()` for syncing to the asyncio task.
- some detailed logging around "asyncio cancelled trio" case.
- Move `AsyncioCancelled` in this module.

Styling changes,
- generally more explicit var naming.
- some todos for getting modern and fancy with typing..

NB, Let it be known this commit msg was written on a friday with the
help of various "mr. white" solns.
2025-02-24 13:05:01 -05:00
14 changed files with 304 additions and 144 deletions

View File

@ -32,24 +32,21 @@ classifiers = [
"Topic :: System :: Distributed Computing",
]
dependencies = [
# trio runtime and friends
# trio runtime and friends
# (poetry) proper range specs,
# https://packaging.python.org/en/latest/discussions/install-requires-vs-requirements/#id5
# TODO, for 3.13 we must go go `0.27` which means we have to
# disable strict egs or port to handling them internally!
# trio='^0.27'
"trio>=0.24,<0.25",
"trio>0.27",
"tricycle>=0.4.1,<0.5",
"trio-typing>=0.10.0,<0.11",
"wrapt>=1.16.0,<2",
"colorlog>=6.8.2,<7",
# built-in multi-actor `pdb` REPL
"pdbp>=1.5.0,<2",
# typed IPC msging
# TODO, get back on release once 3.13 support is out!
# built-in multi-actor `pdb` REPL
"pdbp>=1.6,<2", # windows only (from `pdbp`)
"tabcompleter>=1.4.0",
# typed IPC msging
# TODO, get back on release once 3.13 support is out!
"msgspec",
]
@ -65,13 +62,9 @@ dev = [
# `tractor.devx` tooling
"greenback>=1.2.1,<2",
"stackscope>=0.2.2,<0.3",
# xonsh usage/integration (namely as @goodboy's sh of choice Bp)
"xonsh>=0.19.1",
"xontrib-vox>=0.0.1,<0.0.2",
"prompt-toolkit>=3.0.43,<4",
"xonsh-vox-tabcomplete>=0.5,<0.6",
"pyperclip>=1.9.0",
"prompt-toolkit>=3.0.50",
"xonsh>=0.19.2",
]
# TODO, distributed (multi-host) extensions
@ -132,7 +125,23 @@ log_cli = false
# ------ tool.towncrier ------
[tool.uv]
# XXX NOTE, prefer the sys python bc apparently the distis from
# `astral` are built in a way that breaks `pdbp`+`tabcompleter`'s
# likely due to linking against `libedit` over `readline`..
# |_https://docs.astral.sh/uv/concepts/python-versions/#managed-python-distributions
# |_https://gregoryszorc.com/docs/python-build-standalone/main/quirks.html#use-of-libedit-on-linux
#
# https://docs.astral.sh/uv/reference/settings/#python-preference
python-preference = 'system'
# ------ tool.uv ------
[tool.uv.sources]
msgspec = { git = "https://github.com/jcrist/msgspec.git" }
# XXX NOTE, only for @goodboy's hacking on `pprint(sort_dicts=False)`
# for the `pp` alias..
# pdbp = { path = "../pdbp", editable = true }
# ------ tool.uv.sources ------

View File

@ -150,6 +150,18 @@ def pytest_generate_tests(metafunc):
metafunc.parametrize("start_method", [spawn_backend], scope='module')
# TODO: a way to let test scripts (like from `examples/`)
# guarantee they won't registry addr collide!
# @pytest.fixture
# def open_test_runtime(
# reg_addr: tuple,
# ) -> AsyncContextManager:
# return partial(
# tractor.open_nursery,
# registry_addrs=[reg_addr],
# )
def sig_prog(proc, sig):
"Kill the actor-process with ``sig``."
proc.send_signal(sig)

View File

@ -30,7 +30,7 @@ from conftest import (
@pytest.fixture
def spawn(
start_method,
testdir: pytest.Testdir,
testdir: pytest.Pytester,
reg_addr: tuple[str, int],
) -> Callable[[str], None]:
@ -44,16 +44,32 @@ def spawn(
'`pexpect` based tests only supported on `trio` backend'
)
def unset_colors():
'''
Python 3.13 introduced colored tracebacks that break patt
matching,
https://docs.python.org/3/using/cmdline.html#envvar-PYTHON_COLORS
https://docs.python.org/3/using/cmdline.html#using-on-controlling-color
'''
import os
os.environ['PYTHON_COLORS'] = '0'
def _spawn(
cmd: str,
**mkcmd_kwargs,
):
unset_colors()
return testdir.spawn(
cmd=mk_cmd(
cmd,
**mkcmd_kwargs,
),
expect_timeout=3,
# preexec_fn=unset_colors,
# ^TODO? get `pytest` core to expose underlying
# `pexpect.spawn()` stuff?
)
# such that test-dep can pass input script name.

View File

@ -218,10 +218,9 @@ def expect_any_of(
)
return expected_patts
# yield child
def test_pause_from_asyncio_task(
def test_sync_pause_from_aio_task(
spawn,
ctlc: bool
# ^TODO, fix for `asyncio`!!
@ -327,3 +326,25 @@ def test_pause_from_asyncio_task(
child.sendline('c')
child.expect(EOF)
def test_sync_pause_from_non_greenbacked_aio_task():
'''
Where the `breakpoint()` caller task is NOT spawned by
`tractor.to_asyncio` and thus never activates
a `greenback.ensure_portal()` beforehand, presumably bc the task
was started by some lib/dep as in often seen in the field.
Ensure sync pausing works when the pause is in,
- the root actor running in infected-mode?
|_ since we don't need any IPC to acquire the debug lock?
|_ is there some way to handle this like the non-main-thread case?
All other cases need to error out appropriately right?
- for any subactor we can't avoid needing the repl lock..
|_ is there a way to hook into `asyncio.ensure_future(obj)`?
'''
pass

View File

@ -130,7 +130,7 @@ def test_multierror(
try:
await portal2.result()
except tractor.RemoteActorError as err:
assert err.boxed_type == AssertionError
assert err.boxed_type is AssertionError
print("Look Maa that first actor failed hard, hehh")
raise
@ -182,7 +182,7 @@ def test_multierror_fast_nursery(reg_addr, start_method, num_subactors, delay):
for exc in exceptions:
assert isinstance(exc, tractor.RemoteActorError)
assert exc.boxed_type == AssertionError
assert exc.boxed_type is AssertionError
async def do_nothing():
@ -504,7 +504,9 @@ def test_cancel_via_SIGINT_other_task(
if is_win(): # smh
timeout += 1
async def spawn_and_sleep_forever(task_status=trio.TASK_STATUS_IGNORED):
async def spawn_and_sleep_forever(
task_status=trio.TASK_STATUS_IGNORED
):
async with tractor.open_nursery() as tn:
for i in range(3):
await tn.run_in_actor(

View File

@ -170,7 +170,7 @@ def test_do_not_swallow_error_before_started_by_remote_contextcancelled(
trio.run(main)
rae = excinfo.value
assert rae.boxed_type == TypeError
assert rae.boxed_type is TypeError
@tractor.context

View File

@ -19,10 +19,13 @@ Actor cluster helpers.
'''
from __future__ import annotations
from contextlib import asynccontextmanager as acm
from contextlib import (
asynccontextmanager as acm,
)
from multiprocessing import cpu_count
from typing import AsyncGenerator, Optional
from typing import (
AsyncGenerator,
)
import trio
import tractor

View File

@ -1985,7 +1985,10 @@ async def open_context_from_portal(
ctxc_from_callee: ContextCancelled|None = None
try:
async with (
trio.open_nursery() as tn,
trio.open_nursery(
strict_exception_groups=False,
) as tn,
msgops.maybe_limit_plds(
ctx=ctx,
spec=ctx_meta.get('pld_spec'),

View File

@ -981,18 +981,6 @@ class MessagingError(Exception):
'''
class AsyncioCancelled(Exception):
'''
Asyncio cancelled translation (non-base) error
for use with the ``to_asyncio`` module
to be raised in the ``trio`` side task
NOTE: this should NOT inherit from `asyncio.CancelledError` or
tests should break!
'''
def pack_error(
exc: BaseException|RemoteActorError,
@ -1172,7 +1160,7 @@ def is_multi_cancelled(
trio.Cancelled in ignore_nested
# XXX always count-in `trio`'s native signal
):
ignore_nested |= {trio.Cancelled}
ignore_nested.update({trio.Cancelled})
if isinstance(exc, BaseExceptionGroup):
matched_exc: BaseExceptionGroup|None = exc.subgroup(

View File

@ -362,7 +362,10 @@ async def open_root_actor(
)
# start the actor runtime in a new task
async with trio.open_nursery() as nursery:
async with trio.open_nursery(
strict_exception_groups=False,
# ^XXX^ TODO? instead unpack any RAE as per "loose" style?
) as nursery:
# ``_runtime.async_main()`` creates an internal nursery
# and blocks here until any underlying actor(-process)
@ -457,12 +460,19 @@ def run_daemon(
start_method: str | None = None,
debug_mode: bool = False,
# TODO, support `infected_aio=True` mode by,
# - calling the appropriate entrypoint-func from `.to_asyncio`
# - maybe init-ing `greenback` as done above in
# `open_root_actor()`.
**kwargs
) -> None:
'''
Spawn daemon actor which will respond to RPC; the main task simply
starts the runtime and then sleeps forever.
Spawn a root (daemon) actor which will respond to RPC; the main
task simply starts the runtime and then blocks via embedded
`trio.sleep_forever()`.
This is a very minimal convenience wrapper around starting
a "run-until-cancelled" root actor which can be started with a set
@ -475,7 +485,6 @@ def run_daemon(
importlib.import_module(path)
async def _main():
async with open_root_actor(
registry_addrs=registry_addrs,
name=name,

View File

@ -621,7 +621,11 @@ async def _invoke(
tn: trio.Nursery
rpc_ctx_cs: CancelScope
async with (
trio.open_nursery() as tn,
trio.open_nursery(
strict_exception_groups=False,
# ^XXX^ TODO? instead unpack any RAE as per "loose" style?
) as tn,
msgops.maybe_limit_plds(
ctx=ctx,
spec=ctx_meta.get('pld_spec'),
@ -734,8 +738,8 @@ async def _invoke(
# XXX: do we ever trigger this block any more?
except (
BaseExceptionGroup,
trio.Cancelled,
BaseException,
trio.Cancelled,
) as scope_error:
if (

View File

@ -395,7 +395,10 @@ async def _open_and_supervise_one_cancels_all_nursery(
# `ActorNursery.start_actor()`).
# errors from this daemon actor nursery bubble up to caller
async with trio.open_nursery() as da_nursery:
async with trio.open_nursery(
strict_exception_groups=False,
# ^XXX^ TODO? instead unpack any RAE as per "loose" style?
) as da_nursery:
try:
# This is the inner level "run in actor" nursery. It is
# awaited first since actors spawned in this way (using
@ -405,7 +408,10 @@ async def _open_and_supervise_one_cancels_all_nursery(
# immediately raised for handling by a supervisor strategy.
# As such if the strategy propagates any error(s) upwards
# the above "daemon actor" nursery will be notified.
async with trio.open_nursery() as ria_nursery:
async with trio.open_nursery(
strict_exception_groups=False,
# ^XXX^ TODO? instead unpack any RAE as per "loose" style?
) as ria_nursery:
an = ActorNursery(
actor,

View File

@ -34,7 +34,6 @@ from typing import (
import tractor
from tractor._exceptions import (
AsyncioCancelled,
is_multi_cancelled,
)
from tractor._state import (
@ -46,6 +45,11 @@ from tractor.log import (
get_logger,
StackLevelAdapter,
)
# TODO, wite the equiv of `trio.abc.Channel` but without attrs..
# -[ ] `trionics.chan_types.ChanStruct` maybe?
# from tractor.msg import (
# pretty_struct,
# )
from tractor.trionics._broadcast import (
broadcast_receiver,
BroadcastReceiver,
@ -66,7 +70,12 @@ __all__ = [
@dataclass
class LinkedTaskChannel(trio.abc.Channel):
class LinkedTaskChannel(
trio.abc.Channel,
# XXX LAME! meta-base conflict..
# pretty_struct.Struct,
):
'''
A "linked task channel" which allows for two-way synchronized msg
passing between a ``trio``-in-guest-mode task and an ``asyncio``
@ -77,12 +86,14 @@ class LinkedTaskChannel(trio.abc.Channel):
_from_aio: trio.MemoryReceiveChannel
_to_trio: trio.MemorySendChannel
_trio_cs: trio.CancelScope
_trio_task: trio.Task
_aio_task_complete: trio.Event
_trio_err: BaseException|None = None
_trio_exited: bool = False
# set after ``asyncio.create_task()``
# _aio_first: Any|None = None
_aio_task: asyncio.Task|None = None
_aio_err: BaseException|None = None
_broadcaster: BroadcastReceiver|None = None
@ -90,6 +101,25 @@ class LinkedTaskChannel(trio.abc.Channel):
async def aclose(self) -> None:
await self._from_aio.aclose()
def started(
self,
val: Any = None,
) -> None:
self._aio_started_val = val
return self._to_trio.send_nowait(val)
# TODO, mk this side-agnostic?
#
# -[ ] add private meths for both sides and dynamically
# determine which to use based on task-type read at calltime?
# -[ ] `._recv_trio()`: receive to trio<-asyncio
# -[ ] `._send_trio()`: send from trio->asyncio
# -[ ] `._recv_aio()`: send from asyncio->trio
# -[ ] `._send_aio()`: receive to asyncio<-trio
#
# -[ ] pass the instance to the aio side instead of the separate
# per-side chan types?
#
async def receive(self) -> Any:
'''
Receive a value from the paired `asyncio.Task` with
@ -115,7 +145,16 @@ class LinkedTaskChannel(trio.abc.Channel):
):
raise err
async def wait_asyncio_complete(self) -> None:
async def send(self, item: Any) -> None:
'''
Send a value through to the asyncio task presuming
it defines a ``from_trio`` argument, if it does not
this method will raise an error.
'''
self._to_aio.put_nowait(item)
async def wait_aio_complete(self) -> None:
await self._aio_task_complete.wait()
def cancel_asyncio_task(
@ -126,15 +165,6 @@ class LinkedTaskChannel(trio.abc.Channel):
msg=msg,
)
async def send(self, item: Any) -> None:
'''
Send a value through to the asyncio task presuming
it defines a ``from_trio`` argument, if it does not
this method will raise an error.
'''
self._to_aio.put_nowait(item)
def closed(self) -> bool:
return self._from_aio._closed # type: ignore
@ -218,7 +248,8 @@ def _run_asyncio_task(
coro = func(**kwargs)
cancel_scope = trio.CancelScope()
trio_task: trio.Task = trio.lowlevel.current_task()
trio_cs = trio.CancelScope()
aio_task_complete = trio.Event()
aio_err: BaseException|None = None
@ -226,7 +257,8 @@ def _run_asyncio_task(
_to_aio=aio_q, # asyncio.Queue
_from_aio=from_aio, # recv chan
_to_trio=to_trio, # send chan
_trio_cs=cancel_scope,
_trio_cs=trio_cs,
_trio_task=trio_task,
_aio_task_complete=aio_task_complete,
)
@ -274,6 +306,9 @@ def _run_asyncio_task(
to_trio.send_nowait(result)
finally:
# breakpoint()
# import pdbp; pdbp.set_trace()
# if the task was spawned using `open_channel_from()`
# then we close the channels on exit.
if provide_channels:
@ -281,7 +316,6 @@ def _run_asyncio_task(
# a ``trio.EndOfChannel`` to the trio (consumer) side.
to_trio.close()
# import pdbp; pdbp.set_trace()
aio_task_complete.set()
# await asyncio.sleep(0.1)
log.info(
@ -325,14 +359,17 @@ def _run_asyncio_task(
)
greenback.bestow_portal(task)
def cancel_trio(task: asyncio.Task) -> None:
def cancel_trio(
task: asyncio.Task,
) -> None:
'''
Cancel the calling `trio` task on error.
Cancel the parent `trio` task on any error raised by the
`asyncio` side.
'''
nonlocal chan
aio_err: BaseException|None = chan._aio_err
task_err: BaseException|None = None
relayed_aio_err: BaseException|None = chan._aio_err
aio_err: BaseException|None = None
# only to avoid `asyncio` complaining about uncaptured
# task exceptions
@ -343,20 +380,20 @@ def _run_asyncio_task(
f'|_{res}\n'
)
except BaseException as _aio_err:
task_err: BaseException = _aio_err
aio_err: BaseException = _aio_err
# read again AFTER the `asyncio` side errors in case
# it was cancelled due to an error from `trio` (or
# some other out of band exc).
aio_err: BaseException|None = chan._aio_err
# some other out of band exc) and then set to something
# else?
relayed_aio_err: BaseException|None = chan._aio_err
# always true right?
assert (
type(_aio_err) is type(aio_err)
type(_aio_err) is type(relayed_aio_err)
), (
f'`asyncio`-side task errors mismatch?!?\n\n'
f'caught: {_aio_err}\n'
f'chan._aio_err: {aio_err}\n'
f'(caught) aio_err: {aio_err}\n'
f'chan._aio_err: {relayed_aio_err}\n'
)
msg: str = (
@ -381,12 +418,13 @@ def _run_asyncio_task(
msg.format(etype_str='errored')
)
if aio_err is not None:
trio_err: BaseException|None = chan._trio_err
if (
relayed_aio_err
or
trio_err
):
# import pdbp; pdbp.set_trace()
# XXX: uhh is this true?
# assert task_err, f'Asyncio task {task.get_name()} discrepancy!?'
# NOTE: currently mem chan closure may act as a form
# of error relay (at least in the `asyncio.CancelledError`
# case) since we have no way to directly trigger a `trio`
@ -394,8 +432,6 @@ def _run_asyncio_task(
# We might want to change this in the future though.
from_aio.close()
if task_err is None:
assert aio_err
# wait, wut?
# aio_err.with_traceback(aio_err.__traceback__)
@ -404,7 +440,7 @@ def _run_asyncio_task(
# elif (
# type(aio_err) is CancelledError
# and # trio was the cause?
# cancel_scope.cancel_called
# trio_cs.cancel_called
# ):
# log.cancel(
# 'infected task was cancelled by `trio`-side'
@ -415,26 +451,83 @@ def _run_asyncio_task(
# error in case the trio task is blocking on
# a checkpoint.
if (
not cancel_scope.cancelled_caught
not trio_cs.cancelled_caught
or
not cancel_scope.cancel_called
not trio_cs.cancel_called
):
# import pdbp; pdbp.set_trace()
cancel_scope.cancel()
trio_cs.cancel()
if task_err:
# maybe the `trio` task errored independent from the
# `asyncio` one and likely in between
# a guest-run-sched-tick.
#
# The obvious ex. is where one side errors during
# the current tick and then the other side immediately
# errors before its next checkpoint; i.e. the 2 errors
# are "independent".
#
# "Independent" here means in the sense that neither task
# was the explicit cause of the other side's exception
# according to our `tractor.to_asyncio` SC API's error
# relaying mechanism(s); the error pair is *possibly
# due-to* but **not necessarily** inter-related by some
# (subsys) state between the tasks,
#
# NOTE, also see the `test_trio_prestarted_task_bubbles`
# for reproducing detailed edge cases as per the above
# cases.
#
if (
not trio_cs.cancelled_caught
and
(trio_err := chan._trio_err)
and
type(trio_err) not in {
trio.Cancelled,
}
and (
aio_err
and
type(aio_err) not in {
asyncio.CancelledError
}
)
):
eg = ExceptionGroup(
'Both the `trio` and `asyncio` tasks errored independently!!\n',
(trio_err, aio_err),
)
chan._trio_err = eg
chan._aio_err = eg
raise eg
elif aio_err:
# XXX raise any `asyncio` side error IFF it doesn't
# match the one we just caught from the task above!
# (that would indicate something weird/very-wrong
# going on?)
if aio_err is not task_err:
# import pdbp; pdbp.set_trace()
raise aio_err from task_err
if aio_err is not relayed_aio_err:
raise aio_err from relayed_aio_err
raise aio_err
task.add_done_callback(cancel_trio)
return chan
class AsyncioCancelled(Exception):
'''
Asyncio cancelled translation (non-base) error
for use with the ``to_asyncio`` module
to be raised in the ``trio`` side task
NOTE: this should NOT inherit from `asyncio.CancelledError` or
tests should break!
'''
class TrioTaskExited(AsyncioCancelled):
'''
The `trio`-side task exited without explicitly cancelling the
@ -483,13 +576,12 @@ async def translate_aio_errors(
# import pdbp; pdbp.set_trace() # lolevel-debug
# relay cancel through to called ``asyncio`` task
# relay cancel through to called `asyncio` task
chan._aio_err = AsyncioCancelled(
f'trio`-side cancelled the `asyncio`-side,\n'
f'c)>\n'
f' |_{trio_task}\n\n'
f'{trio_err!r}\n'
)
@ -546,6 +638,7 @@ async def translate_aio_errors(
raise
except BaseException as _trio_err:
# await tractor.pause(shield=True)
trio_err = _trio_err
log.exception(
'`trio`-side task errored?'
@ -619,11 +712,17 @@ async def translate_aio_errors(
# pump the other side's task? needed?
await trio.lowlevel.checkpoint()
# from tractor._state import is_root_process
# if is_root_process():
# breakpoint()
if (
not chan._trio_err
and
(fut := aio_task._fut_waiter)
):
# await trio.lowlevel.checkpoint()
# import pdbp; pdbp.set_trace()
fut.set_exception(
TrioTaskExited(
f'The peer `asyncio` task is still blocking/running?\n'
@ -632,11 +731,6 @@ async def translate_aio_errors(
)
)
else:
# from tractor._state import is_root_process
# if is_root_process():
# breakpoint()
# import pdbp; pdbp.set_trace()
aio_taskc_warn: str = (
f'\n'
f'MANUALLY Cancelling `asyncio`-task: {aio_task.get_name()}!\n\n'
@ -663,7 +757,7 @@ async def translate_aio_errors(
# it erroed out up there!
#
if wait_on_aio_task:
# await chan.wait_asyncio_complete()
# await chan.wait_aio_complete()
await chan._aio_task_complete.wait()
log.info(
'asyncio-task is done and unblocked trio-side!\n'
@ -771,11 +865,22 @@ async def open_channel_from(
# sync to a "started()"-like first delivered value from the
# ``asyncio`` task.
try:
with chan._trio_cs:
with (cs := chan._trio_cs):
first = await chan.receive()
# deliver stream handle upward
yield first, chan
except trio.Cancelled as taskc:
# await tractor.pause(shield=True) # ya it worx ;)
if cs.cancel_called:
log.cancel(
f'trio-side was manually cancelled by aio side\n'
f'|_c>}}{cs!r}?\n'
)
# TODO, maybe a special `TrioCancelled`???
raise taskc
finally:
chan._trio_exited = True
chan._to_trio.close()
@ -893,12 +998,12 @@ def run_as_asyncio_guest(
_sigint_loop_pump_delay: float = 0,
) -> None:
# ^-TODO-^ technically whatever `trio_main` returns.. we should
# try to use func-typevar-params at leaast by 3.13!
# -[ ] https://typing.readthedocs.io/en/latest/spec/callables.html#callback-protocols
# -[ ] https://peps.python.org/pep-0646/#using-type-variable-tuples-in-functions
# -[ ] https://typing.readthedocs.io/en/latest/spec/callables.html#unpack-for-keyword-arguments
# -[ ] https://peps.python.org/pep-0718/
# ^-TODO-^ technically whatever `trio_main` returns.. we should
# try to use func-typevar-params at leaast by 3.13!
# -[ ] https://typing.readthedocs.io/en/latest/spec/callables.html#callback-protocols
# -[ ] https://peps.python.org/pep-0646/#using-type-variable-tuples-in-functions
# -[ ] https://typing.readthedocs.io/en/latest/spec/callables.html#unpack-for-keyword-arguments
# -[ ] https://peps.python.org/pep-0718/
'''
Entry for an "infected ``asyncio`` actor".
@ -957,15 +1062,15 @@ def run_as_asyncio_guest(
# force_reload=True,
# )
def trio_done_callback(main_outcome):
def trio_done_callback(main_outcome: Outcome):
log.runtime(
f'`trio` guest-run finishing with outcome\n'
f'>) {main_outcome}\n'
f'|_{trio_done_fute}\n'
)
# import pdbp; pdbp.set_trace()
if isinstance(main_outcome, Error):
# import pdbp; pdbp.set_trace()
error: BaseException = main_outcome.error
# show an dedicated `asyncio`-side tb from the error
@ -1165,6 +1270,10 @@ def run_as_asyncio_guest(
)
raise AsyncioRuntimeTranslationError(message) from state_err
# XXX, should never get here ;)
# else:
# import pdbp; pdbp.set_trace()
# might as well if it's installed.
try:
import uvloop

54
uv.lock
View File

@ -330,6 +330,7 @@ dependencies = [
{ name = "colorlog" },
{ name = "msgspec" },
{ name = "pdbp" },
{ name = "tabcompleter" },
{ name = "tricycle" },
{ name = "trio" },
{ name = "trio-typing" },
@ -345,17 +346,16 @@ dev = [
{ name = "pytest" },
{ name = "stackscope" },
{ name = "xonsh" },
{ name = "xonsh-vox-tabcomplete" },
{ name = "xontrib-vox" },
]
[package.metadata]
requires-dist = [
{ name = "colorlog", specifier = ">=6.8.2,<7" },
{ name = "msgspec", git = "https://github.com/jcrist/msgspec.git" },
{ name = "pdbp", specifier = ">=1.5.0,<2" },
{ name = "pdbp", specifier = ">=1.6,<2" },
{ name = "tabcompleter", specifier = ">=1.4.0" },
{ name = "tricycle", specifier = ">=0.4.1,<0.5" },
{ name = "trio", specifier = ">=0.24,<0.25" },
{ name = "trio", specifier = ">0.27" },
{ name = "trio-typing", specifier = ">=0.10.0,<0.11" },
{ name = "wrapt", specifier = ">=1.16.0,<2" },
]
@ -364,13 +364,11 @@ requires-dist = [
dev = [
{ name = "greenback", specifier = ">=1.2.1,<2" },
{ name = "pexpect", specifier = ">=4.9.0,<5" },
{ name = "prompt-toolkit", specifier = ">=3.0.43,<4" },
{ name = "prompt-toolkit", specifier = ">=3.0.50" },
{ name = "pyperclip", specifier = ">=1.9.0" },
{ name = "pytest", specifier = ">=8.2.0,<9" },
{ name = "stackscope", specifier = ">=0.2.2,<0.3" },
{ name = "xonsh", specifier = ">=0.19.1" },
{ name = "xonsh-vox-tabcomplete", specifier = ">=0.5,<0.6" },
{ name = "xontrib-vox", specifier = ">=0.0.1,<0.0.2" },
{ name = "xonsh", specifier = ">=0.19.2" },
]
[[package]]
@ -387,7 +385,7 @@ wheels = [
[[package]]
name = "trio"
version = "0.24.0"
version = "0.29.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "attrs" },
@ -397,9 +395,9 @@ dependencies = [
{ name = "sniffio" },
{ name = "sortedcontainers" },
]
sdist = { url = "https://files.pythonhosted.org/packages/8a/f3/07c152213222c615fe2391b8e1fea0f5af83599219050a549c20fcbd9ba2/trio-0.24.0.tar.gz", hash = "sha256:ffa09a74a6bf81b84f8613909fb0beaee84757450183a7a2e0b47b455c0cac5d", size = 545131 }
sdist = { url = "https://files.pythonhosted.org/packages/a1/47/f62e62a1a6f37909aed0bf8f5d5411e06fa03846cfcb64540cd1180ccc9f/trio-0.29.0.tar.gz", hash = "sha256:ea0d3967159fc130acb6939a0be0e558e364fee26b5deeecc893a6b08c361bdf", size = 588952 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/14/fb/9299cf74953f473a15accfdbe2c15218e766bae8c796f2567c83bae03e98/trio-0.24.0-py3-none-any.whl", hash = "sha256:c3bd3a4e3e3025cd9a2241eae75637c43fe0b9e88b4c97b9161a55b9e54cd72c", size = 460205 },
{ url = "https://files.pythonhosted.org/packages/c9/55/c4d9bea8b3d7937901958f65124123512419ab0eb73695e5f382521abbfb/trio-0.29.0-py3-none-any.whl", hash = "sha256:d8c463f1a9cc776ff63e331aba44c125f423a5a13c684307e828d930e625ba66", size = 492920 },
]
[[package]]
@ -492,35 +490,15 @@ wheels = [
[[package]]
name = "xonsh"
version = "0.19.1"
version = "0.19.2"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/98/6e/b54a0b2685535995ee50f655103c463f9d339455c9b08c4bce3e03e7bb17/xonsh-0.19.1.tar.gz", hash = "sha256:5d3de649c909f6d14bc69232219bcbdb8152c830e91ddf17ad169c672397fb97", size = 796468 }
sdist = { url = "https://files.pythonhosted.org/packages/68/4e/56e95a5e607eb3b0da37396f87cde70588efc8ef819ab16f02d5b8378dc4/xonsh-0.19.2.tar.gz", hash = "sha256:cfdd0680d954a2c3aefd6caddcc7143a3d06aa417ed18365a08219bb71b960b0", size = 799960 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/8c/e6/db44068c5725af9678e37980ae9503165393d51b80dc8517fa4ec74af1cf/xonsh-0.19.1-py310-none-any.whl", hash = "sha256:83eb6610ed3535f8542abd80af9554fb7e2805b0b3f96e445f98d4b5cf1f7046", size = 640686 },
{ url = "https://files.pythonhosted.org/packages/77/4e/e487e82349866b245c559433c9ba626026a2e66bd17d7f9ac1045082f146/xonsh-0.19.1-py311-none-any.whl", hash = "sha256:c176e515b0260ab803963d1f0924f1e32f1064aa6fd5d791aa0cf6cda3a924ae", size = 640680 },
{ url = "https://files.pythonhosted.org/packages/5d/88/09060815548219b8f6953a06c247cb5c92d03cbdf7a02a980bda1b5754db/xonsh-0.19.1-py312-none-any.whl", hash = "sha256:fe1266c86b117aced3bdc4d5972420bda715864435d0bd3722d63451e8001036", size = 640604 },
{ url = "https://files.pythonhosted.org/packages/83/ff/7873cb8184cffeafddbf861712831c2baa2e9dbecdbfd33b1228f0db0019/xonsh-0.19.1-py313-none-any.whl", hash = "sha256:3f158b6fc0bba954e0b989004d4261bafc4bd94c68c2abd75b825da23e5a869c", size = 641166 },
{ url = "https://files.pythonhosted.org/packages/cc/03/b9f8dd338df0a330011d104e63d4d0acd8bbbc1e990ff049487b6bdf585d/xonsh-0.19.1-py39-none-any.whl", hash = "sha256:a900a6eb87d881a7ef90b1ac8522ba3699582f0bcb1e9abd863d32f6d63faf04", size = 632912 },
]
[[package]]
name = "xonsh-vox-tabcomplete"
version = "0.5"
source = { registry = "https://pypi.org/simple" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/ab/fd/af0c2ee6c067c2a4dc64ec03598c94de1f6ec5984b3116af917f3add4a16/xonsh_vox_tabcomplete-0.5-py3-none-any.whl", hash = "sha256:9701b198180f167071234e77eab87b7befa97c1873b088d0b3fbbe6d6d8dcaad", size = 14381 },
]
[[package]]
name = "xontrib-vox"
version = "0.0.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "xonsh" },
]
sdist = { url = "https://files.pythonhosted.org/packages/6c/ac/a5db68a1f2e4036f7ff4c8546b1cbe29edee2ff40e0ff931836745988b79/xontrib-vox-0.0.1.tar.gz", hash = "sha256:c1f0b155992b4b0ebe6dcfd651084a8707ade7372f7e456c484d2a85339d9907", size = 16504 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/23/58/dcdf11849c8340033da00669527ce75d8292a4e8d82605c082ed236a081a/xontrib_vox-0.0.1-py3-none-any.whl", hash = "sha256:df2bbb815832db5b04d46684f540eac967ee40ef265add2662a95d6947d04c70", size = 13467 },
{ url = "https://files.pythonhosted.org/packages/6c/13/281094759df87b23b3c02dc4a16603ab08ea54d7f6acfeb69f3341137c7a/xonsh-0.19.2-py310-none-any.whl", hash = "sha256:ec7f163fd3a4943782aa34069d4e72793328c916a5975949dbec8536cbfc089b", size = 642301 },
{ url = "https://files.pythonhosted.org/packages/29/41/a51e4c3918fe9a293b150cb949b1b8c6d45eb17dfed480dcb76ea43df4e7/xonsh-0.19.2-py311-none-any.whl", hash = "sha256:53c45f7a767901f2f518f9b8dd60fc653e0498e56e89825e1710bb0859985049", size = 642286 },
{ url = "https://files.pythonhosted.org/packages/0a/93/9a77b731f492fac27c577dea2afb5a2bcc2a6a1c79be0c86c95498060270/xonsh-0.19.2-py312-none-any.whl", hash = "sha256:b24c619aa52b59eae4d35c4195dba9b19a2c548fb5c42c6f85f2b8ccb96807b5", size = 642386 },
{ url = "https://files.pythonhosted.org/packages/be/75/070324769c1ff88d971ce040f4f486339be98e0a365c8dd9991eb654265b/xonsh-0.19.2-py313-none-any.whl", hash = "sha256:c53ef6c19f781fbc399ed1b382b5c2aac2125010679a3b61d643978273c27df0", size = 642873 },
{ url = "https://files.pythonhosted.org/packages/fa/cb/2c7ccec54f5b0e73fdf7650e8336582ff0347d9001c5ef8271dc00c034fe/xonsh-0.19.2-py39-none-any.whl", hash = "sha256:bcc0225dc3847f1ed2f175dac6122fbcc54cea67d9c2dc2753d9615e2a5ff284", size = 634602 },
]
[[package]]