Compare commits
10 Commits
673aeef4e9
...
57edf481e8
Author | SHA1 | Date |
---|---|---|
Tyler Goodlet | 57edf481e8 | |
Tyler Goodlet | d6ddc47e58 | |
Tyler Goodlet | 86f4f2df6f | |
Tyler Goodlet | 2bd5ba76b9 | |
Tyler Goodlet | a4859c969c | |
Tyler Goodlet | 2dfa12c743 | |
Tyler Goodlet | f812c344a7 | |
Tyler Goodlet | e161f7bac0 | |
Tyler Goodlet | 3fd28ee3a5 | |
Tyler Goodlet | 8dba692ef5 |
|
@ -254,6 +254,7 @@ async def _hijack_stdin_for_child(
|
|||
# assert await stream.receive() == 'pdb_unlock'
|
||||
|
||||
except (
|
||||
trio.MultiError,
|
||||
trio.BrokenResourceError,
|
||||
trio.Cancelled, # by local cancellation
|
||||
trio.ClosedResourceError, # by self._rx_chan
|
||||
|
@ -343,6 +344,7 @@ async def _breakpoint(
|
|||
|
||||
except tractor.ContextCancelled:
|
||||
log.warning('Root actor cancelled debug lock')
|
||||
raise
|
||||
|
||||
finally:
|
||||
log.debug(f"Exiting debugger for actor {actor}")
|
||||
|
@ -407,7 +409,11 @@ async def _breakpoint(
|
|||
'Root actor attempting to shield-acquire active tty lock'
|
||||
f' owned by {_global_actor_in_debug}')
|
||||
|
||||
with trio.CancelScope(shield=True):
|
||||
stats = _debug_lock.statistics()
|
||||
if stats.owner:
|
||||
breakpoint()
|
||||
|
||||
# with trio.CancelScope(shield=True):
|
||||
# must shield here to avoid hitting a ``Cancelled`` and
|
||||
# a child getting stuck bc we clobbered the tty
|
||||
await _debug_lock.acquire()
|
||||
|
|
|
@ -238,7 +238,7 @@ def run(
|
|||
|
||||
|
||||
def run_daemon(
|
||||
rpc_module_paths: List[str],
|
||||
enable_modules: List[str],
|
||||
**kwargs
|
||||
) -> None:
|
||||
"""Spawn daemon actor which will respond to RPC.
|
||||
|
@ -247,9 +247,9 @@ def run_daemon(
|
|||
``tractor.run(trio.sleep(float('inf')))`` such that the first actor spawned
|
||||
is meant to run forever responding to RPC requests.
|
||||
"""
|
||||
kwargs['rpc_module_paths'] = list(rpc_module_paths)
|
||||
kwargs['enable_modules'] = list(enable_modules)
|
||||
|
||||
for path in rpc_module_paths:
|
||||
for path in enable_modules:
|
||||
importlib.import_module(path)
|
||||
|
||||
return run(partial(trio.sleep, float('inf')), **kwargs)
|
||||
|
|
|
@ -331,6 +331,7 @@ async def new_proc(
|
|||
bind_addr=bind_addr,
|
||||
parent_addr=parent_addr,
|
||||
_runtime_vars=_runtime_vars,
|
||||
infect_asyncio=infect_asyncio,
|
||||
task_status=task_status,
|
||||
)
|
||||
|
||||
|
@ -346,6 +347,7 @@ async def mp_new_proc(
|
|||
parent_addr: Tuple[str, int],
|
||||
_runtime_vars: Dict[str, Any], # serialized and sent to _child
|
||||
*,
|
||||
infect_asyncio: bool = False,
|
||||
task_status: TaskStatus[Portal] = trio.TASK_STATUS_IGNORED
|
||||
|
||||
) -> None:
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
"""
|
||||
'''
|
||||
Infection apis for ``asyncio`` loops running ``trio`` using guest mode.
|
||||
"""
|
||||
|
||||
'''
|
||||
import asyncio
|
||||
import inspect
|
||||
from typing import (
|
||||
|
@ -43,15 +44,16 @@ async def consume_asyncgen(
|
|||
to_trio.send_nowait(item)
|
||||
|
||||
|
||||
async def run_task(
|
||||
def _run_asyncio_task(
|
||||
func: Callable,
|
||||
*,
|
||||
qsize: int = 2**10,
|
||||
qsize: int = 1,
|
||||
_treat_as_stream: bool = False,
|
||||
**kwargs,
|
||||
) -> Any:
|
||||
"""Run an ``asyncio`` async function or generator in a task, return
|
||||
or stream the result back to ``trio``.
|
||||
|
||||
"""
|
||||
assert current_actor().is_infected_aio()
|
||||
|
||||
|
@ -59,20 +61,27 @@ async def run_task(
|
|||
from_trio = asyncio.Queue(qsize) # type: ignore
|
||||
to_trio, from_aio = trio.open_memory_channel(qsize) # type: ignore
|
||||
|
||||
from_aio._err = None
|
||||
|
||||
args = tuple(inspect.getfullargspec(func).args)
|
||||
|
||||
if getattr(func, '_tractor_steam_function', None):
|
||||
# the assumption is that the target async routine accepts the
|
||||
# send channel then it intends to yield more then one return
|
||||
# value otherwise it would just return ;P
|
||||
_treat_as_stream = True
|
||||
# _treat_as_stream = True
|
||||
assert qsize > 1
|
||||
|
||||
# allow target func to accept/stream results manually by name
|
||||
if 'to_trio' in args:
|
||||
kwargs['to_trio'] = to_trio
|
||||
|
||||
if 'from_trio' in args:
|
||||
kwargs['from_trio'] = from_trio
|
||||
|
||||
# if 'from_aio' in args:
|
||||
# kwargs['from_aio'] = from_aio
|
||||
|
||||
coro = func(**kwargs)
|
||||
|
||||
cancel_scope = trio.CancelScope()
|
||||
|
@ -80,8 +89,10 @@ async def run_task(
|
|||
# start the asyncio task we submitted from trio
|
||||
if inspect.isawaitable(coro):
|
||||
task = asyncio.create_task(run_coro(to_trio, coro))
|
||||
|
||||
elif inspect.isasyncgen(coro):
|
||||
task = asyncio.create_task(consume_asyncgen(to_trio, coro))
|
||||
|
||||
else:
|
||||
raise TypeError(f"No support for invoking {coro}")
|
||||
|
||||
|
@ -91,59 +102,93 @@ async def run_task(
|
|||
"""Cancel the calling ``trio`` task on error.
|
||||
"""
|
||||
nonlocal aio_err
|
||||
try:
|
||||
aio_err = task.exception()
|
||||
except asyncio.CancelledError as cerr:
|
||||
aio_err = cerr
|
||||
|
||||
if aio_err:
|
||||
log.exception(f"asyncio task errorred:\n{aio_err}")
|
||||
|
||||
cancel_scope.cancel()
|
||||
from_aio._err = aio_err
|
||||
from_aio.close()
|
||||
|
||||
task.add_done_callback(cancel_trio)
|
||||
|
||||
# async iterator
|
||||
if inspect.isasyncgen(coro) or _treat_as_stream:
|
||||
return task, from_aio, to_trio, cancel_scope
|
||||
|
||||
async def stream_results():
|
||||
try:
|
||||
with cancel_scope:
|
||||
# stream values upward
|
||||
async with from_aio:
|
||||
async for item in from_aio:
|
||||
yield item
|
||||
|
||||
if cancel_scope.cancelled_caught:
|
||||
# always raise from any captured asyncio error
|
||||
if aio_err:
|
||||
raise aio_err
|
||||
async def run_task(
|
||||
|
||||
except BaseException as err:
|
||||
if aio_err is not None:
|
||||
# always raise from any captured asyncio error
|
||||
raise err from aio_err
|
||||
else:
|
||||
raise
|
||||
func: Callable,
|
||||
*,
|
||||
|
||||
return stream_results()
|
||||
qsize: int = 2**10,
|
||||
_treat_as_stream: bool = False,
|
||||
**kwargs,
|
||||
|
||||
) -> Any:
|
||||
"""Run an ``asyncio`` async function or generator in a task, return
|
||||
or stream the result back to ``trio``.
|
||||
|
||||
"""
|
||||
# streaming ``asyncio`` task
|
||||
if _treat_as_stream:
|
||||
|
||||
task, from_aio, to_trio, cs = _run_asyncio_task(
|
||||
func,
|
||||
qsize=2**8,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
# naively expect the mem chan api to do the job
|
||||
# of handling cross-framework cancellations / errors
|
||||
return from_aio
|
||||
|
||||
# simple async func
|
||||
try:
|
||||
with cancel_scope:
|
||||
task, from_aio, to_trio, cs = _run_asyncio_task(
|
||||
func,
|
||||
qsize=1,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
# return single value
|
||||
with cs:
|
||||
return await from_aio.receive()
|
||||
|
||||
if cancel_scope.cancelled_caught:
|
||||
# always raise from any captured asyncio error
|
||||
if aio_err:
|
||||
raise aio_err
|
||||
|
||||
# Do we need this?
|
||||
except BaseException as err:
|
||||
if aio_err is not None:
|
||||
# always raise from any captured asyncio error
|
||||
raise err from aio_err
|
||||
else:
|
||||
except trio.Cancelled:
|
||||
if not task.done():
|
||||
task.cancel()
|
||||
raise
|
||||
|
||||
finally:
|
||||
if from_aio._err:
|
||||
raise from_aio._err
|
||||
|
||||
|
||||
# TODO: explicit api for the streaming case where
|
||||
# we pull from the mem chan in an async generator?
|
||||
# This ends up looking more like our ``Portal.open_stream_from()``
|
||||
# NB: code below is untested.
|
||||
|
||||
# @asynccontextmanager
|
||||
# async def stream_from_task(
|
||||
|
||||
# target: Callable[Any],
|
||||
# **kwargs,
|
||||
|
||||
# ) -> AsyncIterator[Any]:
|
||||
|
||||
# from_aoi = await run_task(target, _treat_as_stream=True, **kwargs)
|
||||
|
||||
# with cancel_scope:
|
||||
# # stream values upward
|
||||
# async with from_aio:
|
||||
# async for item in from_aio:
|
||||
# yield item
|
||||
|
||||
|
||||
def run_as_asyncio_guest(
|
||||
trio_main: Callable,
|
||||
|
|
Loading…
Reference in New Issue