Compare commits
No commits in common. "57edf481e8cf687fb1b0ef7d36d04bf4acd85047" and "673aeef4e9f3fedec61bbca09924e54c08b0da8e" have entirely different histories.
57edf481e8
...
673aeef4e9
|
@ -254,7 +254,6 @@ async def _hijack_stdin_for_child(
|
||||||
# assert await stream.receive() == 'pdb_unlock'
|
# assert await stream.receive() == 'pdb_unlock'
|
||||||
|
|
||||||
except (
|
except (
|
||||||
trio.MultiError,
|
|
||||||
trio.BrokenResourceError,
|
trio.BrokenResourceError,
|
||||||
trio.Cancelled, # by local cancellation
|
trio.Cancelled, # by local cancellation
|
||||||
trio.ClosedResourceError, # by self._rx_chan
|
trio.ClosedResourceError, # by self._rx_chan
|
||||||
|
@ -344,7 +343,6 @@ async def _breakpoint(
|
||||||
|
|
||||||
except tractor.ContextCancelled:
|
except tractor.ContextCancelled:
|
||||||
log.warning('Root actor cancelled debug lock')
|
log.warning('Root actor cancelled debug lock')
|
||||||
raise
|
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
log.debug(f"Exiting debugger for actor {actor}")
|
log.debug(f"Exiting debugger for actor {actor}")
|
||||||
|
@ -409,11 +407,7 @@ async def _breakpoint(
|
||||||
'Root actor attempting to shield-acquire active tty lock'
|
'Root actor attempting to shield-acquire active tty lock'
|
||||||
f' owned by {_global_actor_in_debug}')
|
f' owned by {_global_actor_in_debug}')
|
||||||
|
|
||||||
stats = _debug_lock.statistics()
|
with trio.CancelScope(shield=True):
|
||||||
if stats.owner:
|
|
||||||
breakpoint()
|
|
||||||
|
|
||||||
# with trio.CancelScope(shield=True):
|
|
||||||
# must shield here to avoid hitting a ``Cancelled`` and
|
# must shield here to avoid hitting a ``Cancelled`` and
|
||||||
# a child getting stuck bc we clobbered the tty
|
# a child getting stuck bc we clobbered the tty
|
||||||
await _debug_lock.acquire()
|
await _debug_lock.acquire()
|
||||||
|
|
|
@ -238,7 +238,7 @@ def run(
|
||||||
|
|
||||||
|
|
||||||
def run_daemon(
|
def run_daemon(
|
||||||
enable_modules: List[str],
|
rpc_module_paths: List[str],
|
||||||
**kwargs
|
**kwargs
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Spawn daemon actor which will respond to RPC.
|
"""Spawn daemon actor which will respond to RPC.
|
||||||
|
@ -247,9 +247,9 @@ def run_daemon(
|
||||||
``tractor.run(trio.sleep(float('inf')))`` such that the first actor spawned
|
``tractor.run(trio.sleep(float('inf')))`` such that the first actor spawned
|
||||||
is meant to run forever responding to RPC requests.
|
is meant to run forever responding to RPC requests.
|
||||||
"""
|
"""
|
||||||
kwargs['enable_modules'] = list(enable_modules)
|
kwargs['rpc_module_paths'] = list(rpc_module_paths)
|
||||||
|
|
||||||
for path in enable_modules:
|
for path in rpc_module_paths:
|
||||||
importlib.import_module(path)
|
importlib.import_module(path)
|
||||||
|
|
||||||
return run(partial(trio.sleep, float('inf')), **kwargs)
|
return run(partial(trio.sleep, float('inf')), **kwargs)
|
||||||
|
|
|
@ -331,7 +331,6 @@ async def new_proc(
|
||||||
bind_addr=bind_addr,
|
bind_addr=bind_addr,
|
||||||
parent_addr=parent_addr,
|
parent_addr=parent_addr,
|
||||||
_runtime_vars=_runtime_vars,
|
_runtime_vars=_runtime_vars,
|
||||||
infect_asyncio=infect_asyncio,
|
|
||||||
task_status=task_status,
|
task_status=task_status,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -347,7 +346,6 @@ async def mp_new_proc(
|
||||||
parent_addr: Tuple[str, int],
|
parent_addr: Tuple[str, int],
|
||||||
_runtime_vars: Dict[str, Any], # serialized and sent to _child
|
_runtime_vars: Dict[str, Any], # serialized and sent to _child
|
||||||
*,
|
*,
|
||||||
infect_asyncio: bool = False,
|
|
||||||
task_status: TaskStatus[Portal] = trio.TASK_STATUS_IGNORED
|
task_status: TaskStatus[Portal] = trio.TASK_STATUS_IGNORED
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
'''
|
"""
|
||||||
Infection apis for ``asyncio`` loops running ``trio`` using guest mode.
|
Infection apis for ``asyncio`` loops running ``trio`` using guest mode.
|
||||||
|
"""
|
||||||
'''
|
|
||||||
import asyncio
|
import asyncio
|
||||||
import inspect
|
import inspect
|
||||||
from typing import (
|
from typing import (
|
||||||
|
@ -44,16 +43,15 @@ async def consume_asyncgen(
|
||||||
to_trio.send_nowait(item)
|
to_trio.send_nowait(item)
|
||||||
|
|
||||||
|
|
||||||
def _run_asyncio_task(
|
async def run_task(
|
||||||
func: Callable,
|
func: Callable,
|
||||||
*,
|
*,
|
||||||
qsize: int = 1,
|
qsize: int = 2**10,
|
||||||
_treat_as_stream: bool = False,
|
_treat_as_stream: bool = False,
|
||||||
**kwargs,
|
**kwargs,
|
||||||
) -> Any:
|
) -> Any:
|
||||||
"""Run an ``asyncio`` async function or generator in a task, return
|
"""Run an ``asyncio`` async function or generator in a task, return
|
||||||
or stream the result back to ``trio``.
|
or stream the result back to ``trio``.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
assert current_actor().is_infected_aio()
|
assert current_actor().is_infected_aio()
|
||||||
|
|
||||||
|
@ -61,27 +59,20 @@ def _run_asyncio_task(
|
||||||
from_trio = asyncio.Queue(qsize) # type: ignore
|
from_trio = asyncio.Queue(qsize) # type: ignore
|
||||||
to_trio, from_aio = trio.open_memory_channel(qsize) # type: ignore
|
to_trio, from_aio = trio.open_memory_channel(qsize) # type: ignore
|
||||||
|
|
||||||
from_aio._err = None
|
|
||||||
|
|
||||||
args = tuple(inspect.getfullargspec(func).args)
|
args = tuple(inspect.getfullargspec(func).args)
|
||||||
|
|
||||||
if getattr(func, '_tractor_steam_function', None):
|
if getattr(func, '_tractor_steam_function', None):
|
||||||
# the assumption is that the target async routine accepts the
|
# the assumption is that the target async routine accepts the
|
||||||
# send channel then it intends to yield more then one return
|
# send channel then it intends to yield more then one return
|
||||||
# value otherwise it would just return ;P
|
# value otherwise it would just return ;P
|
||||||
# _treat_as_stream = True
|
_treat_as_stream = True
|
||||||
assert qsize > 1
|
|
||||||
|
|
||||||
# allow target func to accept/stream results manually by name
|
# allow target func to accept/stream results manually by name
|
||||||
if 'to_trio' in args:
|
if 'to_trio' in args:
|
||||||
kwargs['to_trio'] = to_trio
|
kwargs['to_trio'] = to_trio
|
||||||
|
|
||||||
if 'from_trio' in args:
|
if 'from_trio' in args:
|
||||||
kwargs['from_trio'] = from_trio
|
kwargs['from_trio'] = from_trio
|
||||||
|
|
||||||
# if 'from_aio' in args:
|
|
||||||
# kwargs['from_aio'] = from_aio
|
|
||||||
|
|
||||||
coro = func(**kwargs)
|
coro = func(**kwargs)
|
||||||
|
|
||||||
cancel_scope = trio.CancelScope()
|
cancel_scope = trio.CancelScope()
|
||||||
|
@ -89,10 +80,8 @@ def _run_asyncio_task(
|
||||||
# start the asyncio task we submitted from trio
|
# start the asyncio task we submitted from trio
|
||||||
if inspect.isawaitable(coro):
|
if inspect.isawaitable(coro):
|
||||||
task = asyncio.create_task(run_coro(to_trio, coro))
|
task = asyncio.create_task(run_coro(to_trio, coro))
|
||||||
|
|
||||||
elif inspect.isasyncgen(coro):
|
elif inspect.isasyncgen(coro):
|
||||||
task = asyncio.create_task(consume_asyncgen(to_trio, coro))
|
task = asyncio.create_task(consume_asyncgen(to_trio, coro))
|
||||||
|
|
||||||
else:
|
else:
|
||||||
raise TypeError(f"No support for invoking {coro}")
|
raise TypeError(f"No support for invoking {coro}")
|
||||||
|
|
||||||
|
@ -102,93 +91,59 @@ def _run_asyncio_task(
|
||||||
"""Cancel the calling ``trio`` task on error.
|
"""Cancel the calling ``trio`` task on error.
|
||||||
"""
|
"""
|
||||||
nonlocal aio_err
|
nonlocal aio_err
|
||||||
try:
|
|
||||||
aio_err = task.exception()
|
aio_err = task.exception()
|
||||||
except asyncio.CancelledError as cerr:
|
|
||||||
aio_err = cerr
|
|
||||||
|
|
||||||
if aio_err:
|
if aio_err:
|
||||||
log.exception(f"asyncio task errorred:\n{aio_err}")
|
log.exception(f"asyncio task errorred:\n{aio_err}")
|
||||||
|
|
||||||
cancel_scope.cancel()
|
cancel_scope.cancel()
|
||||||
from_aio._err = aio_err
|
|
||||||
from_aio.close()
|
|
||||||
|
|
||||||
task.add_done_callback(cancel_trio)
|
task.add_done_callback(cancel_trio)
|
||||||
|
|
||||||
return task, from_aio, to_trio, cancel_scope
|
# async iterator
|
||||||
|
if inspect.isasyncgen(coro) or _treat_as_stream:
|
||||||
|
|
||||||
|
async def stream_results():
|
||||||
|
try:
|
||||||
|
with cancel_scope:
|
||||||
|
# stream values upward
|
||||||
|
async with from_aio:
|
||||||
|
async for item in from_aio:
|
||||||
|
yield item
|
||||||
|
|
||||||
async def run_task(
|
if cancel_scope.cancelled_caught:
|
||||||
|
# always raise from any captured asyncio error
|
||||||
|
if aio_err:
|
||||||
|
raise aio_err
|
||||||
|
|
||||||
func: Callable,
|
except BaseException as err:
|
||||||
*,
|
if aio_err is not None:
|
||||||
|
# always raise from any captured asyncio error
|
||||||
|
raise err from aio_err
|
||||||
|
else:
|
||||||
|
raise
|
||||||
|
|
||||||
qsize: int = 2**10,
|
return stream_results()
|
||||||
_treat_as_stream: bool = False,
|
|
||||||
**kwargs,
|
|
||||||
|
|
||||||
) -> Any:
|
|
||||||
"""Run an ``asyncio`` async function or generator in a task, return
|
|
||||||
or stream the result back to ``trio``.
|
|
||||||
|
|
||||||
"""
|
|
||||||
# streaming ``asyncio`` task
|
|
||||||
if _treat_as_stream:
|
|
||||||
|
|
||||||
task, from_aio, to_trio, cs = _run_asyncio_task(
|
|
||||||
func,
|
|
||||||
qsize=2**8,
|
|
||||||
**kwargs,
|
|
||||||
)
|
|
||||||
|
|
||||||
# naively expect the mem chan api to do the job
|
|
||||||
# of handling cross-framework cancellations / errors
|
|
||||||
return from_aio
|
|
||||||
|
|
||||||
# simple async func
|
# simple async func
|
||||||
try:
|
try:
|
||||||
task, from_aio, to_trio, cs = _run_asyncio_task(
|
with cancel_scope:
|
||||||
func,
|
|
||||||
qsize=1,
|
|
||||||
**kwargs,
|
|
||||||
)
|
|
||||||
|
|
||||||
# return single value
|
# return single value
|
||||||
with cs:
|
|
||||||
return await from_aio.receive()
|
return await from_aio.receive()
|
||||||
|
|
||||||
except trio.Cancelled:
|
if cancel_scope.cancelled_caught:
|
||||||
if not task.done():
|
# always raise from any captured asyncio error
|
||||||
task.cancel()
|
if aio_err:
|
||||||
|
raise aio_err
|
||||||
|
|
||||||
|
# Do we need this?
|
||||||
|
except BaseException as err:
|
||||||
|
if aio_err is not None:
|
||||||
|
# always raise from any captured asyncio error
|
||||||
|
raise err from aio_err
|
||||||
|
else:
|
||||||
raise
|
raise
|
||||||
|
|
||||||
finally:
|
|
||||||
if from_aio._err:
|
|
||||||
raise from_aio._err
|
|
||||||
|
|
||||||
|
|
||||||
# TODO: explicit api for the streaming case where
|
|
||||||
# we pull from the mem chan in an async generator?
|
|
||||||
# This ends up looking more like our ``Portal.open_stream_from()``
|
|
||||||
# NB: code below is untested.
|
|
||||||
|
|
||||||
# @asynccontextmanager
|
|
||||||
# async def stream_from_task(
|
|
||||||
|
|
||||||
# target: Callable[Any],
|
|
||||||
# **kwargs,
|
|
||||||
|
|
||||||
# ) -> AsyncIterator[Any]:
|
|
||||||
|
|
||||||
# from_aoi = await run_task(target, _treat_as_stream=True, **kwargs)
|
|
||||||
|
|
||||||
# with cancel_scope:
|
|
||||||
# # stream values upward
|
|
||||||
# async with from_aio:
|
|
||||||
# async for item in from_aio:
|
|
||||||
# yield item
|
|
||||||
|
|
||||||
|
|
||||||
def run_as_asyncio_guest(
|
def run_as_asyncio_guest(
|
||||||
trio_main: Callable,
|
trio_main: Callable,
|
||||||
|
|
Loading…
Reference in New Issue