Compare commits

...

16 Commits

Author SHA1 Message Date
Tyler Goodlet 22068e95fa Toss in another tests with daemon subactors 2020-10-15 23:15:20 -04:00
Tyler Goodlet 46cc0540ef Add explanation to module load error 2020-10-15 22:49:12 -04:00
Tyler Goodlet d191d03179 Set _is_root runtime var in _main() 2020-10-15 22:47:11 -04:00
Tyler Goodlet 285dea04ea Raise from asyncio error; fixes mypy 2020-10-14 14:39:56 -04:00
Tyler Goodlet 8f15f438c7 Tweak log msg 2020-10-14 14:39:56 -04:00
Tyler Goodlet 63a5036e6e Log error 2020-10-14 14:39:56 -04:00
Tyler Goodlet 9db4324796 Drop uneeded parent cs cancel 2020-10-14 14:39:56 -04:00
Tyler Goodlet 47074209a1 Support asyncio actors with the trio spawner backend 2020-10-14 14:39:56 -04:00
Tyler Goodlet fa455f9c24 Revert removal of `infect_asyncio` in nursery start methods 2020-10-14 14:39:56 -04:00
Tyler Goodlet 0f65f9289d Attempt to make mypy happy.. 2020-10-14 14:39:56 -04:00
Tyler Goodlet 13bf19ecfe Add an obnoxious error message on internal failures 2020-10-14 14:39:56 -04:00
Tyler Goodlet fe611a2bc1 Allow marking `asyncio` funcs declaring `to_trio` channel 2020-10-14 14:39:56 -04:00
Tyler Goodlet 544443e9d5 Wow, fix all the broken async func invoking code..
Clearly this wasn't developed against a task that spawned just an async
func in `asyncio`.. Fix all that and remove a bunch of unnecessary func
layers. Add provisional support for the target receiving the `to_trio`
and `from_trio` channels and for the @tractor.stream marker.
2020-10-14 14:39:56 -04:00
Tyler Goodlet a5899d1246 Drop entrypoints from `Actor` 2020-10-14 14:39:56 -04:00
Tyler Goodlet 9fbded6439 Move asyncio guest mode entrypoint to `to_asyncio`
The function is useful if you want to run the "main process" under
`asyncio`. Until `trio` core wraps this better we'll keep our own copy
in the interim (there's a new "inside-out-guest" mode almost on
mainline so hang tight).
2020-10-14 14:39:56 -04:00
Tyler Goodlet cd27f527bb Propagate any spawned `asyncio` task error upwards
This should mostly maintain top level SC principles for any task spawned
using `tractor.to_asyncio.run()`. When the `asyncio` task completes make
sure to cancel the pertaining `trio` cancel scope and raise any error
that may have resulted.

Resolves #120
2020-10-14 14:39:56 -04:00
11 changed files with 315 additions and 15 deletions

View File

@ -0,0 +1,31 @@
import tractor
import trio
async def breakpoint_forever():
"Indefinitely re-enter debugger in child actor."
while True:
yield 'yo'
await tractor.breakpoint()
async def name_error():
"Raise a ``NameError``"
getattr(doggypants)
async def main():
"""Test breakpoint in a streaming actor.
"""
async with tractor.open_nursery() as n:
p0 = await n.start_actor('bp_forever', rpc_module_paths=[__name__])
p1 = await n.start_actor('name_error', rpc_module_paths=[__name__])
# retreive results
stream = await p0.run(__name__, 'breakpoint_forever')
await p1.run(__name__, 'name_error')
if __name__ == '__main__':
tractor.run(main, debug_mode=True, loglevel='error')

View File

@ -282,6 +282,34 @@ def test_multi_subactors(spawn):
assert 'bdb.BdbQuit' in before assert 'bdb.BdbQuit' in before
def test_multi_daemon_subactors(spawn):
"""Multiple daemon subactors, both erroring and breakpointing within a
stream.
"""
child = spawn('multi_daemon_subactors')
child.expect(r"\(Pdb\+\+\)")
before = str(child.before.decode())
assert "Attaching pdb to actor: ('bp_forever'" in before
child.sendline('c')
# first name_error failure
child.expect(r"\(Pdb\+\+\)")
before = str(child.before.decode())
assert "NameError" in before
child.sendline('c')
child.expect(r"\(Pdb\+\+\)")
before = str(child.before.decode())
assert "tractor._exceptions.RemoteActorError: ('name_error'" in before
child.sendline('c')
child.expect(pexpect.EOF)
def test_multi_subactors_root_errors(spawn): def test_multi_subactors_root_errors(spawn):
"""Multiple subactors, both erroring and breakpointing as well as """Multiple subactors, both erroring and breakpointing as well as
a nested subactor erroring. a nested subactor erroring.

View File

@ -20,8 +20,9 @@ from ._state import current_actor
from . import _state from . import _state
from ._exceptions import RemoteActorError, ModuleNotExposed from ._exceptions import RemoteActorError, ModuleNotExposed
from ._debug import breakpoint, post_mortem from ._debug import breakpoint, post_mortem
from . import msg
from . import _spawn from . import _spawn
from . import msg
from . import to_asyncio
__all__ = [ __all__ = [
@ -39,6 +40,7 @@ __all__ = [
'RemoteActorError', 'RemoteActorError',
'ModuleNotExposed', 'ModuleNotExposed',
'msg' 'msg'
'to_asyncio'
] ]
@ -60,16 +62,23 @@ async def _main(
""" """
logger = log.get_logger('tractor') logger = log.get_logger('tractor')
# mark top most level process as root actor
_state._runtime_vars['_is_root'] = True
if start_method is not None: if start_method is not None:
_spawn.try_set_start_method(start_method) _spawn.try_set_start_method(start_method)
if debug_mode and _spawn._spawn_method == 'trio': if debug_mode and _spawn._spawn_method == 'trio':
_state._runtime_vars['_debug_mode'] = True _state._runtime_vars['_debug_mode'] = True
# expose internal debug module to every actor allowing # expose internal debug module to every actor allowing
# for use of ``await tractor.breakpoint()`` # for use of ``await tractor.breakpoint()``
kwargs.setdefault('rpc_module_paths', []).append('tractor._debug') kwargs.setdefault('rpc_module_paths', []).append('tractor._debug')
elif debug_mode: elif debug_mode:
raise RuntimeError("Debug mode is only supported for the `trio` backend!") raise RuntimeError(
"Debug mode is only supported for the `trio` backend!"
)
main = partial(async_fn, *args) main = partial(async_fn, *args)
@ -103,7 +112,10 @@ async def _main(
else: else:
# start this local actor as the arbiter # start this local actor as the arbiter
actor = Arbiter( actor = Arbiter(
name or 'arbiter', arbiter_addr=arbiter_addr, **kwargs) name or 'arbiter',
arbiter_addr=arbiter_addr,
**kwargs
)
# ``Actor._async_main()`` creates an internal nursery if one is not # ``Actor._async_main()`` creates an internal nursery if one is not
# provided and thus blocks here until it's main task completes. # provided and thus blocks here until it's main task completes.
@ -134,9 +146,6 @@ def run(
This is tractor's main entry and the start point for any async actor. This is tractor's main entry and the start point for any async actor.
""" """
# mark top most level process as root actor
_state._runtime_vars['_is_root'] = True
return trio.run( return trio.run(
partial( partial(
# our entry # our entry

View File

@ -182,6 +182,9 @@ class Actor:
_parent_main_data: Dict[str, str] _parent_main_data: Dict[str, str]
_parent_chan_cs: Optional[trio.CancelScope] = None _parent_chan_cs: Optional[trio.CancelScope] = None
# if started on ``asycio`` running ``trio`` in guest mode
_infected_aio: bool = False
def __init__( def __init__(
self, self,
name: str, name: str,
@ -301,7 +304,18 @@ class Actor:
try: try:
return getattr(self._mods[ns], funcname) return getattr(self._mods[ns], funcname)
except KeyError as err: except KeyError as err:
raise ModuleNotExposed(*err.args) mne = ModuleNotExposed(*err.args)
if ns == '__main__':
msg = (
"\n\nMake sure you exposed the current module using:\n\n"
"ActorNursery.start_actor(<name>, rpc_module_paths="
"[__name__])"
)
mne.msg += msg
raise mne
async def _stream_handler( async def _stream_handler(
self, self,
@ -591,7 +605,7 @@ class Actor:
# Receive runtime state from our parent # Receive runtime state from our parent
parent_data = await chan.recv() parent_data = await chan.recv()
log.debug( log.debug(
"Recieved state from parent:\n" "Received state from parent:\n"
f"{parent_data}" f"{parent_data}"
) )
accept_addr = ( accept_addr = (
@ -599,6 +613,7 @@ class Actor:
parent_data.pop('bind_port'), parent_data.pop('bind_port'),
) )
rvs = parent_data.pop('_runtime_vars') rvs = parent_data.pop('_runtime_vars')
log.debug(f"Runtime vars are: {rvs}")
rvs['_is_root'] = False rvs['_is_root'] = False
_state._runtime_vars.update(rvs) _state._runtime_vars.update(rvs)
@ -976,6 +991,9 @@ class Actor:
log.info(f"Handshake with actor {uid}@{chan.raddr} complete") log.info(f"Handshake with actor {uid}@{chan.raddr} complete")
return uid return uid
def is_infected_aio(self) -> bool:
return self._infected_aio
class Arbiter(Actor): class Arbiter(Actor):
"""A special actor who knows all the other actors and always has """A special actor who knows all the other actors and always has

View File

@ -19,12 +19,15 @@ def parse_ipaddr(arg):
return (str(host), int(port)) return (str(host), int(port))
from ._entry import _trio_main
if __name__ == "__main__": if __name__ == "__main__":
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument("--uid", type=parse_uid) parser.add_argument("--uid", type=parse_uid)
parser.add_argument("--loglevel", type=str) parser.add_argument("--loglevel", type=str)
parser.add_argument("--parent_addr", type=parse_ipaddr) parser.add_argument("--parent_addr", type=parse_ipaddr)
parser.add_argument("--asyncio", action='store_true')
args = parser.parse_args() args = parser.parse_args()
subactor = Actor( subactor = Actor(
@ -36,5 +39,6 @@ if __name__ == "__main__":
_trio_main( _trio_main(
subactor, subactor,
parent_addr=args.parent_addr parent_addr=args.parent_addr,
infect_asyncio=args.asyncio,
) )

View File

@ -42,6 +42,7 @@ async def get_root(
**kwargs, **kwargs,
) -> typing.AsyncGenerator[Union[Portal, LocalPortal], None]: ) -> typing.AsyncGenerator[Union[Portal, LocalPortal], None]:
host, port = _runtime_vars['_root_mailbox'] host, port = _runtime_vars['_root_mailbox']
assert host is not None
async with _connect_chan(host, port) as chan: async with _connect_chan(host, port) as chan:
async with open_portal(chan, **kwargs) as portal: async with open_portal(chan, **kwargs) as portal:
yield portal yield portal

View File

@ -10,6 +10,7 @@ import trio # type: ignore
from ._actor import Actor from ._actor import Actor
from .log import get_console_log, get_logger from .log import get_console_log, get_logger
from . import _state from . import _state
from .to_asyncio import run_as_asyncio_guest
log = get_logger(__name__) log = get_logger(__name__)
@ -21,6 +22,7 @@ def _mp_main(
forkserver_info: Tuple[Any, Any, Any, Any, Any], forkserver_info: Tuple[Any, Any, Any, Any, Any],
start_method: str, start_method: str,
parent_addr: Tuple[str, int] = None, parent_addr: Tuple[str, int] = None,
infect_asyncio: bool = False,
) -> None: ) -> None:
"""The routine called *after fork* which invokes a fresh ``trio.run`` """The routine called *after fork* which invokes a fresh ``trio.run``
""" """
@ -46,6 +48,10 @@ def _mp_main(
parent_addr=parent_addr parent_addr=parent_addr
) )
try: try:
if infect_asyncio:
actor._infected_aio = True
run_as_asyncio_guest(trio_main)
else:
trio.run(trio_main) trio.run(trio_main)
except KeyboardInterrupt: except KeyboardInterrupt:
pass # handle it the same way trio does? pass # handle it the same way trio does?
@ -54,7 +60,9 @@ def _mp_main(
def _trio_main( def _trio_main(
actor: 'Actor', actor: 'Actor',
parent_addr: Tuple[str, int] = None *,
parent_addr: Tuple[str, int] = None,
infect_asyncio: bool = False,
) -> None: ) -> None:
"""Entry point for a `trio_run_in_process` subactor. """Entry point for a `trio_run_in_process` subactor.
""" """
@ -65,6 +73,8 @@ def _trio_main(
# TODO: make a global func to set this or is it too hacky? # TODO: make a global func to set this or is it too hacky?
# os.environ['PYTHONBREAKPOINT'] = 'tractor._debug.breakpoint' # os.environ['PYTHONBREAKPOINT'] = 'tractor._debug.breakpoint'
log.info(f"Started new trio process for {actor.uid}")
if actor.loglevel is not None: if actor.loglevel is not None:
log.info( log.info(
f"Setting loglevel for {actor.uid} to {actor.loglevel}") f"Setting loglevel for {actor.uid} to {actor.loglevel}")
@ -82,6 +92,10 @@ def _trio_main(
) )
try: try:
if infect_asyncio:
actor._infected_aio = True
run_as_asyncio_guest(trio_main)
else:
trio.run(trio_main) trio.run(trio_main)
except KeyboardInterrupt: except KeyboardInterrupt:
log.warning(f"Actor {actor.uid} received KBI") log.warning(f"Actor {actor.uid} received KBI")

View File

@ -157,6 +157,7 @@ async def cancel_on_completion(
async def spawn_subactor( async def spawn_subactor(
subactor: 'Actor', subactor: 'Actor',
parent_addr: Tuple[str, int], parent_addr: Tuple[str, int],
infect_asyncio: bool,
): ):
spawn_cmd = [ spawn_cmd = [
sys.executable, sys.executable,
@ -181,6 +182,10 @@ async def spawn_subactor(
subactor.loglevel subactor.loglevel
] ]
# Tell child to run in guest mode on top of ``asyncio`` loop
if infect_asyncio:
spawn_cmd.append("--asyncio")
proc = await trio.open_process(spawn_cmd) proc = await trio.open_process(spawn_cmd)
try: try:
yield proc yield proc
@ -217,6 +222,7 @@ async def new_proc(
_runtime_vars: Dict[str, Any], # serialized and sent to _child _runtime_vars: Dict[str, Any], # serialized and sent to _child
*, *,
use_trio_run_in_process: bool = False, use_trio_run_in_process: bool = False,
infect_asyncio: bool = False,
task_status: TaskStatus[Portal] = trio.TASK_STATUS_IGNORED task_status: TaskStatus[Portal] = trio.TASK_STATUS_IGNORED
) -> None: ) -> None:
"""Create a new ``multiprocessing.Process`` using the """Create a new ``multiprocessing.Process`` using the
@ -232,6 +238,7 @@ async def new_proc(
async with spawn_subactor( async with spawn_subactor(
subactor, subactor,
parent_addr, parent_addr,
infect_asyncio=infect_asyncio
) as proc: ) as proc:
log.info(f"Started {proc}") log.info(f"Started {proc}")
@ -321,6 +328,7 @@ async def new_proc(
fs_info, fs_info,
start_method, start_method,
parent_addr, parent_addr,
infect_asyncio,
), ),
# daemon=True, # daemon=True,
name=name, name=name,

View File

@ -41,9 +41,11 @@ def stream(func):
""" """
func._tractor_stream_function = True func._tractor_stream_function = True
sig = inspect.signature(func) sig = inspect.signature(func)
if 'ctx' not in sig.parameters: params = sig.parameters
if 'ctx' not in params and 'to_trio' not in params:
raise TypeError( raise TypeError(
"The first argument to the stream function " "The first argument to the stream function "
f"{func.__name__} must be `ctx: tractor.Context`" f"{func.__name__} must be `ctx: tractor.Context` "
"(Or ``to_trio`` if using ``asyncio`` in guest mode)."
) )
return func return func

View File

@ -56,6 +56,7 @@ class ActorNursery:
rpc_module_paths: List[str] = None, rpc_module_paths: List[str] = None,
loglevel: str = None, # set log level per subactor loglevel: str = None, # set log level per subactor
nursery: trio.Nursery = None, nursery: trio.Nursery = None,
infect_asyncio: bool = False,
) -> Portal: ) -> Portal:
loglevel = loglevel or self._actor.loglevel or get_loglevel() loglevel = loglevel or self._actor.loglevel or get_loglevel()
@ -89,6 +90,7 @@ class ActorNursery:
bind_addr, bind_addr,
parent_addr, parent_addr,
_rtv, # run time vars _rtv, # run time vars
infect_asyncio=infect_asyncio,
) )
) )
@ -101,6 +103,7 @@ class ActorNursery:
rpc_module_paths: Optional[List[str]] = None, rpc_module_paths: Optional[List[str]] = None,
statespace: Dict[str, Any] = None, statespace: Dict[str, Any] = None,
loglevel: str = None, # set log level per subactor loglevel: str = None, # set log level per subactor
infect_asyncio: bool = False,
**kwargs, # explicit args to ``fn`` **kwargs, # explicit args to ``fn``
) -> Portal: ) -> Portal:
"""Spawn a new actor, run a lone task, then terminate the actor and """Spawn a new actor, run a lone task, then terminate the actor and
@ -119,6 +122,7 @@ class ActorNursery:
loglevel=loglevel, loglevel=loglevel,
# use the run_in_actor nursery # use the run_in_actor nursery
nursery=self._ria_nursery, nursery=self._ria_nursery,
infect_asyncio=infect_asyncio,
) )
# this marks the actor to be cancelled after its portal result # this marks the actor to be cancelled after its portal result
# is retreived, see logic in `open_nursery()` below. # is retreived, see logic in `open_nursery()` below.

View File

@ -0,0 +1,181 @@
"""
Infection apis for ``asyncio`` loops running ``trio`` using guest mode.
"""
import asyncio
import inspect
from typing import (
Any,
Callable,
AsyncIterator,
Awaitable,
Union,
)
import trio
from .log import get_logger
from ._state import current_actor
log = get_logger(__name__)
__all__ = ['run_task', 'run_as_asyncio_guest']
async def run_coro(
to_trio: trio.MemorySendChannel,
coro: Awaitable,
) -> None:
"""Await ``coro`` and relay result back to ``trio``.
"""
to_trio.send_nowait(await coro)
async def consume_asyncgen(
to_trio: trio.MemorySendChannel,
coro: AsyncIterator,
) -> None:
"""Stream async generator results back to ``trio``.
``from_trio`` might eventually be used here for
bidirectional streaming.
"""
async for item in coro:
to_trio.send_nowait(item)
async def run_task(
func: Callable,
*,
qsize: int = 2**10,
_treat_as_stream: bool = False,
**kwargs,
) -> Any:
"""Run an ``asyncio`` async function or generator in a task, return
or stream the result back to ``trio``.
"""
assert current_actor().is_infected_aio()
# ITC (inter task comms)
from_trio = asyncio.Queue(qsize) # type: ignore
to_trio, from_aio = trio.open_memory_channel(qsize) # type: ignore
args = tuple(inspect.getfullargspec(func).args)
if getattr(func, '_tractor_steam_function', None):
# the assumption is that the target async routine accepts the
# send channel then it intends to yield more then one return
# value otherwise it would just return ;P
_treat_as_stream = True
# allow target func to accept/stream results manually by name
if 'to_trio' in args:
kwargs['to_trio'] = to_trio
if 'from_trio' in args:
kwargs['from_trio'] = from_trio
coro = func(**kwargs)
cancel_scope = trio.CancelScope()
# start the asyncio task we submitted from trio
if inspect.isawaitable(coro):
task = asyncio.create_task(run_coro(to_trio, coro))
elif inspect.isasyncgen(coro):
task = asyncio.create_task(consume_asyncgen(to_trio, coro))
else:
raise TypeError(f"No support for invoking {coro}")
aio_err = None
def cancel_trio(task):
"""Cancel the calling ``trio`` task on error.
"""
nonlocal err
aio_err = task.exception()
if aio_err:
log.exception(f"asyncio task errorred:\n{aio_err}")
cancel_scope.cancel()
task.add_done_callback(cancel_trio)
# async iterator
if inspect.isasyncgen(coro) or _treat_as_stream:
async def stream_results():
try:
with cancel_scope:
# stream values upward
async with from_aio:
async for item in from_aio:
yield item
except BaseException as err:
if aio_err is not None:
# always raise from any captured asyncio error
raise err from aio_err
else:
raise
return stream_results()
# simple async func
try:
with cancel_scope:
# return single value
return await from_aio.receive()
# Do we need this?
except BaseException as err:
if aio_err is not None:
# always raise from any captured asyncio error
raise err from aio_err
else:
raise
def run_as_asyncio_guest(
trio_main: Callable,
) -> None:
"""Entry for an "infected ``asyncio`` actor".
Uh, oh. :o
It looks like your event loop has caught a case of the ``trio``s.
:()
Don't worry, we've heard you'll barely notice. You might hallucinate
a few more propagating errors and feel like your digestion has
slowed but if anything get's too bad your parents will know about
it.
:)
"""
async def aio_main(trio_main):
loop = asyncio.get_running_loop()
trio_done_fut = asyncio.Future()
def trio_done_callback(main_outcome):
log.info(f"trio_main finished: {main_outcome!r}")
trio_done_fut.set_result(main_outcome)
# start the infection: run trio on the asyncio loop in "guest mode"
log.info(f"Infecting asyncio process with {trio_main}")
trio.lowlevel.start_guest_run(
trio_main,
run_sync_soon_threadsafe=loop.call_soon_threadsafe,
done_callback=trio_done_callback,
)
(await trio_done_fut).unwrap()
# might as well if it's installed.
try:
import uvloop
loop = uvloop.new_event_loop()
asyncio.set_event_loop(loop)
except ImportError:
pass
asyncio.run(aio_main(trio_main))