Compare commits

..

10 Commits

Author SHA1 Message Date
Tyler Goodlet 22068e95fa Toss in another tests with daemon subactors 2020-10-15 23:15:20 -04:00
Tyler Goodlet 46cc0540ef Add explanation to module load error 2020-10-15 22:49:12 -04:00
Tyler Goodlet d191d03179 Set _is_root runtime var in _main() 2020-10-15 22:47:11 -04:00
Tyler Goodlet 285dea04ea Raise from asyncio error; fixes mypy 2020-10-14 14:39:56 -04:00
Tyler Goodlet 8f15f438c7 Tweak log msg 2020-10-14 14:39:56 -04:00
Tyler Goodlet 63a5036e6e Log error 2020-10-14 14:39:56 -04:00
Tyler Goodlet 9db4324796 Drop uneeded parent cs cancel 2020-10-14 14:39:56 -04:00
Tyler Goodlet 47074209a1 Support asyncio actors with the trio spawner backend 2020-10-14 14:39:56 -04:00
Tyler Goodlet fa455f9c24 Revert removal of `infect_asyncio` in nursery start methods 2020-10-14 14:39:56 -04:00
Tyler Goodlet 0f65f9289d Attempt to make mypy happy.. 2020-10-14 14:39:56 -04:00
10 changed files with 180 additions and 48 deletions

View File

@ -0,0 +1,31 @@
import tractor
import trio
async def breakpoint_forever():
"Indefinitely re-enter debugger in child actor."
while True:
yield 'yo'
await tractor.breakpoint()
async def name_error():
"Raise a ``NameError``"
getattr(doggypants)
async def main():
"""Test breakpoint in a streaming actor.
"""
async with tractor.open_nursery() as n:
p0 = await n.start_actor('bp_forever', rpc_module_paths=[__name__])
p1 = await n.start_actor('name_error', rpc_module_paths=[__name__])
# retreive results
stream = await p0.run(__name__, 'breakpoint_forever')
await p1.run(__name__, 'name_error')
if __name__ == '__main__':
tractor.run(main, debug_mode=True, loglevel='error')

View File

@ -282,6 +282,34 @@ def test_multi_subactors(spawn):
assert 'bdb.BdbQuit' in before assert 'bdb.BdbQuit' in before
def test_multi_daemon_subactors(spawn):
"""Multiple daemon subactors, both erroring and breakpointing within a
stream.
"""
child = spawn('multi_daemon_subactors')
child.expect(r"\(Pdb\+\+\)")
before = str(child.before.decode())
assert "Attaching pdb to actor: ('bp_forever'" in before
child.sendline('c')
# first name_error failure
child.expect(r"\(Pdb\+\+\)")
before = str(child.before.decode())
assert "NameError" in before
child.sendline('c')
child.expect(r"\(Pdb\+\+\)")
before = str(child.before.decode())
assert "tractor._exceptions.RemoteActorError: ('name_error'" in before
child.sendline('c')
child.expect(pexpect.EOF)
def test_multi_subactors_root_errors(spawn): def test_multi_subactors_root_errors(spawn):
"""Multiple subactors, both erroring and breakpointing as well as """Multiple subactors, both erroring and breakpointing as well as
a nested subactor erroring. a nested subactor erroring.

View File

@ -20,8 +20,9 @@ from ._state import current_actor
from . import _state from . import _state
from ._exceptions import RemoteActorError, ModuleNotExposed from ._exceptions import RemoteActorError, ModuleNotExposed
from ._debug import breakpoint, post_mortem from ._debug import breakpoint, post_mortem
from . import msg
from . import _spawn from . import _spawn
from . import msg
from . import to_asyncio
__all__ = [ __all__ = [
@ -39,6 +40,7 @@ __all__ = [
'RemoteActorError', 'RemoteActorError',
'ModuleNotExposed', 'ModuleNotExposed',
'msg' 'msg'
'to_asyncio'
] ]
@ -60,16 +62,23 @@ async def _main(
""" """
logger = log.get_logger('tractor') logger = log.get_logger('tractor')
# mark top most level process as root actor
_state._runtime_vars['_is_root'] = True
if start_method is not None: if start_method is not None:
_spawn.try_set_start_method(start_method) _spawn.try_set_start_method(start_method)
if debug_mode and _spawn._spawn_method == 'trio': if debug_mode and _spawn._spawn_method == 'trio':
_state._runtime_vars['_debug_mode'] = True _state._runtime_vars['_debug_mode'] = True
# expose internal debug module to every actor allowing # expose internal debug module to every actor allowing
# for use of ``await tractor.breakpoint()`` # for use of ``await tractor.breakpoint()``
kwargs.setdefault('rpc_module_paths', []).append('tractor._debug') kwargs.setdefault('rpc_module_paths', []).append('tractor._debug')
elif debug_mode: elif debug_mode:
raise RuntimeError("Debug mode is only supported for the `trio` backend!") raise RuntimeError(
"Debug mode is only supported for the `trio` backend!"
)
main = partial(async_fn, *args) main = partial(async_fn, *args)
@ -103,7 +112,10 @@ async def _main(
else: else:
# start this local actor as the arbiter # start this local actor as the arbiter
actor = Arbiter( actor = Arbiter(
name or 'arbiter', arbiter_addr=arbiter_addr, **kwargs) name or 'arbiter',
arbiter_addr=arbiter_addr,
**kwargs
)
# ``Actor._async_main()`` creates an internal nursery if one is not # ``Actor._async_main()`` creates an internal nursery if one is not
# provided and thus blocks here until it's main task completes. # provided and thus blocks here until it's main task completes.
@ -134,9 +146,6 @@ def run(
This is tractor's main entry and the start point for any async actor. This is tractor's main entry and the start point for any async actor.
""" """
# mark top most level process as root actor
_state._runtime_vars['_is_root'] = True
return trio.run( return trio.run(
partial( partial(
# our entry # our entry

View File

@ -304,7 +304,18 @@ class Actor:
try: try:
return getattr(self._mods[ns], funcname) return getattr(self._mods[ns], funcname)
except KeyError as err: except KeyError as err:
raise ModuleNotExposed(*err.args) mne = ModuleNotExposed(*err.args)
if ns == '__main__':
msg = (
"\n\nMake sure you exposed the current module using:\n\n"
"ActorNursery.start_actor(<name>, rpc_module_paths="
"[__name__])"
)
mne.msg += msg
raise mne
async def _stream_handler( async def _stream_handler(
self, self,
@ -594,7 +605,7 @@ class Actor:
# Receive runtime state from our parent # Receive runtime state from our parent
parent_data = await chan.recv() parent_data = await chan.recv()
log.debug( log.debug(
"Recieved state from parent:\n" "Received state from parent:\n"
f"{parent_data}" f"{parent_data}"
) )
accept_addr = ( accept_addr = (
@ -602,6 +613,7 @@ class Actor:
parent_data.pop('bind_port'), parent_data.pop('bind_port'),
) )
rvs = parent_data.pop('_runtime_vars') rvs = parent_data.pop('_runtime_vars')
log.debug(f"Runtime vars are: {rvs}")
rvs['_is_root'] = False rvs['_is_root'] = False
_state._runtime_vars.update(rvs) _state._runtime_vars.update(rvs)

View File

@ -19,12 +19,15 @@ def parse_ipaddr(arg):
return (str(host), int(port)) return (str(host), int(port))
from ._entry import _trio_main
if __name__ == "__main__": if __name__ == "__main__":
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument("--uid", type=parse_uid) parser.add_argument("--uid", type=parse_uid)
parser.add_argument("--loglevel", type=str) parser.add_argument("--loglevel", type=str)
parser.add_argument("--parent_addr", type=parse_ipaddr) parser.add_argument("--parent_addr", type=parse_ipaddr)
parser.add_argument("--asyncio", action='store_true')
args = parser.parse_args() args = parser.parse_args()
subactor = Actor( subactor = Actor(
@ -36,5 +39,6 @@ if __name__ == "__main__":
_trio_main( _trio_main(
subactor, subactor,
parent_addr=args.parent_addr parent_addr=args.parent_addr,
infect_asyncio=args.asyncio,
) )

View File

@ -42,6 +42,7 @@ async def get_root(
**kwargs, **kwargs,
) -> typing.AsyncGenerator[Union[Portal, LocalPortal], None]: ) -> typing.AsyncGenerator[Union[Portal, LocalPortal], None]:
host, port = _runtime_vars['_root_mailbox'] host, port = _runtime_vars['_root_mailbox']
assert host is not None
async with _connect_chan(host, port) as chan: async with _connect_chan(host, port) as chan:
async with open_portal(chan, **kwargs) as portal: async with open_portal(chan, **kwargs) as portal:
yield portal yield portal

View File

@ -22,6 +22,7 @@ def _mp_main(
forkserver_info: Tuple[Any, Any, Any, Any, Any], forkserver_info: Tuple[Any, Any, Any, Any, Any],
start_method: str, start_method: str,
parent_addr: Tuple[str, int] = None, parent_addr: Tuple[str, int] = None,
infect_asyncio: bool = False,
) -> None: ) -> None:
"""The routine called *after fork* which invokes a fresh ``trio.run`` """The routine called *after fork* which invokes a fresh ``trio.run``
""" """
@ -59,7 +60,9 @@ def _mp_main(
def _trio_main( def _trio_main(
actor: 'Actor', actor: 'Actor',
parent_addr: Tuple[str, int] = None *,
parent_addr: Tuple[str, int] = None,
infect_asyncio: bool = False,
) -> None: ) -> None:
"""Entry point for a `trio_run_in_process` subactor. """Entry point for a `trio_run_in_process` subactor.
""" """
@ -70,6 +73,8 @@ def _trio_main(
# TODO: make a global func to set this or is it too hacky? # TODO: make a global func to set this or is it too hacky?
# os.environ['PYTHONBREAKPOINT'] = 'tractor._debug.breakpoint' # os.environ['PYTHONBREAKPOINT'] = 'tractor._debug.breakpoint'
log.info(f"Started new trio process for {actor.uid}")
if actor.loglevel is not None: if actor.loglevel is not None:
log.info( log.info(
f"Setting loglevel for {actor.uid} to {actor.loglevel}") f"Setting loglevel for {actor.uid} to {actor.loglevel}")
@ -87,6 +92,10 @@ def _trio_main(
) )
try: try:
if infect_asyncio:
actor._infected_aio = True
run_as_asyncio_guest(trio_main)
else:
trio.run(trio_main) trio.run(trio_main)
except KeyboardInterrupt: except KeyboardInterrupt:
log.warning(f"Actor {actor.uid} received KBI") log.warning(f"Actor {actor.uid} received KBI")

View File

@ -157,6 +157,7 @@ async def cancel_on_completion(
async def spawn_subactor( async def spawn_subactor(
subactor: 'Actor', subactor: 'Actor',
parent_addr: Tuple[str, int], parent_addr: Tuple[str, int],
infect_asyncio: bool,
): ):
spawn_cmd = [ spawn_cmd = [
sys.executable, sys.executable,
@ -181,6 +182,10 @@ async def spawn_subactor(
subactor.loglevel subactor.loglevel
] ]
# Tell child to run in guest mode on top of ``asyncio`` loop
if infect_asyncio:
spawn_cmd.append("--asyncio")
proc = await trio.open_process(spawn_cmd) proc = await trio.open_process(spawn_cmd)
try: try:
yield proc yield proc
@ -217,6 +222,7 @@ async def new_proc(
_runtime_vars: Dict[str, Any], # serialized and sent to _child _runtime_vars: Dict[str, Any], # serialized and sent to _child
*, *,
use_trio_run_in_process: bool = False, use_trio_run_in_process: bool = False,
infect_asyncio: bool = False,
task_status: TaskStatus[Portal] = trio.TASK_STATUS_IGNORED task_status: TaskStatus[Portal] = trio.TASK_STATUS_IGNORED
) -> None: ) -> None:
"""Create a new ``multiprocessing.Process`` using the """Create a new ``multiprocessing.Process`` using the
@ -232,6 +238,7 @@ async def new_proc(
async with spawn_subactor( async with spawn_subactor(
subactor, subactor,
parent_addr, parent_addr,
infect_asyncio=infect_asyncio
) as proc: ) as proc:
log.info(f"Started {proc}") log.info(f"Started {proc}")
@ -321,6 +328,7 @@ async def new_proc(
fs_info, fs_info,
start_method, start_method,
parent_addr, parent_addr,
infect_asyncio,
), ),
# daemon=True, # daemon=True,
name=name, name=name,

View File

@ -56,6 +56,7 @@ class ActorNursery:
rpc_module_paths: List[str] = None, rpc_module_paths: List[str] = None,
loglevel: str = None, # set log level per subactor loglevel: str = None, # set log level per subactor
nursery: trio.Nursery = None, nursery: trio.Nursery = None,
infect_asyncio: bool = False,
) -> Portal: ) -> Portal:
loglevel = loglevel or self._actor.loglevel or get_loglevel() loglevel = loglevel or self._actor.loglevel or get_loglevel()
@ -89,6 +90,7 @@ class ActorNursery:
bind_addr, bind_addr,
parent_addr, parent_addr,
_rtv, # run time vars _rtv, # run time vars
infect_asyncio=infect_asyncio,
) )
) )
@ -101,6 +103,7 @@ class ActorNursery:
rpc_module_paths: Optional[List[str]] = None, rpc_module_paths: Optional[List[str]] = None,
statespace: Dict[str, Any] = None, statespace: Dict[str, Any] = None,
loglevel: str = None, # set log level per subactor loglevel: str = None, # set log level per subactor
infect_asyncio: bool = False,
**kwargs, # explicit args to ``fn`` **kwargs, # explicit args to ``fn``
) -> Portal: ) -> Portal:
"""Spawn a new actor, run a lone task, then terminate the actor and """Spawn a new actor, run a lone task, then terminate the actor and
@ -119,6 +122,7 @@ class ActorNursery:
loglevel=loglevel, loglevel=loglevel,
# use the run_in_actor nursery # use the run_in_actor nursery
nursery=self._ria_nursery, nursery=self._ria_nursery,
infect_asyncio=infect_asyncio,
) )
# this marks the actor to be cancelled after its portal result # this marks the actor to be cancelled after its portal result
# is retreived, see logic in `open_nursery()` below. # is retreived, see logic in `open_nursery()` below.

View File

@ -4,8 +4,9 @@ Infection apis for ``asyncio`` loops running ``trio`` using guest mode.
import asyncio import asyncio
import inspect import inspect
from typing import ( from typing import (
Any,
Callable, Callable,
AsyncGenerator, AsyncIterator,
Awaitable, Awaitable,
Union, Union,
) )
@ -21,21 +22,26 @@ log = get_logger(__name__)
__all__ = ['run_task', 'run_as_asyncio_guest'] __all__ = ['run_task', 'run_as_asyncio_guest']
async def _invoke( async def run_coro(
from_trio: trio.abc.ReceiveChannel, to_trio: trio.MemorySendChannel,
to_trio: asyncio.Queue,
coro: Awaitable, coro: Awaitable,
) -> None: ) -> None:
"""Await or stream awaiable object based on ``coro`` type into """Await ``coro`` and relay result back to ``trio``.
``trio`` memory channel. """
to_trio.send_nowait(await coro)
``from_trio`` might eventually be used here for bidirectional streaming.
async def consume_asyncgen(
to_trio: trio.MemorySendChannel,
coro: AsyncIterator,
) -> None:
"""Stream async generator results back to ``trio``.
``from_trio`` might eventually be used here for
bidirectional streaming.
""" """
if inspect.isasyncgen(coro):
async for item in coro: async for item in coro:
to_trio.send_nowait(item) to_trio.send_nowait(item)
elif inspect.iscoroutine(coro):
to_trio.send_nowait(await coro)
async def run_task( async def run_task(
@ -44,15 +50,15 @@ async def run_task(
qsize: int = 2**10, qsize: int = 2**10,
_treat_as_stream: bool = False, _treat_as_stream: bool = False,
**kwargs, **kwargs,
) -> Union[AsyncGenerator, Awaitable]: ) -> Any:
"""Run an ``asyncio`` async function or generator in a task, return """Run an ``asyncio`` async function or generator in a task, return
or stream the result back to ``trio``. or stream the result back to ``trio``.
""" """
assert current_actor().is_infected_aio() assert current_actor().is_infected_aio()
# ITC (inter task comms) # ITC (inter task comms)
from_trio = asyncio.Queue(qsize) from_trio = asyncio.Queue(qsize) # type: ignore
to_trio, from_aio = trio.open_memory_channel(qsize) to_trio, from_aio = trio.open_memory_channel(qsize) # type: ignore
args = tuple(inspect.getfullargspec(func).args) args = tuple(inspect.getfullargspec(func).args)
@ -66,49 +72,69 @@ async def run_task(
if 'to_trio' in args: if 'to_trio' in args:
kwargs['to_trio'] = to_trio kwargs['to_trio'] = to_trio
if 'from_trio' in args: if 'from_trio' in args:
kwargs['from_trio'] = to_trio kwargs['from_trio'] = from_trio
coro = func(**kwargs) coro = func(**kwargs)
cancel_scope = trio.CancelScope() cancel_scope = trio.CancelScope()
# start the asyncio task we submitted from trio # start the asyncio task we submitted from trio
# TODO: try out ``anyio`` asyncio based tg here if inspect.isawaitable(coro):
task = asyncio.create_task(_invoke(from_trio, to_trio, coro)) task = asyncio.create_task(run_coro(to_trio, coro))
err = None elif inspect.isasyncgen(coro):
task = asyncio.create_task(consume_asyncgen(to_trio, coro))
else:
raise TypeError(f"No support for invoking {coro}")
aio_err = None
def cancel_trio(task): def cancel_trio(task):
"""Cancel the calling ``trio`` task on error. """Cancel the calling ``trio`` task on error.
""" """
nonlocal err nonlocal err
err = task.exception() aio_err = task.exception()
if aio_err:
log.exception(f"asyncio task errorred:\n{aio_err}")
cancel_scope.cancel() cancel_scope.cancel()
task.add_done_callback(cancel_trio) task.add_done_callback(cancel_trio)
# asycn gen # async iterator
if inspect.isasyncgen(coro) or _treat_as_stream: if inspect.isasyncgen(coro) or _treat_as_stream:
async def result():
async def stream_results():
try:
with cancel_scope: with cancel_scope:
# stream values upward
async with from_aio: async with from_aio:
async for item in from_aio: async for item in from_aio:
yield item yield item
if cancel_scope.cancelled_caught and err: except BaseException as err:
raise err if aio_err is not None:
# always raise from any captured asyncio error
raise err from aio_err
else:
raise
return result() return stream_results()
# simple async func # simple async func
elif inspect.iscoroutine(coro): try:
with cancel_scope: with cancel_scope:
result = await from_aio.receive() # return single value
return result return await from_aio.receive()
if cancel_scope.cancelled_caught and err:
raise err # Do we need this?
except BaseException as err:
if aio_err is not None:
# always raise from any captured asyncio error
raise err from aio_err
else:
raise
def run_as_asyncio_guest( def run_as_asyncio_guest(
trio_main: Awaitable, trio_main: Callable,
) -> None: ) -> None:
"""Entry for an "infected ``asyncio`` actor". """Entry for an "infected ``asyncio`` actor".