Compare commits

..

No commits in common. "22068e95fabfa493cc9cb12c57c007e0751f67b2" and "13bf19ecfecb0bfaf0c306306086c97b975149b0" have entirely different histories.

10 changed files with 48 additions and 180 deletions

View File

@ -1,31 +0,0 @@
import tractor
import trio
async def breakpoint_forever():
"Indefinitely re-enter debugger in child actor."
while True:
yield 'yo'
await tractor.breakpoint()
async def name_error():
"Raise a ``NameError``"
getattr(doggypants)
async def main():
"""Test breakpoint in a streaming actor.
"""
async with tractor.open_nursery() as n:
p0 = await n.start_actor('bp_forever', rpc_module_paths=[__name__])
p1 = await n.start_actor('name_error', rpc_module_paths=[__name__])
# retreive results
stream = await p0.run(__name__, 'breakpoint_forever')
await p1.run(__name__, 'name_error')
if __name__ == '__main__':
tractor.run(main, debug_mode=True, loglevel='error')

View File

@ -282,34 +282,6 @@ def test_multi_subactors(spawn):
assert 'bdb.BdbQuit' in before
def test_multi_daemon_subactors(spawn):
"""Multiple daemon subactors, both erroring and breakpointing within a
stream.
"""
child = spawn('multi_daemon_subactors')
child.expect(r"\(Pdb\+\+\)")
before = str(child.before.decode())
assert "Attaching pdb to actor: ('bp_forever'" in before
child.sendline('c')
# first name_error failure
child.expect(r"\(Pdb\+\+\)")
before = str(child.before.decode())
assert "NameError" in before
child.sendline('c')
child.expect(r"\(Pdb\+\+\)")
before = str(child.before.decode())
assert "tractor._exceptions.RemoteActorError: ('name_error'" in before
child.sendline('c')
child.expect(pexpect.EOF)
def test_multi_subactors_root_errors(spawn):
"""Multiple subactors, both erroring and breakpointing as well as
a nested subactor erroring.

View File

@ -20,9 +20,8 @@ from ._state import current_actor
from . import _state
from ._exceptions import RemoteActorError, ModuleNotExposed
from ._debug import breakpoint, post_mortem
from . import _spawn
from . import msg
from . import to_asyncio
from . import _spawn
__all__ = [
@ -40,7 +39,6 @@ __all__ = [
'RemoteActorError',
'ModuleNotExposed',
'msg'
'to_asyncio'
]
@ -62,23 +60,16 @@ async def _main(
"""
logger = log.get_logger('tractor')
# mark top most level process as root actor
_state._runtime_vars['_is_root'] = True
if start_method is not None:
_spawn.try_set_start_method(start_method)
if debug_mode and _spawn._spawn_method == 'trio':
_state._runtime_vars['_debug_mode'] = True
# expose internal debug module to every actor allowing
# for use of ``await tractor.breakpoint()``
kwargs.setdefault('rpc_module_paths', []).append('tractor._debug')
elif debug_mode:
raise RuntimeError(
"Debug mode is only supported for the `trio` backend!"
)
raise RuntimeError("Debug mode is only supported for the `trio` backend!")
main = partial(async_fn, *args)
@ -112,10 +103,7 @@ async def _main(
else:
# start this local actor as the arbiter
actor = Arbiter(
name or 'arbiter',
arbiter_addr=arbiter_addr,
**kwargs
)
name or 'arbiter', arbiter_addr=arbiter_addr, **kwargs)
# ``Actor._async_main()`` creates an internal nursery if one is not
# provided and thus blocks here until it's main task completes.
@ -146,6 +134,9 @@ def run(
This is tractor's main entry and the start point for any async actor.
"""
# mark top most level process as root actor
_state._runtime_vars['_is_root'] = True
return trio.run(
partial(
# our entry

View File

@ -304,18 +304,7 @@ class Actor:
try:
return getattr(self._mods[ns], funcname)
except KeyError as err:
mne = ModuleNotExposed(*err.args)
if ns == '__main__':
msg = (
"\n\nMake sure you exposed the current module using:\n\n"
"ActorNursery.start_actor(<name>, rpc_module_paths="
"[__name__])"
)
mne.msg += msg
raise mne
raise ModuleNotExposed(*err.args)
async def _stream_handler(
self,
@ -605,7 +594,7 @@ class Actor:
# Receive runtime state from our parent
parent_data = await chan.recv()
log.debug(
"Received state from parent:\n"
"Recieved state from parent:\n"
f"{parent_data}"
)
accept_addr = (
@ -613,7 +602,6 @@ class Actor:
parent_data.pop('bind_port'),
)
rvs = parent_data.pop('_runtime_vars')
log.debug(f"Runtime vars are: {rvs}")
rvs['_is_root'] = False
_state._runtime_vars.update(rvs)

View File

@ -19,15 +19,12 @@ def parse_ipaddr(arg):
return (str(host), int(port))
from ._entry import _trio_main
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--uid", type=parse_uid)
parser.add_argument("--loglevel", type=str)
parser.add_argument("--parent_addr", type=parse_ipaddr)
parser.add_argument("--asyncio", action='store_true')
args = parser.parse_args()
subactor = Actor(
@ -39,6 +36,5 @@ if __name__ == "__main__":
_trio_main(
subactor,
parent_addr=args.parent_addr,
infect_asyncio=args.asyncio,
)
parent_addr=args.parent_addr
)

View File

@ -42,7 +42,6 @@ async def get_root(
**kwargs,
) -> typing.AsyncGenerator[Union[Portal, LocalPortal], None]:
host, port = _runtime_vars['_root_mailbox']
assert host is not None
async with _connect_chan(host, port) as chan:
async with open_portal(chan, **kwargs) as portal:
yield portal

View File

@ -22,7 +22,6 @@ def _mp_main(
forkserver_info: Tuple[Any, Any, Any, Any, Any],
start_method: str,
parent_addr: Tuple[str, int] = None,
infect_asyncio: bool = False,
) -> None:
"""The routine called *after fork* which invokes a fresh ``trio.run``
"""
@ -60,9 +59,7 @@ def _mp_main(
def _trio_main(
actor: 'Actor',
*,
parent_addr: Tuple[str, int] = None,
infect_asyncio: bool = False,
parent_addr: Tuple[str, int] = None
) -> None:
"""Entry point for a `trio_run_in_process` subactor.
"""
@ -73,8 +70,6 @@ def _trio_main(
# TODO: make a global func to set this or is it too hacky?
# os.environ['PYTHONBREAKPOINT'] = 'tractor._debug.breakpoint'
log.info(f"Started new trio process for {actor.uid}")
if actor.loglevel is not None:
log.info(
f"Setting loglevel for {actor.uid} to {actor.loglevel}")
@ -92,11 +87,7 @@ def _trio_main(
)
try:
if infect_asyncio:
actor._infected_aio = True
run_as_asyncio_guest(trio_main)
else:
trio.run(trio_main)
trio.run(trio_main)
except KeyboardInterrupt:
log.warning(f"Actor {actor.uid} received KBI")

View File

@ -157,7 +157,6 @@ async def cancel_on_completion(
async def spawn_subactor(
subactor: 'Actor',
parent_addr: Tuple[str, int],
infect_asyncio: bool,
):
spawn_cmd = [
sys.executable,
@ -182,10 +181,6 @@ async def spawn_subactor(
subactor.loglevel
]
# Tell child to run in guest mode on top of ``asyncio`` loop
if infect_asyncio:
spawn_cmd.append("--asyncio")
proc = await trio.open_process(spawn_cmd)
try:
yield proc
@ -222,7 +217,6 @@ async def new_proc(
_runtime_vars: Dict[str, Any], # serialized and sent to _child
*,
use_trio_run_in_process: bool = False,
infect_asyncio: bool = False,
task_status: TaskStatus[Portal] = trio.TASK_STATUS_IGNORED
) -> None:
"""Create a new ``multiprocessing.Process`` using the
@ -238,7 +232,6 @@ async def new_proc(
async with spawn_subactor(
subactor,
parent_addr,
infect_asyncio=infect_asyncio
) as proc:
log.info(f"Started {proc}")
@ -328,7 +321,6 @@ async def new_proc(
fs_info,
start_method,
parent_addr,
infect_asyncio,
),
# daemon=True,
name=name,

View File

@ -56,7 +56,6 @@ class ActorNursery:
rpc_module_paths: List[str] = None,
loglevel: str = None, # set log level per subactor
nursery: trio.Nursery = None,
infect_asyncio: bool = False,
) -> Portal:
loglevel = loglevel or self._actor.loglevel or get_loglevel()
@ -90,7 +89,6 @@ class ActorNursery:
bind_addr,
parent_addr,
_rtv, # run time vars
infect_asyncio=infect_asyncio,
)
)
@ -103,7 +101,6 @@ class ActorNursery:
rpc_module_paths: Optional[List[str]] = None,
statespace: Dict[str, Any] = None,
loglevel: str = None, # set log level per subactor
infect_asyncio: bool = False,
**kwargs, # explicit args to ``fn``
) -> Portal:
"""Spawn a new actor, run a lone task, then terminate the actor and
@ -122,7 +119,6 @@ class ActorNursery:
loglevel=loglevel,
# use the run_in_actor nursery
nursery=self._ria_nursery,
infect_asyncio=infect_asyncio,
)
# this marks the actor to be cancelled after its portal result
# is retreived, see logic in `open_nursery()` below.

View File

@ -4,9 +4,8 @@ Infection apis for ``asyncio`` loops running ``trio`` using guest mode.
import asyncio
import inspect
from typing import (
Any,
Callable,
AsyncIterator,
AsyncGenerator,
Awaitable,
Union,
)
@ -22,26 +21,21 @@ log = get_logger(__name__)
__all__ = ['run_task', 'run_as_asyncio_guest']
async def run_coro(
to_trio: trio.MemorySendChannel,
async def _invoke(
from_trio: trio.abc.ReceiveChannel,
to_trio: asyncio.Queue,
coro: Awaitable,
) -> None:
"""Await ``coro`` and relay result back to ``trio``.
"""Await or stream awaiable object based on ``coro`` type into
``trio`` memory channel.
``from_trio`` might eventually be used here for bidirectional streaming.
"""
to_trio.send_nowait(await coro)
async def consume_asyncgen(
to_trio: trio.MemorySendChannel,
coro: AsyncIterator,
) -> None:
"""Stream async generator results back to ``trio``.
``from_trio`` might eventually be used here for
bidirectional streaming.
"""
async for item in coro:
to_trio.send_nowait(item)
if inspect.isasyncgen(coro):
async for item in coro:
to_trio.send_nowait(item)
elif inspect.iscoroutine(coro):
to_trio.send_nowait(await coro)
async def run_task(
@ -50,15 +44,15 @@ async def run_task(
qsize: int = 2**10,
_treat_as_stream: bool = False,
**kwargs,
) -> Any:
) -> Union[AsyncGenerator, Awaitable]:
"""Run an ``asyncio`` async function or generator in a task, return
or stream the result back to ``trio``.
"""
assert current_actor().is_infected_aio()
# ITC (inter task comms)
from_trio = asyncio.Queue(qsize) # type: ignore
to_trio, from_aio = trio.open_memory_channel(qsize) # type: ignore
from_trio = asyncio.Queue(qsize)
to_trio, from_aio = trio.open_memory_channel(qsize)
args = tuple(inspect.getfullargspec(func).args)
@ -72,69 +66,49 @@ async def run_task(
if 'to_trio' in args:
kwargs['to_trio'] = to_trio
if 'from_trio' in args:
kwargs['from_trio'] = from_trio
kwargs['from_trio'] = to_trio
coro = func(**kwargs)
cancel_scope = trio.CancelScope()
# start the asyncio task we submitted from trio
if inspect.isawaitable(coro):
task = asyncio.create_task(run_coro(to_trio, coro))
elif inspect.isasyncgen(coro):
task = asyncio.create_task(consume_asyncgen(to_trio, coro))
else:
raise TypeError(f"No support for invoking {coro}")
aio_err = None
# TODO: try out ``anyio`` asyncio based tg here
task = asyncio.create_task(_invoke(from_trio, to_trio, coro))
err = None
def cancel_trio(task):
"""Cancel the calling ``trio`` task on error.
"""
nonlocal err
aio_err = task.exception()
if aio_err:
log.exception(f"asyncio task errorred:\n{aio_err}")
err = task.exception()
cancel_scope.cancel()
task.add_done_callback(cancel_trio)
# async iterator
# asycn gen
if inspect.isasyncgen(coro) or _treat_as_stream:
async def result():
with cancel_scope:
async with from_aio:
async for item in from_aio:
yield item
if cancel_scope.cancelled_caught and err:
raise err
async def stream_results():
try:
with cancel_scope:
# stream values upward
async with from_aio:
async for item in from_aio:
yield item
except BaseException as err:
if aio_err is not None:
# always raise from any captured asyncio error
raise err from aio_err
else:
raise
return stream_results()
return result()
# simple async func
try:
elif inspect.iscoroutine(coro):
with cancel_scope:
# return single value
return await from_aio.receive()
# Do we need this?
except BaseException as err:
if aio_err is not None:
# always raise from any captured asyncio error
raise err from aio_err
else:
raise
result = await from_aio.receive()
return result
if cancel_scope.cancelled_caught and err:
raise err
def run_as_asyncio_guest(
trio_main: Callable,
trio_main: Awaitable,
) -> None:
"""Entry for an "infected ``asyncio`` actor".