Compare commits

...

12 Commits

Author SHA1 Message Date
Guillermo Rodriguez 2b09818ed0
Merge pull request #1 from goodboy/drop-trip-update-trio
Drop trip update trio
2020-07-20 21:04:05 -03:00
Tyler Goodlet 409ceefd6e Add logging to some cancel tests 2020-07-20 19:51:07 -04:00
Tyler Goodlet 86ed8111d8 Make sure to wait trio processes on teardown 2020-07-20 19:50:47 -04:00
Tyler Goodlet 1459abe568 Change spawn method name in `Actor.load_modules()` 2020-07-20 19:50:19 -04:00
Tyler Goodlet 660f310737 Add back subactor logging 2020-07-20 19:49:39 -04:00
Guillermo Rodriguez 772f9c3ac3
First attempt at removing trip & updating hazmat -> lowlevel 2020-07-20 16:18:38 -03:00
Tyler Goodlet 6bf5148ffc Allow marking `asyncio` funcs declaring `to_trio` channel 2020-07-03 17:40:37 -04:00
Tyler Goodlet 6d5ebb9aa7 Wow, fix all the broken async func invoking code..
Clearly this wasn't developed against a task that spawned just an async
func in `asyncio`.. Fix all that and remove a bunch of unnecessary func
layers. Add provisional support for the target receiving the `to_trio`
and `from_trio` channels and for the @tractor.stream marker.
2020-07-03 17:33:46 -04:00
Tyler Goodlet fcd1566834 Drop entrypoints from `Actor` 2020-07-03 17:05:38 -04:00
Tyler Goodlet d19c0f9b1f Move asyncio guest mode entrypoint to `to_asyncio`
The function is useful if you want to run the "main process" under
`asyncio`. Until `trio` core wraps this better we'll keep our own copy
in the interim (there's a new "inside-out-guest" mode almost on
mainline so hang tight).
2020-07-01 13:38:40 -04:00
Tyler Goodlet ebaf129283 Propagate any spawned `asyncio` task error upwards
This should mostly maintain top level SC principles for any task spawned
using `tractor.to_asyncio.run()`. When the `asyncio` task completes make
sure to cancel the pertaining `trio` cancel scope and raise any error
that may have resulted.

Resolves #120
2020-06-28 23:13:56 -04:00
Tyler Goodlet 8434c76451 Support "infected asyncio" actors
This is an initial solution for #120.

Allow spawning `asyncio` based actors which run `trio` in guest
mode. This enables spawning `tractor` actors on top of the `asyncio`
event loop whilst still leveraging the SC focused internal actor
supervision machinery. Add a `tractor.to_syncio.run()` api to allow
spawning tasks on the `asyncio` loop from an embedded (remote) `trio`
task and return or stream results all the way back through the `tractor`
IPC system using a very similar api to portals.

One outstanding problem is getting SC around calls to
`asyncio.create_task()`. Currently a task that crashes isn't able to
easily relay the error to the embedded `trio` task without us fully
enforcing the portals based message protocol (which seems superfluous
given the error ref is in process). Further experiments using `anyio`
task groups may alleviate this.
2020-06-28 23:13:50 -04:00
14 changed files with 336 additions and 96 deletions

View File

@ -25,16 +25,16 @@ matrix:
- name: "Python 3.7: multiprocessing"
python: 3.7 # this works for Linux but is ignored on macOS or Windows
env: SPAWN_BACKEND="mp"
- name: "Python 3.7: trio-run-in-process"
- name: "Python 3.7: trio"
python: 3.7 # this works for Linux but is ignored on macOS or Windows
env: SPAWN_BACKEND="trio_run_in_process"
env: SPAWN_BACKEND="trio"
- name: "Python 3.8: multiprocessing"
python: 3.8 # this works for Linux but is ignored on macOS or Windows
env: SPAWN_BACKEND="mp"
- name: "Python 3.8: trio-run-in-process"
- name: "Python 3.8: trio"
python: 3.8 # this works for Linux but is ignored on macOS or Windows
env: SPAWN_BACKEND="trio_run_in_process"
env: SPAWN_BACKEND="trio"
install:
- cd $TRAVIS_BUILD_DIR

View File

@ -39,7 +39,7 @@ setup(
],
install_requires=[
'msgpack', 'trio>0.8', 'async_generator', 'colorlog', 'wrapt',
'trio_typing', 'trio-run-in-process',
'trio_typing', 'cloudpickle',
],
tests_require=['pytest'],
python_requires=">=3.7",

View File

@ -21,7 +21,7 @@ def pytest_addoption(parser):
parser.addoption(
"--spawn-backend", action="store", dest='spawn_backend',
default='trio_run_in_process',
default='trio',
help="Processing spawning backend to use for test run",
)
@ -34,7 +34,7 @@ def pytest_configure(config):
if backend == 'mp':
tractor._spawn.try_set_start_method('spawn')
elif backend == 'trio_run_in_process':
elif backend == 'trio':
tractor._spawn.try_set_start_method(backend)
@ -56,7 +56,7 @@ def pytest_generate_tests(metafunc):
if not spawn_backend:
# XXX some weird windows bug with `pytest`?
spawn_backend = 'mp'
assert spawn_backend in ('mp', 'trio_run_in_process')
assert spawn_backend in ('mp', 'trio')
if 'start_method' in metafunc.fixturenames:
if spawn_backend == 'mp':
@ -67,11 +67,11 @@ def pytest_generate_tests(metafunc):
# removing XXX: the fork method is in general
# incompatible with trio's global scheduler state
methods.remove('fork')
elif spawn_backend == 'trio_run_in_process':
elif spawn_backend == 'trio':
if platform.system() == "Windows":
pytest.fail(
"Only `--spawn-backend=mp` is supported on Windows")
methods = ['trio_run_in_process']
methods = ['trio']
metafunc.parametrize("start_method", methods, scope='module')

View File

@ -197,7 +197,7 @@ async def test_cancel_infinite_streamer(start_method):
],
)
@tractor_test
async def test_some_cancels_all(num_actors_and_errs, start_method):
async def test_some_cancels_all(num_actors_and_errs, start_method, loglevel):
"""Verify a subset of failed subactors causes all others in
the nursery to be cancelled just like the strategy in trio.

View File

@ -20,6 +20,7 @@ from ._state import current_actor
from ._exceptions import RemoteActorError, ModuleNotExposed
from . import msg
from . import _spawn
from . import to_asyncio
__all__ = [
@ -35,6 +36,7 @@ __all__ = [
'RemoteActorError',
'ModuleNotExposed',
'msg'
'to_asyncio'
]

View File

@ -179,6 +179,9 @@ class Actor:
# Information about `__main__` from parent
_parent_main_data: Dict[str, str]
# if started on ``asycio`` running ``trio`` in guest mode
_infected_aio: bool = False
def __init__(
self,
name: str,
@ -256,7 +259,7 @@ class Actor:
code (if it exists).
"""
try:
if self._spawn_method == 'trio_run_in_process':
if self._spawn_method == 'trio':
parent_data = self._parent_main_data
if 'init_main_from_name' in parent_data:
_mp_fixup_main._fixup_main_from_name(
@ -539,58 +542,6 @@ class Actor:
f"Exiting msg loop for {chan} from {chan.uid} "
f"with last msg:\n{msg}")
def _mp_main(
self,
accept_addr: Tuple[str, int],
forkserver_info: Tuple[Any, Any, Any, Any, Any],
start_method: str,
parent_addr: Tuple[str, int] = None
) -> None:
"""The routine called *after fork* which invokes a fresh ``trio.run``
"""
self._forkserver_info = forkserver_info
from ._spawn import try_set_start_method
spawn_ctx = try_set_start_method(start_method)
if self.loglevel is not None:
log.info(
f"Setting loglevel for {self.uid} to {self.loglevel}")
get_console_log(self.loglevel)
assert spawn_ctx
log.info(
f"Started new {spawn_ctx.current_process()} for {self.uid}")
_state._current_actor = self
log.debug(f"parent_addr is {parent_addr}")
try:
trio.run(partial(
self._async_main, accept_addr, parent_addr=parent_addr))
except KeyboardInterrupt:
pass # handle it the same way trio does?
log.info(f"Actor {self.uid} terminated")
async def _trip_main(
self,
accept_addr: Tuple[str, int],
parent_addr: Tuple[str, int] = None
) -> None:
"""Entry point for a `trio_run_in_process` subactor.
Here we don't need to call `trio.run()` since trip does that as
part of its subprocess startup sequence.
"""
if self.loglevel is not None:
log.info(
f"Setting loglevel for {self.uid} to {self.loglevel}")
get_console_log(self.loglevel)
log.info(f"Started new TRIP process for {self.uid}")
_state._current_actor = self
await self._async_main(accept_addr, parent_addr=parent_addr)
log.info(f"Actor {self.uid} terminated")
async def _async_main(
self,
accept_addr: Tuple[str, int],
@ -846,6 +797,8 @@ class Actor:
log.info(f"Handshake with actor {uid}@{chan.raddr} complete")
return uid
def is_infected_aio(self) -> bool:
return self._infected_aio
class Arbiter(Actor):
"""A special actor who knows all the other actors and always has

13
tractor/_child.py 100644
View File

@ -0,0 +1,13 @@
import sys
import trio
import cloudpickle
if __name__ == "__main__":
job = cloudpickle.load(sys.stdin.detach())
try:
result = trio.run(job)
cloudpickle.dump(sys.stdout.detach(), result)
except BaseException as err:
cloudpickle.dump(sys.stdout.detach(), err)

80
tractor/_entry.py 100644
View File

@ -0,0 +1,80 @@
"""
Process entry points.
"""
from functools import partial
from typing import Tuple, Any
import trio # type: ignore
from ._actor import Actor
from .log import get_console_log, get_logger
from . import _state
from .to_asyncio import run_as_asyncio_guest
log = get_logger(__name__)
def _mp_main(
actor: 'Actor',
accept_addr: Tuple[str, int],
forkserver_info: Tuple[Any, Any, Any, Any, Any],
start_method: str,
parent_addr: Tuple[str, int] = None,
infect_asyncio: bool = False,
) -> None:
"""The routine called *after fork* which invokes a fresh ``trio.run``
"""
actor._forkserver_info = forkserver_info
from ._spawn import try_set_start_method
spawn_ctx = try_set_start_method(start_method)
if actor.loglevel is not None:
log.info(
f"Setting loglevel for {actor.uid} to {actor.loglevel}")
get_console_log(actor.loglevel)
assert spawn_ctx
log.info(
f"Started new {spawn_ctx.current_process()} for {actor.uid}")
_state._current_actor = actor
log.debug(f"parent_addr is {parent_addr}")
trio_main = partial(
actor._async_main,
accept_addr,
parent_addr=parent_addr
)
try:
if infect_asyncio:
actor._infected_aio = True
run_as_asyncio_guest(trio_main)
else:
trio.run(trio_main)
except KeyboardInterrupt:
pass # handle it the same way trio does?
log.info(f"Actor {actor.uid} terminated")
async def _trio_main(
actor: 'Actor',
accept_addr: Tuple[str, int],
parent_addr: Tuple[str, int] = None
) -> None:
"""Entry point for a `trio_run_in_process` subactor.
Here we don't need to call `trio.run()` since trip does that as
part of its subprocess startup sequence.
"""
if actor.loglevel is not None:
log.info(
f"Setting loglevel for {actor.uid} to {actor.loglevel}")
get_console_log(actor.loglevel)
log.info(f"Started new trio process for {actor.uid}")
_state._current_actor = actor
await actor._async_main(accept_addr, parent_addr=parent_addr)
log.info(f"Actor {actor.uid} terminated")

View File

@ -1,14 +1,18 @@
"""
Machinery for actor process spawning using multiple backends.
"""
import sys
import inspect
import subprocess
import multiprocessing as mp
import platform
from typing import Any, Dict, Optional
from functools import partial
import trio
import cloudpickle
from trio_typing import TaskStatus
from async_generator import aclosing
from async_generator import aclosing, asynccontextmanager
try:
from multiprocessing import semaphore_tracker # type: ignore
@ -21,11 +25,12 @@ except ImportError:
from multiprocessing import forkserver # type: ignore
from typing import Tuple
from . import _forkserver_override
from . import _forkserver_override, _child
from ._state import current_actor
from .log import get_logger
from ._portal import Portal
from ._actor import Actor, ActorFailure
from ._entry import _mp_main, _trio_main
log = get_logger('tractor')
@ -40,14 +45,13 @@ if platform.system() == 'Windows':
_ctx = mp.get_context("spawn")
async def proc_waiter(proc: mp.Process) -> None:
await trio.hazmat.WaitForSingleObject(proc.sentinel)
await trio.lowlevel.WaitForSingleObject(proc.sentinel)
else:
# *NIX systems use ``trio_run_in_process` as our default (for now)
import trio_run_in_process
_spawn_method = "trio_run_in_process"
# *NIX systems use ``trio`` primitives as our default
_spawn_method = "trio"
async def proc_waiter(proc: mp.Process) -> None:
await trio.hazmat.wait_readable(proc.sentinel)
await trio.lowlevel.wait_readable(proc.sentinel)
def try_set_start_method(name: str) -> Optional[mp.context.BaseContext]:
@ -56,7 +60,7 @@ def try_set_start_method(name: str) -> Optional[mp.context.BaseContext]:
If the desired method is not supported this function will error. On
Windows the only supported option is the ``multiprocessing`` "spawn"
method. The default on *nix systems is ``trio_run_in_process``.
method. The default on *nix systems is ``trio``.
"""
global _ctx
global _spawn_method
@ -68,7 +72,7 @@ def try_set_start_method(name: str) -> Optional[mp.context.BaseContext]:
# no Windows support for trip yet
if platform.system() != 'Windows':
methods += ['trio_run_in_process']
methods += ['trio']
if name not in methods:
raise ValueError(
@ -77,7 +81,7 @@ def try_set_start_method(name: str) -> Optional[mp.context.BaseContext]:
elif name == 'forkserver':
_forkserver_override.override_stdlib()
_ctx = mp.get_context(name)
elif name == 'trio_run_in_process':
elif name == 'trio':
_ctx = None
else:
_ctx = mp.get_context(name)
@ -152,6 +156,27 @@ async def cancel_on_completion(
await portal.cancel_actor()
@asynccontextmanager
async def run_in_process(async_fn, *args, **kwargs):
encoded_job = cloudpickle.dumps(partial(async_fn, *args, **kwargs))
p = await trio.open_process(
[
sys.executable,
"-m",
_child.__name__
],
stdin=subprocess.PIPE
)
# send over func to call
await p.stdin.send_all(encoded_job)
yield p
# wait for termination
await p.wait()
async def new_proc(
name: str,
actor_nursery: 'ActorNursery', # type: ignore
@ -161,6 +186,7 @@ async def new_proc(
bind_addr: Tuple[str, int],
parent_addr: Tuple[str, int],
use_trio_run_in_process: bool = False,
infect_asyncio: bool = False,
task_status: TaskStatus[Portal] = trio.TASK_STATUS_IGNORED
) -> None:
"""Create a new ``multiprocessing.Process`` using the
@ -172,10 +198,10 @@ async def new_proc(
subactor._spawn_method = _spawn_method
async with trio.open_nursery() as nursery:
if use_trio_run_in_process or _spawn_method == 'trio_run_in_process':
# trio_run_in_process
async with trio_run_in_process.open_in_process(
subactor._trip_main,
if use_trio_run_in_process or _spawn_method == 'trio':
async with run_in_process(
_trio_main,
subactor,
bind_addr,
parent_addr,
) as proc:
@ -198,7 +224,7 @@ async def new_proc(
cancel_scope = await nursery.start(
cancel_on_completion, portal, subactor, errors)
# TRIP blocks here until process is complete
# run_in_process blocks here until process is complete
else:
# `multiprocessing`
assert _ctx
@ -235,12 +261,14 @@ async def new_proc(
fs_info = (None, None, None, None, None)
proc = _ctx.Process( # type: ignore
target=subactor._mp_main,
target=_mp_main,
args=(
subactor,
bind_addr,
fs_info,
start_method,
parent_addr
parent_addr,
infect_asyncio,
),
# daemon=True,
name=name,

View File

@ -30,7 +30,7 @@ class ActorContextInfo(Mapping):
def __getitem__(self, key: str):
try:
return {
'task': trio.hazmat.current_task,
'task': trio.lowlevel.current_task,
'actor': current_actor
}[key]().name
except RuntimeError:

View File

@ -41,9 +41,11 @@ def stream(func):
"""
func._tractor_stream_function = True
sig = inspect.signature(func)
if 'ctx' not in sig.parameters:
params = sig.parameters
if 'ctx' not in params and 'to_trio' not in params:
raise TypeError(
"The first argument to the stream function "
f"{func.__name__} must be `ctx: tractor.Context` "
"(Or ``to_trio`` if using ``asyncio`` in guest mode)."
)
return func

View File

@ -1,6 +1,7 @@
"""
``trio`` inspired apis and helpers
"""
from functools import partial
import multiprocessing as mp
from typing import Tuple, List, Dict, Optional, Any
import typing
@ -10,7 +11,7 @@ from async_generator import asynccontextmanager
from ._state import current_actor
from .log import get_logger, get_loglevel
from ._actor import Actor # , ActorFailure
from ._actor import Actor
from ._portal import Portal
from . import _spawn
@ -51,6 +52,7 @@ class ActorNursery:
rpc_module_paths: List[str] = None,
loglevel: str = None, # set log level per subactor
nursery: trio.Nursery = None,
infect_asyncio: bool = False,
) -> Portal:
loglevel = loglevel or self._actor.loglevel or get_loglevel()
@ -71,6 +73,7 @@ class ActorNursery:
# XXX: the type ignore is actually due to a `mypy` bug
return await nursery.start( # type: ignore
partial(
_spawn.new_proc,
name,
self,
@ -78,6 +81,8 @@ class ActorNursery:
self.errors,
bind_addr,
parent_addr,
infect_asyncio=infect_asyncio,
)
)
async def run_in_actor(
@ -88,6 +93,7 @@ class ActorNursery:
rpc_module_paths: Optional[List[str]] = None,
statespace: Dict[str, Any] = None,
loglevel: str = None, # set log level per subactor
infect_asyncio: bool = False,
**kwargs, # explicit args to ``fn``
) -> Portal:
"""Spawn a new actor, run a lone task, then terminate the actor and
@ -106,6 +112,7 @@ class ActorNursery:
loglevel=loglevel,
# use the run_in_actor nursery
nursery=self._ria_nursery,
infect_asyncio=infect_asyncio,
)
# this marks the actor to be cancelled after its portal result
# is retreived, see logic in `open_nursery()` below.
@ -131,7 +138,7 @@ class ActorNursery:
# send KeyBoardInterrupt (trio abort signal) to sub-actors
# os.kill(proc.pid, signal.SIGINT)
log.debug(f"Cancelling nursery")
log.debug("Cancelling nursery")
with trio.move_on_after(3) as cs:
async with trio.open_nursery() as nursery:
for subactor, proc, portal in self._children.values():
@ -260,7 +267,7 @@ async def open_nursery() -> typing.AsyncGenerator[ActorNursery, None]:
# Last bit before first nursery block ends in the case
# where we didn't error in the caller's scope
log.debug(f"Waiting on all subactors to complete")
log.debug("Waiting on all subactors to complete")
anursery._join_procs.set()
# ria_nursery scope end
@ -293,4 +300,4 @@ async def open_nursery() -> typing.AsyncGenerator[ActorNursery, None]:
# ria_nursery scope end
log.debug(f"Nursery teardown complete")
log.debug("Nursery teardown complete")

View File

@ -47,7 +47,7 @@ def tractor_test(fn):
if platform.system() == "Windows":
start_method = 'spawn'
else:
start_method = 'trio_run_in_process'
start_method = 'trio'
if 'start_method' in inspect.signature(fn).parameters:
# set of subprocess spawning backends

View File

@ -0,0 +1,155 @@
"""
Infection apis for ``asyncio`` loops running ``trio`` using guest mode.
"""
import asyncio
import inspect
from typing import (
Callable,
AsyncGenerator,
Awaitable,
Union,
)
import trio
from .log import get_logger
from ._state import current_actor
log = get_logger(__name__)
__all__ = ['run_task', 'run_as_asyncio_guest']
async def _invoke(
from_trio: trio.abc.ReceiveChannel,
to_trio: asyncio.Queue,
coro: Awaitable,
) -> None:
"""Await or stream awaiable object based on ``coro`` type into
``trio`` memory channel.
``from_trio`` might eventually be used here for bidirectional streaming.
"""
if inspect.isasyncgen(coro):
async for item in coro:
to_trio.send_nowait(item)
elif inspect.iscoroutine(coro):
to_trio.send_nowait(await coro)
async def run_task(
func: Callable,
*,
qsize: int = 2**10,
_treat_as_stream: bool = False,
**kwargs,
) -> Union[AsyncGenerator, Awaitable]:
"""Run an ``asyncio`` async function or generator in a task, return
or stream the result back to ``trio``.
"""
assert current_actor().is_infected_aio()
# ITC (inter task comms)
from_trio = asyncio.Queue(qsize)
to_trio, from_aio = trio.open_memory_channel(qsize)
args = tuple(inspect.getfullargspec(func).args)
if getattr(func, '_tractor_steam_function', None):
# the assumption is that the target async routine accepts the
# send channel then it intends to yield more then one return
# value otherwise it would just return ;P
_treat_as_stream = True
# allow target func to accept/stream results manually by name
if 'to_trio' in args:
kwargs['to_trio'] = to_trio
if 'from_trio' in args:
kwargs['from_trio'] = to_trio
coro = func(**kwargs)
cancel_scope = trio.CancelScope()
# start the asyncio task we submitted from trio
# TODO: try out ``anyio`` asyncio based tg here
task = asyncio.create_task(_invoke(from_trio, to_trio, coro))
err = None
def cancel_trio(task):
"""Cancel the calling ``trio`` task on error.
"""
nonlocal err
err = task.exception()
cancel_scope.cancel()
task.add_done_callback(cancel_trio)
# asycn gen
if inspect.isasyncgen(coro) or _treat_as_stream:
async def result():
with cancel_scope:
async with from_aio:
async for item in from_aio:
yield item
if cancel_scope.cancelled_caught and err:
raise err
return result()
# simple async func
elif inspect.iscoroutine(coro):
with cancel_scope:
result = await from_aio.receive()
return result
if cancel_scope.cancelled_caught and err:
raise err
def run_as_asyncio_guest(
trio_main: Awaitable,
) -> None:
"""Entry for an "infected ``asyncio`` actor".
Uh, oh. :o
It looks like your event loop has caught a case of the ``trio``s.
:()
Don't worry, we've heard you'll barely notice. You might hallucinate
a few more propagating errors and feel like your digestion has
slowed but if anything get's too bad your parents will know about
it.
:)
"""
async def aio_main(trio_main):
loop = asyncio.get_running_loop()
trio_done_fut = asyncio.Future()
def trio_done_callback(main_outcome):
log.info(f"trio_main finished: {main_outcome!r}")
trio_done_fut.set_result(main_outcome)
# start the infection: run trio on the asyncio loop in "guest mode"
log.info(f"Infecting asyncio process with {trio_main}")
trio.lowlevel.start_guest_run(
trio_main,
run_sync_soon_threadsafe=loop.call_soon_threadsafe,
done_callback=trio_done_callback,
)
(await trio_done_fut).unwrap()
# might as well if it's installed.
try:
import uvloop
loop = uvloop.new_event_loop()
asyncio.set_event_loop(loop)
except ImportError:
pass
asyncio.run(aio_main(trio_main))