Compare commits
No commits in common. "051ea3f99dee0466ce152b38c9bf2bd833db6646" and "adeee08d037cb986d5991224d1cea8897cc298bd" have entirely different histories.
051ea3f99d
...
adeee08d03
|
@ -322,7 +322,6 @@ channel`_!
|
|||
.. _async sandwich: https://trio.readthedocs.io/en/latest/tutorial.html#async-sandwich
|
||||
.. _structured concurrent: https://trio.discourse.group/t/concise-definition-of-structured-concurrency/228
|
||||
.. _3 axioms: https://www.youtube.com/watch?v=7erJ1DV_Tlo&t=162s
|
||||
.. .. _3 axioms: https://en.wikipedia.org/wiki/Actor_model#Fundamental_concepts
|
||||
.. _adherance to: https://www.youtube.com/watch?v=7erJ1DV_Tlo&t=1821s
|
||||
.. _trio gitter channel: https://gitter.im/python-trio/general
|
||||
.. _matrix channel: https://matrix.to/#/!tractor:matrix.org
|
||||
|
@ -331,7 +330,7 @@ channel`_!
|
|||
.. _messages: https://en.wikipedia.org/wiki/Message_passing
|
||||
.. _trio docs: https://trio.readthedocs.io/en/latest/
|
||||
.. _blog post: https://vorpus.org/blog/notes-on-structured-concurrency-or-go-statement-considered-harmful/
|
||||
.. _structured concurrency: https://en.wikipedia.org/wiki/Structured_concurrency
|
||||
.. _structured concurrency: https://vorpus.org/blog/notes-on-structured-concurrency-or-go-statement-considered-harmful/
|
||||
.. _unrequirements: https://en.wikipedia.org/wiki/Actor_model#Direct_communication_and_asynchrony
|
||||
.. _async generators: https://www.python.org/dev/peps/pep-0525/
|
||||
.. _trio-parallel: https://github.com/richardsheridan/trio-parallel
|
||||
|
|
|
@ -23,7 +23,6 @@ from ._exceptions import (
|
|||
from ._debug import breakpoint, post_mortem
|
||||
from . import msg
|
||||
from ._root import run, run_daemon, open_root_actor
|
||||
from ._portal import Portal
|
||||
|
||||
|
||||
__all__ = [
|
||||
|
@ -41,7 +40,6 @@ __all__ = [
|
|||
'msg',
|
||||
'open_nursery',
|
||||
'open_root_actor',
|
||||
'Portal',
|
||||
'post_mortem',
|
||||
'run',
|
||||
'run_daemon',
|
||||
|
|
|
@ -574,7 +574,7 @@ class Actor:
|
|||
try:
|
||||
send_chan, recv_chan = self._cids2qs[(actorid, cid)]
|
||||
except KeyError:
|
||||
send_chan, recv_chan = trio.open_memory_channel(2*10)
|
||||
send_chan, recv_chan = trio.open_memory_channel(1000)
|
||||
send_chan.cid = cid # type: ignore
|
||||
recv_chan.cid = cid # type: ignore
|
||||
self._cids2qs[(actorid, cid)] = send_chan, recv_chan
|
||||
|
|
|
@ -69,8 +69,6 @@ def _trio_main(
|
|||
"""
|
||||
log.info(f"Started new trio process for {actor.uid}")
|
||||
|
||||
log.info(f"Started new trio process for {actor.uid}")
|
||||
|
||||
if actor.loglevel is not None:
|
||||
log.info(
|
||||
f"Setting loglevel for {actor.uid} to {actor.loglevel}")
|
||||
|
|
|
@ -229,7 +229,7 @@ def run(
|
|||
|
||||
|
||||
def run_daemon(
|
||||
enable_modules: List[str],
|
||||
rpc_module_paths: List[str],
|
||||
**kwargs
|
||||
) -> None:
|
||||
"""Spawn daemon actor which will respond to RPC.
|
||||
|
@ -238,9 +238,9 @@ def run_daemon(
|
|||
``tractor.run(trio.sleep(float('inf')))`` such that the first actor spawned
|
||||
is meant to run forever responding to RPC requests.
|
||||
"""
|
||||
kwargs['enable_modules'] = list(enable_modules)
|
||||
kwargs['rpc_module_paths'] = list(rpc_module_paths)
|
||||
|
||||
for path in enable_modules:
|
||||
for path in rpc_module_paths:
|
||||
importlib.import_module(path)
|
||||
|
||||
return run(partial(trio.sleep, float('inf')), **kwargs)
|
||||
|
|
|
@ -366,7 +366,6 @@ async def new_proc(
|
|||
bind_addr=bind_addr,
|
||||
parent_addr=parent_addr,
|
||||
_runtime_vars=_runtime_vars,
|
||||
infect_asyncio=infect_asyncio,
|
||||
task_status=task_status,
|
||||
)
|
||||
|
||||
|
@ -382,7 +381,6 @@ async def mp_new_proc(
|
|||
parent_addr: Tuple[str, int],
|
||||
_runtime_vars: Dict[str, Any], # serialized and sent to _child
|
||||
*,
|
||||
infect_asyncio: bool = False,
|
||||
task_status: TaskStatus[Portal] = trio.TASK_STATUS_IGNORED
|
||||
|
||||
) -> None:
|
||||
|
|
|
@ -62,7 +62,6 @@ class ActorNursery:
|
|||
loglevel: str = None, # set log level per subactor
|
||||
nursery: trio.Nursery = None,
|
||||
infect_asyncio: bool = False,
|
||||
debug_mode: Optional[bool] = None,
|
||||
) -> Portal:
|
||||
loglevel = loglevel or self._actor.loglevel or get_loglevel()
|
||||
|
||||
|
@ -70,10 +69,6 @@ class ActorNursery:
|
|||
_rtv = _state._runtime_vars.copy()
|
||||
_rtv['_is_root'] = False
|
||||
|
||||
# allow setting debug policy per actor
|
||||
if debug_mode is not None:
|
||||
_rtv['_debug_mode'] = debug_mode
|
||||
|
||||
enable_modules = enable_modules or []
|
||||
|
||||
if rpc_module_paths:
|
||||
|
|
|
@ -43,81 +43,6 @@ async def consume_asyncgen(
|
|||
to_trio.send_nowait(item)
|
||||
|
||||
|
||||
def _run_asyncio_task(
|
||||
func: Callable,
|
||||
*,
|
||||
qsize: int = 1,
|
||||
_treat_as_stream: bool = False,
|
||||
**kwargs,
|
||||
) -> Any:
|
||||
"""Run an ``asyncio`` async function or generator in a task, return
|
||||
or stream the result back to ``trio``.
|
||||
|
||||
"""
|
||||
assert current_actor().is_infected_aio()
|
||||
|
||||
# ITC (inter task comms)
|
||||
from_trio = asyncio.Queue(qsize) # type: ignore
|
||||
to_trio, from_aio = trio.open_memory_channel(qsize) # type: ignore
|
||||
|
||||
from_aio._err = None
|
||||
|
||||
args = tuple(inspect.getfullargspec(func).args)
|
||||
|
||||
if getattr(func, '_tractor_steam_function', None):
|
||||
# the assumption is that the target async routine accepts the
|
||||
# send channel then it intends to yield more then one return
|
||||
# value otherwise it would just return ;P
|
||||
# _treat_as_stream = True
|
||||
assert qsize > 1
|
||||
|
||||
# allow target func to accept/stream results manually by name
|
||||
if 'to_trio' in args:
|
||||
kwargs['to_trio'] = to_trio
|
||||
|
||||
if 'from_trio' in args:
|
||||
kwargs['from_trio'] = from_trio
|
||||
|
||||
# if 'from_aio' in args:
|
||||
# kwargs['from_aio'] = from_aio
|
||||
|
||||
coro = func(**kwargs)
|
||||
|
||||
# cancel_scope = trio.CancelScope()
|
||||
|
||||
# start the asyncio task we submitted from trio
|
||||
if inspect.isawaitable(coro):
|
||||
task = asyncio.create_task(run_coro(to_trio, coro))
|
||||
|
||||
elif inspect.isasyncgen(coro):
|
||||
task = asyncio.create_task(consume_asyncgen(to_trio, coro))
|
||||
|
||||
else:
|
||||
raise TypeError(f"No support for invoking {coro}")
|
||||
|
||||
aio_err = None
|
||||
|
||||
def cancel_trio(task):
|
||||
"""Cancel the calling ``trio`` task on error.
|
||||
"""
|
||||
nonlocal aio_err
|
||||
try:
|
||||
aio_err = task.exception()
|
||||
except asyncio.CancelledError as cerr:
|
||||
aio_err = cerr
|
||||
|
||||
if aio_err:
|
||||
log.exception(f"asyncio task errorred:\n{aio_err}")
|
||||
|
||||
# cancel_scope.cancel()
|
||||
from_aio._err = aio_err
|
||||
to_trio.close()
|
||||
|
||||
task.add_done_callback(cancel_trio)
|
||||
|
||||
return task, from_aio, to_trio
|
||||
|
||||
|
||||
async def run_task(
|
||||
func: Callable,
|
||||
*,
|
||||
|
@ -127,138 +52,97 @@ async def run_task(
|
|||
) -> Any:
|
||||
"""Run an ``asyncio`` async function or generator in a task, return
|
||||
or stream the result back to ``trio``.
|
||||
|
||||
"""
|
||||
# assert current_actor().is_infected_aio()
|
||||
assert current_actor().is_infected_aio()
|
||||
|
||||
# # ITC (inter task comms)
|
||||
# from_trio = asyncio.Queue(qsize) # type: ignore
|
||||
# to_trio, from_aio = trio.open_memory_channel(qsize) # type: ignore
|
||||
# ITC (inter task comms)
|
||||
from_trio = asyncio.Queue(qsize) # type: ignore
|
||||
to_trio, from_aio = trio.open_memory_channel(qsize) # type: ignore
|
||||
|
||||
# args = tuple(inspect.getfullargspec(func).args)
|
||||
args = tuple(inspect.getfullargspec(func).args)
|
||||
|
||||
# if getattr(func, '_tractor_steam_function', None):
|
||||
# # the assumption is that the target async routine accepts the
|
||||
# # send channel then it intends to yield more then one return
|
||||
# # value otherwise it would just return ;P
|
||||
# _treat_as_stream = True
|
||||
if getattr(func, '_tractor_steam_function', None):
|
||||
# the assumption is that the target async routine accepts the
|
||||
# send channel then it intends to yield more then one return
|
||||
# value otherwise it would just return ;P
|
||||
_treat_as_stream = True
|
||||
|
||||
# # allow target func to accept/stream results manually by name
|
||||
# if 'to_trio' in args:
|
||||
# kwargs['to_trio'] = to_trio
|
||||
# if 'from_trio' in args:
|
||||
# kwargs['from_trio'] = from_trio
|
||||
# allow target func to accept/stream results manually by name
|
||||
if 'to_trio' in args:
|
||||
kwargs['to_trio'] = to_trio
|
||||
if 'from_trio' in args:
|
||||
kwargs['from_trio'] = from_trio
|
||||
|
||||
# coro = func(**kwargs)
|
||||
coro = func(**kwargs)
|
||||
|
||||
# cancel_scope = trio.CancelScope()
|
||||
cancel_scope = trio.CancelScope()
|
||||
|
||||
# # start the asyncio task we submitted from trio
|
||||
# if inspect.isawaitable(coro):
|
||||
# task = asyncio.create_task(run_coro(to_trio, coro))
|
||||
# start the asyncio task we submitted from trio
|
||||
if inspect.isawaitable(coro):
|
||||
task = asyncio.create_task(run_coro(to_trio, coro))
|
||||
elif inspect.isasyncgen(coro):
|
||||
task = asyncio.create_task(consume_asyncgen(to_trio, coro))
|
||||
else:
|
||||
raise TypeError(f"No support for invoking {coro}")
|
||||
|
||||
# elif inspect.isasyncgen(coro):
|
||||
# task = asyncio.create_task(consume_asyncgen(to_trio, coro))
|
||||
aio_err = None
|
||||
|
||||
# else:
|
||||
# raise TypeError(f"No support for invoking {coro}")
|
||||
def cancel_trio(task):
|
||||
"""Cancel the calling ``trio`` task on error.
|
||||
"""
|
||||
nonlocal aio_err
|
||||
aio_err = task.exception()
|
||||
|
||||
# aio_err = None
|
||||
if aio_err:
|
||||
log.exception(f"asyncio task errorred:\n{aio_err}")
|
||||
|
||||
# def cancel_trio(task):
|
||||
# """Cancel the calling ``trio`` task on error.
|
||||
# """
|
||||
# nonlocal aio_err
|
||||
# aio_err = task.exception()
|
||||
cancel_scope.cancel()
|
||||
|
||||
# if aio_err:
|
||||
# log.exception(f"asyncio task errorred:\n{aio_err}")
|
||||
|
||||
# cancel_scope.cancel()
|
||||
|
||||
# task.add_done_callback(cancel_trio)
|
||||
task.add_done_callback(cancel_trio)
|
||||
|
||||
# async iterator
|
||||
# if inspect.isasyncgen(coro) or _treat_as_stream:
|
||||
if inspect.isasyncgen(coro) or _treat_as_stream:
|
||||
|
||||
# if inspect.isasyncgenfunction(meth) or :
|
||||
if _treat_as_stream:
|
||||
async def stream_results():
|
||||
try:
|
||||
with cancel_scope:
|
||||
# stream values upward
|
||||
async with from_aio:
|
||||
async for item in from_aio:
|
||||
yield item
|
||||
|
||||
task, from_aio, to_trio = _run_asyncio_task(
|
||||
func,
|
||||
qsize=2**8,
|
||||
**kwargs,
|
||||
)
|
||||
if cancel_scope.cancelled_caught:
|
||||
# always raise from any captured asyncio error
|
||||
if aio_err:
|
||||
raise aio_err
|
||||
|
||||
return from_aio
|
||||
except BaseException as err:
|
||||
if aio_err is not None:
|
||||
# always raise from any captured asyncio error
|
||||
raise err from aio_err
|
||||
else:
|
||||
raise
|
||||
|
||||
# async def stream_results():
|
||||
# try:
|
||||
# with cancel_scope:
|
||||
# # stream values upward
|
||||
# async with from_aio:
|
||||
# async for item in from_aio:
|
||||
# yield item
|
||||
|
||||
# if cancel_scope.cancelled_caught:
|
||||
# # always raise from any captured asyncio error
|
||||
# if aio_err:
|
||||
# raise aio_err
|
||||
|
||||
# except BaseException as err:
|
||||
# if aio_err is not None:
|
||||
# # always raise from any captured asyncio error
|
||||
# raise err from aio_err
|
||||
# else:
|
||||
# raise
|
||||
# finally:
|
||||
# # breakpoint()
|
||||
# task.cancel()
|
||||
|
||||
# return stream_results()
|
||||
return stream_results()
|
||||
|
||||
# simple async func
|
||||
try:
|
||||
task, from_aio, to_trio = _run_asyncio_task(
|
||||
func,
|
||||
qsize=1,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
# with cancel_scope:
|
||||
# async with from_aio:
|
||||
with cancel_scope:
|
||||
# return single value
|
||||
return await from_aio.receive()
|
||||
return await from_aio.receive()
|
||||
|
||||
# if cancel_scope.cancelled_caught:
|
||||
# # always raise from any captured asyncio error
|
||||
# if aio_err:
|
||||
# raise aio_err
|
||||
if cancel_scope.cancelled_caught:
|
||||
# always raise from any captured asyncio error
|
||||
if aio_err:
|
||||
raise aio_err
|
||||
|
||||
# Do we need this?
|
||||
except Exception as err:
|
||||
# await tractor.breakpoint()
|
||||
aio_err = from_aio._err
|
||||
|
||||
# try:
|
||||
except BaseException as err:
|
||||
if aio_err is not None:
|
||||
# always raise from any captured asyncio error
|
||||
raise err from aio_err
|
||||
else:
|
||||
raise
|
||||
# finally:
|
||||
# if not task.done():
|
||||
# task.cancel()
|
||||
|
||||
except trio.Cancelled:
|
||||
if not task.done():
|
||||
task.cancel()
|
||||
|
||||
raise
|
||||
|
||||
|
||||
# async def stream_from_task
|
||||
# pass
|
||||
|
||||
|
||||
def run_as_asyncio_guest(
|
||||
|
|
Loading…
Reference in New Issue