forked from goodboy/tractor
Use built-ins for all data-structure-type annotations
parent
a113e22bb9
commit
10eeda2d2b
|
@ -396,7 +396,7 @@ tasks spawned via multiple RPC calls to an actor can modify
|
|||
|
||||
|
||||
# a per process cache
|
||||
_actor_cache: Dict[str, bool] = {}
|
||||
_actor_cache: dict[str, bool] = {}
|
||||
|
||||
|
||||
def ping_endpoints(endpoints: List[str]):
|
||||
|
|
|
@ -9,7 +9,7 @@ is ``tractor``'s channels.
|
|||
|
||||
"""
|
||||
from contextlib import asynccontextmanager
|
||||
from typing import List, Callable
|
||||
from typing import Callable
|
||||
import itertools
|
||||
import math
|
||||
import time
|
||||
|
@ -71,8 +71,8 @@ async def worker_pool(workers=4):
|
|||
|
||||
async def _map(
|
||||
worker_func: Callable[[int], bool],
|
||||
sequence: List[int]
|
||||
) -> List[bool]:
|
||||
sequence: list[int]
|
||||
) -> list[bool]:
|
||||
|
||||
# define an async (local) task to collect results from workers
|
||||
async def send_result(func, value, portal):
|
||||
|
|
|
@ -5,7 +5,6 @@ Advanced streaming patterns using bidirectional streams and contexts.
|
|||
from collections import Counter
|
||||
import itertools
|
||||
import platform
|
||||
from typing import Set, Dict, List
|
||||
|
||||
import trio
|
||||
import tractor
|
||||
|
@ -15,7 +14,7 @@ def is_win():
|
|||
return platform.system() == 'Windows'
|
||||
|
||||
|
||||
_registry: Dict[str, Set[tractor.ReceiveMsgStream]] = {
|
||||
_registry: dict[str, set[tractor.ReceiveMsgStream]] = {
|
||||
'even': set(),
|
||||
'odd': set(),
|
||||
}
|
||||
|
@ -77,7 +76,7 @@ async def subscribe(
|
|||
|
||||
async def consumer(
|
||||
|
||||
subs: List[str],
|
||||
subs: list[str],
|
||||
|
||||
) -> None:
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
"""
|
||||
Spawning basics
|
||||
"""
|
||||
from typing import Dict, Tuple, Optional
|
||||
from typing import Optional
|
||||
|
||||
import pytest
|
||||
import trio
|
||||
|
@ -14,8 +14,8 @@ data_to_pass_down = {'doggy': 10, 'kitty': 4}
|
|||
|
||||
async def spawn(
|
||||
is_arbiter: bool,
|
||||
data: Dict,
|
||||
arb_addr: Tuple[str, int],
|
||||
data: dict,
|
||||
arb_addr: tuple[str, int],
|
||||
):
|
||||
namespaces = [__name__]
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@ from contextlib import asynccontextmanager
|
|||
from functools import partial
|
||||
from itertools import cycle
|
||||
import time
|
||||
from typing import Optional, List, Tuple
|
||||
from typing import Optional
|
||||
|
||||
import pytest
|
||||
import trio
|
||||
|
@ -62,8 +62,8 @@ async def ensure_sequence(
|
|||
@asynccontextmanager
|
||||
async def open_sequence_streamer(
|
||||
|
||||
sequence: List[int],
|
||||
arb_addr: Tuple[str, int],
|
||||
sequence: list[int],
|
||||
arb_addr: tuple[str, int],
|
||||
start_method: str,
|
||||
|
||||
) -> tractor.MsgStream:
|
||||
|
|
|
@ -25,7 +25,6 @@ import signal
|
|||
from functools import partial
|
||||
from contextlib import asynccontextmanager as acm
|
||||
from typing import (
|
||||
Tuple,
|
||||
Optional,
|
||||
Callable,
|
||||
AsyncIterator,
|
||||
|
@ -74,7 +73,7 @@ class Lock:
|
|||
local_task_in_debug: Optional[str] = None
|
||||
|
||||
# actor tree-wide actor uid that supposedly has the tty lock
|
||||
global_actor_in_debug: Optional[Tuple[str, str]] = None
|
||||
global_actor_in_debug: Optional[tuple[str, str]] = None
|
||||
|
||||
local_pdb_complete: Optional[trio.Event] = None
|
||||
no_remote_has_tty: Optional[trio.Event] = None
|
||||
|
@ -172,7 +171,7 @@ class MultiActorPdb(pdbpp.Pdb):
|
|||
|
||||
@acm
|
||||
async def _acquire_debug_lock_from_root_task(
|
||||
uid: Tuple[str, str]
|
||||
uid: tuple[str, str]
|
||||
|
||||
) -> AsyncIterator[trio.StrictFIFOLock]:
|
||||
'''
|
||||
|
@ -252,7 +251,7 @@ async def _acquire_debug_lock_from_root_task(
|
|||
async def lock_tty_for_child(
|
||||
|
||||
ctx: tractor.Context,
|
||||
subactor_uid: Tuple[str, str]
|
||||
subactor_uid: tuple[str, str]
|
||||
|
||||
) -> str:
|
||||
'''
|
||||
|
@ -302,7 +301,7 @@ async def lock_tty_for_child(
|
|||
|
||||
|
||||
async def wait_for_parent_stdin_hijack(
|
||||
actor_uid: Tuple[str, str],
|
||||
actor_uid: tuple[str, str],
|
||||
task_status: TaskStatus[trio.CancelScope] = trio.TASK_STATUS_IGNORED
|
||||
):
|
||||
'''
|
||||
|
@ -732,7 +731,7 @@ async def _maybe_enter_pm(err):
|
|||
|
||||
@acm
|
||||
async def acquire_debug_lock(
|
||||
subactor_uid: Tuple[str, str],
|
||||
subactor_uid: tuple[str, str],
|
||||
) -> AsyncGenerator[None, tuple]:
|
||||
'''
|
||||
Grab root's debug lock on entry, release on exit.
|
||||
|
|
|
@ -18,7 +18,11 @@
|
|||
Actor discovery API.
|
||||
|
||||
"""
|
||||
from typing import Tuple, Optional, Union, AsyncGenerator
|
||||
from typing import (
|
||||
Optional,
|
||||
Union,
|
||||
AsyncGenerator,
|
||||
)
|
||||
from contextlib import asynccontextmanager as acm
|
||||
|
||||
from ._ipc import _connect_chan, Channel
|
||||
|
@ -104,7 +108,7 @@ async def query_actor(
|
|||
@acm
|
||||
async def find_actor(
|
||||
name: str,
|
||||
arbiter_sockaddr: Tuple[str, int] = None
|
||||
arbiter_sockaddr: tuple[str, int] = None
|
||||
|
||||
) -> AsyncGenerator[Optional[Portal], None]:
|
||||
'''
|
||||
|
@ -130,7 +134,7 @@ async def find_actor(
|
|||
@acm
|
||||
async def wait_for_actor(
|
||||
name: str,
|
||||
arbiter_sockaddr: Tuple[str, int] = None
|
||||
arbiter_sockaddr: tuple[str, int] = None
|
||||
) -> AsyncGenerator[Portal, None]:
|
||||
"""Wait on an actor to register with the arbiter.
|
||||
|
||||
|
|
|
@ -19,7 +19,7 @@ Sub-process entry points.
|
|||
|
||||
"""
|
||||
from functools import partial
|
||||
from typing import Tuple, Any
|
||||
from typing import Any
|
||||
|
||||
import trio # type: ignore
|
||||
|
||||
|
@ -35,10 +35,10 @@ log = get_logger(__name__)
|
|||
def _mp_main(
|
||||
|
||||
actor: 'Actor', # type: ignore
|
||||
accept_addr: Tuple[str, int],
|
||||
forkserver_info: Tuple[Any, Any, Any, Any, Any],
|
||||
accept_addr: tuple[str, int],
|
||||
forkserver_info: tuple[Any, Any, Any, Any, Any],
|
||||
start_method: str,
|
||||
parent_addr: Tuple[str, int] = None,
|
||||
parent_addr: tuple[str, int] = None,
|
||||
infect_asyncio: bool = False,
|
||||
|
||||
) -> None:
|
||||
|
@ -85,7 +85,7 @@ def _trio_main(
|
|||
|
||||
actor: Actor, # type: ignore
|
||||
*,
|
||||
parent_addr: Tuple[str, int] = None,
|
||||
parent_addr: tuple[str, int] = None,
|
||||
infect_asyncio: bool = False,
|
||||
|
||||
) -> None:
|
||||
|
|
|
@ -18,7 +18,11 @@
|
|||
Our classy exception set.
|
||||
|
||||
"""
|
||||
from typing import Dict, Any, Optional, Type
|
||||
from typing import (
|
||||
Any,
|
||||
Optional,
|
||||
Type,
|
||||
)
|
||||
import importlib
|
||||
import builtins
|
||||
import traceback
|
||||
|
@ -95,7 +99,7 @@ def pack_error(
|
|||
exc: BaseException,
|
||||
tb=None,
|
||||
|
||||
) -> Dict[str, Any]:
|
||||
) -> dict[str, Any]:
|
||||
"""Create an "error message" for tranmission over
|
||||
a channel (aka the wire).
|
||||
"""
|
||||
|
@ -114,7 +118,7 @@ def pack_error(
|
|||
|
||||
def unpack_error(
|
||||
|
||||
msg: Dict[str, Any],
|
||||
msg: dict[str, Any],
|
||||
chan=None,
|
||||
err_type=RemoteActorError
|
||||
|
||||
|
|
|
@ -18,7 +18,10 @@
|
|||
Per process state
|
||||
|
||||
"""
|
||||
from typing import Optional, Dict, Any
|
||||
from typing import (
|
||||
Optional,
|
||||
Any,
|
||||
)
|
||||
from collections.abc import Mapping
|
||||
|
||||
import trio
|
||||
|
@ -27,7 +30,7 @@ from ._exceptions import NoRuntime
|
|||
|
||||
|
||||
_current_actor: Optional['Actor'] = None # type: ignore # noqa
|
||||
_runtime_vars: Dict[str, Any] = {
|
||||
_runtime_vars: dict[str, Any] = {
|
||||
'_debug_mode': False,
|
||||
'_is_root': False,
|
||||
'_root_mailbox': (None, None)
|
||||
|
|
|
@ -23,8 +23,10 @@ import inspect
|
|||
from contextlib import asynccontextmanager
|
||||
from dataclasses import dataclass
|
||||
from typing import (
|
||||
Any, Optional, Callable,
|
||||
AsyncGenerator, Dict,
|
||||
Any,
|
||||
Optional,
|
||||
Callable,
|
||||
AsyncGenerator,
|
||||
AsyncIterator
|
||||
)
|
||||
|
||||
|
@ -393,7 +395,7 @@ class Context:
|
|||
|
||||
async def _maybe_raise_from_remote_msg(
|
||||
self,
|
||||
msg: Dict[str, Any],
|
||||
msg: dict[str, Any],
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
|
|
|
@ -20,7 +20,10 @@
|
|||
"""
|
||||
from functools import partial
|
||||
import inspect
|
||||
from typing import Tuple, List, Dict, Optional, TYPE_CHECKING
|
||||
from typing import (
|
||||
Optional,
|
||||
TYPE_CHECKING,
|
||||
)
|
||||
import typing
|
||||
import warnings
|
||||
|
||||
|
@ -43,7 +46,7 @@ if TYPE_CHECKING:
|
|||
|
||||
log = get_logger(__name__)
|
||||
|
||||
_default_bind_addr: Tuple[str, int] = ('127.0.0.1', 0)
|
||||
_default_bind_addr: tuple[str, int] = ('127.0.0.1', 0)
|
||||
|
||||
|
||||
class ActorNursery:
|
||||
|
@ -79,15 +82,15 @@ class ActorNursery:
|
|||
actor: Actor,
|
||||
ria_nursery: trio.Nursery,
|
||||
da_nursery: trio.Nursery,
|
||||
errors: Dict[Tuple[str, str], Exception],
|
||||
errors: dict[tuple[str, str], Exception],
|
||||
) -> None:
|
||||
# self.supervisor = supervisor # TODO
|
||||
self._actor: Actor = actor
|
||||
self._ria_nursery = ria_nursery
|
||||
self._da_nursery = da_nursery
|
||||
self._children: Dict[
|
||||
Tuple[str, str],
|
||||
Tuple[Actor, mp.Process, Optional[Portal]]
|
||||
self._children: dict[
|
||||
tuple[str, str],
|
||||
tuple[Actor, mp.Process, Optional[Portal]]
|
||||
] = {}
|
||||
# portals spawned with ``run_in_actor()`` are
|
||||
# cancelled when their "main" result arrives
|
||||
|
@ -102,9 +105,9 @@ class ActorNursery:
|
|||
self,
|
||||
name: str,
|
||||
*,
|
||||
bind_addr: Tuple[str, int] = _default_bind_addr,
|
||||
rpc_module_paths: List[str] = None,
|
||||
enable_modules: List[str] = None,
|
||||
bind_addr: tuple[str, int] = _default_bind_addr,
|
||||
rpc_module_paths: list[str] = None,
|
||||
enable_modules: list[str] = None,
|
||||
loglevel: str = None, # set log level per subactor
|
||||
nursery: trio.Nursery = None,
|
||||
debug_mode: Optional[bool] = None,
|
||||
|
@ -173,9 +176,9 @@ class ActorNursery:
|
|||
*,
|
||||
|
||||
name: Optional[str] = None,
|
||||
bind_addr: Tuple[str, int] = _default_bind_addr,
|
||||
rpc_module_paths: Optional[List[str]] = None,
|
||||
enable_modules: List[str] = None,
|
||||
bind_addr: tuple[str, int] = _default_bind_addr,
|
||||
rpc_module_paths: Optional[list[str]] = None,
|
||||
enable_modules: list[str] = None,
|
||||
loglevel: str = None, # set log level per subactor
|
||||
infect_asyncio: bool = False,
|
||||
|
||||
|
@ -293,7 +296,7 @@ async def _open_and_supervise_one_cancels_all_nursery(
|
|||
) -> typing.AsyncGenerator[ActorNursery, None]:
|
||||
|
||||
# the collection of errors retreived from spawned sub-actors
|
||||
errors: Dict[Tuple[str, str], Exception] = {}
|
||||
errors: dict[tuple[str, str], Exception] = {}
|
||||
|
||||
# This is the outermost level "deamon actor" nursery. It is awaited
|
||||
# **after** the below inner "run in actor nursery". This allows for
|
||||
|
|
|
@ -26,7 +26,10 @@ support provided by ``tractor.Context.open_stream()`` and friends.
|
|||
from __future__ import annotations
|
||||
import inspect
|
||||
import typing
|
||||
from typing import Dict, Any, Set, Callable, List, Tuple
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
)
|
||||
from functools import partial
|
||||
from async_generator import aclosing
|
||||
|
||||
|
@ -44,7 +47,7 @@ log = get_logger('messaging')
|
|||
|
||||
async def fan_out_to_ctxs(
|
||||
pub_async_gen_func: typing.Callable, # it's an async gen ... gd mypy
|
||||
topics2ctxs: Dict[str, list],
|
||||
topics2ctxs: dict[str, list],
|
||||
packetizer: typing.Callable = None,
|
||||
) -> None:
|
||||
'''
|
||||
|
@ -61,7 +64,7 @@ async def fan_out_to_ctxs(
|
|||
|
||||
async for published in pub_gen:
|
||||
|
||||
ctx_payloads: List[Tuple[Context, Any]] = []
|
||||
ctx_payloads: list[tuple[Context, Any]] = []
|
||||
|
||||
for topic, data in published.items():
|
||||
log.debug(f"publishing {topic, data}")
|
||||
|
@ -103,8 +106,8 @@ async def fan_out_to_ctxs(
|
|||
|
||||
def modify_subs(
|
||||
|
||||
topics2ctxs: Dict[str, List[Context]],
|
||||
topics: Set[str],
|
||||
topics2ctxs: dict[str, list[Context]],
|
||||
topics: set[str],
|
||||
ctx: Context,
|
||||
|
||||
) -> None:
|
||||
|
@ -136,20 +139,20 @@ def modify_subs(
|
|||
topics2ctxs.pop(topic)
|
||||
|
||||
|
||||
_pub_state: Dict[str, dict] = {}
|
||||
_pubtask2lock: Dict[str, trio.StrictFIFOLock] = {}
|
||||
_pub_state: dict[str, dict] = {}
|
||||
_pubtask2lock: dict[str, trio.StrictFIFOLock] = {}
|
||||
|
||||
|
||||
def pub(
|
||||
wrapped: typing.Callable = None,
|
||||
*,
|
||||
tasks: Set[str] = set(),
|
||||
tasks: set[str] = set(),
|
||||
):
|
||||
"""Publisher async generator decorator.
|
||||
|
||||
A publisher can be called multiple times from different actors but
|
||||
will only spawn a finite set of internal tasks to stream values to
|
||||
each caller. The ``tasks: Set[str]`` argument to the decorator
|
||||
each caller. The ``tasks: set[str]`` argument to the decorator
|
||||
specifies the names of the mutex set of publisher tasks. When the
|
||||
publisher function is called, an argument ``task_name`` must be
|
||||
passed to specify which task (of the set named in ``tasks``) should
|
||||
|
@ -158,9 +161,9 @@ def pub(
|
|||
necessary.
|
||||
|
||||
Values yielded from the decorated async generator must be
|
||||
``Dict[str, Dict[str, Any]]`` where the fist level key is the topic
|
||||
``dict[str, dict[str, Any]]`` where the fist level key is the topic
|
||||
string and determines which subscription the packet will be
|
||||
delivered to and the value is a packet ``Dict[str, Any]`` by default
|
||||
delivered to and the value is a packet ``dict[str, Any]`` by default
|
||||
of the form:
|
||||
|
||||
.. ::python
|
||||
|
@ -186,7 +189,7 @@ def pub(
|
|||
|
||||
|
||||
The publisher must be called passing in the following arguments:
|
||||
- ``topics: Set[str]`` the topic sequence or "subscriptions"
|
||||
- ``topics: set[str]`` the topic sequence or "subscriptions"
|
||||
- ``task_name: str`` the task to use (if ``tasks`` was passed)
|
||||
- ``ctx: Context`` the tractor context (only needed if calling the
|
||||
pub func without a nursery, otherwise this is provided implicitly)
|
||||
|
@ -231,7 +234,7 @@ def pub(
|
|||
if wrapped is None:
|
||||
return partial(pub, tasks=tasks)
|
||||
|
||||
task2lock: Dict[str, trio.StrictFIFOLock] = {}
|
||||
task2lock: dict[str, trio.StrictFIFOLock] = {}
|
||||
|
||||
for name in tasks:
|
||||
task2lock[name] = trio.StrictFIFOLock()
|
||||
|
@ -243,7 +246,7 @@ def pub(
|
|||
# `wrapt` docs
|
||||
async def _execute(
|
||||
ctx: Context,
|
||||
topics: Set[str],
|
||||
topics: set[str],
|
||||
*args,
|
||||
# *,
|
||||
task_name: str = None, # default: only one task allocated
|
||||
|
|
|
@ -102,6 +102,8 @@ async def gather_contexts(
|
|||
# deliver control once all managers have started up
|
||||
await all_entered.wait()
|
||||
|
||||
# NOTE: order *should* be preserved in the output values
|
||||
# since ``dict``s are now implicitly ordered.
|
||||
yield tuple(unwrapped.values())
|
||||
|
||||
# we don't need a try/finally since cancellation will be triggered
|
||||
|
|
Loading…
Reference in New Issue