Compare commits
No commits in common. "main" and "py313_support" have entirely different histories.
main
...
py313_supp
|
@ -8,70 +8,46 @@ on:
|
|||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
# ------ sdist ------
|
||||
|
||||
mypy:
|
||||
name: 'MyPy'
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Setup python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
|
||||
- name: Install dependencies
|
||||
run: pip install -U . --upgrade-strategy eager -r requirements-test.txt
|
||||
|
||||
- name: Run MyPy check
|
||||
run: mypy tractor/ --ignore-missing-imports --show-traceback
|
||||
|
||||
# test that we can generate a software distribution and install it
|
||||
# thus avoid missing file issues after packaging.
|
||||
#
|
||||
# -[x] produce sdist with uv
|
||||
# ------ - ------
|
||||
sdist-linux:
|
||||
name: 'sdist'
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Install latest uv
|
||||
uses: astral-sh/setup-uv@v6
|
||||
- name: Setup python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
|
||||
- name: Build sdist as tar.gz
|
||||
run: uv build --sdist --python=3.13
|
||||
- name: Build sdist
|
||||
run: python setup.py sdist --formats=zip
|
||||
|
||||
- name: Install sdist from .tar.gz
|
||||
run: python -m pip install dist/*.tar.gz
|
||||
|
||||
# ------ type-check ------
|
||||
# mypy:
|
||||
# name: 'MyPy'
|
||||
# runs-on: ubuntu-latest
|
||||
|
||||
# steps:
|
||||
# - name: Checkout
|
||||
# uses: actions/checkout@v4
|
||||
|
||||
# - name: Install latest uv
|
||||
# uses: astral-sh/setup-uv@v6
|
||||
|
||||
# # faster due to server caching?
|
||||
# # https://docs.astral.sh/uv/guides/integration/github/#setting-up-python
|
||||
# - name: "Set up Python"
|
||||
# uses: actions/setup-python@v6
|
||||
# with:
|
||||
# python-version-file: "pyproject.toml"
|
||||
|
||||
# # w uv
|
||||
# # - name: Set up Python
|
||||
# # run: uv python install
|
||||
|
||||
# - name: Setup uv venv
|
||||
# run: uv venv .venv --python=3.13
|
||||
|
||||
# - name: Install
|
||||
# run: uv sync --dev
|
||||
|
||||
# # TODO, ty cmd over repo
|
||||
# # - name: type check with ty
|
||||
# # run: ty ./tractor/
|
||||
|
||||
# # - uses: actions/cache@v3
|
||||
# # name: Cache uv virtenv as default .venv
|
||||
# # with:
|
||||
# # path: ./.venv
|
||||
# # key: venv-${{ hashFiles('uv.lock') }}
|
||||
|
||||
# - name: Run MyPy check
|
||||
# run: mypy tractor/ --ignore-missing-imports --show-traceback
|
||||
- name: Install sdist from .zips
|
||||
run: python -m pip install dist/*.zip
|
||||
|
||||
|
||||
testing-linux:
|
||||
|
@ -83,45 +59,32 @@ jobs:
|
|||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest]
|
||||
python-version: ['3.13']
|
||||
python: ['3.11']
|
||||
spawn_backend: [
|
||||
'trio',
|
||||
# 'mp_spawn',
|
||||
# 'mp_forkserver',
|
||||
'mp_spawn',
|
||||
'mp_forkserver',
|
||||
]
|
||||
|
||||
steps:
|
||||
|
||||
- uses: actions/checkout@v4
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: 'Install uv + py-${{ matrix.python-version }}'
|
||||
uses: astral-sh/setup-uv@v6
|
||||
- name: Setup python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
python-version: '${{ matrix.python }}'
|
||||
|
||||
# GH way.. faster?
|
||||
# - name: setup-python@v6
|
||||
# uses: actions/setup-python@v6
|
||||
# with:
|
||||
# python-version: '${{ matrix.python-version }}'
|
||||
- name: Install dependencies
|
||||
run: pip install -U . -r requirements-test.txt -r requirements-docs.txt --upgrade-strategy eager
|
||||
|
||||
# consider caching for speedups?
|
||||
# https://docs.astral.sh/uv/guides/integration/github/#caching
|
||||
|
||||
- name: Install the project w uv
|
||||
run: uv sync --all-extras --dev
|
||||
|
||||
# - name: Install dependencies
|
||||
# run: pip install -U . -r requirements-test.txt -r requirements-docs.txt --upgrade-strategy eager
|
||||
|
||||
- name: List deps tree
|
||||
run: uv tree
|
||||
- name: List dependencies
|
||||
run: pip list
|
||||
|
||||
- name: Run tests
|
||||
run: uv run pytest tests/ --spawn-backend=${{ matrix.spawn_backend }} -rsx
|
||||
run: pytest tests/ --spawn-backend=${{ matrix.spawn_backend }} -rsx
|
||||
|
||||
# XXX legacy NOTE XXX
|
||||
#
|
||||
# We skip 3.10 on windows for now due to not having any collabs to
|
||||
# debug the CI failures. Anyone wanting to hack and solve them is very
|
||||
# welcome, but our primary user base is not using that OS.
|
||||
|
|
19
default.nix
19
default.nix
|
@ -1,19 +0,0 @@
|
|||
{ pkgs ? import <nixpkgs> {} }:
|
||||
let
|
||||
nativeBuildInputs = with pkgs; [
|
||||
stdenv.cc.cc.lib
|
||||
uv
|
||||
];
|
||||
|
||||
in
|
||||
pkgs.mkShell {
|
||||
inherit nativeBuildInputs;
|
||||
|
||||
LD_LIBRARY_PATH = pkgs.lib.makeLibraryPath nativeBuildInputs;
|
||||
TMPDIR = "/tmp";
|
||||
|
||||
shellHook = ''
|
||||
set -e
|
||||
uv venv .venv --python=3.12
|
||||
'';
|
||||
}
|
|
@ -1,5 +1,8 @@
|
|||
|logo| ``tractor``: distributed structurred concurrency
|
||||
|
||||
|gh_actions|
|
||||
|docs|
|
||||
|
||||
``tractor`` is a `structured concurrency`_ (SC), multi-processing_ runtime built on trio_.
|
||||
|
||||
Fundamentally, ``tractor`` provides parallelism via
|
||||
|
@ -63,13 +66,6 @@ Features
|
|||
- (WIP) a ``TaskMngr``: one-cancels-one style nursery supervisor.
|
||||
|
||||
|
||||
Status of `main` / infra
|
||||
------------------------
|
||||
|
||||
- |gh_actions|
|
||||
- |docs|
|
||||
|
||||
|
||||
Install
|
||||
-------
|
||||
``tractor`` is still in a *alpha-near-beta-stage* for many
|
||||
|
@ -693,11 +689,9 @@ channel`_!
|
|||
.. _msgspec: https://jcristharif.com/msgspec/
|
||||
.. _guest: https://trio.readthedocs.io/en/stable/reference-lowlevel.html?highlight=guest%20mode#using-guest-mode-to-run-trio-on-top-of-other-event-loops
|
||||
|
||||
..
|
||||
NOTE, on generating badge links from the UI
|
||||
https://docs.github.com/en/actions/how-tos/monitoring-and-troubleshooting-workflows/monitoring-workflows/adding-a-workflow-status-badge?ref=gitguardian-blog-automated-secrets-detection#using-the-ui
|
||||
.. |gh_actions| image:: https://github.com/goodboy/tractor/actions/workflows/ci.yml/badge.svg?branch=main
|
||||
:target: https://github.com/goodboy/tractor/actions/workflows/ci.yml
|
||||
|
||||
.. |gh_actions| image:: https://img.shields.io/endpoint.svg?url=https%3A%2F%2Factions-badge.atrox.dev%2Fgoodboy%2Ftractor%2Fbadge&style=popout-square
|
||||
:target: https://actions-badge.atrox.dev/goodboy/tractor/goto
|
||||
|
||||
.. |docs| image:: https://readthedocs.org/projects/tractor/badge/?version=latest
|
||||
:target: https://tractor.readthedocs.io/en/latest/?badge=latest
|
||||
|
|
|
@ -120,7 +120,6 @@ async def main(
|
|||
break_parent_ipc_after: int|bool = False,
|
||||
break_child_ipc_after: int|bool = False,
|
||||
pre_close: bool = False,
|
||||
tpt_proto: str = 'tcp',
|
||||
|
||||
) -> None:
|
||||
|
||||
|
@ -132,7 +131,6 @@ async def main(
|
|||
# a hang since it never engages due to broken IPC
|
||||
debug_mode=debug_mode,
|
||||
loglevel=loglevel,
|
||||
enable_transports=[tpt_proto],
|
||||
|
||||
) as an,
|
||||
):
|
||||
|
@ -147,8 +145,7 @@ async def main(
|
|||
_testing.expect_ctxc(
|
||||
yay=(
|
||||
break_parent_ipc_after
|
||||
or
|
||||
break_child_ipc_after
|
||||
or break_child_ipc_after
|
||||
),
|
||||
# TODO: we CAN'T remove this right?
|
||||
# since we need the ctxc to bubble up from either
|
||||
|
|
|
@ -4,11 +4,6 @@ import sys
|
|||
import trio
|
||||
import tractor
|
||||
|
||||
# ensure mod-path is correct!
|
||||
from tractor.devx._debug import (
|
||||
_sync_pause_from_builtin as _sync_pause_from_builtin,
|
||||
)
|
||||
|
||||
|
||||
async def main() -> None:
|
||||
|
||||
|
@ -18,7 +13,6 @@ async def main() -> None:
|
|||
|
||||
async with tractor.open_nursery(
|
||||
debug_mode=True,
|
||||
loglevel='devx',
|
||||
) as an:
|
||||
assert an
|
||||
assert (
|
||||
|
|
|
@ -24,9 +24,10 @@ async def spawn_until(depth=0):
|
|||
|
||||
|
||||
async def main():
|
||||
'''
|
||||
The process tree should look as approximately as follows when the
|
||||
debugger first engages:
|
||||
"""The main ``tractor`` routine.
|
||||
|
||||
The process tree should look as approximately as follows when the debugger
|
||||
first engages:
|
||||
|
||||
python examples/debugging/multi_nested_subactors_bp_forever.py
|
||||
├─ python -m tractor._child --uid ('spawner1', '7eab8462 ...)
|
||||
|
@ -36,11 +37,10 @@ async def main():
|
|||
└─ python -m tractor._child --uid ('spawner0', '1d42012b ...)
|
||||
└─ python -m tractor._child --uid ('name_error', '6c2733b8 ...)
|
||||
|
||||
'''
|
||||
"""
|
||||
async with tractor.open_nursery(
|
||||
debug_mode=True,
|
||||
loglevel='devx',
|
||||
enable_transports=['uds'],
|
||||
loglevel='warning'
|
||||
) as n:
|
||||
|
||||
# spawn both actors
|
||||
|
|
|
@ -37,7 +37,6 @@ async def main(
|
|||
enable_stack_on_sig=True,
|
||||
# maybe_enable_greenback=False,
|
||||
loglevel='devx',
|
||||
enable_transports=['uds'],
|
||||
) as an,
|
||||
):
|
||||
ptl: tractor.Portal = await an.start_actor(
|
||||
|
|
|
@ -9,7 +9,7 @@ async def main(service_name):
|
|||
async with tractor.open_nursery() as an:
|
||||
await an.start_actor(service_name)
|
||||
|
||||
async with tractor.get_registry() as portal:
|
||||
async with tractor.get_registry('127.0.0.1', 1616) as portal:
|
||||
print(f"Arbiter is listening on {portal.channel}")
|
||||
|
||||
async with tractor.wait_for_actor(service_name) as sockaddr:
|
||||
|
|
|
@ -45,8 +45,6 @@ dependencies = [
|
|||
"pdbp>=1.6,<2", # windows only (from `pdbp`)
|
||||
# typed IPC msging
|
||||
"msgspec>=0.19.0",
|
||||
"cffi>=1.17.1",
|
||||
"bidict>=0.23.1",
|
||||
]
|
||||
|
||||
# ------ project ------
|
||||
|
@ -61,12 +59,9 @@ dev = [
|
|||
# `tractor.devx` tooling
|
||||
"greenback>=1.2.1,<2",
|
||||
"stackscope>=0.2.2,<0.3",
|
||||
# ^ requires this?
|
||||
"typing-extensions>=4.14.1",
|
||||
"pyperclip>=1.9.0",
|
||||
"prompt-toolkit>=3.0.50",
|
||||
"xonsh>=0.19.2",
|
||||
"psutil>=7.0.0",
|
||||
]
|
||||
# TODO, add these with sane versions; were originally in
|
||||
# `requirements-docs.txt`..
|
||||
|
|
|
@ -1,27 +1,24 @@
|
|||
"""
|
||||
Top level of the testing suites!
|
||||
|
||||
``tractor`` testing!!
|
||||
"""
|
||||
from __future__ import annotations
|
||||
import sys
|
||||
import subprocess
|
||||
import os
|
||||
import random
|
||||
import signal
|
||||
import platform
|
||||
import time
|
||||
|
||||
import pytest
|
||||
import tractor
|
||||
from tractor._testing import (
|
||||
examples_dir as examples_dir,
|
||||
tractor_test as tractor_test,
|
||||
expect_ctxc as expect_ctxc,
|
||||
)
|
||||
|
||||
pytest_plugins: list[str] = [
|
||||
'pytester',
|
||||
'tractor._testing.pytest',
|
||||
]
|
||||
|
||||
# TODO: include wtv plugin(s) we build in `._testing.pytest`?
|
||||
pytest_plugins = ['pytester']
|
||||
|
||||
# Sending signal.SIGINT on subprocess fails on windows. Use CTRL_* alternatives
|
||||
if platform.system() == 'Windows':
|
||||
|
@ -33,11 +30,7 @@ else:
|
|||
_KILL_SIGNAL = signal.SIGKILL
|
||||
_INT_SIGNAL = signal.SIGINT
|
||||
_INT_RETURN_CODE = 1 if sys.version_info < (3, 8) else -signal.SIGINT.value
|
||||
_PROC_SPAWN_WAIT = (
|
||||
0.6
|
||||
if sys.version_info < (3, 7)
|
||||
else 0.4
|
||||
)
|
||||
_PROC_SPAWN_WAIT = 0.6 if sys.version_info < (3, 7) else 0.4
|
||||
|
||||
|
||||
no_windows = pytest.mark.skipif(
|
||||
|
@ -46,12 +39,7 @@ no_windows = pytest.mark.skipif(
|
|||
)
|
||||
|
||||
|
||||
def pytest_addoption(
|
||||
parser: pytest.Parser,
|
||||
):
|
||||
# ?TODO? should this be exposed from our `._testing.pytest`
|
||||
# plugin or should we make it more explicit with `--tl` for
|
||||
# tractor logging like we do in other client projects?
|
||||
def pytest_addoption(parser):
|
||||
parser.addoption(
|
||||
"--ll",
|
||||
action="store",
|
||||
|
@ -59,10 +47,42 @@ def pytest_addoption(
|
|||
default='ERROR', help="logging level to set when testing"
|
||||
)
|
||||
|
||||
parser.addoption(
|
||||
"--spawn-backend",
|
||||
action="store",
|
||||
dest='spawn_backend',
|
||||
default='trio',
|
||||
help="Processing spawning backend to use for test run",
|
||||
)
|
||||
|
||||
parser.addoption(
|
||||
"--tpdb", "--debug-mode",
|
||||
action="store_true",
|
||||
dest='tractor_debug_mode',
|
||||
# default=False,
|
||||
help=(
|
||||
'Enable a flag that can be used by tests to to set the '
|
||||
'`debug_mode: bool` for engaging the internal '
|
||||
'multi-proc debugger sys.'
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def pytest_configure(config):
|
||||
backend = config.option.spawn_backend
|
||||
tractor._spawn.try_set_start_method(backend)
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def debug_mode(request):
|
||||
debug_mode: bool = request.config.option.tractor_debug_mode
|
||||
# if debug_mode:
|
||||
# breakpoint()
|
||||
return debug_mode
|
||||
|
||||
|
||||
@pytest.fixture(scope='session', autouse=True)
|
||||
def loglevel(request):
|
||||
import tractor
|
||||
orig = tractor.log._default_loglevel
|
||||
level = tractor.log._default_loglevel = request.config.option.loglevel
|
||||
tractor.log.get_console_log(level)
|
||||
|
@ -70,44 +90,106 @@ def loglevel(request):
|
|||
tractor.log._default_loglevel = orig
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def spawn_backend(request) -> str:
|
||||
return request.config.option.spawn_backend
|
||||
|
||||
|
||||
# @pytest.fixture(scope='function', autouse=True)
|
||||
# def debug_enabled(request) -> str:
|
||||
# from tractor import _state
|
||||
# if _state._runtime_vars['_debug_mode']:
|
||||
# breakpoint()
|
||||
|
||||
_ci_env: bool = os.environ.get('CI', False)
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def ci_env() -> bool:
|
||||
'''
|
||||
Detect CI environment.
|
||||
Detect CI envoirment.
|
||||
|
||||
'''
|
||||
return _ci_env
|
||||
|
||||
|
||||
def sig_prog(
|
||||
proc: subprocess.Popen,
|
||||
sig: int,
|
||||
canc_timeout: float = 0.1,
|
||||
) -> int:
|
||||
# TODO: also move this to `._testing` for now?
|
||||
# -[ ] possibly generalize and re-use for multi-tree spawning
|
||||
# along with the new stuff for multi-addrs in distribute_dis
|
||||
# branch?
|
||||
#
|
||||
# choose randomly at import time
|
||||
_reg_addr: tuple[str, int] = (
|
||||
'127.0.0.1',
|
||||
random.randint(1000, 9999),
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def reg_addr() -> tuple[str, int]:
|
||||
|
||||
# globally override the runtime to the per-test-session-dynamic
|
||||
# addr so that all tests never conflict with any other actor
|
||||
# tree using the default.
|
||||
from tractor import _root
|
||||
_root._default_lo_addrs = [_reg_addr]
|
||||
|
||||
return _reg_addr
|
||||
|
||||
|
||||
def pytest_generate_tests(metafunc):
|
||||
spawn_backend = metafunc.config.option.spawn_backend
|
||||
|
||||
if not spawn_backend:
|
||||
# XXX some weird windows bug with `pytest`?
|
||||
spawn_backend = 'trio'
|
||||
|
||||
# TODO: maybe just use the literal `._spawn.SpawnMethodKey`?
|
||||
assert spawn_backend in (
|
||||
'mp_spawn',
|
||||
'mp_forkserver',
|
||||
'trio',
|
||||
)
|
||||
|
||||
# NOTE: used to be used to dyanmically parametrize tests for when
|
||||
# you just passed --spawn-backend=`mp` on the cli, but now we expect
|
||||
# that cli input to be manually specified, BUT, maybe we'll do
|
||||
# something like this again in the future?
|
||||
if 'start_method' in metafunc.fixturenames:
|
||||
metafunc.parametrize("start_method", [spawn_backend], scope='module')
|
||||
|
||||
|
||||
# TODO: a way to let test scripts (like from `examples/`)
|
||||
# guarantee they won't registry addr collide!
|
||||
# @pytest.fixture
|
||||
# def open_test_runtime(
|
||||
# reg_addr: tuple,
|
||||
# ) -> AsyncContextManager:
|
||||
# return partial(
|
||||
# tractor.open_nursery,
|
||||
# registry_addrs=[reg_addr],
|
||||
# )
|
||||
|
||||
|
||||
def sig_prog(proc, sig):
|
||||
"Kill the actor-process with ``sig``."
|
||||
proc.send_signal(sig)
|
||||
time.sleep(canc_timeout)
|
||||
time.sleep(0.1)
|
||||
if not proc.poll():
|
||||
# TODO: why sometimes does SIGINT not work on teardown?
|
||||
# seems to happen only when trace logging enabled?
|
||||
proc.send_signal(_KILL_SIGNAL)
|
||||
ret: int = proc.wait()
|
||||
ret = proc.wait()
|
||||
assert ret
|
||||
|
||||
|
||||
# TODO: factor into @cm and move to `._testing`?
|
||||
@pytest.fixture
|
||||
def daemon(
|
||||
debug_mode: bool,
|
||||
loglevel: str,
|
||||
testdir,
|
||||
reg_addr: tuple[str, int],
|
||||
tpt_proto: str,
|
||||
|
||||
) -> subprocess.Popen:
|
||||
):
|
||||
'''
|
||||
Run a daemon root actor as a separate actor-process tree and
|
||||
"remote registrar" for discovery-protocol related tests.
|
||||
|
@ -118,100 +200,28 @@ def daemon(
|
|||
loglevel: str = 'info'
|
||||
|
||||
code: str = (
|
||||
"import tractor; "
|
||||
"tractor.run_daemon([], "
|
||||
"registry_addrs={reg_addrs}, "
|
||||
"debug_mode={debug_mode}, "
|
||||
"loglevel={ll})"
|
||||
"import tractor; "
|
||||
"tractor.run_daemon([], registry_addrs={reg_addrs}, loglevel={ll})"
|
||||
).format(
|
||||
reg_addrs=str([reg_addr]),
|
||||
ll="'{}'".format(loglevel) if loglevel else None,
|
||||
debug_mode=debug_mode,
|
||||
)
|
||||
cmd: list[str] = [
|
||||
sys.executable,
|
||||
'-c', code,
|
||||
]
|
||||
# breakpoint()
|
||||
kwargs = {}
|
||||
if platform.system() == 'Windows':
|
||||
# without this, tests hang on windows forever
|
||||
kwargs['creationflags'] = subprocess.CREATE_NEW_PROCESS_GROUP
|
||||
|
||||
proc: subprocess.Popen = testdir.popen(
|
||||
proc = testdir.popen(
|
||||
cmd,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
# UDS sockets are **really** fast to bind()/listen()/connect()
|
||||
# so it's often required that we delay a bit more starting
|
||||
# the first actor-tree..
|
||||
if tpt_proto == 'uds':
|
||||
global _PROC_SPAWN_WAIT
|
||||
_PROC_SPAWN_WAIT = 0.6
|
||||
|
||||
time.sleep(_PROC_SPAWN_WAIT)
|
||||
|
||||
assert not proc.returncode
|
||||
time.sleep(_PROC_SPAWN_WAIT)
|
||||
yield proc
|
||||
sig_prog(proc, _INT_SIGNAL)
|
||||
|
||||
# XXX! yeah.. just be reaaal careful with this bc sometimes it
|
||||
# can lock up on the `_io.BufferedReader` and hang..
|
||||
stderr: str = proc.stderr.read().decode()
|
||||
if stderr:
|
||||
print(
|
||||
f'Daemon actor tree produced STDERR:\n'
|
||||
f'{proc.args}\n'
|
||||
f'\n'
|
||||
f'{stderr}\n'
|
||||
)
|
||||
if proc.returncode != -2:
|
||||
raise RuntimeError(
|
||||
'Daemon actor tree failed !?\n'
|
||||
f'{proc.args}\n'
|
||||
)
|
||||
|
||||
|
||||
# @pytest.fixture(autouse=True)
|
||||
# def shared_last_failed(pytestconfig):
|
||||
# val = pytestconfig.cache.get("example/value", None)
|
||||
# breakpoint()
|
||||
# if val is None:
|
||||
# pytestconfig.cache.set("example/value", val)
|
||||
# return val
|
||||
|
||||
|
||||
# TODO: a way to let test scripts (like from `examples/`)
|
||||
# guarantee they won't `registry_addrs` collide!
|
||||
# -[ ] maybe use some kinda standard `def main()` arg-spec that
|
||||
# we can introspect from a fixture that is called from the test
|
||||
# body?
|
||||
# -[ ] test and figure out typing for below prototype! Bp
|
||||
#
|
||||
# @pytest.fixture
|
||||
# def set_script_runtime_args(
|
||||
# reg_addr: tuple,
|
||||
# ) -> Callable[[...], None]:
|
||||
|
||||
# def import_n_partial_in_args_n_triorun(
|
||||
# script: Path, # under examples?
|
||||
# **runtime_args,
|
||||
# ) -> Callable[[], Any]: # a `partial`-ed equiv of `trio.run()`
|
||||
|
||||
# # NOTE, below is taken from
|
||||
# # `.test_advanced_faults.test_ipc_channel_break_during_stream`
|
||||
# mod: ModuleType = import_path(
|
||||
# examples_dir() / 'advanced_faults'
|
||||
# / 'ipc_failure_during_stream.py',
|
||||
# root=examples_dir(),
|
||||
# consider_namespace_packages=False,
|
||||
# )
|
||||
# return partial(
|
||||
# trio.run,
|
||||
# partial(
|
||||
# mod.main,
|
||||
# **runtime_args,
|
||||
# )
|
||||
# )
|
||||
# return import_n_partial_in_args_n_triorun
|
||||
|
|
|
@ -1,4 +0,0 @@
|
|||
'''
|
||||
`tractor.ipc` subsystem(s)/unit testing suites.
|
||||
|
||||
'''
|
|
@ -1,95 +0,0 @@
|
|||
'''
|
||||
Verify the `enable_transports` param drives various
|
||||
per-root/sub-actor IPC endpoint/server settings.
|
||||
|
||||
'''
|
||||
from __future__ import annotations
|
||||
|
||||
import pytest
|
||||
import trio
|
||||
import tractor
|
||||
from tractor import (
|
||||
Actor,
|
||||
Portal,
|
||||
ipc,
|
||||
msg,
|
||||
_state,
|
||||
_addr,
|
||||
)
|
||||
|
||||
@tractor.context
|
||||
async def chk_tpts(
|
||||
ctx: tractor.Context,
|
||||
tpt_proto_key: str,
|
||||
):
|
||||
rtvars = _state._runtime_vars
|
||||
assert (
|
||||
tpt_proto_key
|
||||
in
|
||||
rtvars['_enable_tpts']
|
||||
)
|
||||
actor: Actor = tractor.current_actor()
|
||||
spec: msg.types.SpawnSpec = actor._spawn_spec
|
||||
assert spec._runtime_vars == rtvars
|
||||
|
||||
# ensure individual IPC ep-addr types
|
||||
serv: ipc._server.Server = actor.ipc_server
|
||||
addr: ipc._types.Address
|
||||
for addr in serv.addrs:
|
||||
assert addr.proto_key == tpt_proto_key
|
||||
|
||||
# Actor delegate-props enforcement
|
||||
assert (
|
||||
actor.accept_addrs
|
||||
==
|
||||
serv.accept_addrs
|
||||
)
|
||||
|
||||
await ctx.started(serv.accept_addrs)
|
||||
|
||||
|
||||
# TODO, parametrize over mis-matched-proto-typed `registry_addrs`
|
||||
# since i seems to work in `piker` but not exactly sure if both tcp
|
||||
# & uds are being deployed then?
|
||||
#
|
||||
@pytest.mark.parametrize(
|
||||
'tpt_proto_key',
|
||||
['tcp', 'uds'],
|
||||
ids=lambda item: f'ipc_tpt={item!r}'
|
||||
)
|
||||
def test_root_passes_tpt_to_sub(
|
||||
tpt_proto_key: str,
|
||||
reg_addr: tuple,
|
||||
debug_mode: bool,
|
||||
):
|
||||
async def main():
|
||||
async with tractor.open_nursery(
|
||||
enable_transports=[tpt_proto_key],
|
||||
registry_addrs=[reg_addr],
|
||||
debug_mode=debug_mode,
|
||||
) as an:
|
||||
|
||||
assert (
|
||||
tpt_proto_key
|
||||
in
|
||||
_state._runtime_vars['_enable_tpts']
|
||||
)
|
||||
|
||||
ptl: Portal = await an.start_actor(
|
||||
name='sub',
|
||||
enable_modules=[__name__],
|
||||
)
|
||||
async with ptl.open_context(
|
||||
chk_tpts,
|
||||
tpt_proto_key=tpt_proto_key,
|
||||
) as (ctx, accept_addrs):
|
||||
|
||||
uw_addr: tuple
|
||||
for uw_addr in accept_addrs:
|
||||
addr = _addr.wrap_address(uw_addr)
|
||||
assert addr.is_valid
|
||||
|
||||
# shudown sub-actor(s)
|
||||
await an.cancel()
|
||||
|
||||
trio.run(main)
|
|
@ -1,72 +0,0 @@
|
|||
'''
|
||||
High-level `.ipc._server` unit tests.
|
||||
|
||||
'''
|
||||
from __future__ import annotations
|
||||
|
||||
import pytest
|
||||
import trio
|
||||
from tractor import (
|
||||
devx,
|
||||
ipc,
|
||||
log,
|
||||
)
|
||||
from tractor._testing.addr import (
|
||||
get_rando_addr,
|
||||
)
|
||||
# TODO, use/check-roundtripping with some of these wrapper types?
|
||||
#
|
||||
# from .._addr import Address
|
||||
# from ._chan import Channel
|
||||
# from ._transport import MsgTransport
|
||||
# from ._uds import UDSAddress
|
||||
# from ._tcp import TCPAddress
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'_tpt_proto',
|
||||
['uds', 'tcp']
|
||||
)
|
||||
def test_basic_ipc_server(
|
||||
_tpt_proto: str,
|
||||
debug_mode: bool,
|
||||
loglevel: str,
|
||||
):
|
||||
|
||||
# so we see the socket-listener reporting on console
|
||||
log.get_console_log("INFO")
|
||||
|
||||
rando_addr: tuple = get_rando_addr(
|
||||
tpt_proto=_tpt_proto,
|
||||
)
|
||||
async def main():
|
||||
async with ipc._server.open_ipc_server() as server:
|
||||
|
||||
assert (
|
||||
server._parent_tn
|
||||
and
|
||||
server._parent_tn is server._stream_handler_tn
|
||||
)
|
||||
assert server._no_more_peers.is_set()
|
||||
|
||||
eps: list[ipc._server.Endpoint] = await server.listen_on(
|
||||
accept_addrs=[rando_addr],
|
||||
stream_handler_nursery=None,
|
||||
)
|
||||
assert (
|
||||
len(eps) == 1
|
||||
and
|
||||
(ep := eps[0])._listener
|
||||
and
|
||||
not ep.peer_tpts
|
||||
)
|
||||
|
||||
server._parent_tn.cancel_scope.cancel()
|
||||
|
||||
# !TODO! actually make a bg-task connection from a client
|
||||
# using `ipc._chan._connect_chan()`
|
||||
|
||||
with devx.maybe_open_crash_handler(
|
||||
pdb=debug_mode,
|
||||
):
|
||||
trio.run(main)
|
|
@ -10,9 +10,6 @@ import pytest
|
|||
from _pytest.pathlib import import_path
|
||||
import trio
|
||||
import tractor
|
||||
from tractor import (
|
||||
TransportClosed,
|
||||
)
|
||||
from tractor._testing import (
|
||||
examples_dir,
|
||||
break_ipc,
|
||||
|
@ -77,7 +74,6 @@ def test_ipc_channel_break_during_stream(
|
|||
spawn_backend: str,
|
||||
ipc_break: dict|None,
|
||||
pre_aclose_msgstream: bool,
|
||||
tpt_proto: str,
|
||||
):
|
||||
'''
|
||||
Ensure we can have an IPC channel break its connection during
|
||||
|
@ -95,7 +91,7 @@ def test_ipc_channel_break_during_stream(
|
|||
# non-`trio` spawners should never hit the hang condition that
|
||||
# requires the user to do ctl-c to cancel the actor tree.
|
||||
# expect_final_exc = trio.ClosedResourceError
|
||||
expect_final_exc = TransportClosed
|
||||
expect_final_exc = tractor.TransportClosed
|
||||
|
||||
mod: ModuleType = import_path(
|
||||
examples_dir() / 'advanced_faults'
|
||||
|
@ -108,8 +104,6 @@ def test_ipc_channel_break_during_stream(
|
|||
# period" wherein the user eventually hits ctl-c to kill the
|
||||
# root-actor tree.
|
||||
expect_final_exc: BaseException = KeyboardInterrupt
|
||||
expect_final_cause: BaseException|None = None
|
||||
|
||||
if (
|
||||
# only expect EoC if trans is broken on the child side,
|
||||
ipc_break['break_child_ipc_after'] is not False
|
||||
|
@ -144,9 +138,6 @@ def test_ipc_channel_break_during_stream(
|
|||
# a user sending ctl-c by raising a KBI.
|
||||
if pre_aclose_msgstream:
|
||||
expect_final_exc = KeyboardInterrupt
|
||||
if tpt_proto == 'uds':
|
||||
expect_final_exc = TransportClosed
|
||||
expect_final_cause = trio.BrokenResourceError
|
||||
|
||||
# XXX OLD XXX
|
||||
# if child calls `MsgStream.aclose()` then expect EoC.
|
||||
|
@ -166,10 +157,6 @@ def test_ipc_channel_break_during_stream(
|
|||
if pre_aclose_msgstream:
|
||||
expect_final_exc = KeyboardInterrupt
|
||||
|
||||
if tpt_proto == 'uds':
|
||||
expect_final_exc = TransportClosed
|
||||
expect_final_cause = trio.BrokenResourceError
|
||||
|
||||
# NOTE when the parent IPC side dies (even if the child does as well
|
||||
# but the child fails BEFORE the parent) we always expect the
|
||||
# IPC layer to raise a closed-resource, NEVER do we expect
|
||||
|
@ -182,8 +169,8 @@ def test_ipc_channel_break_during_stream(
|
|||
and
|
||||
ipc_break['break_child_ipc_after'] is False
|
||||
):
|
||||
# expect_final_exc = trio.ClosedResourceError
|
||||
expect_final_exc = tractor.TransportClosed
|
||||
expect_final_cause = trio.ClosedResourceError
|
||||
|
||||
# BOTH but, PARENT breaks FIRST
|
||||
elif (
|
||||
|
@ -194,8 +181,8 @@ def test_ipc_channel_break_during_stream(
|
|||
ipc_break['break_parent_ipc_after']
|
||||
)
|
||||
):
|
||||
# expect_final_exc = trio.ClosedResourceError
|
||||
expect_final_exc = tractor.TransportClosed
|
||||
expect_final_cause = trio.ClosedResourceError
|
||||
|
||||
with pytest.raises(
|
||||
expected_exception=(
|
||||
|
@ -211,7 +198,6 @@ def test_ipc_channel_break_during_stream(
|
|||
start_method=spawn_backend,
|
||||
loglevel=loglevel,
|
||||
pre_close=pre_aclose_msgstream,
|
||||
tpt_proto=tpt_proto,
|
||||
**ipc_break,
|
||||
)
|
||||
)
|
||||
|
@ -234,15 +220,10 @@ def test_ipc_channel_break_during_stream(
|
|||
)
|
||||
cause: Exception = tc.__cause__
|
||||
assert (
|
||||
# type(cause) is trio.ClosedResourceError
|
||||
type(cause) is expect_final_cause
|
||||
|
||||
# TODO, should we expect a certain exc-message (per
|
||||
# tpt) as well??
|
||||
# and
|
||||
# cause.args[0] == 'another task closed this fd'
|
||||
type(cause) is trio.ClosedResourceError
|
||||
and
|
||||
cause.args[0] == 'another task closed this fd'
|
||||
)
|
||||
|
||||
raise
|
||||
|
||||
# get raw instance from pytest wrapper
|
||||
|
|
|
@ -0,0 +1,917 @@
|
|||
'''
|
||||
Low-level functional audits for our
|
||||
"capability based messaging"-spec feats.
|
||||
|
||||
B~)
|
||||
|
||||
'''
|
||||
import typing
|
||||
from typing import (
|
||||
Any,
|
||||
Type,
|
||||
Union,
|
||||
)
|
||||
|
||||
from msgspec import (
|
||||
structs,
|
||||
msgpack,
|
||||
Struct,
|
||||
ValidationError,
|
||||
)
|
||||
import pytest
|
||||
|
||||
import tractor
|
||||
from tractor import (
|
||||
_state,
|
||||
MsgTypeError,
|
||||
Context,
|
||||
)
|
||||
from tractor.msg import (
|
||||
_codec,
|
||||
_ctxvar_MsgCodec,
|
||||
|
||||
NamespacePath,
|
||||
MsgCodec,
|
||||
mk_codec,
|
||||
apply_codec,
|
||||
current_codec,
|
||||
)
|
||||
from tractor.msg.types import (
|
||||
_payload_msgs,
|
||||
log,
|
||||
PayloadMsg,
|
||||
Started,
|
||||
mk_msg_spec,
|
||||
)
|
||||
import trio
|
||||
|
||||
|
||||
def mk_custom_codec(
|
||||
pld_spec: Union[Type]|Any,
|
||||
add_hooks: bool,
|
||||
|
||||
) -> MsgCodec:
|
||||
'''
|
||||
Create custom `msgpack` enc/dec-hooks and set a `Decoder`
|
||||
which only loads `pld_spec` (like `NamespacePath`) types.
|
||||
|
||||
'''
|
||||
uid: tuple[str, str] = tractor.current_actor().uid
|
||||
|
||||
# XXX NOTE XXX: despite defining `NamespacePath` as a type
|
||||
# field on our `PayloadMsg.pld`, we still need a enc/dec_hook() pair
|
||||
# to cast to/from that type on the wire. See the docs:
|
||||
# https://jcristharif.com/msgspec/extending.html#mapping-to-from-native-types
|
||||
|
||||
def enc_nsp(obj: Any) -> Any:
|
||||
print(f'{uid} ENC HOOK')
|
||||
match obj:
|
||||
case NamespacePath():
|
||||
print(
|
||||
f'{uid}: `NamespacePath`-Only ENCODE?\n'
|
||||
f'obj-> `{obj}`: {type(obj)}\n'
|
||||
)
|
||||
# if type(obj) != NamespacePath:
|
||||
# breakpoint()
|
||||
return str(obj)
|
||||
|
||||
print(
|
||||
f'{uid}\n'
|
||||
'CUSTOM ENCODE\n'
|
||||
f'obj-arg-> `{obj}`: {type(obj)}\n'
|
||||
)
|
||||
logmsg: str = (
|
||||
f'{uid}\n'
|
||||
'FAILED ENCODE\n'
|
||||
f'obj-> `{obj}: {type(obj)}`\n'
|
||||
)
|
||||
raise NotImplementedError(logmsg)
|
||||
|
||||
def dec_nsp(
|
||||
obj_type: Type,
|
||||
obj: Any,
|
||||
|
||||
) -> Any:
|
||||
print(
|
||||
f'{uid}\n'
|
||||
'CUSTOM DECODE\n'
|
||||
f'type-arg-> {obj_type}\n'
|
||||
f'obj-arg-> `{obj}`: {type(obj)}\n'
|
||||
)
|
||||
nsp = None
|
||||
|
||||
if (
|
||||
obj_type is NamespacePath
|
||||
and isinstance(obj, str)
|
||||
and ':' in obj
|
||||
):
|
||||
nsp = NamespacePath(obj)
|
||||
# TODO: we could built a generic handler using
|
||||
# JUST matching the obj_type part?
|
||||
# nsp = obj_type(obj)
|
||||
|
||||
if nsp:
|
||||
print(f'Returning NSP instance: {nsp}')
|
||||
return nsp
|
||||
|
||||
logmsg: str = (
|
||||
f'{uid}\n'
|
||||
'FAILED DECODE\n'
|
||||
f'type-> {obj_type}\n'
|
||||
f'obj-arg-> `{obj}`: {type(obj)}\n\n'
|
||||
f'current codec:\n'
|
||||
f'{current_codec()}\n'
|
||||
)
|
||||
# TODO: figure out the ignore subsys for this!
|
||||
# -[ ] option whether to defense-relay backc the msg
|
||||
# inside an `Invalid`/`Ignore`
|
||||
# -[ ] how to make this handling pluggable such that a
|
||||
# `Channel`/`MsgTransport` can intercept and process
|
||||
# back msgs either via exception handling or some other
|
||||
# signal?
|
||||
log.warning(logmsg)
|
||||
# NOTE: this delivers the invalid
|
||||
# value up to `msgspec`'s decoding
|
||||
# machinery for error raising.
|
||||
return obj
|
||||
# raise NotImplementedError(logmsg)
|
||||
|
||||
nsp_codec: MsgCodec = mk_codec(
|
||||
ipc_pld_spec=pld_spec,
|
||||
|
||||
# NOTE XXX: the encode hook MUST be used no matter what since
|
||||
# our `NamespacePath` is not any of a `Any` native type nor
|
||||
# a `msgspec.Struct` subtype - so `msgspec` has no way to know
|
||||
# how to encode it unless we provide the custom hook.
|
||||
#
|
||||
# AGAIN that is, regardless of whether we spec an
|
||||
# `Any`-decoded-pld the enc has no knowledge (by default)
|
||||
# how to enc `NamespacePath` (nsp), so we add a custom
|
||||
# hook to do that ALWAYS.
|
||||
enc_hook=enc_nsp if add_hooks else None,
|
||||
|
||||
# XXX NOTE: pretty sure this is mutex with the `type=` to
|
||||
# `Decoder`? so it won't work in tandem with the
|
||||
# `ipc_pld_spec` passed above?
|
||||
dec_hook=dec_nsp if add_hooks else None,
|
||||
)
|
||||
return nsp_codec
|
||||
|
||||
|
||||
def chk_codec_applied(
|
||||
expect_codec: MsgCodec,
|
||||
enter_value: MsgCodec|None = None,
|
||||
|
||||
) -> MsgCodec:
|
||||
'''
|
||||
buncha sanity checks ensuring that the IPC channel's
|
||||
context-vars are set to the expected codec and that are
|
||||
ctx-var wrapper APIs match the same.
|
||||
|
||||
'''
|
||||
# TODO: play with tricyle again, bc this is supposed to work
|
||||
# the way we want?
|
||||
#
|
||||
# TreeVar
|
||||
# task: trio.Task = trio.lowlevel.current_task()
|
||||
# curr_codec = _ctxvar_MsgCodec.get_in(task)
|
||||
|
||||
# ContextVar
|
||||
# task_ctx: Context = task.context
|
||||
# assert _ctxvar_MsgCodec in task_ctx
|
||||
# curr_codec: MsgCodec = task.context[_ctxvar_MsgCodec]
|
||||
|
||||
# NOTE: currently we use this!
|
||||
# RunVar
|
||||
curr_codec: MsgCodec = current_codec()
|
||||
last_read_codec = _ctxvar_MsgCodec.get()
|
||||
# assert curr_codec is last_read_codec
|
||||
|
||||
assert (
|
||||
(same_codec := expect_codec) is
|
||||
# returned from `mk_codec()`
|
||||
|
||||
# yielded value from `apply_codec()`
|
||||
|
||||
# read from current task's `contextvars.Context`
|
||||
curr_codec is
|
||||
last_read_codec
|
||||
|
||||
# the default `msgspec` settings
|
||||
is not _codec._def_msgspec_codec
|
||||
is not _codec._def_tractor_codec
|
||||
)
|
||||
|
||||
if enter_value:
|
||||
enter_value is same_codec
|
||||
|
||||
|
||||
def iter_maybe_sends(
|
||||
send_items: dict[Union[Type], Any] | list[tuple],
|
||||
ipc_pld_spec: Union[Type] | Any,
|
||||
add_codec_hooks: bool,
|
||||
|
||||
codec: MsgCodec|None = None,
|
||||
|
||||
) -> tuple[Any, bool]:
|
||||
|
||||
if isinstance(send_items, dict):
|
||||
send_items = send_items.items()
|
||||
|
||||
for (
|
||||
send_type_spec,
|
||||
send_value,
|
||||
) in send_items:
|
||||
|
||||
expect_roundtrip: bool = False
|
||||
|
||||
# values-to-typespec santiy
|
||||
send_type = type(send_value)
|
||||
assert send_type == send_type_spec or (
|
||||
(subtypes := getattr(send_type_spec, '__args__', None))
|
||||
and send_type in subtypes
|
||||
)
|
||||
|
||||
spec_subtypes: set[Union[Type]] = (
|
||||
getattr(
|
||||
ipc_pld_spec,
|
||||
'__args__',
|
||||
{ipc_pld_spec,},
|
||||
)
|
||||
)
|
||||
send_in_spec: bool = (
|
||||
send_type == ipc_pld_spec
|
||||
or (
|
||||
ipc_pld_spec != Any
|
||||
and # presume `Union` of types
|
||||
send_type in spec_subtypes
|
||||
)
|
||||
or (
|
||||
ipc_pld_spec == Any
|
||||
and
|
||||
send_type != NamespacePath
|
||||
)
|
||||
)
|
||||
expect_roundtrip = (
|
||||
send_in_spec
|
||||
# any spec should support all other
|
||||
# builtin py values that we send
|
||||
# except our custom nsp type which
|
||||
# we should be able to send as long
|
||||
# as we provide the custom codec hooks.
|
||||
or (
|
||||
ipc_pld_spec == Any
|
||||
and
|
||||
send_type == NamespacePath
|
||||
and
|
||||
add_codec_hooks
|
||||
)
|
||||
)
|
||||
|
||||
if codec is not None:
|
||||
# XXX FIRST XXX ensure roundtripping works
|
||||
# before touching any IPC primitives/APIs.
|
||||
wire_bytes: bytes = codec.encode(
|
||||
Started(
|
||||
cid='blahblah',
|
||||
pld=send_value,
|
||||
)
|
||||
)
|
||||
# NOTE: demonstrates the decoder loading
|
||||
# to via our native SCIPP msg-spec
|
||||
# (structurred-conc-inter-proc-protocol)
|
||||
# implemented as per,
|
||||
try:
|
||||
msg: Started = codec.decode(wire_bytes)
|
||||
if not expect_roundtrip:
|
||||
pytest.fail(
|
||||
f'NOT-EXPECTED able to roundtrip value given spec:\n'
|
||||
f'ipc_pld_spec -> {ipc_pld_spec}\n'
|
||||
f'value -> {send_value}: {send_type}\n'
|
||||
)
|
||||
|
||||
pld = msg.pld
|
||||
assert pld == send_value
|
||||
|
||||
except ValidationError:
|
||||
if expect_roundtrip:
|
||||
pytest.fail(
|
||||
f'EXPECTED to roundtrip value given spec:\n'
|
||||
f'ipc_pld_spec -> {ipc_pld_spec}\n'
|
||||
f'value -> {send_value}: {send_type}\n'
|
||||
)
|
||||
|
||||
yield (
|
||||
str(send_type),
|
||||
send_value,
|
||||
expect_roundtrip,
|
||||
)
|
||||
|
||||
|
||||
def dec_type_union(
|
||||
type_names: list[str],
|
||||
) -> Type:
|
||||
'''
|
||||
Look up types by name, compile into a list and then create and
|
||||
return a `typing.Union` from the full set.
|
||||
|
||||
'''
|
||||
import importlib
|
||||
types: list[Type] = []
|
||||
for type_name in type_names:
|
||||
for mod in [
|
||||
typing,
|
||||
importlib.import_module(__name__),
|
||||
]:
|
||||
if type_ref := getattr(
|
||||
mod,
|
||||
type_name,
|
||||
False,
|
||||
):
|
||||
types.append(type_ref)
|
||||
|
||||
# special case handling only..
|
||||
# ipc_pld_spec: Union[Type] = eval(
|
||||
# pld_spec_str,
|
||||
# {}, # globals
|
||||
# {'typing': typing}, # locals
|
||||
# )
|
||||
|
||||
return Union[*types]
|
||||
|
||||
|
||||
def enc_type_union(
|
||||
union_or_type: Union[Type]|Type,
|
||||
) -> list[str]:
|
||||
'''
|
||||
Encode a type-union or single type to a list of type-name-strings
|
||||
ready for IPC interchange.
|
||||
|
||||
'''
|
||||
type_strs: list[str] = []
|
||||
for typ in getattr(
|
||||
union_or_type,
|
||||
'__args__',
|
||||
{union_or_type,},
|
||||
):
|
||||
type_strs.append(typ.__qualname__)
|
||||
|
||||
return type_strs
|
||||
|
||||
|
||||
@tractor.context
|
||||
async def send_back_values(
|
||||
ctx: Context,
|
||||
expect_debug: bool,
|
||||
pld_spec_type_strs: list[str],
|
||||
add_hooks: bool,
|
||||
started_msg_bytes: bytes,
|
||||
expect_ipc_send: dict[str, tuple[Any, bool]],
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
Setup up a custom codec to load instances of `NamespacePath`
|
||||
and ensure we can round trip a func ref with our parent.
|
||||
|
||||
'''
|
||||
uid: tuple = tractor.current_actor().uid
|
||||
|
||||
# debug mode sanity check (prolly superfluous but, meh)
|
||||
assert expect_debug == _state.debug_mode()
|
||||
|
||||
# init state in sub-actor should be default
|
||||
chk_codec_applied(
|
||||
expect_codec=_codec._def_tractor_codec,
|
||||
)
|
||||
|
||||
# load pld spec from input str
|
||||
ipc_pld_spec = dec_type_union(
|
||||
pld_spec_type_strs,
|
||||
)
|
||||
pld_spec_str = str(ipc_pld_spec)
|
||||
|
||||
# same as on parent side config.
|
||||
nsp_codec: MsgCodec = mk_custom_codec(
|
||||
pld_spec=ipc_pld_spec,
|
||||
add_hooks=add_hooks,
|
||||
)
|
||||
with (
|
||||
apply_codec(nsp_codec) as codec,
|
||||
):
|
||||
chk_codec_applied(
|
||||
expect_codec=nsp_codec,
|
||||
enter_value=codec,
|
||||
)
|
||||
|
||||
print(
|
||||
f'{uid}: attempting `Started`-bytes DECODE..\n'
|
||||
)
|
||||
try:
|
||||
msg: Started = nsp_codec.decode(started_msg_bytes)
|
||||
expected_pld_spec_str: str = msg.pld
|
||||
assert pld_spec_str == expected_pld_spec_str
|
||||
|
||||
# TODO: maybe we should add our own wrapper error so as to
|
||||
# be interchange-lib agnostic?
|
||||
# -[ ] the error type is wtv is raised from the hook so we
|
||||
# could also require a type-class of errors for
|
||||
# indicating whether the hook-failure can be handled by
|
||||
# a nasty-dialog-unprot sub-sys?
|
||||
except ValidationError:
|
||||
|
||||
# NOTE: only in the `Any` spec case do we expect this to
|
||||
# work since otherwise no spec covers a plain-ol'
|
||||
# `.pld: str`
|
||||
if pld_spec_str == 'Any':
|
||||
raise
|
||||
else:
|
||||
print(
|
||||
f'{uid}: (correctly) unable to DECODE `Started`-bytes\n'
|
||||
f'{started_msg_bytes}\n'
|
||||
)
|
||||
|
||||
iter_send_val_items = iter(expect_ipc_send.values())
|
||||
sent: list[Any] = []
|
||||
for send_value, expect_send in iter_send_val_items:
|
||||
try:
|
||||
print(
|
||||
f'{uid}: attempting to `.started({send_value})`\n'
|
||||
f'=> expect_send: {expect_send}\n'
|
||||
f'SINCE, ipc_pld_spec: {ipc_pld_spec}\n'
|
||||
f'AND, codec: {codec}\n'
|
||||
)
|
||||
await ctx.started(send_value)
|
||||
sent.append(send_value)
|
||||
if not expect_send:
|
||||
|
||||
# XXX NOTE XXX THIS WON'T WORK WITHOUT SPECIAL
|
||||
# `str` handling! or special debug mode IPC
|
||||
# msgs!
|
||||
await tractor.pause()
|
||||
|
||||
raise RuntimeError(
|
||||
f'NOT-EXPECTED able to roundtrip value given spec:\n'
|
||||
f'ipc_pld_spec -> {ipc_pld_spec}\n'
|
||||
f'value -> {send_value}: {type(send_value)}\n'
|
||||
)
|
||||
|
||||
break # move on to streaming block..
|
||||
|
||||
except tractor.MsgTypeError:
|
||||
await tractor.pause()
|
||||
|
||||
if expect_send:
|
||||
raise RuntimeError(
|
||||
f'EXPECTED to `.started()` value given spec:\n'
|
||||
f'ipc_pld_spec -> {ipc_pld_spec}\n'
|
||||
f'value -> {send_value}: {type(send_value)}\n'
|
||||
)
|
||||
|
||||
async with ctx.open_stream() as ipc:
|
||||
print(
|
||||
f'{uid}: Entering streaming block to send remaining values..'
|
||||
)
|
||||
|
||||
for send_value, expect_send in iter_send_val_items:
|
||||
send_type: Type = type(send_value)
|
||||
print(
|
||||
'------ - ------\n'
|
||||
f'{uid}: SENDING NEXT VALUE\n'
|
||||
f'ipc_pld_spec: {ipc_pld_spec}\n'
|
||||
f'expect_send: {expect_send}\n'
|
||||
f'val: {send_value}\n'
|
||||
'------ - ------\n'
|
||||
)
|
||||
try:
|
||||
await ipc.send(send_value)
|
||||
print(f'***\n{uid}-CHILD sent {send_value!r}\n***\n')
|
||||
sent.append(send_value)
|
||||
|
||||
# NOTE: should only raise above on
|
||||
# `.started()` or a `Return`
|
||||
# if not expect_send:
|
||||
# raise RuntimeError(
|
||||
# f'NOT-EXPECTED able to roundtrip value given spec:\n'
|
||||
# f'ipc_pld_spec -> {ipc_pld_spec}\n'
|
||||
# f'value -> {send_value}: {send_type}\n'
|
||||
# )
|
||||
|
||||
except ValidationError:
|
||||
print(f'{uid} FAILED TO SEND {send_value}!')
|
||||
|
||||
# await tractor.pause()
|
||||
if expect_send:
|
||||
raise RuntimeError(
|
||||
f'EXPECTED to roundtrip value given spec:\n'
|
||||
f'ipc_pld_spec -> {ipc_pld_spec}\n'
|
||||
f'value -> {send_value}: {send_type}\n'
|
||||
)
|
||||
# continue
|
||||
|
||||
else:
|
||||
print(
|
||||
f'{uid}: finished sending all values\n'
|
||||
'Should be exiting stream block!\n'
|
||||
)
|
||||
|
||||
print(f'{uid}: exited streaming block!')
|
||||
|
||||
# TODO: this won't be true bc in streaming phase we DO NOT
|
||||
# msgspec check outbound msgs!
|
||||
# -[ ] once we implement the receiver side `InvalidMsg`
|
||||
# then we can expect it here?
|
||||
# assert (
|
||||
# len(sent)
|
||||
# ==
|
||||
# len([val
|
||||
# for val, expect in
|
||||
# expect_ipc_send.values()
|
||||
# if expect is True])
|
||||
# )
|
||||
|
||||
|
||||
def ex_func(*args):
|
||||
print(f'ex_func({args})')
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'ipc_pld_spec',
|
||||
[
|
||||
Any,
|
||||
NamespacePath,
|
||||
NamespacePath|None, # the "maybe" spec Bo
|
||||
],
|
||||
ids=[
|
||||
'any_type',
|
||||
'nsp_type',
|
||||
'maybe_nsp_type',
|
||||
]
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
'add_codec_hooks',
|
||||
[
|
||||
True,
|
||||
False,
|
||||
],
|
||||
ids=['use_codec_hooks', 'no_codec_hooks'],
|
||||
)
|
||||
def test_codec_hooks_mod(
|
||||
debug_mode: bool,
|
||||
ipc_pld_spec: Union[Type]|Any,
|
||||
# send_value: None|str|NamespacePath,
|
||||
add_codec_hooks: bool,
|
||||
):
|
||||
'''
|
||||
Audit the `.msg.MsgCodec` override apis details given our impl
|
||||
uses `contextvars` to accomplish per `trio` task codec
|
||||
application around an inter-proc-task-comms context.
|
||||
|
||||
'''
|
||||
async def main():
|
||||
nsp = NamespacePath.from_ref(ex_func)
|
||||
send_items: dict[Union, Any] = {
|
||||
Union[None]: None,
|
||||
Union[NamespacePath]: nsp,
|
||||
Union[str]: str(nsp),
|
||||
}
|
||||
|
||||
# init default state for actor
|
||||
chk_codec_applied(
|
||||
expect_codec=_codec._def_tractor_codec,
|
||||
)
|
||||
|
||||
async with tractor.open_nursery(
|
||||
debug_mode=debug_mode,
|
||||
) as an:
|
||||
p: tractor.Portal = await an.start_actor(
|
||||
'sub',
|
||||
enable_modules=[__name__],
|
||||
)
|
||||
|
||||
# TODO: 2 cases:
|
||||
# - codec not modified -> decode nsp as `str`
|
||||
# - codec modified with hooks -> decode nsp as
|
||||
# `NamespacePath`
|
||||
nsp_codec: MsgCodec = mk_custom_codec(
|
||||
pld_spec=ipc_pld_spec,
|
||||
add_hooks=add_codec_hooks,
|
||||
)
|
||||
with apply_codec(nsp_codec) as codec:
|
||||
chk_codec_applied(
|
||||
expect_codec=nsp_codec,
|
||||
enter_value=codec,
|
||||
)
|
||||
|
||||
expect_ipc_send: dict[str, tuple[Any, bool]] = {}
|
||||
|
||||
report: str = (
|
||||
'Parent report on send values with\n'
|
||||
f'ipc_pld_spec: {ipc_pld_spec}\n'
|
||||
' ------ - ------\n'
|
||||
)
|
||||
for val_type_str, val, expect_send in iter_maybe_sends(
|
||||
send_items,
|
||||
ipc_pld_spec,
|
||||
add_codec_hooks=add_codec_hooks,
|
||||
):
|
||||
report += (
|
||||
f'send_value: {val}: {type(val)} '
|
||||
f'=> expect_send: {expect_send}\n'
|
||||
)
|
||||
expect_ipc_send[val_type_str] = (val, expect_send)
|
||||
|
||||
print(
|
||||
report +
|
||||
' ------ - ------\n'
|
||||
)
|
||||
assert len(expect_ipc_send) == len(send_items)
|
||||
# now try over real IPC with a the subactor
|
||||
# expect_ipc_rountrip: bool = True
|
||||
expected_started = Started(
|
||||
cid='cid',
|
||||
pld=str(ipc_pld_spec),
|
||||
)
|
||||
# build list of values we expect to receive from
|
||||
# the subactor.
|
||||
expect_to_send: list[Any] = [
|
||||
val
|
||||
for val, expect_send in expect_ipc_send.values()
|
||||
if expect_send
|
||||
]
|
||||
|
||||
pld_spec_type_strs: list[str] = enc_type_union(ipc_pld_spec)
|
||||
|
||||
# XXX should raise an mte (`MsgTypeError`)
|
||||
# when `add_codec_hooks == False` bc the input
|
||||
# `expect_ipc_send` kwarg has a nsp which can't be
|
||||
# serialized!
|
||||
#
|
||||
# TODO:can we ensure this happens from the
|
||||
# `Return`-side (aka the sub) as well?
|
||||
if not add_codec_hooks:
|
||||
try:
|
||||
async with p.open_context(
|
||||
send_back_values,
|
||||
expect_debug=debug_mode,
|
||||
pld_spec_type_strs=pld_spec_type_strs,
|
||||
add_hooks=add_codec_hooks,
|
||||
started_msg_bytes=nsp_codec.encode(expected_started),
|
||||
|
||||
# XXX NOTE bc we send a `NamespacePath` in this kwarg
|
||||
expect_ipc_send=expect_ipc_send,
|
||||
|
||||
) as (ctx, first):
|
||||
pytest.fail('ctx should fail to open without custom enc_hook!?')
|
||||
|
||||
# this test passes bc we can go no further!
|
||||
except MsgTypeError:
|
||||
# teardown nursery
|
||||
await p.cancel_actor()
|
||||
return
|
||||
|
||||
# TODO: send the original nsp here and
|
||||
# test with `limit_msg_spec()` above?
|
||||
# await tractor.pause()
|
||||
print('PARENT opening IPC ctx!\n')
|
||||
async with (
|
||||
|
||||
# XXX should raise an mte (`MsgTypeError`)
|
||||
# when `add_codec_hooks == False`..
|
||||
p.open_context(
|
||||
send_back_values,
|
||||
expect_debug=debug_mode,
|
||||
pld_spec_type_strs=pld_spec_type_strs,
|
||||
add_hooks=add_codec_hooks,
|
||||
started_msg_bytes=nsp_codec.encode(expected_started),
|
||||
expect_ipc_send=expect_ipc_send,
|
||||
) as (ctx, first),
|
||||
|
||||
ctx.open_stream() as ipc,
|
||||
):
|
||||
# ensure codec is still applied across
|
||||
# `tractor.Context` + its embedded nursery.
|
||||
chk_codec_applied(
|
||||
expect_codec=nsp_codec,
|
||||
enter_value=codec,
|
||||
)
|
||||
print(
|
||||
'root: ENTERING CONTEXT BLOCK\n'
|
||||
f'type(first): {type(first)}\n'
|
||||
f'first: {first}\n'
|
||||
)
|
||||
expect_to_send.remove(first)
|
||||
|
||||
# TODO: explicit values we expect depending on
|
||||
# codec config!
|
||||
# assert first == first_val
|
||||
# assert first == f'{__name__}:ex_func'
|
||||
|
||||
async for next_sent in ipc:
|
||||
print(
|
||||
'Parent: child sent next value\n'
|
||||
f'{next_sent}: {type(next_sent)}\n'
|
||||
)
|
||||
if expect_to_send:
|
||||
expect_to_send.remove(next_sent)
|
||||
else:
|
||||
print('PARENT should terminate stream loop + block!')
|
||||
|
||||
# all sent values should have arrived!
|
||||
assert not expect_to_send
|
||||
|
||||
await p.cancel_actor()
|
||||
|
||||
trio.run(main)
|
||||
|
||||
|
||||
def chk_pld_type(
|
||||
payload_spec: Type[Struct]|Any,
|
||||
pld: Any,
|
||||
|
||||
expect_roundtrip: bool|None = None,
|
||||
|
||||
) -> bool:
|
||||
|
||||
pld_val_type: Type = type(pld)
|
||||
|
||||
# TODO: verify that the overridden subtypes
|
||||
# DO NOT have modified type-annots from original!
|
||||
# 'Start', .pld: FuncSpec
|
||||
# 'StartAck', .pld: IpcCtxSpec
|
||||
# 'Stop', .pld: UNSEt
|
||||
# 'Error', .pld: ErrorData
|
||||
|
||||
codec: MsgCodec = mk_codec(
|
||||
# NOTE: this ONLY accepts `PayloadMsg.pld` fields of a specified
|
||||
# type union.
|
||||
ipc_pld_spec=payload_spec,
|
||||
)
|
||||
|
||||
# make a one-off dec to compare with our `MsgCodec` instance
|
||||
# which does the below `mk_msg_spec()` call internally
|
||||
ipc_msg_spec: Union[Type[Struct]]
|
||||
msg_types: list[PayloadMsg[payload_spec]]
|
||||
(
|
||||
ipc_msg_spec,
|
||||
msg_types,
|
||||
) = mk_msg_spec(
|
||||
payload_type_union=payload_spec,
|
||||
)
|
||||
_enc = msgpack.Encoder()
|
||||
_dec = msgpack.Decoder(
|
||||
type=ipc_msg_spec or Any, # like `PayloadMsg[Any]`
|
||||
)
|
||||
|
||||
assert (
|
||||
payload_spec
|
||||
==
|
||||
codec.pld_spec
|
||||
)
|
||||
|
||||
# assert codec.dec == dec
|
||||
#
|
||||
# ^-XXX-^ not sure why these aren't "equal" but when cast
|
||||
# to `str` they seem to match ?? .. kk
|
||||
|
||||
assert (
|
||||
str(ipc_msg_spec)
|
||||
==
|
||||
str(codec.msg_spec)
|
||||
==
|
||||
str(_dec.type)
|
||||
==
|
||||
str(codec.dec.type)
|
||||
)
|
||||
|
||||
# verify the boxed-type for all variable payload-type msgs.
|
||||
if not msg_types:
|
||||
breakpoint()
|
||||
|
||||
roundtrip: bool|None = None
|
||||
pld_spec_msg_names: list[str] = [
|
||||
td.__name__ for td in _payload_msgs
|
||||
]
|
||||
for typedef in msg_types:
|
||||
|
||||
skip_runtime_msg: bool = typedef.__name__ not in pld_spec_msg_names
|
||||
if skip_runtime_msg:
|
||||
continue
|
||||
|
||||
pld_field = structs.fields(typedef)[1]
|
||||
assert pld_field.type is payload_spec # TODO-^ does this need to work to get all subtypes to adhere?
|
||||
|
||||
kwargs: dict[str, Any] = {
|
||||
'cid': '666',
|
||||
'pld': pld,
|
||||
}
|
||||
enc_msg: PayloadMsg = typedef(**kwargs)
|
||||
|
||||
_wire_bytes: bytes = _enc.encode(enc_msg)
|
||||
wire_bytes: bytes = codec.enc.encode(enc_msg)
|
||||
assert _wire_bytes == wire_bytes
|
||||
|
||||
ve: ValidationError|None = None
|
||||
try:
|
||||
dec_msg = codec.dec.decode(wire_bytes)
|
||||
_dec_msg = _dec.decode(wire_bytes)
|
||||
|
||||
# decoded msg and thus payload should be exactly same!
|
||||
assert (roundtrip := (
|
||||
_dec_msg
|
||||
==
|
||||
dec_msg
|
||||
==
|
||||
enc_msg
|
||||
))
|
||||
|
||||
if (
|
||||
expect_roundtrip is not None
|
||||
and expect_roundtrip != roundtrip
|
||||
):
|
||||
breakpoint()
|
||||
|
||||
assert (
|
||||
pld
|
||||
==
|
||||
dec_msg.pld
|
||||
==
|
||||
enc_msg.pld
|
||||
)
|
||||
# assert (roundtrip := (_dec_msg == enc_msg))
|
||||
|
||||
except ValidationError as _ve:
|
||||
ve = _ve
|
||||
roundtrip: bool = False
|
||||
if pld_val_type is payload_spec:
|
||||
raise ValueError(
|
||||
'Got `ValidationError` despite type-var match!?\n'
|
||||
f'pld_val_type: {pld_val_type}\n'
|
||||
f'payload_type: {payload_spec}\n'
|
||||
) from ve
|
||||
|
||||
else:
|
||||
# ow we good cuz the pld spec mismatched.
|
||||
print(
|
||||
'Got expected `ValidationError` since,\n'
|
||||
f'{pld_val_type} is not {payload_spec}\n'
|
||||
)
|
||||
else:
|
||||
if (
|
||||
payload_spec is not Any
|
||||
and
|
||||
pld_val_type is not payload_spec
|
||||
):
|
||||
raise ValueError(
|
||||
'DID NOT `ValidationError` despite expected type match!?\n'
|
||||
f'pld_val_type: {pld_val_type}\n'
|
||||
f'payload_type: {payload_spec}\n'
|
||||
)
|
||||
|
||||
# full code decode should always be attempted!
|
||||
if roundtrip is None:
|
||||
breakpoint()
|
||||
|
||||
return roundtrip
|
||||
|
||||
|
||||
def test_limit_msgspec(
|
||||
debug_mode: bool,
|
||||
):
|
||||
async def main():
|
||||
async with tractor.open_root_actor(
|
||||
debug_mode=debug_mode,
|
||||
):
|
||||
# ensure we can round-trip a boxing `PayloadMsg`
|
||||
assert chk_pld_type(
|
||||
payload_spec=Any,
|
||||
pld=None,
|
||||
expect_roundtrip=True,
|
||||
)
|
||||
|
||||
# verify that a mis-typed payload value won't decode
|
||||
assert not chk_pld_type(
|
||||
payload_spec=int,
|
||||
pld='doggy',
|
||||
)
|
||||
|
||||
# parametrize the boxed `.pld` type as a custom-struct
|
||||
# and ensure that parametrization propagates
|
||||
# to all payload-msg-spec-able subtypes!
|
||||
class CustomPayload(Struct):
|
||||
name: str
|
||||
value: Any
|
||||
|
||||
assert not chk_pld_type(
|
||||
payload_spec=CustomPayload,
|
||||
pld='doggy',
|
||||
)
|
||||
|
||||
assert chk_pld_type(
|
||||
payload_spec=CustomPayload,
|
||||
pld=CustomPayload(name='doggy', value='urmom')
|
||||
)
|
||||
|
||||
# yah, we can `.pause_from_sync()` now!
|
||||
# breakpoint()
|
||||
|
||||
trio.run(main)
|
|
@ -38,9 +38,9 @@ from tractor._testing import (
|
|||
# - standard setup/teardown:
|
||||
# ``Portal.open_context()`` starts a new
|
||||
# remote task context in another actor. The target actor's task must
|
||||
# call ``Context.started()`` to unblock this entry on the parent side.
|
||||
# the child task executes until complete and returns a final value
|
||||
# which is delivered to the parent side and retreived via
|
||||
# call ``Context.started()`` to unblock this entry on the caller side.
|
||||
# the callee task executes until complete and returns a final value
|
||||
# which is delivered to the caller side and retreived via
|
||||
# ``Context.result()``.
|
||||
|
||||
# - cancel termination:
|
||||
|
@ -170,9 +170,9 @@ async def assert_state(value: bool):
|
|||
[False, ValueError, KeyboardInterrupt],
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
'child_blocks_forever',
|
||||
'callee_blocks_forever',
|
||||
[False, True],
|
||||
ids=lambda item: f'child_blocks_forever={item}'
|
||||
ids=lambda item: f'callee_blocks_forever={item}'
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
'pointlessly_open_stream',
|
||||
|
@ -181,7 +181,7 @@ async def assert_state(value: bool):
|
|||
)
|
||||
def test_simple_context(
|
||||
error_parent,
|
||||
child_blocks_forever,
|
||||
callee_blocks_forever,
|
||||
pointlessly_open_stream,
|
||||
debug_mode: bool,
|
||||
):
|
||||
|
@ -204,13 +204,13 @@ def test_simple_context(
|
|||
portal.open_context(
|
||||
simple_setup_teardown,
|
||||
data=10,
|
||||
block_forever=child_blocks_forever,
|
||||
block_forever=callee_blocks_forever,
|
||||
) as (ctx, sent),
|
||||
):
|
||||
assert current_ipc_ctx() is ctx
|
||||
assert sent == 11
|
||||
|
||||
if child_blocks_forever:
|
||||
if callee_blocks_forever:
|
||||
await portal.run(assert_state, value=True)
|
||||
else:
|
||||
assert await ctx.result() == 'yo'
|
||||
|
@ -220,7 +220,7 @@ def test_simple_context(
|
|||
if error_parent:
|
||||
raise error_parent
|
||||
|
||||
if child_blocks_forever:
|
||||
if callee_blocks_forever:
|
||||
await ctx.cancel()
|
||||
else:
|
||||
# in this case the stream will send a
|
||||
|
@ -259,9 +259,9 @@ def test_simple_context(
|
|||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'child_returns_early',
|
||||
'callee_returns_early',
|
||||
[True, False],
|
||||
ids=lambda item: f'child_returns_early={item}'
|
||||
ids=lambda item: f'callee_returns_early={item}'
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
'cancel_method',
|
||||
|
@ -273,14 +273,14 @@ def test_simple_context(
|
|||
[True, False],
|
||||
ids=lambda item: f'chk_ctx_result_before_exit={item}'
|
||||
)
|
||||
def test_parent_cancels(
|
||||
def test_caller_cancels(
|
||||
cancel_method: str,
|
||||
chk_ctx_result_before_exit: bool,
|
||||
child_returns_early: bool,
|
||||
callee_returns_early: bool,
|
||||
debug_mode: bool,
|
||||
):
|
||||
'''
|
||||
Verify that when the opening side of a context (aka the parent)
|
||||
Verify that when the opening side of a context (aka the caller)
|
||||
cancels that context, the ctx does not raise a cancelled when
|
||||
either calling `.result()` or on context exit.
|
||||
|
||||
|
@ -294,7 +294,7 @@ def test_parent_cancels(
|
|||
|
||||
if (
|
||||
cancel_method == 'portal'
|
||||
and not child_returns_early
|
||||
and not callee_returns_early
|
||||
):
|
||||
try:
|
||||
res = await ctx.result()
|
||||
|
@ -318,7 +318,7 @@ def test_parent_cancels(
|
|||
pytest.fail(f'should not have raised ctxc\n{ctxc}')
|
||||
|
||||
# we actually get a result
|
||||
if child_returns_early:
|
||||
if callee_returns_early:
|
||||
assert res == 'yo'
|
||||
assert ctx.outcome is res
|
||||
assert ctx.maybe_error is None
|
||||
|
@ -362,14 +362,14 @@ def test_parent_cancels(
|
|||
)
|
||||
timeout: float = (
|
||||
0.5
|
||||
if not child_returns_early
|
||||
if not callee_returns_early
|
||||
else 2
|
||||
)
|
||||
with trio.fail_after(timeout):
|
||||
async with (
|
||||
expect_ctxc(
|
||||
yay=(
|
||||
not child_returns_early
|
||||
not callee_returns_early
|
||||
and cancel_method == 'portal'
|
||||
)
|
||||
),
|
||||
|
@ -377,13 +377,13 @@ def test_parent_cancels(
|
|||
portal.open_context(
|
||||
simple_setup_teardown,
|
||||
data=10,
|
||||
block_forever=not child_returns_early,
|
||||
block_forever=not callee_returns_early,
|
||||
) as (ctx, sent),
|
||||
):
|
||||
|
||||
if child_returns_early:
|
||||
if callee_returns_early:
|
||||
# ensure we block long enough before sending
|
||||
# a cancel such that the child has already
|
||||
# a cancel such that the callee has already
|
||||
# returned it's result.
|
||||
await trio.sleep(0.5)
|
||||
|
||||
|
@ -421,7 +421,7 @@ def test_parent_cancels(
|
|||
# which should in turn cause `ctx._scope` to
|
||||
# catch any cancellation?
|
||||
if (
|
||||
not child_returns_early
|
||||
not callee_returns_early
|
||||
and cancel_method != 'portal'
|
||||
):
|
||||
assert not ctx._scope.cancelled_caught
|
||||
|
@ -430,11 +430,11 @@ def test_parent_cancels(
|
|||
|
||||
|
||||
# basic stream terminations:
|
||||
# - child context closes without using stream
|
||||
# - parent context closes without using stream
|
||||
# - parent context calls `Context.cancel()` while streaming
|
||||
# is ongoing resulting in child being cancelled
|
||||
# - child calls `Context.cancel()` while streaming and parent
|
||||
# - callee context closes without using stream
|
||||
# - caller context closes without using stream
|
||||
# - caller context calls `Context.cancel()` while streaming
|
||||
# is ongoing resulting in callee being cancelled
|
||||
# - callee calls `Context.cancel()` while streaming and caller
|
||||
# sees stream terminated in `RemoteActorError`
|
||||
|
||||
# TODO: future possible features
|
||||
|
@ -443,6 +443,7 @@ def test_parent_cancels(
|
|||
|
||||
@tractor.context
|
||||
async def close_ctx_immediately(
|
||||
|
||||
ctx: Context,
|
||||
|
||||
) -> None:
|
||||
|
@ -453,24 +454,13 @@ async def close_ctx_immediately(
|
|||
async with ctx.open_stream():
|
||||
pass
|
||||
|
||||
print('child returning!')
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'parent_send_before_receive',
|
||||
[
|
||||
False,
|
||||
True,
|
||||
],
|
||||
ids=lambda item: f'child_send_before_receive={item}'
|
||||
)
|
||||
@tractor_test
|
||||
async def test_child_exits_ctx_after_stream_open(
|
||||
async def test_callee_closes_ctx_after_stream_open(
|
||||
debug_mode: bool,
|
||||
parent_send_before_receive: bool,
|
||||
):
|
||||
'''
|
||||
child context closes without using stream.
|
||||
callee context closes without using stream.
|
||||
|
||||
This should result in a msg sequence
|
||||
|_<root>_
|
||||
|
@ -484,9 +474,6 @@ async def test_child_exits_ctx_after_stream_open(
|
|||
=> {'stop': True, 'cid': <str>}
|
||||
|
||||
'''
|
||||
timeout: float = (
|
||||
0.5 if not debug_mode else 999
|
||||
)
|
||||
async with tractor.open_nursery(
|
||||
debug_mode=debug_mode,
|
||||
) as an:
|
||||
|
@ -495,7 +482,7 @@ async def test_child_exits_ctx_after_stream_open(
|
|||
enable_modules=[__name__],
|
||||
)
|
||||
|
||||
with trio.fail_after(timeout):
|
||||
with trio.fail_after(0.5):
|
||||
async with portal.open_context(
|
||||
close_ctx_immediately,
|
||||
|
||||
|
@ -507,56 +494,41 @@ async def test_child_exits_ctx_after_stream_open(
|
|||
|
||||
with trio.fail_after(0.4):
|
||||
async with ctx.open_stream() as stream:
|
||||
if parent_send_before_receive:
|
||||
print('sending first msg from parent!')
|
||||
await stream.send('yo')
|
||||
|
||||
# should fall through since ``StopAsyncIteration``
|
||||
# should be raised through translation of
|
||||
# a ``trio.EndOfChannel`` by
|
||||
# ``trio.abc.ReceiveChannel.__anext__()``
|
||||
msg = 10
|
||||
async for msg in stream:
|
||||
async for _ in stream:
|
||||
# trigger failure if we DO NOT
|
||||
# get an EOC!
|
||||
assert 0
|
||||
else:
|
||||
# never should get anythinig new from
|
||||
# the underlying stream
|
||||
assert msg == 10
|
||||
|
||||
# verify stream is now closed
|
||||
try:
|
||||
with trio.fail_after(0.3):
|
||||
print('parent trying to `.receive()` on EoC stream!')
|
||||
await stream.receive()
|
||||
assert 0, 'should have raised eoc!?'
|
||||
except trio.EndOfChannel:
|
||||
print('parent got EoC as expected!')
|
||||
pass
|
||||
# raise
|
||||
|
||||
# TODO: should be just raise the closed resource err
|
||||
# directly here to enforce not allowing a re-open
|
||||
# of a stream to the context (at least until a time of
|
||||
# if/when we decide that's a good idea?)
|
||||
try:
|
||||
with trio.fail_after(timeout):
|
||||
with trio.fail_after(0.5):
|
||||
async with ctx.open_stream() as stream:
|
||||
pass
|
||||
except trio.ClosedResourceError:
|
||||
pass
|
||||
|
||||
# if ctx._rx_chan._state.data:
|
||||
# await tractor.pause()
|
||||
|
||||
await portal.cancel_actor()
|
||||
|
||||
|
||||
@tractor.context
|
||||
async def expect_cancelled(
|
||||
ctx: Context,
|
||||
send_before_receive: bool = False,
|
||||
|
||||
) -> None:
|
||||
global _state
|
||||
|
@ -566,10 +538,6 @@ async def expect_cancelled(
|
|||
|
||||
try:
|
||||
async with ctx.open_stream() as stream:
|
||||
|
||||
if send_before_receive:
|
||||
await stream.send('yo')
|
||||
|
||||
async for msg in stream:
|
||||
await stream.send(msg) # echo server
|
||||
|
||||
|
@ -596,49 +564,26 @@ async def expect_cancelled(
|
|||
raise
|
||||
|
||||
else:
|
||||
assert 0, "child wasn't cancelled !?"
|
||||
assert 0, "callee wasn't cancelled !?"
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'child_send_before_receive',
|
||||
[
|
||||
False,
|
||||
True,
|
||||
],
|
||||
ids=lambda item: f'child_send_before_receive={item}'
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
'rent_wait_for_msg',
|
||||
[
|
||||
False,
|
||||
True,
|
||||
],
|
||||
ids=lambda item: f'rent_wait_for_msg={item}'
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
'use_ctx_cancel_method',
|
||||
[
|
||||
False,
|
||||
'pre_stream',
|
||||
'post_stream_open',
|
||||
'post_stream_close',
|
||||
],
|
||||
ids=lambda item: f'use_ctx_cancel_method={item}'
|
||||
[False, True],
|
||||
)
|
||||
@tractor_test
|
||||
async def test_parent_exits_ctx_after_child_enters_stream(
|
||||
use_ctx_cancel_method: bool|str,
|
||||
async def test_caller_closes_ctx_after_callee_opens_stream(
|
||||
use_ctx_cancel_method: bool,
|
||||
debug_mode: bool,
|
||||
rent_wait_for_msg: bool,
|
||||
child_send_before_receive: bool,
|
||||
):
|
||||
'''
|
||||
Parent-side of IPC context closes without sending on `MsgStream`.
|
||||
caller context closes without using/opening stream
|
||||
|
||||
'''
|
||||
async with tractor.open_nursery(
|
||||
debug_mode=debug_mode,
|
||||
) as an:
|
||||
|
||||
root: Actor = current_actor()
|
||||
portal = await an.start_actor(
|
||||
'ctx_cancelled',
|
||||
|
@ -647,52 +592,41 @@ async def test_parent_exits_ctx_after_child_enters_stream(
|
|||
|
||||
async with portal.open_context(
|
||||
expect_cancelled,
|
||||
send_before_receive=child_send_before_receive,
|
||||
) as (ctx, sent):
|
||||
assert sent is None
|
||||
|
||||
await portal.run(assert_state, value=True)
|
||||
|
||||
# call `ctx.cancel()` explicitly
|
||||
if use_ctx_cancel_method == 'pre_stream':
|
||||
if use_ctx_cancel_method:
|
||||
await ctx.cancel()
|
||||
|
||||
# NOTE: means the local side `ctx._scope` will
|
||||
# have been cancelled by an ctxc ack and thus
|
||||
# `._scope.cancelled_caught` should be set.
|
||||
async with (
|
||||
expect_ctxc(
|
||||
# XXX: the cause is US since we call
|
||||
# `Context.cancel()` just above!
|
||||
yay=True,
|
||||
|
||||
# XXX: must be propagated to __aexit__
|
||||
# and should be silently absorbed there
|
||||
# since we called `.cancel()` just above ;)
|
||||
reraise=True,
|
||||
) as maybe_ctxc,
|
||||
):
|
||||
try:
|
||||
async with ctx.open_stream() as stream:
|
||||
async for msg in stream:
|
||||
pass
|
||||
|
||||
if rent_wait_for_msg:
|
||||
async for msg in stream:
|
||||
print(f'PARENT rx: {msg!r}\n')
|
||||
break
|
||||
except tractor.ContextCancelled as ctxc:
|
||||
# XXX: the cause is US since we call
|
||||
# `Context.cancel()` just above!
|
||||
assert (
|
||||
ctxc.canceller
|
||||
==
|
||||
current_actor().uid
|
||||
==
|
||||
root.uid
|
||||
)
|
||||
|
||||
if use_ctx_cancel_method == 'post_stream_open':
|
||||
await ctx.cancel()
|
||||
# XXX: must be propagated to __aexit__
|
||||
# and should be silently absorbed there
|
||||
# since we called `.cancel()` just above ;)
|
||||
raise
|
||||
|
||||
if use_ctx_cancel_method == 'post_stream_close':
|
||||
await ctx.cancel()
|
||||
|
||||
ctxc: tractor.ContextCancelled = maybe_ctxc.value
|
||||
assert (
|
||||
ctxc.canceller
|
||||
==
|
||||
current_actor().uid
|
||||
==
|
||||
root.uid
|
||||
)
|
||||
else:
|
||||
assert 0, "Should have context cancelled?"
|
||||
|
||||
# channel should still be up
|
||||
assert portal.channel.connected()
|
||||
|
@ -703,20 +637,13 @@ async def test_parent_exits_ctx_after_child_enters_stream(
|
|||
value=False,
|
||||
)
|
||||
|
||||
# XXX CHILD-BLOCKS case, we SHOULD NOT exit from the
|
||||
# `.open_context()` before the child has returned,
|
||||
# errored or been cancelled!
|
||||
else:
|
||||
try:
|
||||
with trio.fail_after(
|
||||
0.5 # if not debug_mode else 999
|
||||
):
|
||||
res = await ctx.wait_for_result()
|
||||
assert res is not tractor._context.Unresolved
|
||||
with trio.fail_after(0.2):
|
||||
await ctx.result()
|
||||
assert 0, "Callee should have blocked!?"
|
||||
except trio.TooSlowError:
|
||||
# NO-OP -> since already triggered by
|
||||
# `trio.fail_after()` above!
|
||||
# NO-OP -> since already called above
|
||||
await ctx.cancel()
|
||||
|
||||
# NOTE: local scope should have absorbed the cancellation since
|
||||
|
@ -756,7 +683,7 @@ async def test_parent_exits_ctx_after_child_enters_stream(
|
|||
|
||||
|
||||
@tractor_test
|
||||
async def test_multitask_parent_cancels_from_nonroot_task(
|
||||
async def test_multitask_caller_cancels_from_nonroot_task(
|
||||
debug_mode: bool,
|
||||
):
|
||||
async with tractor.open_nursery(
|
||||
|
@ -808,6 +735,7 @@ async def test_multitask_parent_cancels_from_nonroot_task(
|
|||
|
||||
@tractor.context
|
||||
async def cancel_self(
|
||||
|
||||
ctx: Context,
|
||||
|
||||
) -> None:
|
||||
|
@ -847,11 +775,11 @@ async def cancel_self(
|
|||
|
||||
|
||||
@tractor_test
|
||||
async def test_child_cancels_before_started(
|
||||
async def test_callee_cancels_before_started(
|
||||
debug_mode: bool,
|
||||
):
|
||||
'''
|
||||
Callee calls `Context.cancel()` while streaming and parent
|
||||
Callee calls `Context.cancel()` while streaming and caller
|
||||
sees stream terminated in `ContextCancelled`.
|
||||
|
||||
'''
|
||||
|
@ -898,13 +826,14 @@ async def never_open_stream(
|
|||
|
||||
|
||||
@tractor.context
|
||||
async def keep_sending_from_child(
|
||||
async def keep_sending_from_callee(
|
||||
|
||||
ctx: Context,
|
||||
msg_buffer_size: int|None = None,
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
Send endlessly on the child stream.
|
||||
Send endlessly on the calleee stream.
|
||||
|
||||
'''
|
||||
await ctx.started()
|
||||
|
@ -912,7 +841,7 @@ async def keep_sending_from_child(
|
|||
msg_buffer_size=msg_buffer_size,
|
||||
) as stream:
|
||||
for msg in count():
|
||||
print(f'child sending {msg}')
|
||||
print(f'callee sending {msg}')
|
||||
await stream.send(msg)
|
||||
await trio.sleep(0.01)
|
||||
|
||||
|
@ -920,12 +849,12 @@ async def keep_sending_from_child(
|
|||
@pytest.mark.parametrize(
|
||||
'overrun_by',
|
||||
[
|
||||
('parent', 1, never_open_stream),
|
||||
('child', 0, keep_sending_from_child),
|
||||
('caller', 1, never_open_stream),
|
||||
('callee', 0, keep_sending_from_callee),
|
||||
],
|
||||
ids=[
|
||||
('parent_1buf_never_open_stream'),
|
||||
('child_0buf_keep_sending_from_child'),
|
||||
('caller_1buf_never_open_stream'),
|
||||
('callee_0buf_keep_sending_from_callee'),
|
||||
]
|
||||
)
|
||||
def test_one_end_stream_not_opened(
|
||||
|
@ -956,7 +885,8 @@ def test_one_end_stream_not_opened(
|
|||
) as (ctx, sent):
|
||||
assert sent is None
|
||||
|
||||
if 'parent' in overrunner:
|
||||
if 'caller' in overrunner:
|
||||
|
||||
async with ctx.open_stream() as stream:
|
||||
|
||||
# itersend +1 msg more then the buffer size
|
||||
|
@ -971,7 +901,7 @@ def test_one_end_stream_not_opened(
|
|||
await trio.sleep_forever()
|
||||
|
||||
else:
|
||||
# child overruns parent case so we do nothing here
|
||||
# callee overruns caller case so we do nothing here
|
||||
await trio.sleep_forever()
|
||||
|
||||
await portal.cancel_actor()
|
||||
|
@ -979,19 +909,19 @@ def test_one_end_stream_not_opened(
|
|||
# 2 overrun cases and the no overrun case (which pushes right up to
|
||||
# the msg limit)
|
||||
if (
|
||||
overrunner == 'parent'
|
||||
overrunner == 'caller'
|
||||
):
|
||||
with pytest.raises(tractor.RemoteActorError) as excinfo:
|
||||
trio.run(main)
|
||||
|
||||
assert excinfo.value.boxed_type == StreamOverrun
|
||||
|
||||
elif overrunner == 'child':
|
||||
elif overrunner == 'callee':
|
||||
with pytest.raises(tractor.RemoteActorError) as excinfo:
|
||||
trio.run(main)
|
||||
|
||||
# TODO: embedded remote errors so that we can verify the source
|
||||
# error? the child delivers an error which is an overrun
|
||||
# error? the callee delivers an error which is an overrun
|
||||
# wrapped in a remote actor error.
|
||||
assert excinfo.value.boxed_type == tractor.RemoteActorError
|
||||
|
||||
|
@ -1001,7 +931,8 @@ def test_one_end_stream_not_opened(
|
|||
|
||||
@tractor.context
|
||||
async def echo_back_sequence(
|
||||
ctx: Context,
|
||||
|
||||
ctx: Context,
|
||||
seq: list[int],
|
||||
wait_for_cancel: bool,
|
||||
allow_overruns_side: str,
|
||||
|
@ -1010,12 +941,12 @@ async def echo_back_sequence(
|
|||
|
||||
) -> None:
|
||||
'''
|
||||
Send endlessly on the child stream using a small buffer size
|
||||
Send endlessly on the calleee stream using a small buffer size
|
||||
setting on the contex to simulate backlogging that would normally
|
||||
cause overruns.
|
||||
|
||||
'''
|
||||
# NOTE: ensure that if the parent is expecting to cancel this task
|
||||
# NOTE: ensure that if the caller is expecting to cancel this task
|
||||
# that we stay echoing much longer then they are so we don't
|
||||
# return early instead of receive the cancel msg.
|
||||
total_batches: int = (
|
||||
|
@ -1065,18 +996,18 @@ async def echo_back_sequence(
|
|||
if be_slow:
|
||||
await trio.sleep(0.05)
|
||||
|
||||
print('child waiting on next')
|
||||
print('callee waiting on next')
|
||||
|
||||
print(f'child echoing back latest batch\n{batch}')
|
||||
print(f'callee echoing back latest batch\n{batch}')
|
||||
for msg in batch:
|
||||
print(f'child sending msg\n{msg}')
|
||||
print(f'callee sending msg\n{msg}')
|
||||
await stream.send(msg)
|
||||
|
||||
try:
|
||||
return 'yo'
|
||||
finally:
|
||||
print(
|
||||
'exiting child with context:\n'
|
||||
'exiting callee with context:\n'
|
||||
f'{pformat(ctx)}\n'
|
||||
)
|
||||
|
||||
|
@ -1130,7 +1061,7 @@ def test_maybe_allow_overruns_stream(
|
|||
debug_mode=debug_mode,
|
||||
) as an:
|
||||
portal = await an.start_actor(
|
||||
'child_sends_forever',
|
||||
'callee_sends_forever',
|
||||
enable_modules=[__name__],
|
||||
loglevel=loglevel,
|
||||
debug_mode=debug_mode,
|
||||
|
|
|
@ -7,9 +7,7 @@ import platform
|
|||
from functools import partial
|
||||
import itertools
|
||||
|
||||
import psutil
|
||||
import pytest
|
||||
import subprocess
|
||||
import tractor
|
||||
from tractor._testing import tractor_test
|
||||
import trio
|
||||
|
@ -28,7 +26,7 @@ async def test_reg_then_unreg(reg_addr):
|
|||
portal = await n.start_actor('actor', enable_modules=[__name__])
|
||||
uid = portal.channel.uid
|
||||
|
||||
async with tractor.get_registry(reg_addr) as aportal:
|
||||
async with tractor.get_registry(*reg_addr) as aportal:
|
||||
# this local actor should be the arbiter
|
||||
assert actor is aportal.actor
|
||||
|
||||
|
@ -154,25 +152,15 @@ async def unpack_reg(actor_or_portal):
|
|||
async def spawn_and_check_registry(
|
||||
reg_addr: tuple,
|
||||
use_signal: bool,
|
||||
debug_mode: bool = False,
|
||||
remote_arbiter: bool = False,
|
||||
with_streaming: bool = False,
|
||||
maybe_daemon: tuple[
|
||||
subprocess.Popen,
|
||||
psutil.Process,
|
||||
]|None = None,
|
||||
|
||||
) -> None:
|
||||
|
||||
if maybe_daemon:
|
||||
popen, proc = maybe_daemon
|
||||
# breakpoint()
|
||||
|
||||
async with tractor.open_root_actor(
|
||||
registry_addrs=[reg_addr],
|
||||
debug_mode=debug_mode,
|
||||
):
|
||||
async with tractor.get_registry(reg_addr) as portal:
|
||||
async with tractor.get_registry(*reg_addr) as portal:
|
||||
# runtime needs to be up to call this
|
||||
actor = tractor.current_actor()
|
||||
|
||||
|
@ -188,11 +176,11 @@ async def spawn_and_check_registry(
|
|||
extra = 2 # local root actor + remote arbiter
|
||||
|
||||
# ensure current actor is registered
|
||||
registry: dict = await get_reg()
|
||||
registry = await get_reg()
|
||||
assert actor.uid in registry
|
||||
|
||||
try:
|
||||
async with tractor.open_nursery() as an:
|
||||
async with tractor.open_nursery() as n:
|
||||
async with trio.open_nursery(
|
||||
strict_exception_groups=False,
|
||||
) as trion:
|
||||
|
@ -201,17 +189,17 @@ async def spawn_and_check_registry(
|
|||
for i in range(3):
|
||||
name = f'a{i}'
|
||||
if with_streaming:
|
||||
portals[name] = await an.start_actor(
|
||||
portals[name] = await n.start_actor(
|
||||
name=name, enable_modules=[__name__])
|
||||
|
||||
else: # no streaming
|
||||
portals[name] = await an.run_in_actor(
|
||||
portals[name] = await n.run_in_actor(
|
||||
trio.sleep_forever, name=name)
|
||||
|
||||
# wait on last actor to come up
|
||||
async with tractor.wait_for_actor(name):
|
||||
registry = await get_reg()
|
||||
for uid in an._children:
|
||||
for uid in n._children:
|
||||
assert uid in registry
|
||||
|
||||
assert len(portals) + extra == len(registry)
|
||||
|
@ -244,7 +232,6 @@ async def spawn_and_check_registry(
|
|||
@pytest.mark.parametrize('use_signal', [False, True])
|
||||
@pytest.mark.parametrize('with_streaming', [False, True])
|
||||
def test_subactors_unregister_on_cancel(
|
||||
debug_mode: bool,
|
||||
start_method,
|
||||
use_signal,
|
||||
reg_addr,
|
||||
|
@ -261,7 +248,6 @@ def test_subactors_unregister_on_cancel(
|
|||
spawn_and_check_registry,
|
||||
reg_addr,
|
||||
use_signal,
|
||||
debug_mode=debug_mode,
|
||||
remote_arbiter=False,
|
||||
with_streaming=with_streaming,
|
||||
),
|
||||
|
@ -271,8 +257,7 @@ def test_subactors_unregister_on_cancel(
|
|||
@pytest.mark.parametrize('use_signal', [False, True])
|
||||
@pytest.mark.parametrize('with_streaming', [False, True])
|
||||
def test_subactors_unregister_on_cancel_remote_daemon(
|
||||
daemon: subprocess.Popen,
|
||||
debug_mode: bool,
|
||||
daemon,
|
||||
start_method,
|
||||
use_signal,
|
||||
reg_addr,
|
||||
|
@ -288,13 +273,8 @@ def test_subactors_unregister_on_cancel_remote_daemon(
|
|||
spawn_and_check_registry,
|
||||
reg_addr,
|
||||
use_signal,
|
||||
debug_mode=debug_mode,
|
||||
remote_arbiter=True,
|
||||
with_streaming=with_streaming,
|
||||
maybe_daemon=(
|
||||
daemon,
|
||||
psutil.Process(daemon.pid)
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
|
@ -320,7 +300,7 @@ async def close_chans_before_nursery(
|
|||
async with tractor.open_root_actor(
|
||||
registry_addrs=[reg_addr],
|
||||
):
|
||||
async with tractor.get_registry(reg_addr) as aportal:
|
||||
async with tractor.get_registry(*reg_addr) as aportal:
|
||||
try:
|
||||
get_reg = partial(unpack_reg, aportal)
|
||||
|
||||
|
@ -393,7 +373,7 @@ def test_close_channel_explicit(
|
|||
|
||||
@pytest.mark.parametrize('use_signal', [False, True])
|
||||
def test_close_channel_explicit_remote_arbiter(
|
||||
daemon: subprocess.Popen,
|
||||
daemon,
|
||||
start_method,
|
||||
use_signal,
|
||||
reg_addr,
|
||||
|
|
|
@ -66,9 +66,6 @@ def run_example_in_subproc(
|
|||
# due to backpressure!!!
|
||||
proc = testdir.popen(
|
||||
cmdargs,
|
||||
stdin=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
**kwargs,
|
||||
)
|
||||
assert not proc.returncode
|
||||
|
@ -122,14 +119,10 @@ def test_example(
|
|||
code = ex.read()
|
||||
|
||||
with run_example_in_subproc(code) as proc:
|
||||
err = None
|
||||
try:
|
||||
if not proc.poll():
|
||||
_, err = proc.communicate(timeout=15)
|
||||
|
||||
except subprocess.TimeoutExpired as e:
|
||||
proc.kill()
|
||||
err = e.stderr
|
||||
proc.wait()
|
||||
err, _ = proc.stderr.read(), proc.stdout.read()
|
||||
# print(f'STDERR: {err}')
|
||||
# print(f'STDOUT: {out}')
|
||||
|
||||
# if we get some gnarly output let's aggregate and raise
|
||||
if err:
|
||||
|
|
|
@ -1,946 +0,0 @@
|
|||
'''
|
||||
Low-level functional audits for our
|
||||
"capability based messaging"-spec feats.
|
||||
|
||||
B~)
|
||||
|
||||
'''
|
||||
from contextlib import (
|
||||
contextmanager as cm,
|
||||
# nullcontext,
|
||||
)
|
||||
import importlib
|
||||
from typing import (
|
||||
Any,
|
||||
Type,
|
||||
Union,
|
||||
)
|
||||
|
||||
from msgspec import (
|
||||
# structs,
|
||||
# msgpack,
|
||||
Raw,
|
||||
# Struct,
|
||||
ValidationError,
|
||||
)
|
||||
import pytest
|
||||
import trio
|
||||
|
||||
import tractor
|
||||
from tractor import (
|
||||
Actor,
|
||||
# _state,
|
||||
MsgTypeError,
|
||||
Context,
|
||||
)
|
||||
from tractor.msg import (
|
||||
_codec,
|
||||
_ctxvar_MsgCodec,
|
||||
_exts,
|
||||
|
||||
NamespacePath,
|
||||
MsgCodec,
|
||||
MsgDec,
|
||||
mk_codec,
|
||||
mk_dec,
|
||||
apply_codec,
|
||||
current_codec,
|
||||
)
|
||||
from tractor.msg.types import (
|
||||
log,
|
||||
Started,
|
||||
# _payload_msgs,
|
||||
# PayloadMsg,
|
||||
# mk_msg_spec,
|
||||
)
|
||||
from tractor.msg._ops import (
|
||||
limit_plds,
|
||||
)
|
||||
|
||||
def enc_nsp(obj: Any) -> Any:
|
||||
actor: Actor = tractor.current_actor(
|
||||
err_on_no_runtime=False,
|
||||
)
|
||||
uid: tuple[str, str]|None = None if not actor else actor.uid
|
||||
print(f'{uid} ENC HOOK')
|
||||
|
||||
match obj:
|
||||
# case NamespacePath()|str():
|
||||
case NamespacePath():
|
||||
encoded: str = str(obj)
|
||||
print(
|
||||
f'----- ENCODING `NamespacePath` as `str` ------\n'
|
||||
f'|_obj:{type(obj)!r} = {obj!r}\n'
|
||||
f'|_encoded: str = {encoded!r}\n'
|
||||
)
|
||||
# if type(obj) != NamespacePath:
|
||||
# breakpoint()
|
||||
return encoded
|
||||
case _:
|
||||
logmsg: str = (
|
||||
f'{uid}\n'
|
||||
'FAILED ENCODE\n'
|
||||
f'obj-> `{obj}: {type(obj)}`\n'
|
||||
)
|
||||
raise NotImplementedError(logmsg)
|
||||
|
||||
|
||||
def dec_nsp(
|
||||
obj_type: Type,
|
||||
obj: Any,
|
||||
|
||||
) -> Any:
|
||||
# breakpoint()
|
||||
actor: Actor = tractor.current_actor(
|
||||
err_on_no_runtime=False,
|
||||
)
|
||||
uid: tuple[str, str]|None = None if not actor else actor.uid
|
||||
print(
|
||||
f'{uid}\n'
|
||||
'CUSTOM DECODE\n'
|
||||
f'type-arg-> {obj_type}\n'
|
||||
f'obj-arg-> `{obj}`: {type(obj)}\n'
|
||||
)
|
||||
nsp = None
|
||||
# XXX, never happens right?
|
||||
if obj_type is Raw:
|
||||
breakpoint()
|
||||
|
||||
if (
|
||||
obj_type is NamespacePath
|
||||
and isinstance(obj, str)
|
||||
and ':' in obj
|
||||
):
|
||||
nsp = NamespacePath(obj)
|
||||
# TODO: we could built a generic handler using
|
||||
# JUST matching the obj_type part?
|
||||
# nsp = obj_type(obj)
|
||||
|
||||
if nsp:
|
||||
print(f'Returning NSP instance: {nsp}')
|
||||
return nsp
|
||||
|
||||
logmsg: str = (
|
||||
f'{uid}\n'
|
||||
'FAILED DECODE\n'
|
||||
f'type-> {obj_type}\n'
|
||||
f'obj-arg-> `{obj}`: {type(obj)}\n\n'
|
||||
f'current codec:\n'
|
||||
f'{current_codec()}\n'
|
||||
)
|
||||
# TODO: figure out the ignore subsys for this!
|
||||
# -[ ] option whether to defense-relay backc the msg
|
||||
# inside an `Invalid`/`Ignore`
|
||||
# -[ ] how to make this handling pluggable such that a
|
||||
# `Channel`/`MsgTransport` can intercept and process
|
||||
# back msgs either via exception handling or some other
|
||||
# signal?
|
||||
log.warning(logmsg)
|
||||
# NOTE: this delivers the invalid
|
||||
# value up to `msgspec`'s decoding
|
||||
# machinery for error raising.
|
||||
return obj
|
||||
# raise NotImplementedError(logmsg)
|
||||
|
||||
|
||||
def ex_func(*args):
|
||||
'''
|
||||
A mod level func we can ref and load via our `NamespacePath`
|
||||
python-object pointer `str` subtype.
|
||||
|
||||
'''
|
||||
print(f'ex_func({args})')
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'add_codec_hooks',
|
||||
[
|
||||
True,
|
||||
False,
|
||||
],
|
||||
ids=['use_codec_hooks', 'no_codec_hooks'],
|
||||
)
|
||||
def test_custom_extension_types(
|
||||
debug_mode: bool,
|
||||
add_codec_hooks: bool
|
||||
):
|
||||
'''
|
||||
Verify that a `MsgCodec` (used for encoding all outbound IPC msgs
|
||||
and decoding all inbound `PayloadMsg`s) and a paired `MsgDec`
|
||||
(used for decoding the `PayloadMsg.pld: Raw` received within a given
|
||||
task's ipc `Context` scope) can both send and receive "extension types"
|
||||
as supported via custom converter hooks passed to `msgspec`.
|
||||
|
||||
'''
|
||||
nsp_pld_dec: MsgDec = mk_dec(
|
||||
spec=None, # ONLY support the ext type
|
||||
dec_hook=dec_nsp if add_codec_hooks else None,
|
||||
ext_types=[NamespacePath],
|
||||
)
|
||||
nsp_codec: MsgCodec = mk_codec(
|
||||
# ipc_pld_spec=Raw, # default!
|
||||
|
||||
# NOTE XXX: the encode hook MUST be used no matter what since
|
||||
# our `NamespacePath` is not any of a `Any` native type nor
|
||||
# a `msgspec.Struct` subtype - so `msgspec` has no way to know
|
||||
# how to encode it unless we provide the custom hook.
|
||||
#
|
||||
# AGAIN that is, regardless of whether we spec an
|
||||
# `Any`-decoded-pld the enc has no knowledge (by default)
|
||||
# how to enc `NamespacePath` (nsp), so we add a custom
|
||||
# hook to do that ALWAYS.
|
||||
enc_hook=enc_nsp if add_codec_hooks else None,
|
||||
|
||||
# XXX NOTE: pretty sure this is mutex with the `type=` to
|
||||
# `Decoder`? so it won't work in tandem with the
|
||||
# `ipc_pld_spec` passed above?
|
||||
ext_types=[NamespacePath],
|
||||
|
||||
# TODO? is it useful to have the `.pld` decoded *prior* to
|
||||
# the `PldRx`?? like perf or mem related?
|
||||
# ext_dec=nsp_pld_dec,
|
||||
)
|
||||
if add_codec_hooks:
|
||||
assert nsp_codec.dec.dec_hook is None
|
||||
|
||||
# TODO? if we pass `ext_dec` above?
|
||||
# assert nsp_codec.dec.dec_hook is dec_nsp
|
||||
|
||||
assert nsp_codec.enc.enc_hook is enc_nsp
|
||||
|
||||
nsp = NamespacePath.from_ref(ex_func)
|
||||
|
||||
try:
|
||||
nsp_bytes: bytes = nsp_codec.encode(nsp)
|
||||
nsp_rt_sin_msg = nsp_pld_dec.decode(nsp_bytes)
|
||||
nsp_rt_sin_msg.load_ref() is ex_func
|
||||
except TypeError:
|
||||
if not add_codec_hooks:
|
||||
pass
|
||||
|
||||
try:
|
||||
msg_bytes: bytes = nsp_codec.encode(
|
||||
Started(
|
||||
cid='cid',
|
||||
pld=nsp,
|
||||
)
|
||||
)
|
||||
# since the ext-type obj should also be set as the msg.pld
|
||||
assert nsp_bytes in msg_bytes
|
||||
started_rt: Started = nsp_codec.decode(msg_bytes)
|
||||
pld: Raw = started_rt.pld
|
||||
assert isinstance(pld, Raw)
|
||||
nsp_rt: NamespacePath = nsp_pld_dec.decode(pld)
|
||||
assert isinstance(nsp_rt, NamespacePath)
|
||||
# in obj comparison terms they should be the same
|
||||
assert nsp_rt == nsp
|
||||
# ensure we've decoded to ext type!
|
||||
assert nsp_rt.load_ref() is ex_func
|
||||
|
||||
except TypeError:
|
||||
if not add_codec_hooks:
|
||||
pass
|
||||
|
||||
@tractor.context
|
||||
async def sleep_forever_in_sub(
|
||||
ctx: Context,
|
||||
) -> None:
|
||||
await trio.sleep_forever()
|
||||
|
||||
|
||||
def mk_custom_codec(
|
||||
add_hooks: bool,
|
||||
|
||||
) -> tuple[
|
||||
MsgCodec, # encode to send
|
||||
MsgDec, # pld receive-n-decode
|
||||
]:
|
||||
'''
|
||||
Create custom `msgpack` enc/dec-hooks and set a `Decoder`
|
||||
which only loads `pld_spec` (like `NamespacePath`) types.
|
||||
|
||||
'''
|
||||
|
||||
# XXX NOTE XXX: despite defining `NamespacePath` as a type
|
||||
# field on our `PayloadMsg.pld`, we still need a enc/dec_hook() pair
|
||||
# to cast to/from that type on the wire. See the docs:
|
||||
# https://jcristharif.com/msgspec/extending.html#mapping-to-from-native-types
|
||||
|
||||
# if pld_spec is Any:
|
||||
# pld_spec = Raw
|
||||
|
||||
nsp_codec: MsgCodec = mk_codec(
|
||||
# ipc_pld_spec=Raw, # default!
|
||||
|
||||
# NOTE XXX: the encode hook MUST be used no matter what since
|
||||
# our `NamespacePath` is not any of a `Any` native type nor
|
||||
# a `msgspec.Struct` subtype - so `msgspec` has no way to know
|
||||
# how to encode it unless we provide the custom hook.
|
||||
#
|
||||
# AGAIN that is, regardless of whether we spec an
|
||||
# `Any`-decoded-pld the enc has no knowledge (by default)
|
||||
# how to enc `NamespacePath` (nsp), so we add a custom
|
||||
# hook to do that ALWAYS.
|
||||
enc_hook=enc_nsp if add_hooks else None,
|
||||
|
||||
# XXX NOTE: pretty sure this is mutex with the `type=` to
|
||||
# `Decoder`? so it won't work in tandem with the
|
||||
# `ipc_pld_spec` passed above?
|
||||
ext_types=[NamespacePath],
|
||||
)
|
||||
# dec_hook=dec_nsp if add_hooks else None,
|
||||
return nsp_codec
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'limit_plds_args',
|
||||
[
|
||||
(
|
||||
{'dec_hook': None, 'ext_types': None},
|
||||
None,
|
||||
),
|
||||
(
|
||||
{'dec_hook': dec_nsp, 'ext_types': None},
|
||||
TypeError,
|
||||
),
|
||||
(
|
||||
{'dec_hook': dec_nsp, 'ext_types': [NamespacePath]},
|
||||
None,
|
||||
),
|
||||
(
|
||||
{'dec_hook': dec_nsp, 'ext_types': [NamespacePath|None]},
|
||||
None,
|
||||
),
|
||||
],
|
||||
ids=[
|
||||
'no_hook_no_ext_types',
|
||||
'only_hook',
|
||||
'hook_and_ext_types',
|
||||
'hook_and_ext_types_w_null',
|
||||
]
|
||||
)
|
||||
def test_pld_limiting_usage(
|
||||
limit_plds_args: tuple[dict, Exception|None],
|
||||
):
|
||||
'''
|
||||
Verify `dec_hook()` and `ext_types` need to either both be
|
||||
provided or we raise a explanator type-error.
|
||||
|
||||
'''
|
||||
kwargs, maybe_err = limit_plds_args
|
||||
async def main():
|
||||
async with tractor.open_nursery() as an: # just to open runtime
|
||||
|
||||
# XXX SHOULD NEVER WORK outside an ipc ctx scope!
|
||||
try:
|
||||
with limit_plds(**kwargs):
|
||||
pass
|
||||
except RuntimeError:
|
||||
pass
|
||||
|
||||
p: tractor.Portal = await an.start_actor(
|
||||
'sub',
|
||||
enable_modules=[__name__],
|
||||
)
|
||||
async with (
|
||||
p.open_context(
|
||||
sleep_forever_in_sub
|
||||
) as (ctx, first),
|
||||
):
|
||||
try:
|
||||
with limit_plds(**kwargs):
|
||||
pass
|
||||
except maybe_err as exc:
|
||||
assert type(exc) is maybe_err
|
||||
pass
|
||||
|
||||
|
||||
def chk_codec_applied(
|
||||
expect_codec: MsgCodec|None,
|
||||
enter_value: MsgCodec|None = None,
|
||||
|
||||
) -> MsgCodec:
|
||||
'''
|
||||
buncha sanity checks ensuring that the IPC channel's
|
||||
context-vars are set to the expected codec and that are
|
||||
ctx-var wrapper APIs match the same.
|
||||
|
||||
'''
|
||||
# TODO: play with tricyle again, bc this is supposed to work
|
||||
# the way we want?
|
||||
#
|
||||
# TreeVar
|
||||
# task: trio.Task = trio.lowlevel.current_task()
|
||||
# curr_codec = _ctxvar_MsgCodec.get_in(task)
|
||||
|
||||
# ContextVar
|
||||
# task_ctx: Context = task.context
|
||||
# assert _ctxvar_MsgCodec in task_ctx
|
||||
# curr_codec: MsgCodec = task.context[_ctxvar_MsgCodec]
|
||||
if expect_codec is None:
|
||||
assert enter_value is None
|
||||
return
|
||||
|
||||
# NOTE: currently we use this!
|
||||
# RunVar
|
||||
curr_codec: MsgCodec = current_codec()
|
||||
last_read_codec = _ctxvar_MsgCodec.get()
|
||||
# assert curr_codec is last_read_codec
|
||||
|
||||
assert (
|
||||
(same_codec := expect_codec) is
|
||||
# returned from `mk_codec()`
|
||||
|
||||
# yielded value from `apply_codec()`
|
||||
|
||||
# read from current task's `contextvars.Context`
|
||||
curr_codec is
|
||||
last_read_codec
|
||||
|
||||
# the default `msgspec` settings
|
||||
is not _codec._def_msgspec_codec
|
||||
is not _codec._def_tractor_codec
|
||||
)
|
||||
|
||||
if enter_value:
|
||||
assert enter_value is same_codec
|
||||
|
||||
|
||||
@tractor.context
|
||||
async def send_back_values(
|
||||
ctx: Context,
|
||||
rent_pld_spec_type_strs: list[str],
|
||||
add_hooks: bool,
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
Setup up a custom codec to load instances of `NamespacePath`
|
||||
and ensure we can round trip a func ref with our parent.
|
||||
|
||||
'''
|
||||
uid: tuple = tractor.current_actor().uid
|
||||
|
||||
# init state in sub-actor should be default
|
||||
chk_codec_applied(
|
||||
expect_codec=_codec._def_tractor_codec,
|
||||
)
|
||||
|
||||
# load pld spec from input str
|
||||
rent_pld_spec = _exts.dec_type_union(
|
||||
rent_pld_spec_type_strs,
|
||||
mods=[
|
||||
importlib.import_module(__name__),
|
||||
],
|
||||
)
|
||||
rent_pld_spec_types: set[Type] = _codec.unpack_spec_types(
|
||||
rent_pld_spec,
|
||||
)
|
||||
|
||||
# ONLY add ext-hooks if the rent specified a non-std type!
|
||||
add_hooks: bool = (
|
||||
NamespacePath in rent_pld_spec_types
|
||||
and
|
||||
add_hooks
|
||||
)
|
||||
|
||||
# same as on parent side config.
|
||||
nsp_codec: MsgCodec|None = None
|
||||
if add_hooks:
|
||||
nsp_codec = mk_codec(
|
||||
enc_hook=enc_nsp,
|
||||
ext_types=[NamespacePath],
|
||||
)
|
||||
|
||||
with (
|
||||
maybe_apply_codec(nsp_codec) as codec,
|
||||
limit_plds(
|
||||
rent_pld_spec,
|
||||
dec_hook=dec_nsp if add_hooks else None,
|
||||
ext_types=[NamespacePath] if add_hooks else None,
|
||||
) as pld_dec,
|
||||
):
|
||||
# ?XXX? SHOULD WE NOT be swapping the global codec since it
|
||||
# breaks `Context.started()` roundtripping checks??
|
||||
chk_codec_applied(
|
||||
expect_codec=nsp_codec,
|
||||
enter_value=codec,
|
||||
)
|
||||
|
||||
# ?TODO, mismatch case(s)?
|
||||
#
|
||||
# ensure pld spec matches on both sides
|
||||
ctx_pld_dec: MsgDec = ctx._pld_rx._pld_dec
|
||||
assert pld_dec is ctx_pld_dec
|
||||
child_pld_spec: Type = pld_dec.spec
|
||||
child_pld_spec_types: set[Type] = _codec.unpack_spec_types(
|
||||
child_pld_spec,
|
||||
)
|
||||
assert (
|
||||
child_pld_spec_types.issuperset(
|
||||
rent_pld_spec_types
|
||||
)
|
||||
)
|
||||
|
||||
# ?TODO, try loop for each of the types in pld-superset?
|
||||
#
|
||||
# for send_value in [
|
||||
# nsp,
|
||||
# str(nsp),
|
||||
# None,
|
||||
# ]:
|
||||
nsp = NamespacePath.from_ref(ex_func)
|
||||
try:
|
||||
print(
|
||||
f'{uid}: attempting to `.started({nsp})`\n'
|
||||
f'\n'
|
||||
f'rent_pld_spec: {rent_pld_spec}\n'
|
||||
f'child_pld_spec: {child_pld_spec}\n'
|
||||
f'codec: {codec}\n'
|
||||
)
|
||||
# await tractor.pause()
|
||||
await ctx.started(nsp)
|
||||
|
||||
except tractor.MsgTypeError as _mte:
|
||||
mte = _mte
|
||||
|
||||
# false -ve case
|
||||
if add_hooks:
|
||||
raise RuntimeError(
|
||||
f'EXPECTED to `.started()` value given spec ??\n\n'
|
||||
f'child_pld_spec -> {child_pld_spec}\n'
|
||||
f'value = {nsp}: {type(nsp)}\n'
|
||||
)
|
||||
|
||||
# true -ve case
|
||||
raise mte
|
||||
|
||||
# TODO: maybe we should add our own wrapper error so as to
|
||||
# be interchange-lib agnostic?
|
||||
# -[ ] the error type is wtv is raised from the hook so we
|
||||
# could also require a type-class of errors for
|
||||
# indicating whether the hook-failure can be handled by
|
||||
# a nasty-dialog-unprot sub-sys?
|
||||
except TypeError as typerr:
|
||||
# false -ve
|
||||
if add_hooks:
|
||||
raise RuntimeError('Should have been able to send `nsp`??')
|
||||
|
||||
# true -ve
|
||||
print('Failed to send `nsp` due to no ext hooks set!')
|
||||
raise typerr
|
||||
|
||||
# now try sending a set of valid and invalid plds to ensure
|
||||
# the pld spec is respected.
|
||||
sent: list[Any] = []
|
||||
async with ctx.open_stream() as ipc:
|
||||
print(
|
||||
f'{uid}: streaming all pld types to rent..'
|
||||
)
|
||||
|
||||
# for send_value, expect_send in iter_send_val_items:
|
||||
for send_value in [
|
||||
nsp,
|
||||
str(nsp),
|
||||
None,
|
||||
]:
|
||||
send_type: Type = type(send_value)
|
||||
print(
|
||||
f'{uid}: SENDING NEXT pld\n'
|
||||
f'send_type: {send_type}\n'
|
||||
f'send_value: {send_value}\n'
|
||||
)
|
||||
try:
|
||||
await ipc.send(send_value)
|
||||
sent.append(send_value)
|
||||
|
||||
except ValidationError as valerr:
|
||||
print(f'{uid} FAILED TO SEND {send_value}!')
|
||||
|
||||
# false -ve
|
||||
if add_hooks:
|
||||
raise RuntimeError(
|
||||
f'EXPECTED to roundtrip value given spec:\n'
|
||||
f'rent_pld_spec -> {rent_pld_spec}\n'
|
||||
f'child_pld_spec -> {child_pld_spec}\n'
|
||||
f'value = {send_value}: {send_type}\n'
|
||||
)
|
||||
|
||||
# true -ve
|
||||
raise valerr
|
||||
# continue
|
||||
|
||||
else:
|
||||
print(
|
||||
f'{uid}: finished sending all values\n'
|
||||
'Should be exiting stream block!\n'
|
||||
)
|
||||
|
||||
print(f'{uid}: exited streaming block!')
|
||||
|
||||
|
||||
|
||||
@cm
|
||||
def maybe_apply_codec(codec: MsgCodec|None) -> MsgCodec|None:
|
||||
if codec is None:
|
||||
yield None
|
||||
return
|
||||
|
||||
with apply_codec(codec) as codec:
|
||||
yield codec
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'pld_spec',
|
||||
[
|
||||
Any,
|
||||
NamespacePath,
|
||||
NamespacePath|None, # the "maybe" spec Bo
|
||||
],
|
||||
ids=[
|
||||
'any_type',
|
||||
'only_nsp_ext',
|
||||
'maybe_nsp_ext',
|
||||
]
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
'add_hooks',
|
||||
[
|
||||
True,
|
||||
False,
|
||||
],
|
||||
ids=[
|
||||
'use_codec_hooks',
|
||||
'no_codec_hooks',
|
||||
],
|
||||
)
|
||||
def test_ext_types_over_ipc(
|
||||
debug_mode: bool,
|
||||
pld_spec: Union[Type],
|
||||
add_hooks: bool,
|
||||
):
|
||||
'''
|
||||
Ensure we can support extension types coverted using
|
||||
`enc/dec_hook()`s passed to the `.msg.limit_plds()` API
|
||||
and that sane errors happen when we try do the same without
|
||||
the codec hooks.
|
||||
|
||||
'''
|
||||
pld_types: set[Type] = _codec.unpack_spec_types(pld_spec)
|
||||
|
||||
async def main():
|
||||
|
||||
# sanity check the default pld-spec beforehand
|
||||
chk_codec_applied(
|
||||
expect_codec=_codec._def_tractor_codec,
|
||||
)
|
||||
|
||||
# extension type we want to send as msg payload
|
||||
nsp = NamespacePath.from_ref(ex_func)
|
||||
|
||||
# ^NOTE, 2 cases:
|
||||
# - codec hooks noto added -> decode nsp as `str`
|
||||
# - codec with hooks -> decode nsp as `NamespacePath`
|
||||
nsp_codec: MsgCodec|None = None
|
||||
if (
|
||||
NamespacePath in pld_types
|
||||
and
|
||||
add_hooks
|
||||
):
|
||||
nsp_codec = mk_codec(
|
||||
enc_hook=enc_nsp,
|
||||
ext_types=[NamespacePath],
|
||||
)
|
||||
|
||||
async with tractor.open_nursery(
|
||||
debug_mode=debug_mode,
|
||||
) as an:
|
||||
p: tractor.Portal = await an.start_actor(
|
||||
'sub',
|
||||
enable_modules=[__name__],
|
||||
)
|
||||
with (
|
||||
maybe_apply_codec(nsp_codec) as codec,
|
||||
):
|
||||
chk_codec_applied(
|
||||
expect_codec=nsp_codec,
|
||||
enter_value=codec,
|
||||
)
|
||||
rent_pld_spec_type_strs: list[str] = _exts.enc_type_union(pld_spec)
|
||||
|
||||
# XXX should raise an mte (`MsgTypeError`)
|
||||
# when `add_hooks == False` bc the input
|
||||
# `expect_ipc_send` kwarg has a nsp which can't be
|
||||
# serialized!
|
||||
#
|
||||
# TODO:can we ensure this happens from the
|
||||
# `Return`-side (aka the sub) as well?
|
||||
try:
|
||||
ctx: tractor.Context
|
||||
ipc: tractor.MsgStream
|
||||
async with (
|
||||
|
||||
# XXX should raise an mte (`MsgTypeError`)
|
||||
# when `add_hooks == False`..
|
||||
p.open_context(
|
||||
send_back_values,
|
||||
# expect_debug=debug_mode,
|
||||
rent_pld_spec_type_strs=rent_pld_spec_type_strs,
|
||||
add_hooks=add_hooks,
|
||||
# expect_ipc_send=expect_ipc_send,
|
||||
) as (ctx, first),
|
||||
|
||||
ctx.open_stream() as ipc,
|
||||
):
|
||||
with (
|
||||
limit_plds(
|
||||
pld_spec,
|
||||
dec_hook=dec_nsp if add_hooks else None,
|
||||
ext_types=[NamespacePath] if add_hooks else None,
|
||||
) as pld_dec,
|
||||
):
|
||||
ctx_pld_dec: MsgDec = ctx._pld_rx._pld_dec
|
||||
assert pld_dec is ctx_pld_dec
|
||||
|
||||
# if (
|
||||
# not add_hooks
|
||||
# and
|
||||
# NamespacePath in
|
||||
# ):
|
||||
# pytest.fail('ctx should fail to open without custom enc_hook!?')
|
||||
|
||||
await ipc.send(nsp)
|
||||
nsp_rt = await ipc.receive()
|
||||
|
||||
assert nsp_rt == nsp
|
||||
assert nsp_rt.load_ref() is ex_func
|
||||
|
||||
# this test passes bc we can go no further!
|
||||
except MsgTypeError as mte:
|
||||
# if not add_hooks:
|
||||
# # teardown nursery
|
||||
# await p.cancel_actor()
|
||||
# return
|
||||
|
||||
raise mte
|
||||
|
||||
await p.cancel_actor()
|
||||
|
||||
if (
|
||||
NamespacePath in pld_types
|
||||
and
|
||||
add_hooks
|
||||
):
|
||||
trio.run(main)
|
||||
|
||||
else:
|
||||
with pytest.raises(
|
||||
expected_exception=tractor.RemoteActorError,
|
||||
) as excinfo:
|
||||
trio.run(main)
|
||||
|
||||
exc = excinfo.value
|
||||
# bc `.started(nsp: NamespacePath)` will raise
|
||||
assert exc.boxed_type is TypeError
|
||||
|
||||
|
||||
# def chk_pld_type(
|
||||
# payload_spec: Type[Struct]|Any,
|
||||
# pld: Any,
|
||||
|
||||
# expect_roundtrip: bool|None = None,
|
||||
|
||||
# ) -> bool:
|
||||
|
||||
# pld_val_type: Type = type(pld)
|
||||
|
||||
# # TODO: verify that the overridden subtypes
|
||||
# # DO NOT have modified type-annots from original!
|
||||
# # 'Start', .pld: FuncSpec
|
||||
# # 'StartAck', .pld: IpcCtxSpec
|
||||
# # 'Stop', .pld: UNSEt
|
||||
# # 'Error', .pld: ErrorData
|
||||
|
||||
# codec: MsgCodec = mk_codec(
|
||||
# # NOTE: this ONLY accepts `PayloadMsg.pld` fields of a specified
|
||||
# # type union.
|
||||
# ipc_pld_spec=payload_spec,
|
||||
# )
|
||||
|
||||
# # make a one-off dec to compare with our `MsgCodec` instance
|
||||
# # which does the below `mk_msg_spec()` call internally
|
||||
# ipc_msg_spec: Union[Type[Struct]]
|
||||
# msg_types: list[PayloadMsg[payload_spec]]
|
||||
# (
|
||||
# ipc_msg_spec,
|
||||
# msg_types,
|
||||
# ) = mk_msg_spec(
|
||||
# payload_type_union=payload_spec,
|
||||
# )
|
||||
# _enc = msgpack.Encoder()
|
||||
# _dec = msgpack.Decoder(
|
||||
# type=ipc_msg_spec or Any, # like `PayloadMsg[Any]`
|
||||
# )
|
||||
|
||||
# assert (
|
||||
# payload_spec
|
||||
# ==
|
||||
# codec.pld_spec
|
||||
# )
|
||||
|
||||
# # assert codec.dec == dec
|
||||
# #
|
||||
# # ^-XXX-^ not sure why these aren't "equal" but when cast
|
||||
# # to `str` they seem to match ?? .. kk
|
||||
|
||||
# assert (
|
||||
# str(ipc_msg_spec)
|
||||
# ==
|
||||
# str(codec.msg_spec)
|
||||
# ==
|
||||
# str(_dec.type)
|
||||
# ==
|
||||
# str(codec.dec.type)
|
||||
# )
|
||||
|
||||
# # verify the boxed-type for all variable payload-type msgs.
|
||||
# if not msg_types:
|
||||
# breakpoint()
|
||||
|
||||
# roundtrip: bool|None = None
|
||||
# pld_spec_msg_names: list[str] = [
|
||||
# td.__name__ for td in _payload_msgs
|
||||
# ]
|
||||
# for typedef in msg_types:
|
||||
|
||||
# skip_runtime_msg: bool = typedef.__name__ not in pld_spec_msg_names
|
||||
# if skip_runtime_msg:
|
||||
# continue
|
||||
|
||||
# pld_field = structs.fields(typedef)[1]
|
||||
# assert pld_field.type is payload_spec # TODO-^ does this need to work to get all subtypes to adhere?
|
||||
|
||||
# kwargs: dict[str, Any] = {
|
||||
# 'cid': '666',
|
||||
# 'pld': pld,
|
||||
# }
|
||||
# enc_msg: PayloadMsg = typedef(**kwargs)
|
||||
|
||||
# _wire_bytes: bytes = _enc.encode(enc_msg)
|
||||
# wire_bytes: bytes = codec.enc.encode(enc_msg)
|
||||
# assert _wire_bytes == wire_bytes
|
||||
|
||||
# ve: ValidationError|None = None
|
||||
# try:
|
||||
# dec_msg = codec.dec.decode(wire_bytes)
|
||||
# _dec_msg = _dec.decode(wire_bytes)
|
||||
|
||||
# # decoded msg and thus payload should be exactly same!
|
||||
# assert (roundtrip := (
|
||||
# _dec_msg
|
||||
# ==
|
||||
# dec_msg
|
||||
# ==
|
||||
# enc_msg
|
||||
# ))
|
||||
|
||||
# if (
|
||||
# expect_roundtrip is not None
|
||||
# and expect_roundtrip != roundtrip
|
||||
# ):
|
||||
# breakpoint()
|
||||
|
||||
# assert (
|
||||
# pld
|
||||
# ==
|
||||
# dec_msg.pld
|
||||
# ==
|
||||
# enc_msg.pld
|
||||
# )
|
||||
# # assert (roundtrip := (_dec_msg == enc_msg))
|
||||
|
||||
# except ValidationError as _ve:
|
||||
# ve = _ve
|
||||
# roundtrip: bool = False
|
||||
# if pld_val_type is payload_spec:
|
||||
# raise ValueError(
|
||||
# 'Got `ValidationError` despite type-var match!?\n'
|
||||
# f'pld_val_type: {pld_val_type}\n'
|
||||
# f'payload_type: {payload_spec}\n'
|
||||
# ) from ve
|
||||
|
||||
# else:
|
||||
# # ow we good cuz the pld spec mismatched.
|
||||
# print(
|
||||
# 'Got expected `ValidationError` since,\n'
|
||||
# f'{pld_val_type} is not {payload_spec}\n'
|
||||
# )
|
||||
# else:
|
||||
# if (
|
||||
# payload_spec is not Any
|
||||
# and
|
||||
# pld_val_type is not payload_spec
|
||||
# ):
|
||||
# raise ValueError(
|
||||
# 'DID NOT `ValidationError` despite expected type match!?\n'
|
||||
# f'pld_val_type: {pld_val_type}\n'
|
||||
# f'payload_type: {payload_spec}\n'
|
||||
# )
|
||||
|
||||
# # full code decode should always be attempted!
|
||||
# if roundtrip is None:
|
||||
# breakpoint()
|
||||
|
||||
# return roundtrip
|
||||
|
||||
|
||||
# ?TODO? maybe remove since covered in the newer `test_pldrx_limiting`
|
||||
# via end-2-end testing of all this?
|
||||
# -[ ] IOW do we really NEED this lowlevel unit testing?
|
||||
#
|
||||
# def test_limit_msgspec(
|
||||
# debug_mode: bool,
|
||||
# ):
|
||||
# '''
|
||||
# Internals unit testing to verify that type-limiting an IPC ctx's
|
||||
# msg spec with `Pldrx.limit_plds()` results in various
|
||||
# encapsulated `msgspec` object settings and state.
|
||||
|
||||
# '''
|
||||
# async def main():
|
||||
# async with tractor.open_root_actor(
|
||||
# debug_mode=debug_mode,
|
||||
# ):
|
||||
# # ensure we can round-trip a boxing `PayloadMsg`
|
||||
# assert chk_pld_type(
|
||||
# payload_spec=Any,
|
||||
# pld=None,
|
||||
# expect_roundtrip=True,
|
||||
# )
|
||||
|
||||
# # verify that a mis-typed payload value won't decode
|
||||
# assert not chk_pld_type(
|
||||
# payload_spec=int,
|
||||
# pld='doggy',
|
||||
# )
|
||||
|
||||
# # parametrize the boxed `.pld` type as a custom-struct
|
||||
# # and ensure that parametrization propagates
|
||||
# # to all payload-msg-spec-able subtypes!
|
||||
# class CustomPayload(Struct):
|
||||
# name: str
|
||||
# value: Any
|
||||
|
||||
# assert not chk_pld_type(
|
||||
# payload_spec=CustomPayload,
|
||||
# pld='doggy',
|
||||
# )
|
||||
|
||||
# assert chk_pld_type(
|
||||
# payload_spec=CustomPayload,
|
||||
# pld=CustomPayload(name='doggy', value='urmom')
|
||||
# )
|
||||
|
||||
# # yah, we can `.pause_from_sync()` now!
|
||||
# # breakpoint()
|
||||
|
||||
# trio.run(main)
|
|
@ -871,7 +871,7 @@ async def serve_subactors(
|
|||
)
|
||||
await ipc.send((
|
||||
peer.chan.uid,
|
||||
peer.chan.raddr.unwrap(),
|
||||
peer.chan.raddr,
|
||||
))
|
||||
|
||||
print('Spawner exiting spawn serve loop!')
|
||||
|
|
|
@ -38,7 +38,7 @@ async def test_self_is_registered_localportal(reg_addr):
|
|||
"Verify waiting on the arbiter to register itself using a local portal."
|
||||
actor = tractor.current_actor()
|
||||
assert actor.is_arbiter
|
||||
async with tractor.get_registry(reg_addr) as portal:
|
||||
async with tractor.get_registry(*reg_addr) as portal:
|
||||
assert isinstance(portal, tractor._portal.LocalPortal)
|
||||
|
||||
with trio.fail_after(0.2):
|
||||
|
|
|
@ -32,7 +32,7 @@ def test_abort_on_sigint(daemon):
|
|||
@tractor_test
|
||||
async def test_cancel_remote_arbiter(daemon, reg_addr):
|
||||
assert not tractor.current_actor().is_arbiter
|
||||
async with tractor.get_registry(reg_addr) as portal:
|
||||
async with tractor.get_registry(*reg_addr) as portal:
|
||||
await portal.cancel_actor()
|
||||
|
||||
time.sleep(0.1)
|
||||
|
@ -41,7 +41,7 @@ async def test_cancel_remote_arbiter(daemon, reg_addr):
|
|||
|
||||
# no arbiter socket should exist
|
||||
with pytest.raises(OSError):
|
||||
async with tractor.get_registry(reg_addr) as portal:
|
||||
async with tractor.get_registry(*reg_addr) as portal:
|
||||
pass
|
||||
|
||||
|
||||
|
|
|
@ -100,29 +100,16 @@ async def streamer(
|
|||
@acm
|
||||
async def open_stream() -> Awaitable[tractor.MsgStream]:
|
||||
|
||||
try:
|
||||
async with tractor.open_nursery() as an:
|
||||
portal = await an.start_actor(
|
||||
'streamer',
|
||||
enable_modules=[__name__],
|
||||
)
|
||||
async with (
|
||||
portal.open_context(streamer) as (ctx, first),
|
||||
ctx.open_stream() as stream,
|
||||
):
|
||||
yield stream
|
||||
async with tractor.open_nursery() as tn:
|
||||
portal = await tn.start_actor('streamer', enable_modules=[__name__])
|
||||
async with (
|
||||
portal.open_context(streamer) as (ctx, first),
|
||||
ctx.open_stream() as stream,
|
||||
):
|
||||
yield stream
|
||||
|
||||
print('Cancelling streamer')
|
||||
await portal.cancel_actor()
|
||||
print('Cancelled streamer')
|
||||
|
||||
except Exception as err:
|
||||
print(
|
||||
f'`open_stream()` errored?\n'
|
||||
f'{err!r}\n'
|
||||
)
|
||||
await tractor.pause(shield=True)
|
||||
raise err
|
||||
await portal.cancel_actor()
|
||||
print('CANCELLED STREAMER')
|
||||
|
||||
|
||||
@acm
|
||||
|
@ -145,28 +132,19 @@ async def maybe_open_stream(taskname: str):
|
|||
yield stream
|
||||
|
||||
|
||||
def test_open_local_sub_to_stream(
|
||||
debug_mode: bool,
|
||||
):
|
||||
def test_open_local_sub_to_stream():
|
||||
'''
|
||||
Verify a single inter-actor stream can can be fanned-out shared to
|
||||
N local tasks using `trionics.maybe_open_context()`.
|
||||
N local tasks using ``trionics.maybe_open_context():``.
|
||||
|
||||
'''
|
||||
timeout: float = 3.6
|
||||
if platform.system() == "Windows":
|
||||
timeout: float = 10
|
||||
|
||||
if debug_mode:
|
||||
timeout = 999
|
||||
timeout: float = 3.6 if platform.system() != "Windows" else 10
|
||||
|
||||
async def main():
|
||||
|
||||
full = list(range(1000))
|
||||
|
||||
async def get_sub_and_pull(taskname: str):
|
||||
|
||||
stream: tractor.MsgStream
|
||||
async with (
|
||||
maybe_open_stream(taskname) as stream,
|
||||
):
|
||||
|
@ -187,27 +165,17 @@ def test_open_local_sub_to_stream(
|
|||
assert set(seq).issubset(set(full))
|
||||
print(f'{taskname} finished')
|
||||
|
||||
with trio.fail_after(timeout) as cs:
|
||||
with trio.fail_after(timeout):
|
||||
# TODO: turns out this isn't multi-task entrant XD
|
||||
# We probably need an indepotent entry semantic?
|
||||
async with tractor.open_root_actor(
|
||||
debug_mode=debug_mode,
|
||||
):
|
||||
async with tractor.open_root_actor():
|
||||
async with (
|
||||
trio.open_nursery() as tn,
|
||||
trio.open_nursery() as nurse,
|
||||
):
|
||||
for i in range(10):
|
||||
tn.start_soon(
|
||||
get_sub_and_pull,
|
||||
f'task_{i}',
|
||||
)
|
||||
nurse.start_soon(get_sub_and_pull, f'task_{i}')
|
||||
await trio.sleep(0.001)
|
||||
|
||||
print('all consumer tasks finished')
|
||||
|
||||
if cs.cancelled_caught:
|
||||
pytest.fail(
|
||||
'Should NOT time out in `open_root_actor()` ?'
|
||||
)
|
||||
|
||||
trio.run(main)
|
||||
|
|
|
@ -1,211 +0,0 @@
|
|||
import time
|
||||
|
||||
import trio
|
||||
import pytest
|
||||
|
||||
import tractor
|
||||
from tractor.ipc._ringbuf import (
|
||||
open_ringbuf,
|
||||
RBToken,
|
||||
RingBuffSender,
|
||||
RingBuffReceiver
|
||||
)
|
||||
from tractor._testing.samples import (
|
||||
generate_sample_messages,
|
||||
)
|
||||
|
||||
# in case you don't want to melt your cores, uncomment dis!
|
||||
pytestmark = pytest.mark.skip
|
||||
|
||||
|
||||
@tractor.context
|
||||
async def child_read_shm(
|
||||
ctx: tractor.Context,
|
||||
msg_amount: int,
|
||||
token: RBToken,
|
||||
total_bytes: int,
|
||||
) -> None:
|
||||
recvd_bytes = 0
|
||||
await ctx.started()
|
||||
start_ts = time.time()
|
||||
async with RingBuffReceiver(token) as receiver:
|
||||
while recvd_bytes < total_bytes:
|
||||
msg = await receiver.receive_some()
|
||||
recvd_bytes += len(msg)
|
||||
|
||||
# make sure we dont hold any memoryviews
|
||||
# before the ctx manager aclose()
|
||||
msg = None
|
||||
|
||||
end_ts = time.time()
|
||||
elapsed = end_ts - start_ts
|
||||
elapsed_ms = int(elapsed * 1000)
|
||||
|
||||
print(f'\n\telapsed ms: {elapsed_ms}')
|
||||
print(f'\tmsg/sec: {int(msg_amount / elapsed):,}')
|
||||
print(f'\tbytes/sec: {int(recvd_bytes / elapsed):,}')
|
||||
|
||||
|
||||
@tractor.context
|
||||
async def child_write_shm(
|
||||
ctx: tractor.Context,
|
||||
msg_amount: int,
|
||||
rand_min: int,
|
||||
rand_max: int,
|
||||
token: RBToken,
|
||||
) -> None:
|
||||
msgs, total_bytes = generate_sample_messages(
|
||||
msg_amount,
|
||||
rand_min=rand_min,
|
||||
rand_max=rand_max,
|
||||
)
|
||||
await ctx.started(total_bytes)
|
||||
async with RingBuffSender(token) as sender:
|
||||
for msg in msgs:
|
||||
await sender.send_all(msg)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'msg_amount,rand_min,rand_max,buf_size',
|
||||
[
|
||||
# simple case, fixed payloads, large buffer
|
||||
(100_000, 0, 0, 10 * 1024),
|
||||
|
||||
# guaranteed wrap around on every write
|
||||
(100, 10 * 1024, 20 * 1024, 10 * 1024),
|
||||
|
||||
# large payload size, but large buffer
|
||||
(10_000, 256 * 1024, 512 * 1024, 10 * 1024 * 1024)
|
||||
],
|
||||
ids=[
|
||||
'fixed_payloads_large_buffer',
|
||||
'wrap_around_every_write',
|
||||
'large_payloads_large_buffer',
|
||||
]
|
||||
)
|
||||
def test_ringbuf(
|
||||
msg_amount: int,
|
||||
rand_min: int,
|
||||
rand_max: int,
|
||||
buf_size: int
|
||||
):
|
||||
async def main():
|
||||
with open_ringbuf(
|
||||
'test_ringbuf',
|
||||
buf_size=buf_size
|
||||
) as token:
|
||||
proc_kwargs = {
|
||||
'pass_fds': (token.write_eventfd, token.wrap_eventfd)
|
||||
}
|
||||
|
||||
common_kwargs = {
|
||||
'msg_amount': msg_amount,
|
||||
'token': token,
|
||||
}
|
||||
async with tractor.open_nursery() as an:
|
||||
send_p = await an.start_actor(
|
||||
'ring_sender',
|
||||
enable_modules=[__name__],
|
||||
proc_kwargs=proc_kwargs
|
||||
)
|
||||
recv_p = await an.start_actor(
|
||||
'ring_receiver',
|
||||
enable_modules=[__name__],
|
||||
proc_kwargs=proc_kwargs
|
||||
)
|
||||
async with (
|
||||
send_p.open_context(
|
||||
child_write_shm,
|
||||
rand_min=rand_min,
|
||||
rand_max=rand_max,
|
||||
**common_kwargs
|
||||
) as (sctx, total_bytes),
|
||||
recv_p.open_context(
|
||||
child_read_shm,
|
||||
**common_kwargs,
|
||||
total_bytes=total_bytes,
|
||||
) as (sctx, _sent),
|
||||
):
|
||||
await recv_p.result()
|
||||
|
||||
await send_p.cancel_actor()
|
||||
await recv_p.cancel_actor()
|
||||
|
||||
|
||||
trio.run(main)
|
||||
|
||||
|
||||
@tractor.context
|
||||
async def child_blocked_receiver(
|
||||
ctx: tractor.Context,
|
||||
token: RBToken
|
||||
):
|
||||
async with RingBuffReceiver(token) as receiver:
|
||||
await ctx.started()
|
||||
await receiver.receive_some()
|
||||
|
||||
|
||||
def test_ring_reader_cancel():
|
||||
async def main():
|
||||
with open_ringbuf('test_ring_cancel_reader') as token:
|
||||
async with (
|
||||
tractor.open_nursery() as an,
|
||||
RingBuffSender(token) as _sender,
|
||||
):
|
||||
recv_p = await an.start_actor(
|
||||
'ring_blocked_receiver',
|
||||
enable_modules=[__name__],
|
||||
proc_kwargs={
|
||||
'pass_fds': (token.write_eventfd, token.wrap_eventfd)
|
||||
}
|
||||
)
|
||||
async with (
|
||||
recv_p.open_context(
|
||||
child_blocked_receiver,
|
||||
token=token
|
||||
) as (sctx, _sent),
|
||||
):
|
||||
await trio.sleep(1)
|
||||
await an.cancel()
|
||||
|
||||
|
||||
with pytest.raises(tractor._exceptions.ContextCancelled):
|
||||
trio.run(main)
|
||||
|
||||
|
||||
@tractor.context
|
||||
async def child_blocked_sender(
|
||||
ctx: tractor.Context,
|
||||
token: RBToken
|
||||
):
|
||||
async with RingBuffSender(token) as sender:
|
||||
await ctx.started()
|
||||
await sender.send_all(b'this will wrap')
|
||||
|
||||
|
||||
def test_ring_sender_cancel():
|
||||
async def main():
|
||||
with open_ringbuf(
|
||||
'test_ring_cancel_sender',
|
||||
buf_size=1
|
||||
) as token:
|
||||
async with tractor.open_nursery() as an:
|
||||
recv_p = await an.start_actor(
|
||||
'ring_blocked_sender',
|
||||
enable_modules=[__name__],
|
||||
proc_kwargs={
|
||||
'pass_fds': (token.write_eventfd, token.wrap_eventfd)
|
||||
}
|
||||
)
|
||||
async with (
|
||||
recv_p.open_context(
|
||||
child_blocked_sender,
|
||||
token=token
|
||||
) as (sctx, _sent),
|
||||
):
|
||||
await trio.sleep(1)
|
||||
await an.cancel()
|
||||
|
||||
|
||||
with pytest.raises(tractor._exceptions.ContextCancelled):
|
||||
trio.run(main)
|
|
@ -1,85 +0,0 @@
|
|||
'''
|
||||
Runtime boot/init sanity.
|
||||
|
||||
'''
|
||||
|
||||
import pytest
|
||||
import trio
|
||||
|
||||
import tractor
|
||||
from tractor._exceptions import RuntimeFailure
|
||||
|
||||
|
||||
@tractor.context
|
||||
async def open_new_root_in_sub(
|
||||
ctx: tractor.Context,
|
||||
) -> None:
|
||||
|
||||
async with tractor.open_root_actor():
|
||||
pass
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'open_root_in',
|
||||
['root', 'sub'],
|
||||
ids='open_2nd_root_in={}'.format,
|
||||
)
|
||||
def test_only_one_root_actor(
|
||||
open_root_in: str,
|
||||
reg_addr: tuple,
|
||||
debug_mode: bool
|
||||
):
|
||||
'''
|
||||
Verify we specially fail whenever more then one root actor
|
||||
is attempted to be opened within an already opened tree.
|
||||
|
||||
'''
|
||||
async def main():
|
||||
async with tractor.open_nursery() as an:
|
||||
|
||||
if open_root_in == 'root':
|
||||
async with tractor.open_root_actor(
|
||||
registry_addrs=[reg_addr],
|
||||
):
|
||||
pass
|
||||
|
||||
ptl: tractor.Portal = await an.start_actor(
|
||||
name='bad_rooty_boi',
|
||||
enable_modules=[__name__],
|
||||
)
|
||||
|
||||
async with ptl.open_context(
|
||||
open_new_root_in_sub,
|
||||
) as (ctx, first):
|
||||
pass
|
||||
|
||||
if open_root_in == 'root':
|
||||
with pytest.raises(
|
||||
RuntimeFailure
|
||||
) as excinfo:
|
||||
trio.run(main)
|
||||
|
||||
else:
|
||||
with pytest.raises(
|
||||
tractor.RemoteActorError,
|
||||
) as excinfo:
|
||||
trio.run(main)
|
||||
|
||||
assert excinfo.value.boxed_type is RuntimeFailure
|
||||
|
||||
|
||||
def test_implicit_root_via_first_nursery(
|
||||
reg_addr: tuple,
|
||||
debug_mode: bool
|
||||
):
|
||||
'''
|
||||
The first `ActorNursery` open should implicitly call
|
||||
`_root.open_root_actor()`.
|
||||
|
||||
'''
|
||||
async def main():
|
||||
async with tractor.open_nursery() as an:
|
||||
assert an._implicit_runtime_started
|
||||
assert tractor.current_actor().aid.name == 'root'
|
||||
|
||||
trio.run(main)
|
|
@ -1,167 +0,0 @@
|
|||
"""
|
||||
Shared mem primitives and APIs.
|
||||
|
||||
"""
|
||||
import uuid
|
||||
|
||||
# import numpy
|
||||
import pytest
|
||||
import trio
|
||||
import tractor
|
||||
from tractor.ipc._shm import (
|
||||
open_shm_list,
|
||||
attach_shm_list,
|
||||
)
|
||||
|
||||
|
||||
@tractor.context
|
||||
async def child_attach_shml_alot(
|
||||
ctx: tractor.Context,
|
||||
shm_key: str,
|
||||
) -> None:
|
||||
|
||||
await ctx.started(shm_key)
|
||||
|
||||
# now try to attach a boatload of times in a loop..
|
||||
for _ in range(1000):
|
||||
shml = attach_shm_list(
|
||||
key=shm_key,
|
||||
readonly=False,
|
||||
)
|
||||
assert shml.shm.name == shm_key
|
||||
await trio.sleep(0.001)
|
||||
|
||||
|
||||
def test_child_attaches_alot():
|
||||
async def main():
|
||||
async with tractor.open_nursery() as an:
|
||||
|
||||
# allocate writeable list in parent
|
||||
key = f'shml_{uuid.uuid4()}'
|
||||
shml = open_shm_list(
|
||||
key=key,
|
||||
)
|
||||
|
||||
portal = await an.start_actor(
|
||||
'shm_attacher',
|
||||
enable_modules=[__name__],
|
||||
)
|
||||
|
||||
async with (
|
||||
portal.open_context(
|
||||
child_attach_shml_alot,
|
||||
shm_key=shml.key,
|
||||
) as (ctx, start_val),
|
||||
):
|
||||
assert start_val == key
|
||||
await ctx.result()
|
||||
|
||||
await portal.cancel_actor()
|
||||
|
||||
trio.run(main)
|
||||
|
||||
|
||||
@tractor.context
|
||||
async def child_read_shm_list(
|
||||
ctx: tractor.Context,
|
||||
shm_key: str,
|
||||
use_str: bool,
|
||||
frame_size: int,
|
||||
) -> None:
|
||||
|
||||
# attach in child
|
||||
shml = attach_shm_list(
|
||||
key=shm_key,
|
||||
# dtype=str if use_str else float,
|
||||
)
|
||||
await ctx.started(shml.key)
|
||||
|
||||
async with ctx.open_stream() as stream:
|
||||
async for i in stream:
|
||||
print(f'(child): reading shm list index: {i}')
|
||||
|
||||
if use_str:
|
||||
expect = str(float(i))
|
||||
else:
|
||||
expect = float(i)
|
||||
|
||||
if frame_size == 1:
|
||||
val = shml[i]
|
||||
assert expect == val
|
||||
print(f'(child): reading value: {val}')
|
||||
else:
|
||||
frame = shml[i - frame_size:i]
|
||||
print(f'(child): reading frame: {frame}')
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'use_str',
|
||||
[False, True],
|
||||
ids=lambda i: f'use_str_values={i}',
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
'frame_size',
|
||||
[1, 2**6, 2**10],
|
||||
ids=lambda i: f'frame_size={i}',
|
||||
)
|
||||
def test_parent_writer_child_reader(
|
||||
use_str: bool,
|
||||
frame_size: int,
|
||||
):
|
||||
|
||||
async def main():
|
||||
async with tractor.open_nursery(
|
||||
# debug_mode=True,
|
||||
) as an:
|
||||
|
||||
portal = await an.start_actor(
|
||||
'shm_reader',
|
||||
enable_modules=[__name__],
|
||||
debug_mode=True,
|
||||
)
|
||||
|
||||
# allocate writeable list in parent
|
||||
key = 'shm_list'
|
||||
seq_size = int(2 * 2 ** 10)
|
||||
shml = open_shm_list(
|
||||
key=key,
|
||||
size=seq_size,
|
||||
dtype=str if use_str else float,
|
||||
readonly=False,
|
||||
)
|
||||
|
||||
async with (
|
||||
portal.open_context(
|
||||
child_read_shm_list,
|
||||
shm_key=key,
|
||||
use_str=use_str,
|
||||
frame_size=frame_size,
|
||||
) as (ctx, sent),
|
||||
|
||||
ctx.open_stream() as stream,
|
||||
):
|
||||
|
||||
assert sent == key
|
||||
|
||||
for i in range(seq_size):
|
||||
|
||||
val = float(i)
|
||||
if use_str:
|
||||
val = str(val)
|
||||
|
||||
# print(f'(parent): writing {val}')
|
||||
shml[i] = val
|
||||
|
||||
# only on frame fills do we
|
||||
# signal to the child that a frame's
|
||||
# worth is ready.
|
||||
if (i % frame_size) == 0:
|
||||
print(f'(parent): signalling frame full on {val}')
|
||||
await stream.send(i)
|
||||
else:
|
||||
print(f'(parent): signalling final frame on {val}')
|
||||
await stream.send(i)
|
||||
|
||||
await portal.cancel_actor()
|
||||
|
||||
trio.run(main)
|
|
@ -2,7 +2,6 @@
|
|||
Spawning basics
|
||||
|
||||
"""
|
||||
from functools import partial
|
||||
from typing import (
|
||||
Any,
|
||||
)
|
||||
|
@ -13,99 +12,74 @@ import tractor
|
|||
|
||||
from tractor._testing import tractor_test
|
||||
|
||||
data_to_pass_down = {
|
||||
'doggy': 10,
|
||||
'kitty': 4,
|
||||
}
|
||||
data_to_pass_down = {'doggy': 10, 'kitty': 4}
|
||||
|
||||
|
||||
async def spawn(
|
||||
should_be_root: bool,
|
||||
is_arbiter: bool,
|
||||
data: dict,
|
||||
reg_addr: tuple[str, int],
|
||||
|
||||
debug_mode: bool = False,
|
||||
):
|
||||
namespaces = [__name__]
|
||||
|
||||
await trio.sleep(0.1)
|
||||
actor = tractor.current_actor(err_on_no_runtime=False)
|
||||
|
||||
if should_be_root:
|
||||
assert actor is None # no runtime yet
|
||||
async with (
|
||||
tractor.open_root_actor(
|
||||
arbiter_addr=reg_addr,
|
||||
),
|
||||
tractor.open_nursery() as an,
|
||||
):
|
||||
# now runtime exists
|
||||
actor: tractor.Actor = tractor.current_actor()
|
||||
assert actor.is_arbiter == should_be_root
|
||||
async with tractor.open_root_actor(
|
||||
arbiter_addr=reg_addr,
|
||||
):
|
||||
actor = tractor.current_actor()
|
||||
assert actor.is_arbiter == is_arbiter
|
||||
data = data_to_pass_down
|
||||
|
||||
# spawns subproc here
|
||||
portal: tractor.Portal = await an.run_in_actor(
|
||||
fn=spawn,
|
||||
if actor.is_arbiter:
|
||||
async with tractor.open_nursery() as nursery:
|
||||
|
||||
# spawning args
|
||||
name='sub-actor',
|
||||
enable_modules=[__name__],
|
||||
# forks here
|
||||
portal = await nursery.run_in_actor(
|
||||
spawn,
|
||||
is_arbiter=False,
|
||||
name='sub-actor',
|
||||
data=data,
|
||||
reg_addr=reg_addr,
|
||||
enable_modules=namespaces,
|
||||
)
|
||||
|
||||
# passed to a subactor-recursive RPC invoke
|
||||
# of this same `spawn()` fn.
|
||||
should_be_root=False,
|
||||
data=data_to_pass_down,
|
||||
reg_addr=reg_addr,
|
||||
)
|
||||
|
||||
assert len(an._children) == 1
|
||||
assert (
|
||||
portal.channel.uid
|
||||
in
|
||||
tractor.current_actor().ipc_server._peers
|
||||
)
|
||||
|
||||
# get result from child subactor
|
||||
result = await portal.result()
|
||||
assert result == 10
|
||||
return result
|
||||
else:
|
||||
assert actor.is_arbiter == should_be_root
|
||||
return 10
|
||||
assert len(nursery._children) == 1
|
||||
assert portal.channel.uid in tractor.current_actor()._peers
|
||||
# be sure we can still get the result
|
||||
result = await portal.result()
|
||||
assert result == 10
|
||||
return result
|
||||
else:
|
||||
return 10
|
||||
|
||||
|
||||
def test_run_in_actor_same_func_in_child(
|
||||
reg_addr: tuple,
|
||||
debug_mode: bool,
|
||||
def test_local_arbiter_subactor_global_state(
|
||||
reg_addr,
|
||||
):
|
||||
result = trio.run(
|
||||
partial(
|
||||
spawn,
|
||||
should_be_root=True,
|
||||
data=data_to_pass_down,
|
||||
reg_addr=reg_addr,
|
||||
debug_mode=debug_mode,
|
||||
)
|
||||
spawn,
|
||||
True,
|
||||
data_to_pass_down,
|
||||
reg_addr,
|
||||
)
|
||||
assert result == 10
|
||||
|
||||
|
||||
async def movie_theatre_question():
|
||||
'''
|
||||
A question asked in a dark theatre, in a tangent
|
||||
"""A question asked in a dark theatre, in a tangent
|
||||
(errr, I mean different) process.
|
||||
|
||||
'''
|
||||
"""
|
||||
return 'have you ever seen a portal?'
|
||||
|
||||
|
||||
@tractor_test
|
||||
async def test_movie_theatre_convo(start_method):
|
||||
'''
|
||||
The main ``tractor`` routine.
|
||||
"""The main ``tractor`` routine.
|
||||
"""
|
||||
async with tractor.open_nursery() as n:
|
||||
|
||||
'''
|
||||
async with tractor.open_nursery(debug_mode=True) as an:
|
||||
|
||||
portal = await an.start_actor(
|
||||
portal = await n.start_actor(
|
||||
'frank',
|
||||
# enable the actor to run funcs from this current module
|
||||
enable_modules=[__name__],
|
||||
|
@ -144,8 +118,8 @@ async def test_most_beautiful_word(
|
|||
with trio.fail_after(1):
|
||||
async with tractor.open_nursery(
|
||||
debug_mode=debug_mode,
|
||||
) as an:
|
||||
portal = await an.run_in_actor(
|
||||
) as n:
|
||||
portal = await n.run_in_actor(
|
||||
cellar_door,
|
||||
return_value=return_value,
|
||||
name='some_linguist',
|
||||
|
|
|
@ -180,8 +180,7 @@ def test_acm_embedded_nursery_propagates_enter_err(
|
|||
with tractor.devx.maybe_open_crash_handler(
|
||||
pdb=debug_mode,
|
||||
) as bxerr:
|
||||
if bxerr:
|
||||
assert not bxerr.value
|
||||
assert not bxerr.value
|
||||
|
||||
async with (
|
||||
wraps_tn_that_always_cancels() as tn,
|
||||
|
|
|
@ -64,7 +64,7 @@ from ._root import (
|
|||
run_daemon as run_daemon,
|
||||
open_root_actor as open_root_actor,
|
||||
)
|
||||
from .ipc import Channel as Channel
|
||||
from ._ipc import Channel as Channel
|
||||
from ._portal import Portal as Portal
|
||||
from ._runtime import Actor as Actor
|
||||
# from . import hilevel as hilevel
|
||||
from . import hilevel as hilevel
|
||||
|
|
282
tractor/_addr.py
282
tractor/_addr.py
|
@ -1,282 +0,0 @@
|
|||
# tractor: structured concurrent "actors".
|
||||
# Copyright 2018-eternity Tyler Goodlet.
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
from __future__ import annotations
|
||||
from uuid import uuid4
|
||||
from typing import (
|
||||
Protocol,
|
||||
ClassVar,
|
||||
Type,
|
||||
TYPE_CHECKING,
|
||||
)
|
||||
|
||||
from bidict import bidict
|
||||
from trio import (
|
||||
SocketListener,
|
||||
)
|
||||
|
||||
from .log import get_logger
|
||||
from ._state import (
|
||||
_def_tpt_proto,
|
||||
)
|
||||
from .ipc._tcp import TCPAddress
|
||||
from .ipc._uds import UDSAddress
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ._runtime import Actor
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
||||
# TODO, maybe breakout the netns key to a struct?
|
||||
# class NetNs(Struct)[str, int]:
|
||||
# ...
|
||||
|
||||
# TODO, can't we just use a type alias
|
||||
# for this? namely just some `tuple[str, int, str, str]`?
|
||||
#
|
||||
# -[ ] would also just be simpler to keep this as SockAddr[tuple]
|
||||
# or something, implying it's just a simple pair of values which can
|
||||
# presumably be mapped to all transports?
|
||||
# -[ ] `pydoc socket.socket.getsockname()` delivers a 4-tuple for
|
||||
# ipv6 `(hostaddr, port, flowinfo, scope_id)`.. so how should we
|
||||
# handle that?
|
||||
# -[ ] as a further alternative to this wrap()/unwrap() approach we
|
||||
# could just implement `enc/dec_hook()`s for the `Address`-types
|
||||
# and just deal with our internal objs directly and always and
|
||||
# leave it to the codec layer to figure out marshalling?
|
||||
# |_ would mean only one spot to do the `.unwrap()` (which we may
|
||||
# end up needing to call from the hook()s anyway?)
|
||||
# -[x] rename to `UnwrappedAddress[Descriptor]` ??
|
||||
# seems like the right name as per,
|
||||
# https://www.geeksforgeeks.org/introduction-to-address-descriptor/
|
||||
#
|
||||
UnwrappedAddress = (
|
||||
# tcp/udp/uds
|
||||
tuple[
|
||||
str, # host/domain(tcp), filesys-dir(uds)
|
||||
int|str, # port/path(uds)
|
||||
]
|
||||
# ?TODO? should we also include another 2 fields from
|
||||
# our `Aid` msg such that we include the runtime `Actor.uid`
|
||||
# of `.name` and `.uuid`?
|
||||
# - would ensure uniqueness across entire net?
|
||||
# - allows for easier runtime-level filtering of "actors by
|
||||
# service name"
|
||||
)
|
||||
|
||||
|
||||
# TODO, maybe rename to `SocketAddress`?
|
||||
class Address(Protocol):
|
||||
proto_key: ClassVar[str]
|
||||
unwrapped_type: ClassVar[UnwrappedAddress]
|
||||
|
||||
# TODO, i feel like an `.is_bound()` is a better thing to
|
||||
# support?
|
||||
# Lke, what use does this have besides a noop and if it's not
|
||||
# valid why aren't we erroring on creation/use?
|
||||
@property
|
||||
def is_valid(self) -> bool:
|
||||
...
|
||||
|
||||
# TODO, maybe `.netns` is a better name?
|
||||
@property
|
||||
def namespace(self) -> tuple[str, int]|None:
|
||||
'''
|
||||
The if-available, OS-specific "network namespace" key.
|
||||
|
||||
'''
|
||||
...
|
||||
|
||||
@property
|
||||
def bindspace(self) -> str:
|
||||
'''
|
||||
Deliver the socket address' "bindable space" from
|
||||
a `socket.socket.bind()` and thus from the perspective of
|
||||
specific transport protocol domain.
|
||||
|
||||
I.e. for most (layer-4) network-socket protocols this is
|
||||
normally the ipv4/6 address, for UDS this is normally
|
||||
a filesystem (sub-directory).
|
||||
|
||||
For (distributed) network protocols this is normally the routing
|
||||
layer's domain/(ip-)address, though it might also include a "network namespace"
|
||||
key different then the default.
|
||||
|
||||
For local-host-only transports this is either an explicit
|
||||
namespace (with types defined by the OS: netns, Cgroup, IPC,
|
||||
pid, etc. on linux) or failing that the sub-directory in the
|
||||
filesys in which socket/shm files are located *under*.
|
||||
|
||||
'''
|
||||
...
|
||||
|
||||
@classmethod
|
||||
def from_addr(cls, addr: UnwrappedAddress) -> Address:
|
||||
...
|
||||
|
||||
def unwrap(self) -> UnwrappedAddress:
|
||||
'''
|
||||
Deliver the underying minimum field set in
|
||||
a primitive python data type-structure.
|
||||
'''
|
||||
...
|
||||
|
||||
@classmethod
|
||||
def get_random(
|
||||
cls,
|
||||
current_actor: Actor,
|
||||
bindspace: str|None = None,
|
||||
) -> Address:
|
||||
...
|
||||
|
||||
# TODO, this should be something like a `.get_def_registar_addr()`
|
||||
# or similar since,
|
||||
# - it should be a **host singleton** (not root/tree singleton)
|
||||
# - we **only need this value** when one isn't provided to the
|
||||
# runtime at boot and we want to implicitly provide a host-wide
|
||||
# registrar.
|
||||
# - each rooted-actor-tree should likely have its own
|
||||
# micro-registry (likely the root being it), also see
|
||||
@classmethod
|
||||
def get_root(cls) -> Address:
|
||||
...
|
||||
|
||||
def __repr__(self) -> str:
|
||||
...
|
||||
|
||||
def __eq__(self, other) -> bool:
|
||||
...
|
||||
|
||||
async def open_listener(
|
||||
self,
|
||||
**kwargs,
|
||||
) -> SocketListener:
|
||||
...
|
||||
|
||||
async def close_listener(self):
|
||||
...
|
||||
|
||||
|
||||
_address_types: bidict[str, Type[Address]] = {
|
||||
'tcp': TCPAddress,
|
||||
'uds': UDSAddress
|
||||
}
|
||||
|
||||
|
||||
# TODO! really these are discovery sys default addrs ONLY useful for
|
||||
# when none is provided to a root actor on first boot.
|
||||
_default_lo_addrs: dict[
|
||||
str,
|
||||
UnwrappedAddress
|
||||
] = {
|
||||
'tcp': TCPAddress.get_root().unwrap(),
|
||||
'uds': UDSAddress.get_root().unwrap(),
|
||||
}
|
||||
|
||||
|
||||
def get_address_cls(name: str) -> Type[Address]:
|
||||
return _address_types[name]
|
||||
|
||||
|
||||
def is_wrapped_addr(addr: any) -> bool:
|
||||
return type(addr) in _address_types.values()
|
||||
|
||||
|
||||
def mk_uuid() -> str:
|
||||
'''
|
||||
Encapsulate creation of a uuid4 as `str` as used
|
||||
for creating `Actor.uid: tuple[str, str]` and/or
|
||||
`.msg.types.Aid`.
|
||||
|
||||
'''
|
||||
return str(uuid4())
|
||||
|
||||
|
||||
def wrap_address(
|
||||
addr: UnwrappedAddress
|
||||
) -> Address:
|
||||
'''
|
||||
Wrap an `UnwrappedAddress` as an `Address`-type based
|
||||
on matching builtin python data-structures which we adhoc
|
||||
use for each.
|
||||
|
||||
XXX NOTE, careful care must be placed to ensure
|
||||
`UnwrappedAddress` cases are **definitely unique** otherwise the
|
||||
wrong transport backend may be loaded and will break many
|
||||
low-level things in our runtime in a not-fun-to-debug way!
|
||||
|
||||
XD
|
||||
|
||||
'''
|
||||
if is_wrapped_addr(addr):
|
||||
return addr
|
||||
|
||||
cls: Type|None = None
|
||||
# if 'sock' in addr[0]:
|
||||
# import pdbp; pdbp.set_trace()
|
||||
match addr:
|
||||
|
||||
# classic network socket-address as tuple/list
|
||||
case (
|
||||
(str(), int())
|
||||
|
|
||||
[str(), int()]
|
||||
):
|
||||
cls = TCPAddress
|
||||
|
||||
case (
|
||||
# (str()|Path(), str()|Path()),
|
||||
# ^TODO? uhh why doesn't this work!?
|
||||
|
||||
(_, filename)
|
||||
) if type(filename) is str:
|
||||
cls = UDSAddress
|
||||
|
||||
# likely an unset UDS or TCP reg address as defaulted in
|
||||
# `_state._runtime_vars['_root_mailbox']`
|
||||
#
|
||||
# TODO? figure out when/if we even need this?
|
||||
case (
|
||||
None
|
||||
|
|
||||
[None, None]
|
||||
):
|
||||
cls: Type[Address] = get_address_cls(_def_tpt_proto)
|
||||
addr: UnwrappedAddress = cls.get_root().unwrap()
|
||||
|
||||
case _:
|
||||
# import pdbp; pdbp.set_trace()
|
||||
raise TypeError(
|
||||
f'Can not wrap unwrapped-address ??\n'
|
||||
f'type(addr): {type(addr)!r}\n'
|
||||
f'addr: {addr!r}\n'
|
||||
)
|
||||
|
||||
return cls.from_addr(addr)
|
||||
|
||||
|
||||
def default_lo_addrs(
|
||||
transports: list[str],
|
||||
) -> list[Type[Address]]:
|
||||
'''
|
||||
Return the default, host-singleton, registry address
|
||||
for an input transport key set.
|
||||
|
||||
'''
|
||||
return [
|
||||
_default_lo_addrs[transport]
|
||||
for transport in transports
|
||||
]
|
|
@ -31,12 +31,8 @@ def parse_uid(arg):
|
|||
return str(name), str(uuid) # ensures str encoding
|
||||
|
||||
def parse_ipaddr(arg):
|
||||
try:
|
||||
return literal_eval(arg)
|
||||
|
||||
except (ValueError, SyntaxError):
|
||||
# UDS: try to interpret as a straight up str
|
||||
return arg
|
||||
host, port = literal_eval(arg)
|
||||
return (str(host), int(port))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
@ -50,8 +46,8 @@ if __name__ == "__main__":
|
|||
args = parser.parse_args()
|
||||
|
||||
subactor = Actor(
|
||||
name=args.uid[0],
|
||||
uuid=args.uid[1],
|
||||
args.uid[0],
|
||||
uid=args.uid[1],
|
||||
loglevel=args.loglevel,
|
||||
spawn_method="trio"
|
||||
)
|
||||
|
|
|
@ -47,9 +47,6 @@ from functools import partial
|
|||
import inspect
|
||||
from pprint import pformat
|
||||
import textwrap
|
||||
from types import (
|
||||
UnionType,
|
||||
)
|
||||
from typing import (
|
||||
Any,
|
||||
AsyncGenerator,
|
||||
|
@ -82,14 +79,13 @@ from .msg import (
|
|||
MsgType,
|
||||
NamespacePath,
|
||||
PayloadT,
|
||||
Return,
|
||||
Started,
|
||||
Stop,
|
||||
Yield,
|
||||
pretty_struct,
|
||||
_ops as msgops,
|
||||
)
|
||||
from .ipc import (
|
||||
from ._ipc import (
|
||||
Channel,
|
||||
)
|
||||
from ._streaming import (
|
||||
|
@ -105,7 +101,7 @@ from ._state import (
|
|||
if TYPE_CHECKING:
|
||||
from ._portal import Portal
|
||||
from ._runtime import Actor
|
||||
from .ipc._transport import MsgTransport
|
||||
from ._ipc import MsgTransport
|
||||
from .devx._frame_stack import (
|
||||
CallerInfo,
|
||||
)
|
||||
|
@ -246,13 +242,11 @@ class Context:
|
|||
# a drain loop?
|
||||
# _res_scope: trio.CancelScope|None = None
|
||||
|
||||
_outcome_msg: Return|Error|ContextCancelled = Unresolved
|
||||
|
||||
# on a clean exit there should be a final value
|
||||
# delivered from the far end "callee" task, so
|
||||
# this value is only set on one side.
|
||||
# _result: Any | int = None
|
||||
_result: PayloadT|Unresolved = Unresolved
|
||||
_result: Any|Unresolved = Unresolved
|
||||
|
||||
# if the local "caller" task errors this value is always set
|
||||
# to the error that was captured in the
|
||||
|
@ -366,7 +360,7 @@ class Context:
|
|||
# f' ---\n'
|
||||
f' |_ipc: {self.dst_maddr}\n'
|
||||
# f' dst_maddr{ds}{self.dst_maddr}\n'
|
||||
f" uid{ds}'{self.chan.aid}'\n"
|
||||
f" uid{ds}'{self.chan.uid}'\n"
|
||||
f" cid{ds}'{self.cid}'\n"
|
||||
# f' ---\n'
|
||||
f'\n'
|
||||
|
@ -859,10 +853,19 @@ class Context:
|
|||
@property
|
||||
def dst_maddr(self) -> str:
|
||||
chan: Channel = self.chan
|
||||
dst_addr, dst_port = chan.raddr
|
||||
trans: MsgTransport = chan.transport
|
||||
# cid: str = self.cid
|
||||
# cid_head, cid_tail = cid[:6], cid[-6:]
|
||||
return trans.maddr
|
||||
return (
|
||||
f'/ipv4/{dst_addr}'
|
||||
f'/{trans.name_key}/{dst_port}'
|
||||
# f'/{self.chan.uid[0]}'
|
||||
# f'/{self.cid}'
|
||||
|
||||
# f'/cid={cid_head}..{cid_tail}'
|
||||
# TODO: ? not use this ^ right ?
|
||||
)
|
||||
|
||||
dmaddr = dst_maddr
|
||||
|
||||
|
@ -945,10 +948,10 @@ class Context:
|
|||
reminfo: str = (
|
||||
# ' =>\n'
|
||||
# f'Context.cancel() => {self.chan.uid}\n'
|
||||
f'\n'
|
||||
f'c)=> {self.chan.uid}\n'
|
||||
f' |_[{self.dst_maddr}\n'
|
||||
f' >>{self.repr_rpc}\n'
|
||||
# f'{self.chan.uid}\n'
|
||||
f' |_ @{self.dst_maddr}\n'
|
||||
f' >> {self.repr_rpc}\n'
|
||||
# f' >> {self._nsf}() -> {codec}[dict]:\n\n'
|
||||
# TODO: pull msg-type from spec re #320
|
||||
)
|
||||
|
@ -1069,25 +1072,9 @@ class Context:
|
|||
|RemoteActorError # stream overrun caused and ignored by us
|
||||
):
|
||||
'''
|
||||
Maybe raise a remote error depending on the type of error and
|
||||
*who*, i.e. which side of the task pair across actors,
|
||||
requested a cancellation (if any).
|
||||
|
||||
Depending on the input config-params suppress raising
|
||||
certain remote excs:
|
||||
|
||||
- if `remote_error: ContextCancelled` (ctxc) AND this side's
|
||||
task is the "requester", it at somem point called
|
||||
`Context.cancel()`, then the peer's ctxc is treated
|
||||
as a "cancel ack".
|
||||
|
||||
|_ this behaves exactly like how `trio.Nursery.cancel_scope`
|
||||
absorbs any `BaseExceptionGroup[trio.Cancelled]` wherein the
|
||||
owning parent task never will raise a `trio.Cancelled`
|
||||
if `CancelScope.cancel_called == True`.
|
||||
|
||||
- `remote_error: StreamOverrrun` (overrun) AND
|
||||
`raise_overrun_from_self` is set.
|
||||
Maybe raise a remote error depending on the type of error
|
||||
and *who* (i.e. which task from which actor) requested
|
||||
a cancellation (if any).
|
||||
|
||||
'''
|
||||
__tracebackhide__: bool = hide_tb
|
||||
|
@ -1129,19 +1116,18 @@ class Context:
|
|||
# for this ^, NO right?
|
||||
|
||||
) or (
|
||||
# NOTE: whenever this side is the cause of an
|
||||
# overrun on the peer side, i.e. we sent msgs too
|
||||
# fast and the peer task was overrun according
|
||||
# to `MsgStream` buffer settings, AND this was
|
||||
# called with `raise_overrun_from_self=True` (the
|
||||
# default), silently absorb any `StreamOverrun`.
|
||||
#
|
||||
# XXX, this is namely useful for supressing such faults
|
||||
# during cancellation/error/final-result handling inside
|
||||
# `.msg._ops.drain_to_final_msg()` such that we do not
|
||||
# raise during a cancellation-request, i.e. when
|
||||
# NOTE: whenever this context is the cause of an
|
||||
# overrun on the remote side (aka we sent msgs too
|
||||
# fast that the remote task was overrun according
|
||||
# to `MsgStream` buffer settings) AND the caller
|
||||
# has requested to not raise overruns this side
|
||||
# caused, we also silently absorb any remotely
|
||||
# boxed `StreamOverrun`. This is mostly useful for
|
||||
# supressing such faults during
|
||||
# cancellation/error/final-result handling inside
|
||||
# `msg._ops.drain_to_final_msg()` such that we do not
|
||||
# raise such errors particularly in the case where
|
||||
# `._cancel_called == True`.
|
||||
#
|
||||
not raise_overrun_from_self
|
||||
and isinstance(remote_error, RemoteActorError)
|
||||
and remote_error.boxed_type is StreamOverrun
|
||||
|
@ -1210,11 +1196,9 @@ class Context:
|
|||
|
||||
'''
|
||||
__tracebackhide__: bool = hide_tb
|
||||
if not self._portal:
|
||||
raise RuntimeError(
|
||||
'Invalid usage of `Context.wait_for_result()`!\n'
|
||||
'Not valid on child-side IPC ctx!\n'
|
||||
)
|
||||
assert self._portal, (
|
||||
'`Context.wait_for_result()` can not be called from callee side!'
|
||||
)
|
||||
if self._final_result_is_set():
|
||||
return self._result
|
||||
|
||||
|
@ -1235,8 +1219,6 @@ class Context:
|
|||
# since every message should be delivered via the normal
|
||||
# `._deliver_msg()` route which will appropriately set
|
||||
# any `.maybe_error`.
|
||||
outcome_msg: Return|Error|ContextCancelled
|
||||
drained_msgs: list[MsgType]
|
||||
(
|
||||
outcome_msg,
|
||||
drained_msgs,
|
||||
|
@ -1244,19 +1226,11 @@ class Context:
|
|||
ctx=self,
|
||||
hide_tb=hide_tb,
|
||||
)
|
||||
|
||||
drained_status: str = (
|
||||
'Ctx drained to final outcome msg\n\n'
|
||||
f'{outcome_msg}\n'
|
||||
)
|
||||
|
||||
# ?XXX, should already be set in `._deliver_msg()` right?
|
||||
if self._outcome_msg is not Unresolved:
|
||||
# from .devx import _debug
|
||||
# await _debug.pause()
|
||||
assert self._outcome_msg is outcome_msg
|
||||
else:
|
||||
self._outcome_msg = outcome_msg
|
||||
|
||||
if drained_msgs:
|
||||
drained_status += (
|
||||
'\n'
|
||||
|
@ -1764,6 +1738,7 @@ class Context:
|
|||
|
||||
f'{structfmt(msg)}\n'
|
||||
)
|
||||
|
||||
# NOTE: if an error is deteced we should always still
|
||||
# send it through the feeder-mem-chan and expect
|
||||
# it to be raised by any context (stream) consumer
|
||||
|
@ -1775,21 +1750,6 @@ class Context:
|
|||
# normally the task that should get cancelled/error
|
||||
# from some remote fault!
|
||||
send_chan.send_nowait(msg)
|
||||
match msg:
|
||||
case Stop():
|
||||
if (stream := self._stream):
|
||||
stream._stop_msg = msg
|
||||
|
||||
case Return():
|
||||
if not self._outcome_msg:
|
||||
log.warning(
|
||||
f'Setting final outcome msg AFTER '
|
||||
f'`._rx_chan.send()`??\n'
|
||||
f'\n'
|
||||
f'{msg}'
|
||||
)
|
||||
self._outcome_msg = msg
|
||||
|
||||
return True
|
||||
|
||||
except trio.BrokenResourceError:
|
||||
|
@ -2046,7 +2006,7 @@ async def open_context_from_portal(
|
|||
# the dialog, the `Error` msg should be raised from the `msg`
|
||||
# handling block below.
|
||||
try:
|
||||
started_msg, first = await ctx._pld_rx.recv_msg(
|
||||
started_msg, first = await ctx._pld_rx.recv_msg_w_pld(
|
||||
ipc=ctx,
|
||||
expect_msg=Started,
|
||||
passthrough_non_pld_msgs=False,
|
||||
|
@ -2411,8 +2371,7 @@ async def open_context_from_portal(
|
|||
# displaying `ContextCancelled` traces where the
|
||||
# cause of crash/exit IS due to something in
|
||||
# user/app code on either end of the context.
|
||||
and
|
||||
not rxchan._closed
|
||||
and not rxchan._closed
|
||||
):
|
||||
# XXX NOTE XXX: and again as per above, we mask any
|
||||
# `trio.Cancelled` raised here so as to NOT mask
|
||||
|
@ -2471,7 +2430,6 @@ async def open_context_from_portal(
|
|||
# FINALLY, remove the context from runtime tracking and
|
||||
# exit!
|
||||
log.runtime(
|
||||
# log.cancel(
|
||||
f'De-allocating IPC ctx opened with {ctx.side!r} peer \n'
|
||||
f'uid: {uid}\n'
|
||||
f'cid: {ctx.cid}\n'
|
||||
|
@ -2527,6 +2485,7 @@ def mk_context(
|
|||
_caller_info=caller_info,
|
||||
**kwargs,
|
||||
)
|
||||
pld_rx._ctx = ctx
|
||||
ctx._result = Unresolved
|
||||
return ctx
|
||||
|
||||
|
@ -2589,14 +2548,7 @@ def context(
|
|||
name: str
|
||||
param: Type
|
||||
for name, param in annots.items():
|
||||
if (
|
||||
param is Context
|
||||
or (
|
||||
isinstance(param, UnionType)
|
||||
and
|
||||
Context in param.__args__
|
||||
)
|
||||
):
|
||||
if param is Context:
|
||||
ctx_var_name: str = name
|
||||
break
|
||||
else:
|
||||
|
|
|
@ -29,12 +29,7 @@ from contextlib import asynccontextmanager as acm
|
|||
|
||||
from tractor.log import get_logger
|
||||
from .trionics import gather_contexts
|
||||
from .ipc import _connect_chan, Channel
|
||||
from ._addr import (
|
||||
UnwrappedAddress,
|
||||
Address,
|
||||
wrap_address
|
||||
)
|
||||
from ._ipc import _connect_chan, Channel
|
||||
from ._portal import (
|
||||
Portal,
|
||||
open_portal,
|
||||
|
@ -43,7 +38,6 @@ from ._portal import (
|
|||
from ._state import (
|
||||
current_actor,
|
||||
_runtime_vars,
|
||||
_def_tpt_proto,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
|
@ -55,7 +49,9 @@ log = get_logger(__name__)
|
|||
|
||||
@acm
|
||||
async def get_registry(
|
||||
addr: UnwrappedAddress|None = None,
|
||||
host: str,
|
||||
port: int,
|
||||
|
||||
) -> AsyncGenerator[
|
||||
Portal | LocalPortal | None,
|
||||
None,
|
||||
|
@ -73,15 +69,13 @@ async def get_registry(
|
|||
# (likely a re-entrant call from the arbiter actor)
|
||||
yield LocalPortal(
|
||||
actor,
|
||||
Channel(transport=None)
|
||||
# ^XXX, we DO NOT actually provide nor connect an
|
||||
# underlying transport since this is merely an API shim.
|
||||
Channel((host, port))
|
||||
)
|
||||
else:
|
||||
# TODO: try to look pre-existing connection from
|
||||
# `Server._peers` and use it instead?
|
||||
# `Actor._peers` and use it instead?
|
||||
async with (
|
||||
_connect_chan(addr) as chan,
|
||||
_connect_chan(host, port) as chan,
|
||||
open_portal(chan) as regstr_ptl,
|
||||
):
|
||||
yield regstr_ptl
|
||||
|
@ -95,10 +89,11 @@ async def get_root(
|
|||
|
||||
# TODO: rename mailbox to `_root_maddr` when we finally
|
||||
# add and impl libp2p multi-addrs?
|
||||
addr = _runtime_vars['_root_mailbox']
|
||||
host, port = _runtime_vars['_root_mailbox']
|
||||
assert host is not None
|
||||
|
||||
async with (
|
||||
_connect_chan(addr) as chan,
|
||||
_connect_chan(host, port) as chan,
|
||||
open_portal(chan, **kwargs) as portal,
|
||||
):
|
||||
yield portal
|
||||
|
@ -111,23 +106,17 @@ def get_peer_by_name(
|
|||
) -> list[Channel]|None: # at least 1
|
||||
'''
|
||||
Scan for an existing connection (set) to a named actor
|
||||
and return any channels from `Server._peers: dict`.
|
||||
and return any channels from `Actor._peers`.
|
||||
|
||||
This is an optimization method over querying the registrar for
|
||||
the same info.
|
||||
|
||||
'''
|
||||
actor: Actor = current_actor()
|
||||
to_scan: dict[tuple, list[Channel]] = actor.ipc_server._peers.copy()
|
||||
|
||||
# TODO: is this ever needed? creates a duplicate channel on actor._peers
|
||||
# when multiple find_actor calls are made to same actor from a single ctx
|
||||
# which causes actor exit to hang waiting forever on
|
||||
# `actor._no_more_peers.wait()` in `_runtime.async_main`
|
||||
|
||||
# pchan: Channel|None = actor._parent_chan
|
||||
# if pchan and pchan.uid not in to_scan:
|
||||
# to_scan[pchan.uid].append(pchan)
|
||||
to_scan: dict[tuple, list[Channel]] = actor._peers.copy()
|
||||
pchan: Channel|None = actor._parent_chan
|
||||
if pchan:
|
||||
to_scan[pchan.uid].append(pchan)
|
||||
|
||||
for aid, chans in to_scan.items():
|
||||
_, peer_name = aid
|
||||
|
@ -145,10 +134,10 @@ def get_peer_by_name(
|
|||
@acm
|
||||
async def query_actor(
|
||||
name: str,
|
||||
regaddr: UnwrappedAddress|None = None,
|
||||
regaddr: tuple[str, int]|None = None,
|
||||
|
||||
) -> AsyncGenerator[
|
||||
UnwrappedAddress|None,
|
||||
tuple[str, int]|None,
|
||||
None,
|
||||
]:
|
||||
'''
|
||||
|
@ -174,31 +163,31 @@ async def query_actor(
|
|||
return
|
||||
|
||||
reg_portal: Portal
|
||||
regaddr: Address = wrap_address(regaddr) or actor.reg_addrs[0]
|
||||
async with get_registry(regaddr) as reg_portal:
|
||||
regaddr: tuple[str, int] = regaddr or actor.reg_addrs[0]
|
||||
async with get_registry(*regaddr) as reg_portal:
|
||||
# TODO: return portals to all available actors - for now
|
||||
# just the last one that registered
|
||||
addr: UnwrappedAddress = await reg_portal.run_from_ns(
|
||||
sockaddr: tuple[str, int] = await reg_portal.run_from_ns(
|
||||
'self',
|
||||
'find_actor',
|
||||
name=name,
|
||||
)
|
||||
yield addr
|
||||
yield sockaddr
|
||||
|
||||
|
||||
@acm
|
||||
async def maybe_open_portal(
|
||||
addr: UnwrappedAddress,
|
||||
addr: tuple[str, int],
|
||||
name: str,
|
||||
):
|
||||
async with query_actor(
|
||||
name=name,
|
||||
regaddr=addr,
|
||||
) as addr:
|
||||
) as sockaddr:
|
||||
pass
|
||||
|
||||
if addr:
|
||||
async with _connect_chan(addr) as chan:
|
||||
if sockaddr:
|
||||
async with _connect_chan(*sockaddr) as chan:
|
||||
async with open_portal(chan) as portal:
|
||||
yield portal
|
||||
else:
|
||||
|
@ -208,8 +197,7 @@ async def maybe_open_portal(
|
|||
@acm
|
||||
async def find_actor(
|
||||
name: str,
|
||||
registry_addrs: list[UnwrappedAddress]|None = None,
|
||||
enable_transports: list[str] = [_def_tpt_proto],
|
||||
registry_addrs: list[tuple[str, int]]|None = None,
|
||||
|
||||
only_first: bool = True,
|
||||
raise_on_none: bool = False,
|
||||
|
@ -236,15 +224,15 @@ async def find_actor(
|
|||
# XXX NOTE: make sure to dynamically read the value on
|
||||
# every call since something may change it globally (eg.
|
||||
# like in our discovery test suite)!
|
||||
from ._addr import default_lo_addrs
|
||||
from . import _root
|
||||
registry_addrs = (
|
||||
_runtime_vars['_registry_addrs']
|
||||
or
|
||||
default_lo_addrs(enable_transports)
|
||||
_root._default_lo_addrs
|
||||
)
|
||||
|
||||
maybe_portals: list[
|
||||
AsyncContextManager[UnwrappedAddress]
|
||||
AsyncContextManager[tuple[str, int]]
|
||||
] = list(
|
||||
maybe_open_portal(
|
||||
addr=addr,
|
||||
|
@ -286,7 +274,7 @@ async def find_actor(
|
|||
@acm
|
||||
async def wait_for_actor(
|
||||
name: str,
|
||||
registry_addr: UnwrappedAddress | None = None,
|
||||
registry_addr: tuple[str, int] | None = None,
|
||||
|
||||
) -> AsyncGenerator[Portal, None]:
|
||||
'''
|
||||
|
@ -303,7 +291,7 @@ async def wait_for_actor(
|
|||
yield peer_portal
|
||||
return
|
||||
|
||||
regaddr: UnwrappedAddress = (
|
||||
regaddr: tuple[str, int] = (
|
||||
registry_addr
|
||||
or
|
||||
actor.reg_addrs[0]
|
||||
|
@ -311,8 +299,8 @@ async def wait_for_actor(
|
|||
# TODO: use `.trionics.gather_contexts()` like
|
||||
# above in `find_actor()` as well?
|
||||
reg_portal: Portal
|
||||
async with get_registry(regaddr) as reg_portal:
|
||||
addrs = await reg_portal.run_from_ns(
|
||||
async with get_registry(*regaddr) as reg_portal:
|
||||
sockaddrs = await reg_portal.run_from_ns(
|
||||
'self',
|
||||
'wait_for_actor',
|
||||
name=name,
|
||||
|
@ -320,8 +308,8 @@ async def wait_for_actor(
|
|||
|
||||
# get latest registered addr by default?
|
||||
# TODO: offer multi-portal yields in multi-homed case?
|
||||
addr: UnwrappedAddress = addrs[-1]
|
||||
sockaddr: tuple[str, int] = sockaddrs[-1]
|
||||
|
||||
async with _connect_chan(addr) as chan:
|
||||
async with _connect_chan(*sockaddr) as chan:
|
||||
async with open_portal(chan) as portal:
|
||||
yield portal
|
||||
|
|
|
@ -37,7 +37,6 @@ from .log import (
|
|||
from . import _state
|
||||
from .devx import _debug
|
||||
from .to_asyncio import run_as_asyncio_guest
|
||||
from ._addr import UnwrappedAddress
|
||||
from ._runtime import (
|
||||
async_main,
|
||||
Actor,
|
||||
|
@ -53,10 +52,10 @@ log = get_logger(__name__)
|
|||
def _mp_main(
|
||||
|
||||
actor: Actor,
|
||||
accept_addrs: list[UnwrappedAddress],
|
||||
accept_addrs: list[tuple[str, int]],
|
||||
forkserver_info: tuple[Any, Any, Any, Any, Any],
|
||||
start_method: SpawnMethodKey,
|
||||
parent_addr: UnwrappedAddress | None = None,
|
||||
parent_addr: tuple[str, int] | None = None,
|
||||
infect_asyncio: bool = False,
|
||||
|
||||
) -> None:
|
||||
|
@ -207,7 +206,7 @@ def nest_from_op(
|
|||
def _trio_main(
|
||||
actor: Actor,
|
||||
*,
|
||||
parent_addr: UnwrappedAddress|None = None,
|
||||
parent_addr: tuple[str, int] | None = None,
|
||||
infect_asyncio: bool = False,
|
||||
|
||||
) -> None:
|
||||
|
|
|
@ -23,6 +23,7 @@ import builtins
|
|||
import importlib
|
||||
from pprint import pformat
|
||||
from pdb import bdb
|
||||
import sys
|
||||
from types import (
|
||||
TracebackType,
|
||||
)
|
||||
|
@ -64,29 +65,15 @@ if TYPE_CHECKING:
|
|||
from ._context import Context
|
||||
from .log import StackLevelAdapter
|
||||
from ._stream import MsgStream
|
||||
from .ipc import Channel
|
||||
from ._ipc import Channel
|
||||
|
||||
log = get_logger('tractor')
|
||||
|
||||
_this_mod = importlib.import_module(__name__)
|
||||
|
||||
|
||||
class RuntimeFailure(RuntimeError):
|
||||
'''
|
||||
General `Actor`-runtime failure due to,
|
||||
|
||||
- a bad runtime-env,
|
||||
- falied spawning (bad input to process),
|
||||
- API usage.
|
||||
|
||||
'''
|
||||
|
||||
|
||||
class ActorFailure(RuntimeFailure):
|
||||
'''
|
||||
`Actor` failed to boot before/after spawn
|
||||
|
||||
'''
|
||||
class ActorFailure(Exception):
|
||||
"General actor failure"
|
||||
|
||||
|
||||
class InternalError(RuntimeError):
|
||||
|
@ -139,12 +126,6 @@ class TrioTaskExited(Exception):
|
|||
'''
|
||||
|
||||
|
||||
class DebugRequestError(RuntimeError):
|
||||
'''
|
||||
Failed to request stdio lock from root actor!
|
||||
|
||||
'''
|
||||
|
||||
# NOTE: more or less should be close to these:
|
||||
# 'boxed_type',
|
||||
# 'src_type',
|
||||
|
@ -210,8 +191,6 @@ def get_err_type(type_name: str) -> BaseException|None:
|
|||
):
|
||||
return type_ref
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def pack_from_raise(
|
||||
local_err: (
|
||||
|
@ -453,13 +432,9 @@ class RemoteActorError(Exception):
|
|||
Error type boxed by last actor IPC hop.
|
||||
|
||||
'''
|
||||
if (
|
||||
self._boxed_type is None
|
||||
and
|
||||
(ipc_msg := self._ipc_msg)
|
||||
):
|
||||
if self._boxed_type is None:
|
||||
self._boxed_type = get_err_type(
|
||||
ipc_msg.boxed_type_str
|
||||
self._ipc_msg.boxed_type_str
|
||||
)
|
||||
|
||||
return self._boxed_type
|
||||
|
@ -542,6 +517,7 @@ class RemoteActorError(Exception):
|
|||
if val:
|
||||
_repr += f'{key}={val_str}{end_char}'
|
||||
|
||||
|
||||
return _repr
|
||||
|
||||
def reprol(self) -> str:
|
||||
|
@ -620,9 +596,56 @@ class RemoteActorError(Exception):
|
|||
the type name is already implicitly shown by python).
|
||||
|
||||
'''
|
||||
header: str = ''
|
||||
body: str = ''
|
||||
message: str = ''
|
||||
|
||||
# XXX when the currently raised exception is this instance,
|
||||
# we do not ever use the "type header" style repr.
|
||||
is_being_raised: bool = False
|
||||
if (
|
||||
(exc := sys.exception())
|
||||
and
|
||||
exc is self
|
||||
):
|
||||
is_being_raised: bool = True
|
||||
|
||||
with_type_header: bool = (
|
||||
with_type_header
|
||||
and
|
||||
not is_being_raised
|
||||
)
|
||||
|
||||
# <RemoteActorError( .. )> style
|
||||
if with_type_header:
|
||||
header: str = f'<{type(self).__name__}('
|
||||
|
||||
if message := self._message:
|
||||
|
||||
# split off the first line so, if needed, it isn't
|
||||
# indented the same like the "boxed content" which
|
||||
# since there is no `.tb_str` is just the `.message`.
|
||||
lines: list[str] = message.splitlines()
|
||||
first: str = lines[0]
|
||||
message: str = message.removeprefix(first)
|
||||
|
||||
# with a type-style header we,
|
||||
# - have no special message "first line" extraction/handling
|
||||
# - place the message a space in from the header:
|
||||
# `MsgTypeError( <message> ..`
|
||||
# ^-here
|
||||
# - indent the `.message` inside the type body.
|
||||
if with_type_header:
|
||||
first = f' {first} )>'
|
||||
|
||||
message: str = textwrap.indent(
|
||||
message,
|
||||
prefix=' '*2,
|
||||
)
|
||||
message: str = first + message
|
||||
|
||||
# IFF there is an embedded traceback-str we always
|
||||
# draw the ascii-box around it.
|
||||
body: str = ''
|
||||
if tb_str := self.tb_str:
|
||||
fields: str = self._mk_fields_str(
|
||||
_body_fields
|
||||
|
@ -643,15 +666,21 @@ class RemoteActorError(Exception):
|
|||
boxer_header=self.relay_uid,
|
||||
)
|
||||
|
||||
# !TODO, it'd be nice to import these top level without
|
||||
# cycles!
|
||||
from tractor.devx.pformat import (
|
||||
pformat_exc,
|
||||
)
|
||||
return pformat_exc(
|
||||
exc=self,
|
||||
with_type_header=with_type_header,
|
||||
body=body,
|
||||
tail = ''
|
||||
if (
|
||||
with_type_header
|
||||
and not message
|
||||
):
|
||||
tail: str = '>'
|
||||
|
||||
return (
|
||||
header
|
||||
+
|
||||
message
|
||||
+
|
||||
f'{body}'
|
||||
+
|
||||
tail
|
||||
)
|
||||
|
||||
__repr__ = pformat
|
||||
|
@ -929,7 +958,7 @@ class StreamOverrun(
|
|||
'''
|
||||
|
||||
|
||||
class TransportClosed(Exception):
|
||||
class TransportClosed(trio.BrokenResourceError):
|
||||
'''
|
||||
IPC transport (protocol) connection was closed or broke and
|
||||
indicates that the wrapping communication `Channel` can no longer
|
||||
|
@ -940,39 +969,24 @@ class TransportClosed(Exception):
|
|||
self,
|
||||
message: str,
|
||||
loglevel: str = 'transport',
|
||||
src_exc: Exception|None = None,
|
||||
cause: BaseException|None = None,
|
||||
raise_on_report: bool = False,
|
||||
|
||||
) -> None:
|
||||
self.message: str = message
|
||||
self._loglevel: str = loglevel
|
||||
self._loglevel = loglevel
|
||||
super().__init__(message)
|
||||
|
||||
self._src_exc = src_exc
|
||||
# set the cause manually if not already set by python
|
||||
if (
|
||||
src_exc is not None
|
||||
and
|
||||
not self.__cause__
|
||||
):
|
||||
self.__cause__ = src_exc
|
||||
if cause is not None:
|
||||
self.__cause__ = cause
|
||||
|
||||
# flag to toggle whether the msg loop should raise
|
||||
# the exc in its `TransportClosed` handler block.
|
||||
self._raise_on_report = raise_on_report
|
||||
|
||||
@property
|
||||
def src_exc(self) -> Exception:
|
||||
return (
|
||||
self.__cause__
|
||||
or
|
||||
self._src_exc
|
||||
)
|
||||
|
||||
def report_n_maybe_raise(
|
||||
self,
|
||||
message: str|None = None,
|
||||
hide_tb: bool = True,
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
|
@ -980,10 +994,9 @@ class TransportClosed(Exception):
|
|||
for this error.
|
||||
|
||||
'''
|
||||
__tracebackhide__: bool = hide_tb
|
||||
message: str = message or self.message
|
||||
# when a cause is set, slap it onto the log emission.
|
||||
if cause := self.src_exc:
|
||||
if cause := self.__cause__:
|
||||
cause_tb_str: str = ''.join(
|
||||
traceback.format_tb(cause.__traceback__)
|
||||
)
|
||||
|
@ -992,86 +1005,13 @@ class TransportClosed(Exception):
|
|||
f' {cause}\n' # exc repr
|
||||
)
|
||||
|
||||
getattr(
|
||||
log,
|
||||
self._loglevel
|
||||
)(message)
|
||||
getattr(log, self._loglevel)(message)
|
||||
|
||||
# some errors we want to blow up from
|
||||
# inside the RPC msg loop
|
||||
if self._raise_on_report:
|
||||
raise self from cause
|
||||
|
||||
@classmethod
|
||||
def repr_src_exc(
|
||||
self,
|
||||
src_exc: Exception|None = None,
|
||||
) -> str:
|
||||
|
||||
if src_exc is None:
|
||||
return '<unknown>'
|
||||
|
||||
src_msg: tuple[str] = src_exc.args
|
||||
src_exc_repr: str = (
|
||||
f'{type(src_exc).__name__}[ {src_msg} ]'
|
||||
)
|
||||
return src_exc_repr
|
||||
|
||||
def pformat(self) -> str:
|
||||
from tractor.devx.pformat import (
|
||||
pformat_exc,
|
||||
)
|
||||
return pformat_exc(
|
||||
exc=self,
|
||||
)
|
||||
|
||||
# delegate to `str`-ified pformat
|
||||
__repr__ = pformat
|
||||
|
||||
@classmethod
|
||||
def from_src_exc(
|
||||
cls,
|
||||
src_exc: (
|
||||
Exception|
|
||||
trio.ClosedResource|
|
||||
trio.BrokenResourceError
|
||||
),
|
||||
message: str,
|
||||
body: str = '',
|
||||
**init_kws,
|
||||
) -> TransportClosed:
|
||||
'''
|
||||
Convenience constructor for creation from an underlying
|
||||
`trio`-sourced async-resource/chan/stream error.
|
||||
|
||||
Embeds the original `src_exc`'s repr within the
|
||||
`Exception.args` via a first-line-in-`.message`-put-in-header
|
||||
pre-processing and allows inserting additional content beyond
|
||||
the main message via a `body: str`.
|
||||
|
||||
'''
|
||||
repr_src_exc: str = cls.repr_src_exc(
|
||||
src_exc,
|
||||
)
|
||||
next_line: str = f' src_exc: {repr_src_exc}\n'
|
||||
if body:
|
||||
body: str = textwrap.indent(
|
||||
body,
|
||||
prefix=' '*2,
|
||||
)
|
||||
|
||||
return TransportClosed(
|
||||
message=(
|
||||
message
|
||||
+
|
||||
next_line
|
||||
+
|
||||
body
|
||||
),
|
||||
src_exc=src_exc,
|
||||
**init_kws,
|
||||
)
|
||||
|
||||
|
||||
class NoResult(RuntimeError):
|
||||
"No final result is expected for this actor"
|
||||
|
@ -1203,8 +1143,6 @@ def unpack_error(
|
|||
which is the responsibilitiy of the caller.
|
||||
|
||||
'''
|
||||
# XXX, apparently we pass all sorts of msgs here?
|
||||
# kinda odd but seems like maybe they shouldn't be?
|
||||
if not isinstance(msg, Error):
|
||||
return None
|
||||
|
||||
|
|
|
@ -0,0 +1,820 @@
|
|||
# tractor: structured concurrent "actors".
|
||||
# Copyright 2018-eternity Tyler Goodlet.
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
Inter-process comms abstractions
|
||||
|
||||
"""
|
||||
from __future__ import annotations
|
||||
from collections.abc import (
|
||||
AsyncGenerator,
|
||||
AsyncIterator,
|
||||
)
|
||||
from contextlib import (
|
||||
asynccontextmanager as acm,
|
||||
contextmanager as cm,
|
||||
)
|
||||
import platform
|
||||
from pprint import pformat
|
||||
import struct
|
||||
import typing
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
runtime_checkable,
|
||||
Protocol,
|
||||
Type,
|
||||
TypeVar,
|
||||
)
|
||||
|
||||
import msgspec
|
||||
from tricycle import BufferedReceiveStream
|
||||
import trio
|
||||
|
||||
from tractor.log import get_logger
|
||||
from tractor._exceptions import (
|
||||
MsgTypeError,
|
||||
pack_from_raise,
|
||||
TransportClosed,
|
||||
_mk_send_mte,
|
||||
_mk_recv_mte,
|
||||
)
|
||||
from tractor.msg import (
|
||||
_ctxvar_MsgCodec,
|
||||
# _codec, XXX see `self._codec` sanity/debug checks
|
||||
MsgCodec,
|
||||
types as msgtypes,
|
||||
pretty_struct,
|
||||
)
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
_is_windows = platform.system() == 'Windows'
|
||||
|
||||
|
||||
def get_stream_addrs(
|
||||
stream: trio.SocketStream
|
||||
) -> tuple[
|
||||
tuple[str, int], # local
|
||||
tuple[str, int], # remote
|
||||
]:
|
||||
'''
|
||||
Return the `trio` streaming transport prot's socket-addrs for
|
||||
both the local and remote sides as a pair.
|
||||
|
||||
'''
|
||||
# rn, should both be IP sockets
|
||||
lsockname = stream.socket.getsockname()
|
||||
rsockname = stream.socket.getpeername()
|
||||
return (
|
||||
tuple(lsockname[:2]),
|
||||
tuple(rsockname[:2]),
|
||||
)
|
||||
|
||||
|
||||
# from tractor.msg.types import MsgType
|
||||
# ?TODO? this should be our `Union[*msgtypes.__spec__]` alias now right..?
|
||||
# => BLEH, except can't bc prots must inherit typevar or param-spec
|
||||
# vars..
|
||||
MsgType = TypeVar('MsgType')
|
||||
|
||||
|
||||
# TODO: break up this mod into a subpkg so we can start adding new
|
||||
# backends and move this type stuff into a dedicated file.. Bo
|
||||
#
|
||||
@runtime_checkable
|
||||
class MsgTransport(Protocol[MsgType]):
|
||||
#
|
||||
# ^-TODO-^ consider using a generic def and indexing with our
|
||||
# eventual msg definition/types?
|
||||
# - https://docs.python.org/3/library/typing.html#typing.Protocol
|
||||
|
||||
stream: trio.SocketStream
|
||||
drained: list[MsgType]
|
||||
|
||||
def __init__(self, stream: trio.SocketStream) -> None:
|
||||
...
|
||||
|
||||
# XXX: should this instead be called `.sendall()`?
|
||||
async def send(self, msg: MsgType) -> None:
|
||||
...
|
||||
|
||||
async def recv(self) -> MsgType:
|
||||
...
|
||||
|
||||
def __aiter__(self) -> MsgType:
|
||||
...
|
||||
|
||||
def connected(self) -> bool:
|
||||
...
|
||||
|
||||
# defining this sync otherwise it causes a mypy error because it
|
||||
# can't figure out it's a generator i guess?..?
|
||||
def drain(self) -> AsyncIterator[dict]:
|
||||
...
|
||||
|
||||
@property
|
||||
def laddr(self) -> tuple[str, int]:
|
||||
...
|
||||
|
||||
@property
|
||||
def raddr(self) -> tuple[str, int]:
|
||||
...
|
||||
|
||||
|
||||
# TODO: typing oddity.. not sure why we have to inherit here, but it
|
||||
# seems to be an issue with `get_msg_transport()` returning
|
||||
# a `Type[Protocol]`; probably should make a `mypy` issue?
|
||||
class MsgpackTCPStream(MsgTransport):
|
||||
'''
|
||||
A ``trio.SocketStream`` delivering ``msgpack`` formatted data
|
||||
using the ``msgspec`` codec lib.
|
||||
|
||||
'''
|
||||
layer_key: int = 4
|
||||
name_key: str = 'tcp'
|
||||
|
||||
# TODO: better naming for this?
|
||||
# -[ ] check how libp2p does naming for such things?
|
||||
codec_key: str = 'msgpack'
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
stream: trio.SocketStream,
|
||||
prefix_size: int = 4,
|
||||
|
||||
# XXX optionally provided codec pair for `msgspec`:
|
||||
# https://jcristharif.com/msgspec/extending.html#mapping-to-from-native-types
|
||||
#
|
||||
# TODO: define this as a `Codec` struct which can be
|
||||
# overriden dynamically by the application/runtime?
|
||||
codec: tuple[
|
||||
Callable[[Any], Any]|None, # coder
|
||||
Callable[[type, Any], Any]|None, # decoder
|
||||
]|None = None,
|
||||
|
||||
) -> None:
|
||||
|
||||
self.stream = stream
|
||||
assert self.stream.socket
|
||||
|
||||
# should both be IP sockets
|
||||
self._laddr, self._raddr = get_stream_addrs(stream)
|
||||
|
||||
# create read loop instance
|
||||
self._aiter_pkts = self._iter_packets()
|
||||
self._send_lock = trio.StrictFIFOLock()
|
||||
|
||||
# public i guess?
|
||||
self.drained: list[dict] = []
|
||||
|
||||
self.recv_stream = BufferedReceiveStream(
|
||||
transport_stream=stream
|
||||
)
|
||||
self.prefix_size = prefix_size
|
||||
|
||||
# allow for custom IPC msg interchange format
|
||||
# dynamic override Bo
|
||||
self._task = trio.lowlevel.current_task()
|
||||
|
||||
# XXX for ctxvar debug only!
|
||||
# self._codec: MsgCodec = (
|
||||
# codec
|
||||
# or
|
||||
# _codec._ctxvar_MsgCodec.get()
|
||||
# )
|
||||
|
||||
async def _iter_packets(self) -> AsyncGenerator[dict, None]:
|
||||
'''
|
||||
Yield `bytes`-blob decoded packets from the underlying TCP
|
||||
stream using the current task's `MsgCodec`.
|
||||
|
||||
This is a streaming routine implemented as an async generator
|
||||
func (which was the original design, but could be changed?)
|
||||
and is allocated by a `.__call__()` inside `.__init__()` where
|
||||
it is assigned to the `._aiter_pkts` attr.
|
||||
|
||||
'''
|
||||
decodes_failed: int = 0
|
||||
|
||||
while True:
|
||||
try:
|
||||
header: bytes = await self.recv_stream.receive_exactly(4)
|
||||
except (
|
||||
ValueError,
|
||||
ConnectionResetError,
|
||||
|
||||
# not sure entirely why we need this but without it we
|
||||
# seem to be getting racy failures here on
|
||||
# arbiter/registry name subs..
|
||||
trio.BrokenResourceError,
|
||||
|
||||
) as trans_err:
|
||||
|
||||
loglevel = 'transport'
|
||||
match trans_err:
|
||||
# case (
|
||||
# ConnectionResetError()
|
||||
# ):
|
||||
# loglevel = 'transport'
|
||||
|
||||
# peer actor (graceful??) TCP EOF but `tricycle`
|
||||
# seems to raise a 0-bytes-read?
|
||||
case ValueError() if (
|
||||
'unclean EOF' in trans_err.args[0]
|
||||
):
|
||||
pass
|
||||
|
||||
# peer actor (task) prolly shutdown quickly due
|
||||
# to cancellation
|
||||
case trio.BrokenResourceError() if (
|
||||
'Connection reset by peer' in trans_err.args[0]
|
||||
):
|
||||
pass
|
||||
|
||||
# unless the disconnect condition falls under "a
|
||||
# normal operation breakage" we usualy console warn
|
||||
# about it.
|
||||
case _:
|
||||
loglevel: str = 'warning'
|
||||
|
||||
|
||||
raise TransportClosed(
|
||||
message=(
|
||||
f'IPC transport already closed by peer\n'
|
||||
f'x]> {type(trans_err)}\n'
|
||||
f' |_{self}\n'
|
||||
),
|
||||
loglevel=loglevel,
|
||||
) from trans_err
|
||||
|
||||
# XXX definitely can happen if transport is closed
|
||||
# manually by another `trio.lowlevel.Task` in the
|
||||
# same actor; we use this in some simulated fault
|
||||
# testing for ex, but generally should never happen
|
||||
# under normal operation!
|
||||
#
|
||||
# NOTE: as such we always re-raise this error from the
|
||||
# RPC msg loop!
|
||||
except trio.ClosedResourceError as closure_err:
|
||||
raise TransportClosed(
|
||||
message=(
|
||||
f'IPC transport already manually closed locally?\n'
|
||||
f'x]> {type(closure_err)} \n'
|
||||
f' |_{self}\n'
|
||||
),
|
||||
loglevel='error',
|
||||
raise_on_report=(
|
||||
closure_err.args[0] == 'another task closed this fd'
|
||||
or
|
||||
closure_err.args[0] in ['another task closed this fd']
|
||||
),
|
||||
) from closure_err
|
||||
|
||||
# graceful TCP EOF disconnect
|
||||
if header == b'':
|
||||
raise TransportClosed(
|
||||
message=(
|
||||
f'IPC transport already gracefully closed\n'
|
||||
f']>\n'
|
||||
f' |_{self}\n'
|
||||
),
|
||||
loglevel='transport',
|
||||
# cause=??? # handy or no?
|
||||
)
|
||||
|
||||
size: int
|
||||
size, = struct.unpack("<I", header)
|
||||
|
||||
log.transport(f'received header {size}') # type: ignore
|
||||
msg_bytes: bytes = await self.recv_stream.receive_exactly(size)
|
||||
|
||||
log.transport(f"received {msg_bytes}") # type: ignore
|
||||
try:
|
||||
# NOTE: lookup the `trio.Task.context`'s var for
|
||||
# the current `MsgCodec`.
|
||||
codec: MsgCodec = _ctxvar_MsgCodec.get()
|
||||
|
||||
# XXX for ctxvar debug only!
|
||||
# if self._codec.pld_spec != codec.pld_spec:
|
||||
# assert (
|
||||
# task := trio.lowlevel.current_task()
|
||||
# ) is not self._task
|
||||
# self._task = task
|
||||
# self._codec = codec
|
||||
# log.runtime(
|
||||
# f'Using new codec in {self}.recv()\n'
|
||||
# f'codec: {self._codec}\n\n'
|
||||
# f'msg_bytes: {msg_bytes}\n'
|
||||
# )
|
||||
yield codec.decode(msg_bytes)
|
||||
|
||||
# XXX NOTE: since the below error derives from
|
||||
# `DecodeError` we need to catch is specially
|
||||
# and always raise such that spec violations
|
||||
# are never allowed to be caught silently!
|
||||
except msgspec.ValidationError as verr:
|
||||
msgtyperr: MsgTypeError = _mk_recv_mte(
|
||||
msg=msg_bytes,
|
||||
codec=codec,
|
||||
src_validation_error=verr,
|
||||
)
|
||||
# XXX deliver up to `Channel.recv()` where
|
||||
# a re-raise and `Error`-pack can inject the far
|
||||
# end actor `.uid`.
|
||||
yield msgtyperr
|
||||
|
||||
except (
|
||||
msgspec.DecodeError,
|
||||
UnicodeDecodeError,
|
||||
):
|
||||
if decodes_failed < 4:
|
||||
# ignore decoding errors for now and assume they have to
|
||||
# do with a channel drop - hope that receiving from the
|
||||
# channel will raise an expected error and bubble up.
|
||||
try:
|
||||
msg_str: str|bytes = msg_bytes.decode()
|
||||
except UnicodeDecodeError:
|
||||
msg_str = msg_bytes
|
||||
|
||||
log.exception(
|
||||
'Failed to decode msg?\n'
|
||||
f'{codec}\n\n'
|
||||
'Rxed bytes from wire:\n\n'
|
||||
f'{msg_str!r}\n'
|
||||
)
|
||||
decodes_failed += 1
|
||||
else:
|
||||
raise
|
||||
|
||||
async def send(
|
||||
self,
|
||||
msg: msgtypes.MsgType,
|
||||
|
||||
strict_types: bool = True,
|
||||
hide_tb: bool = False,
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
Send a msgpack encoded py-object-blob-as-msg over TCP.
|
||||
|
||||
If `strict_types == True` then a `MsgTypeError` will be raised on any
|
||||
invalid msg type
|
||||
|
||||
'''
|
||||
__tracebackhide__: bool = hide_tb
|
||||
|
||||
# XXX see `trio._sync.AsyncContextManagerMixin` for details
|
||||
# on the `.acquire()`/`.release()` sequencing..
|
||||
async with self._send_lock:
|
||||
|
||||
# NOTE: lookup the `trio.Task.context`'s var for
|
||||
# the current `MsgCodec`.
|
||||
codec: MsgCodec = _ctxvar_MsgCodec.get()
|
||||
|
||||
# XXX for ctxvar debug only!
|
||||
# if self._codec.pld_spec != codec.pld_spec:
|
||||
# self._codec = codec
|
||||
# log.runtime(
|
||||
# f'Using new codec in {self}.send()\n'
|
||||
# f'codec: {self._codec}\n\n'
|
||||
# f'msg: {msg}\n'
|
||||
# )
|
||||
|
||||
if type(msg) not in msgtypes.__msg_types__:
|
||||
if strict_types:
|
||||
raise _mk_send_mte(
|
||||
msg,
|
||||
codec=codec,
|
||||
)
|
||||
else:
|
||||
log.warning(
|
||||
'Sending non-`Msg`-spec msg?\n\n'
|
||||
f'{msg}\n'
|
||||
)
|
||||
|
||||
try:
|
||||
bytes_data: bytes = codec.encode(msg)
|
||||
except TypeError as _err:
|
||||
typerr = _err
|
||||
msgtyperr: MsgTypeError = _mk_send_mte(
|
||||
msg,
|
||||
codec=codec,
|
||||
message=(
|
||||
f'IPC-msg-spec violation in\n\n'
|
||||
f'{pretty_struct.Struct.pformat(msg)}'
|
||||
),
|
||||
src_type_error=typerr,
|
||||
)
|
||||
raise msgtyperr from typerr
|
||||
|
||||
# supposedly the fastest says,
|
||||
# https://stackoverflow.com/a/54027962
|
||||
size: bytes = struct.pack("<I", len(bytes_data))
|
||||
return await self.stream.send_all(size + bytes_data)
|
||||
|
||||
# ?TODO? does it help ever to dynamically show this
|
||||
# frame?
|
||||
# try:
|
||||
# <the-above_code>
|
||||
# except BaseException as _err:
|
||||
# err = _err
|
||||
# if not isinstance(err, MsgTypeError):
|
||||
# __tracebackhide__: bool = False
|
||||
# raise
|
||||
|
||||
@property
|
||||
def laddr(self) -> tuple[str, int]:
|
||||
return self._laddr
|
||||
|
||||
@property
|
||||
def raddr(self) -> tuple[str, int]:
|
||||
return self._raddr
|
||||
|
||||
async def recv(self) -> Any:
|
||||
return await self._aiter_pkts.asend(None)
|
||||
|
||||
async def drain(self) -> AsyncIterator[dict]:
|
||||
'''
|
||||
Drain the stream's remaining messages sent from
|
||||
the far end until the connection is closed by
|
||||
the peer.
|
||||
|
||||
'''
|
||||
try:
|
||||
async for msg in self._iter_packets():
|
||||
self.drained.append(msg)
|
||||
except TransportClosed:
|
||||
for msg in self.drained:
|
||||
yield msg
|
||||
|
||||
def __aiter__(self):
|
||||
return self._aiter_pkts
|
||||
|
||||
def connected(self) -> bool:
|
||||
return self.stream.socket.fileno() != -1
|
||||
|
||||
|
||||
def get_msg_transport(
|
||||
|
||||
key: tuple[str, str],
|
||||
|
||||
) -> Type[MsgTransport]:
|
||||
|
||||
return {
|
||||
('msgpack', 'tcp'): MsgpackTCPStream,
|
||||
}[key]
|
||||
|
||||
|
||||
class Channel:
|
||||
'''
|
||||
An inter-process channel for communication between (remote) actors.
|
||||
|
||||
Wraps a ``MsgStream``: transport + encoding IPC connection.
|
||||
|
||||
Currently we only support ``trio.SocketStream`` for transport
|
||||
(aka TCP) and the ``msgpack`` interchange format via the ``msgspec``
|
||||
codec libary.
|
||||
|
||||
'''
|
||||
def __init__(
|
||||
|
||||
self,
|
||||
destaddr: tuple[str, int]|None,
|
||||
|
||||
msg_transport_type_key: tuple[str, str] = ('msgpack', 'tcp'),
|
||||
|
||||
# TODO: optional reconnection support?
|
||||
# auto_reconnect: bool = False,
|
||||
# on_reconnect: typing.Callable[..., typing.Awaitable] = None,
|
||||
|
||||
) -> None:
|
||||
|
||||
# self._recon_seq = on_reconnect
|
||||
# self._autorecon = auto_reconnect
|
||||
|
||||
self._destaddr = destaddr
|
||||
self._transport_key = msg_transport_type_key
|
||||
|
||||
# Either created in ``.connect()`` or passed in by
|
||||
# user in ``.from_stream()``.
|
||||
self._stream: trio.SocketStream|None = None
|
||||
self._transport: MsgTransport|None = None
|
||||
|
||||
# set after handshake - always uid of far end
|
||||
self.uid: tuple[str, str]|None = None
|
||||
|
||||
self._aiter_msgs = self._iter_msgs()
|
||||
self._exc: Exception|None = None # set if far end actor errors
|
||||
self._closed: bool = False
|
||||
|
||||
# flag set by ``Portal.cancel_actor()`` indicating remote
|
||||
# (possibly peer) cancellation of the far end actor
|
||||
# runtime.
|
||||
self._cancel_called: bool = False
|
||||
|
||||
@property
|
||||
def msgstream(self) -> MsgTransport:
|
||||
log.info(
|
||||
'`Channel.msgstream` is an old name, use `._transport`'
|
||||
)
|
||||
return self._transport
|
||||
|
||||
@property
|
||||
def transport(self) -> MsgTransport:
|
||||
return self._transport
|
||||
|
||||
@classmethod
|
||||
def from_stream(
|
||||
cls,
|
||||
stream: trio.SocketStream,
|
||||
**kwargs,
|
||||
|
||||
) -> Channel:
|
||||
|
||||
src, dst = get_stream_addrs(stream)
|
||||
chan = Channel(
|
||||
destaddr=dst,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
# set immediately here from provided instance
|
||||
chan._stream: trio.SocketStream = stream
|
||||
chan.set_msg_transport(stream)
|
||||
return chan
|
||||
|
||||
def set_msg_transport(
|
||||
self,
|
||||
stream: trio.SocketStream,
|
||||
type_key: tuple[str, str]|None = None,
|
||||
|
||||
# XXX optionally provided codec pair for `msgspec`:
|
||||
# https://jcristharif.com/msgspec/extending.html#mapping-to-from-native-types
|
||||
codec: MsgCodec|None = None,
|
||||
|
||||
) -> MsgTransport:
|
||||
type_key = (
|
||||
type_key
|
||||
or
|
||||
self._transport_key
|
||||
)
|
||||
# get transport type, then
|
||||
self._transport = get_msg_transport(
|
||||
type_key
|
||||
# instantiate an instance of the msg-transport
|
||||
)(
|
||||
stream,
|
||||
codec=codec,
|
||||
)
|
||||
return self._transport
|
||||
|
||||
@cm
|
||||
def apply_codec(
|
||||
self,
|
||||
codec: MsgCodec,
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
Temporarily override the underlying IPC msg codec for
|
||||
dynamic enforcement of messaging schema.
|
||||
|
||||
'''
|
||||
orig: MsgCodec = self._transport.codec
|
||||
try:
|
||||
self._transport.codec = codec
|
||||
yield
|
||||
finally:
|
||||
self._transport.codec = orig
|
||||
|
||||
# TODO: do a .src/.dst: str for maddrs?
|
||||
def __repr__(self) -> str:
|
||||
if not self._transport:
|
||||
return '<Channel with inactive transport?>'
|
||||
|
||||
return repr(
|
||||
self._transport.stream.socket._sock
|
||||
).replace( # type: ignore
|
||||
"socket.socket",
|
||||
"Channel",
|
||||
)
|
||||
|
||||
@property
|
||||
def laddr(self) -> tuple[str, int]|None:
|
||||
return self._transport.laddr if self._transport else None
|
||||
|
||||
@property
|
||||
def raddr(self) -> tuple[str, int]|None:
|
||||
return self._transport.raddr if self._transport else None
|
||||
|
||||
async def connect(
|
||||
self,
|
||||
destaddr: tuple[Any, ...] | None = None,
|
||||
**kwargs
|
||||
|
||||
) -> MsgTransport:
|
||||
|
||||
if self.connected():
|
||||
raise RuntimeError("channel is already connected?")
|
||||
|
||||
destaddr = destaddr or self._destaddr
|
||||
assert isinstance(destaddr, tuple)
|
||||
|
||||
stream = await trio.open_tcp_stream(
|
||||
*destaddr,
|
||||
**kwargs
|
||||
)
|
||||
transport = self.set_msg_transport(stream)
|
||||
|
||||
log.transport(
|
||||
f'Opened channel[{type(transport)}]: {self.laddr} -> {self.raddr}'
|
||||
)
|
||||
return transport
|
||||
|
||||
# TODO: something like,
|
||||
# `pdbp.hideframe_on(errors=[MsgTypeError])`
|
||||
# instead of the `try/except` hack we have rn..
|
||||
# seems like a pretty useful thing to have in general
|
||||
# along with being able to filter certain stack frame(s / sets)
|
||||
# possibly based on the current log-level?
|
||||
async def send(
|
||||
self,
|
||||
payload: Any,
|
||||
|
||||
hide_tb: bool = False,
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
Send a coded msg-blob over the transport.
|
||||
|
||||
'''
|
||||
__tracebackhide__: bool = hide_tb
|
||||
try:
|
||||
log.transport(
|
||||
'=> send IPC msg:\n\n'
|
||||
f'{pformat(payload)}\n'
|
||||
)
|
||||
# assert self._transport # but why typing?
|
||||
await self._transport.send(
|
||||
payload,
|
||||
hide_tb=hide_tb,
|
||||
)
|
||||
except BaseException as _err:
|
||||
err = _err # bind for introspection
|
||||
if not isinstance(_err, MsgTypeError):
|
||||
# assert err
|
||||
__tracebackhide__: bool = False
|
||||
else:
|
||||
assert err.cid
|
||||
|
||||
raise
|
||||
|
||||
async def recv(self) -> Any:
|
||||
assert self._transport
|
||||
return await self._transport.recv()
|
||||
|
||||
# TODO: auto-reconnect features like 0mq/nanomsg?
|
||||
# -[ ] implement it manually with nods to SC prot
|
||||
# possibly on multiple transport backends?
|
||||
# -> seems like that might be re-inventing scalability
|
||||
# prots tho no?
|
||||
# try:
|
||||
# return await self._transport.recv()
|
||||
# except trio.BrokenResourceError:
|
||||
# if self._autorecon:
|
||||
# await self._reconnect()
|
||||
# return await self.recv()
|
||||
# raise
|
||||
|
||||
async def aclose(self) -> None:
|
||||
|
||||
log.transport(
|
||||
f'Closing channel to {self.uid} '
|
||||
f'{self.laddr} -> {self.raddr}'
|
||||
)
|
||||
assert self._transport
|
||||
await self._transport.stream.aclose()
|
||||
self._closed = True
|
||||
|
||||
async def __aenter__(self):
|
||||
await self.connect()
|
||||
return self
|
||||
|
||||
async def __aexit__(self, *args):
|
||||
await self.aclose(*args)
|
||||
|
||||
def __aiter__(self):
|
||||
return self._aiter_msgs
|
||||
|
||||
# ?TODO? run any reconnection sequence?
|
||||
# -[ ] prolly should be impl-ed as deco-API?
|
||||
#
|
||||
# async def _reconnect(self) -> None:
|
||||
# """Handle connection failures by polling until a reconnect can be
|
||||
# established.
|
||||
# """
|
||||
# down = False
|
||||
# while True:
|
||||
# try:
|
||||
# with trio.move_on_after(3) as cancel_scope:
|
||||
# await self.connect()
|
||||
# cancelled = cancel_scope.cancelled_caught
|
||||
# if cancelled:
|
||||
# log.transport(
|
||||
# "Reconnect timed out after 3 seconds, retrying...")
|
||||
# continue
|
||||
# else:
|
||||
# log.transport("Stream connection re-established!")
|
||||
|
||||
# # on_recon = self._recon_seq
|
||||
# # if on_recon:
|
||||
# # await on_recon(self)
|
||||
|
||||
# break
|
||||
# except (OSError, ConnectionRefusedError):
|
||||
# if not down:
|
||||
# down = True
|
||||
# log.transport(
|
||||
# f"Connection to {self.raddr} went down, waiting"
|
||||
# " for re-establishment")
|
||||
# await trio.sleep(1)
|
||||
|
||||
async def _iter_msgs(
|
||||
self
|
||||
) -> AsyncGenerator[Any, None]:
|
||||
'''
|
||||
Yield `MsgType` IPC msgs decoded and deliverd from
|
||||
an underlying `MsgTransport` protocol.
|
||||
|
||||
This is a streaming routine alo implemented as an async-gen
|
||||
func (same a `MsgTransport._iter_pkts()`) gets allocated by
|
||||
a `.__call__()` inside `.__init__()` where it is assigned to
|
||||
the `._aiter_msgs` attr.
|
||||
|
||||
'''
|
||||
assert self._transport
|
||||
while True:
|
||||
try:
|
||||
async for msg in self._transport:
|
||||
match msg:
|
||||
# NOTE: if transport/interchange delivers
|
||||
# a type error, we pack it with the far
|
||||
# end peer `Actor.uid` and relay the
|
||||
# `Error`-msg upward to the `._rpc` stack
|
||||
# for normal RAE handling.
|
||||
case MsgTypeError():
|
||||
yield pack_from_raise(
|
||||
local_err=msg,
|
||||
cid=msg.cid,
|
||||
|
||||
# XXX we pack it here bc lower
|
||||
# layers have no notion of an
|
||||
# actor-id ;)
|
||||
src_uid=self.uid,
|
||||
)
|
||||
case _:
|
||||
yield msg
|
||||
|
||||
except trio.BrokenResourceError:
|
||||
|
||||
# if not self._autorecon:
|
||||
raise
|
||||
|
||||
await self.aclose()
|
||||
|
||||
# if self._autorecon: # attempt reconnect
|
||||
# await self._reconnect()
|
||||
# continue
|
||||
|
||||
def connected(self) -> bool:
|
||||
return self._transport.connected() if self._transport else False
|
||||
|
||||
|
||||
@acm
|
||||
async def _connect_chan(
|
||||
host: str,
|
||||
port: int
|
||||
|
||||
) -> typing.AsyncGenerator[Channel, None]:
|
||||
'''
|
||||
Create and connect a channel with disconnect on context manager
|
||||
teardown.
|
||||
|
||||
'''
|
||||
chan = Channel((host, port))
|
||||
await chan.connect()
|
||||
yield chan
|
||||
with trio.CancelScope(shield=True):
|
||||
await chan.aclose()
|
|
@ -43,7 +43,7 @@ from .trionics import maybe_open_nursery
|
|||
from ._state import (
|
||||
current_actor,
|
||||
)
|
||||
from .ipc import Channel
|
||||
from ._ipc import Channel
|
||||
from .log import get_logger
|
||||
from .msg import (
|
||||
# Error,
|
||||
|
@ -52,8 +52,8 @@ from .msg import (
|
|||
Return,
|
||||
)
|
||||
from ._exceptions import (
|
||||
# unpack_error,
|
||||
NoResult,
|
||||
TransportClosed,
|
||||
)
|
||||
from ._context import (
|
||||
Context,
|
||||
|
@ -107,10 +107,6 @@ class Portal:
|
|||
# point.
|
||||
self._expect_result_ctx: Context|None = None
|
||||
self._streams: set[MsgStream] = set()
|
||||
|
||||
# TODO, this should be PRIVATE (and never used publicly)! since it's just
|
||||
# a cached ref to the local runtime instead of calling
|
||||
# `current_actor()` everywhere.. XD
|
||||
self.actor: Actor = current_actor()
|
||||
|
||||
@property
|
||||
|
@ -175,7 +171,7 @@ class Portal:
|
|||
# not expecting a "main" result
|
||||
if self._expect_result_ctx is None:
|
||||
log.warning(
|
||||
f"Portal for {self.channel.aid} not expecting a final"
|
||||
f"Portal for {self.channel.uid} not expecting a final"
|
||||
" result?\nresult() should only be called if subactor"
|
||||
" was spawned with `ActorNursery.run_in_actor()`")
|
||||
return NoResult
|
||||
|
@ -188,7 +184,7 @@ class Portal:
|
|||
(
|
||||
self._final_result_msg,
|
||||
self._final_result_pld,
|
||||
) = await self._expect_result_ctx._pld_rx.recv_msg(
|
||||
) = await self._expect_result_ctx._pld_rx.recv_msg_w_pld(
|
||||
ipc=self._expect_result_ctx,
|
||||
expect_msg=Return,
|
||||
)
|
||||
|
@ -222,7 +218,7 @@ class Portal:
|
|||
# IPC calls
|
||||
if self._streams:
|
||||
log.cancel(
|
||||
f"Cancelling all streams with {self.channel.aid}")
|
||||
f"Cancelling all streams with {self.channel.uid}")
|
||||
for stream in self._streams.copy():
|
||||
try:
|
||||
await stream.aclose()
|
||||
|
@ -267,7 +263,7 @@ class Portal:
|
|||
return False
|
||||
|
||||
reminfo: str = (
|
||||
f'c)=> {self.channel.aid}\n'
|
||||
f'c)=> {self.channel.uid}\n'
|
||||
f' |_{chan}\n'
|
||||
)
|
||||
log.cancel(
|
||||
|
@ -305,34 +301,14 @@ class Portal:
|
|||
return False
|
||||
|
||||
except (
|
||||
# XXX, should never really get raised unless we aren't
|
||||
# wrapping them in the below type by mistake?
|
||||
#
|
||||
# Leaving the catch here for now until we're very sure
|
||||
# all the cases (for various tpt protos) have indeed been
|
||||
# re-wrapped ;p
|
||||
trio.ClosedResourceError,
|
||||
trio.BrokenResourceError,
|
||||
|
||||
TransportClosed,
|
||||
) as tpt_err:
|
||||
report: str = (
|
||||
f'IPC chan for actor already closed or broken?\n\n'
|
||||
f'{self.channel.aid}\n'
|
||||
):
|
||||
log.debug(
|
||||
'IPC chan for actor already closed or broken?\n\n'
|
||||
f'{self.channel.uid}\n'
|
||||
f' |_{self.channel}\n'
|
||||
)
|
||||
match tpt_err:
|
||||
case TransportClosed():
|
||||
log.debug(report)
|
||||
case _:
|
||||
report += (
|
||||
f'\n'
|
||||
f'Unhandled low-level transport-closed/error during\n'
|
||||
f'Portal.cancel_actor()` request?\n'
|
||||
f'<{type(tpt_err).__name__}( {tpt_err} )>\n'
|
||||
)
|
||||
log.warning(report)
|
||||
|
||||
return False
|
||||
|
||||
# TODO: do we still need this for low level `Actor`-runtime
|
||||
|
@ -528,12 +504,8 @@ class LocalPortal:
|
|||
return it's result.
|
||||
|
||||
'''
|
||||
obj = (
|
||||
self.actor
|
||||
if ns == 'self'
|
||||
else importlib.import_module(ns)
|
||||
)
|
||||
func: Callable = getattr(obj, func_name)
|
||||
obj = self.actor if ns == 'self' else importlib.import_module(ns)
|
||||
func = getattr(obj, func_name)
|
||||
return await func(**kwargs)
|
||||
|
||||
|
||||
|
@ -571,18 +543,17 @@ async def open_portal(
|
|||
await channel.connect()
|
||||
was_connected = True
|
||||
|
||||
if channel.aid is None:
|
||||
await channel._do_handshake(
|
||||
aid=actor.aid,
|
||||
)
|
||||
if channel.uid is None:
|
||||
await actor._do_handshake(channel)
|
||||
|
||||
msg_loop_cs: trio.CancelScope|None = None
|
||||
if start_msg_loop:
|
||||
from . import _rpc
|
||||
from ._runtime import process_messages
|
||||
msg_loop_cs = await tn.start(
|
||||
partial(
|
||||
_rpc.process_messages,
|
||||
chan=channel,
|
||||
process_messages,
|
||||
actor,
|
||||
channel,
|
||||
# if the local task is cancelled we want to keep
|
||||
# the msg loop running until our block ends
|
||||
shield=True,
|
||||
|
|
737
tractor/_root.py
737
tractor/_root.py
|
@ -18,9 +18,7 @@
|
|||
Root actor runtime ignition(s).
|
||||
|
||||
'''
|
||||
from contextlib import (
|
||||
asynccontextmanager as acm,
|
||||
)
|
||||
from contextlib import asynccontextmanager as acm
|
||||
from functools import partial
|
||||
import importlib
|
||||
import inspect
|
||||
|
@ -28,10 +26,7 @@ import logging
|
|||
import os
|
||||
import signal
|
||||
import sys
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
)
|
||||
from typing import Callable
|
||||
import warnings
|
||||
|
||||
|
||||
|
@ -48,111 +43,33 @@ from .devx import _debug
|
|||
from . import _spawn
|
||||
from . import _state
|
||||
from . import log
|
||||
from .ipc import (
|
||||
_connect_chan,
|
||||
)
|
||||
from ._addr import (
|
||||
Address,
|
||||
UnwrappedAddress,
|
||||
default_lo_addrs,
|
||||
mk_uuid,
|
||||
wrap_address,
|
||||
)
|
||||
from ._exceptions import (
|
||||
RuntimeFailure,
|
||||
is_multi_cancelled,
|
||||
)
|
||||
from ._ipc import _connect_chan
|
||||
from ._exceptions import is_multi_cancelled
|
||||
|
||||
|
||||
# set at startup and after forks
|
||||
_default_host: str = '127.0.0.1'
|
||||
_default_port: int = 1616
|
||||
|
||||
# default registry always on localhost
|
||||
_default_lo_addrs: list[tuple[str, int]] = [(
|
||||
_default_host,
|
||||
_default_port,
|
||||
)]
|
||||
|
||||
|
||||
logger = log.get_logger('tractor')
|
||||
|
||||
|
||||
# TODO: stick this in a `@acm` defined in `devx._debug`?
|
||||
# -[ ] also maybe consider making this a `wrapt`-deco to
|
||||
# save an indent level?
|
||||
#
|
||||
@acm
|
||||
async def maybe_block_bp(
|
||||
debug_mode: bool,
|
||||
maybe_enable_greenback: bool,
|
||||
) -> bool:
|
||||
# Override the global debugger hook to make it play nice with
|
||||
# ``trio``, see much discussion in:
|
||||
# https://github.com/python-trio/trio/issues/1155#issuecomment-742964018
|
||||
builtin_bp_handler: Callable = sys.breakpointhook
|
||||
orig_bp_path: str|None = os.environ.get(
|
||||
'PYTHONBREAKPOINT',
|
||||
None,
|
||||
)
|
||||
bp_blocked: bool
|
||||
if (
|
||||
debug_mode
|
||||
and maybe_enable_greenback
|
||||
and (
|
||||
maybe_mod := await _debug.maybe_init_greenback(
|
||||
raise_not_found=False,
|
||||
)
|
||||
)
|
||||
):
|
||||
logger.info(
|
||||
f'Found `greenback` installed @ {maybe_mod}\n'
|
||||
'Enabling `tractor.pause_from_sync()` support!\n'
|
||||
)
|
||||
os.environ['PYTHONBREAKPOINT'] = (
|
||||
'tractor.devx._debug._sync_pause_from_builtin'
|
||||
)
|
||||
_state._runtime_vars['use_greenback'] = True
|
||||
bp_blocked = False
|
||||
|
||||
else:
|
||||
# TODO: disable `breakpoint()` by default (without
|
||||
# `greenback`) since it will break any multi-actor
|
||||
# usage by a clobbered TTY's stdstreams!
|
||||
def block_bps(*args, **kwargs):
|
||||
raise RuntimeError(
|
||||
'Trying to use `breakpoint()` eh?\n\n'
|
||||
'Welp, `tractor` blocks `breakpoint()` built-in calls by default!\n'
|
||||
'If you need to use it please install `greenback` and set '
|
||||
'`debug_mode=True` when opening the runtime '
|
||||
'(either via `.open_nursery()` or `open_root_actor()`)\n'
|
||||
)
|
||||
|
||||
sys.breakpointhook = block_bps
|
||||
# lol ok,
|
||||
# https://docs.python.org/3/library/sys.html#sys.breakpointhook
|
||||
os.environ['PYTHONBREAKPOINT'] = "0"
|
||||
bp_blocked = True
|
||||
|
||||
try:
|
||||
yield bp_blocked
|
||||
finally:
|
||||
# restore any prior built-in `breakpoint()` hook state
|
||||
if builtin_bp_handler is not None:
|
||||
sys.breakpointhook = builtin_bp_handler
|
||||
|
||||
if orig_bp_path is not None:
|
||||
os.environ['PYTHONBREAKPOINT'] = orig_bp_path
|
||||
|
||||
else:
|
||||
# clear env back to having no entry
|
||||
os.environ.pop('PYTHONBREAKPOINT', None)
|
||||
|
||||
|
||||
|
||||
@acm
|
||||
async def open_root_actor(
|
||||
|
||||
*,
|
||||
# defaults are above
|
||||
registry_addrs: list[UnwrappedAddress]|None = None,
|
||||
registry_addrs: list[tuple[str, int]]|None = None,
|
||||
|
||||
# defaults are above
|
||||
arbiter_addr: tuple[UnwrappedAddress]|None = None,
|
||||
|
||||
enable_transports: list[
|
||||
# TODO, this should eventually be the pairs as
|
||||
# defined by (codec, proto) as on `MsgTransport.
|
||||
_state.TransportProtocolKey,
|
||||
]|None = None,
|
||||
arbiter_addr: tuple[str, int]|None = None,
|
||||
|
||||
name: str|None = 'root',
|
||||
|
||||
|
@ -194,336 +111,350 @@ async def open_root_actor(
|
|||
Runtime init entry point for ``tractor``.
|
||||
|
||||
'''
|
||||
# XXX NEVER allow nested actor-trees!
|
||||
if already_actor := _state.current_actor(err_on_no_runtime=False):
|
||||
rtvs: dict[str, Any] = _state._runtime_vars
|
||||
root_mailbox: list[str, int] = rtvs['_root_mailbox']
|
||||
registry_addrs: list[list[str, int]] = rtvs['_registry_addrs']
|
||||
raise RuntimeFailure(
|
||||
f'A current actor already exists !?\n'
|
||||
f'({already_actor}\n'
|
||||
f'\n'
|
||||
f'You can NOT open a second root actor from within '
|
||||
f'an existing tree and the current root of this '
|
||||
f'already exists !!\n'
|
||||
f'\n'
|
||||
f'_root_mailbox: {root_mailbox!r}\n'
|
||||
f'_registry_addrs: {registry_addrs!r}\n'
|
||||
)
|
||||
_debug.hide_runtime_frames()
|
||||
__tracebackhide__: bool = hide_tb
|
||||
|
||||
async with maybe_block_bp(
|
||||
debug_mode=debug_mode,
|
||||
maybe_enable_greenback=maybe_enable_greenback,
|
||||
# TODO: stick this in a `@cm` defined in `devx._debug`?
|
||||
#
|
||||
# Override the global debugger hook to make it play nice with
|
||||
# ``trio``, see much discussion in:
|
||||
# https://github.com/python-trio/trio/issues/1155#issuecomment-742964018
|
||||
builtin_bp_handler: Callable = sys.breakpointhook
|
||||
orig_bp_path: str|None = os.environ.get(
|
||||
'PYTHONBREAKPOINT',
|
||||
None,
|
||||
)
|
||||
if (
|
||||
debug_mode
|
||||
and maybe_enable_greenback
|
||||
and (
|
||||
maybe_mod := await _debug.maybe_init_greenback(
|
||||
raise_not_found=False,
|
||||
)
|
||||
)
|
||||
):
|
||||
if enable_transports is None:
|
||||
enable_transports: list[str] = _state.current_ipc_protos()
|
||||
else:
|
||||
_state._runtime_vars['_enable_tpts'] = enable_transports
|
||||
logger.info(
|
||||
f'Found `greenback` installed @ {maybe_mod}\n'
|
||||
'Enabling `tractor.pause_from_sync()` support!\n'
|
||||
)
|
||||
os.environ['PYTHONBREAKPOINT'] = (
|
||||
'tractor.devx._debug._sync_pause_from_builtin'
|
||||
)
|
||||
_state._runtime_vars['use_greenback'] = True
|
||||
|
||||
# TODO! support multi-tpts per actor!
|
||||
# Bo
|
||||
if not len(enable_transports) == 1:
|
||||
else:
|
||||
# TODO: disable `breakpoint()` by default (without
|
||||
# `greenback`) since it will break any multi-actor
|
||||
# usage by a clobbered TTY's stdstreams!
|
||||
def block_bps(*args, **kwargs):
|
||||
raise RuntimeError(
|
||||
f'No multi-tpt support yet!\n'
|
||||
f'enable_transports={enable_transports!r}\n'
|
||||
'Trying to use `breakpoint()` eh?\n\n'
|
||||
'Welp, `tractor` blocks `breakpoint()` built-in calls by default!\n'
|
||||
'If you need to use it please install `greenback` and set '
|
||||
'`debug_mode=True` when opening the runtime '
|
||||
'(either via `.open_nursery()` or `open_root_actor()`)\n'
|
||||
)
|
||||
|
||||
_debug.hide_runtime_frames()
|
||||
__tracebackhide__: bool = hide_tb
|
||||
sys.breakpointhook = block_bps
|
||||
# lol ok,
|
||||
# https://docs.python.org/3/library/sys.html#sys.breakpointhook
|
||||
os.environ['PYTHONBREAKPOINT'] = "0"
|
||||
|
||||
# attempt to retreive ``trio``'s sigint handler and stash it
|
||||
# on our debugger lock state.
|
||||
_debug.DebugStatus._trio_handler = signal.getsignal(signal.SIGINT)
|
||||
# attempt to retreive ``trio``'s sigint handler and stash it
|
||||
# on our debugger lock state.
|
||||
_debug.DebugStatus._trio_handler = signal.getsignal(signal.SIGINT)
|
||||
|
||||
# mark top most level process as root actor
|
||||
_state._runtime_vars['_is_root'] = True
|
||||
# mark top most level process as root actor
|
||||
_state._runtime_vars['_is_root'] = True
|
||||
|
||||
# caps based rpc list
|
||||
enable_modules = (
|
||||
enable_modules
|
||||
or
|
||||
[]
|
||||
# caps based rpc list
|
||||
enable_modules = (
|
||||
enable_modules
|
||||
or
|
||||
[]
|
||||
)
|
||||
|
||||
if rpc_module_paths:
|
||||
warnings.warn(
|
||||
"`rpc_module_paths` is now deprecated, use "
|
||||
" `enable_modules` instead.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
enable_modules.extend(rpc_module_paths)
|
||||
|
||||
if start_method is not None:
|
||||
_spawn.try_set_start_method(start_method)
|
||||
|
||||
if arbiter_addr is not None:
|
||||
warnings.warn(
|
||||
'`arbiter_addr` is now deprecated\n'
|
||||
'Use `registry_addrs: list[tuple]` instead..',
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
registry_addrs = [arbiter_addr]
|
||||
|
||||
registry_addrs: list[tuple[str, int]] = (
|
||||
registry_addrs
|
||||
or
|
||||
_default_lo_addrs
|
||||
)
|
||||
assert registry_addrs
|
||||
|
||||
loglevel = (
|
||||
loglevel
|
||||
or log._default_loglevel
|
||||
).upper()
|
||||
|
||||
if (
|
||||
debug_mode
|
||||
and _spawn._spawn_method == 'trio'
|
||||
):
|
||||
_state._runtime_vars['_debug_mode'] = True
|
||||
|
||||
# expose internal debug module to every actor allowing for
|
||||
# use of ``await tractor.pause()``
|
||||
enable_modules.append('tractor.devx._debug')
|
||||
|
||||
# if debug mode get's enabled *at least* use that level of
|
||||
# logging for some informative console prompts.
|
||||
if (
|
||||
logging.getLevelName(
|
||||
# lul, need the upper case for the -> int map?
|
||||
# sweet "dynamic function behaviour" stdlib...
|
||||
loglevel,
|
||||
) > logging.getLevelName('PDB')
|
||||
):
|
||||
loglevel = 'PDB'
|
||||
|
||||
|
||||
elif debug_mode:
|
||||
raise RuntimeError(
|
||||
"Debug mode is only supported for the `trio` backend!"
|
||||
)
|
||||
|
||||
if rpc_module_paths:
|
||||
warnings.warn(
|
||||
"`rpc_module_paths` is now deprecated, use "
|
||||
" `enable_modules` instead.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
enable_modules.extend(rpc_module_paths)
|
||||
assert loglevel
|
||||
_log = log.get_console_log(loglevel)
|
||||
assert _log
|
||||
|
||||
if start_method is not None:
|
||||
_spawn.try_set_start_method(start_method)
|
||||
# TODO: factor this into `.devx._stackscope`!!
|
||||
if (
|
||||
debug_mode
|
||||
and
|
||||
enable_stack_on_sig
|
||||
):
|
||||
from .devx._stackscope import enable_stack_on_sig
|
||||
enable_stack_on_sig()
|
||||
|
||||
# TODO! remove this ASAP!
|
||||
if arbiter_addr is not None:
|
||||
warnings.warn(
|
||||
'`arbiter_addr` is now deprecated\n'
|
||||
'Use `registry_addrs: list[tuple]` instead..',
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
registry_addrs = [arbiter_addr]
|
||||
# closed into below ping task-func
|
||||
ponged_addrs: list[tuple[str, int]] = []
|
||||
|
||||
if not registry_addrs:
|
||||
registry_addrs: list[UnwrappedAddress] = default_lo_addrs(
|
||||
enable_transports
|
||||
async def ping_tpt_socket(
|
||||
addr: tuple[str, int],
|
||||
timeout: float = 1,
|
||||
) -> None:
|
||||
'''
|
||||
Attempt temporary connection to see if a registry is
|
||||
listening at the requested address by a tranport layer
|
||||
ping.
|
||||
|
||||
If a connection can't be made quickly we assume none no
|
||||
server is listening at that addr.
|
||||
|
||||
'''
|
||||
try:
|
||||
# TODO: this connect-and-bail forces us to have to
|
||||
# carefully rewrap TCP 104-connection-reset errors as
|
||||
# EOF so as to avoid propagating cancel-causing errors
|
||||
# to the channel-msg loop machinery. Likely it would
|
||||
# be better to eventually have a "discovery" protocol
|
||||
# with basic handshake instead?
|
||||
with trio.move_on_after(timeout):
|
||||
async with _connect_chan(*addr):
|
||||
ponged_addrs.append(addr)
|
||||
|
||||
except OSError:
|
||||
# TODO: make this a "discovery" log level?
|
||||
logger.info(
|
||||
f'No actor registry found @ {addr}\n'
|
||||
)
|
||||
|
||||
assert registry_addrs
|
||||
async with trio.open_nursery() as tn:
|
||||
for addr in registry_addrs:
|
||||
tn.start_soon(
|
||||
ping_tpt_socket,
|
||||
tuple(addr), # TODO: just drop this requirement?
|
||||
)
|
||||
|
||||
loglevel = (
|
||||
loglevel
|
||||
or log._default_loglevel
|
||||
).upper()
|
||||
trans_bind_addrs: list[tuple[str, int]] = []
|
||||
|
||||
if (
|
||||
debug_mode
|
||||
and _spawn._spawn_method == 'trio'
|
||||
):
|
||||
_state._runtime_vars['_debug_mode'] = True
|
||||
|
||||
# expose internal debug module to every actor allowing for
|
||||
# use of ``await tractor.pause()``
|
||||
enable_modules.append('tractor.devx._debug')
|
||||
|
||||
# if debug mode get's enabled *at least* use that level of
|
||||
# logging for some informative console prompts.
|
||||
if (
|
||||
logging.getLevelName(
|
||||
# lul, need the upper case for the -> int map?
|
||||
# sweet "dynamic function behaviour" stdlib...
|
||||
loglevel,
|
||||
) > logging.getLevelName('PDB')
|
||||
):
|
||||
loglevel = 'PDB'
|
||||
|
||||
|
||||
elif debug_mode:
|
||||
# Create a new local root-actor instance which IS NOT THE
|
||||
# REGISTRAR
|
||||
if ponged_addrs:
|
||||
if ensure_registry:
|
||||
raise RuntimeError(
|
||||
"Debug mode is only supported for the `trio` backend!"
|
||||
f'Failed to open `{name}`@{ponged_addrs}: '
|
||||
'registry socket(s) already bound'
|
||||
)
|
||||
|
||||
assert loglevel
|
||||
_log = log.get_console_log(loglevel)
|
||||
assert _log
|
||||
# we were able to connect to an arbiter
|
||||
logger.info(
|
||||
f'Registry(s) seem(s) to exist @ {ponged_addrs}'
|
||||
)
|
||||
|
||||
# TODO: factor this into `.devx._stackscope`!!
|
||||
actor = Actor(
|
||||
name=name or 'anonymous',
|
||||
registry_addrs=ponged_addrs,
|
||||
loglevel=loglevel,
|
||||
enable_modules=enable_modules,
|
||||
)
|
||||
# DO NOT use the registry_addrs as the transport server
|
||||
# addrs for this new non-registar, root-actor.
|
||||
for host, port in ponged_addrs:
|
||||
# NOTE: zero triggers dynamic OS port allocation
|
||||
trans_bind_addrs.append((host, 0))
|
||||
|
||||
# Start this local actor as the "registrar", aka a regular
|
||||
# actor who manages the local registry of "mailboxes" of
|
||||
# other process-tree-local sub-actors.
|
||||
else:
|
||||
|
||||
# NOTE that if the current actor IS THE REGISTAR, the
|
||||
# following init steps are taken:
|
||||
# - the tranport layer server is bound to each (host, port)
|
||||
# pair defined in provided registry_addrs, or the default.
|
||||
trans_bind_addrs = registry_addrs
|
||||
|
||||
# - it is normally desirable for any registrar to stay up
|
||||
# indefinitely until either all registered (child/sub)
|
||||
# actors are terminated (via SC supervision) or,
|
||||
# a re-election process has taken place.
|
||||
# NOTE: all of ^ which is not implemented yet - see:
|
||||
# https://github.com/goodboy/tractor/issues/216
|
||||
# https://github.com/goodboy/tractor/pull/348
|
||||
# https://github.com/goodboy/tractor/issues/296
|
||||
|
||||
actor = Arbiter(
|
||||
name or 'registrar',
|
||||
registry_addrs=registry_addrs,
|
||||
loglevel=loglevel,
|
||||
enable_modules=enable_modules,
|
||||
)
|
||||
# XXX, in case the root actor runtime was actually run from
|
||||
# `tractor.to_asyncio.run_as_asyncio_guest()` and NOt
|
||||
# `.trio.run()`.
|
||||
actor._infected_aio = _state._runtime_vars['_is_infected_aio']
|
||||
|
||||
# Start up main task set via core actor-runtime nurseries.
|
||||
try:
|
||||
# assign process-local actor
|
||||
_state._current_actor = actor
|
||||
|
||||
# start local channel-server and fake the portal API
|
||||
# NOTE: this won't block since we provide the nursery
|
||||
ml_addrs_str: str = '\n'.join(
|
||||
f'@{addr}' for addr in trans_bind_addrs
|
||||
)
|
||||
logger.info(
|
||||
f'Starting local {actor.uid} on the following transport addrs:\n'
|
||||
f'{ml_addrs_str}'
|
||||
)
|
||||
|
||||
# start the actor runtime in a new task
|
||||
async with trio.open_nursery(
|
||||
strict_exception_groups=False,
|
||||
# ^XXX^ TODO? instead unpack any RAE as per "loose" style?
|
||||
) as nursery:
|
||||
|
||||
# ``_runtime.async_main()`` creates an internal nursery
|
||||
# and blocks here until any underlying actor(-process)
|
||||
# tree has terminated thereby conducting so called
|
||||
# "end-to-end" structured concurrency throughout an
|
||||
# entire hierarchical python sub-process set; all
|
||||
# "actor runtime" primitives are SC-compat and thus all
|
||||
# transitively spawned actors/processes must be as
|
||||
# well.
|
||||
await nursery.start(
|
||||
partial(
|
||||
async_main,
|
||||
actor,
|
||||
accept_addrs=trans_bind_addrs,
|
||||
parent_addr=None
|
||||
)
|
||||
)
|
||||
try:
|
||||
yield actor
|
||||
except (
|
||||
Exception,
|
||||
BaseExceptionGroup,
|
||||
) as err:
|
||||
|
||||
# TODO, in beginning to handle the subsubactor with
|
||||
# crashed grandparent cases..
|
||||
#
|
||||
# was_locked: bool = await _debug.maybe_wait_for_debugger(
|
||||
# child_in_debug=True,
|
||||
# )
|
||||
# XXX NOTE XXX see equiv note inside
|
||||
# `._runtime.Actor._stream_handler()` where in the
|
||||
# non-root or root-that-opened-this-mahually case we
|
||||
# wait for the local actor-nursery to exit before
|
||||
# exiting the transport channel handler.
|
||||
entered: bool = await _debug._maybe_enter_pm(
|
||||
err,
|
||||
api_frame=inspect.currentframe(),
|
||||
debug_filter=debug_filter,
|
||||
)
|
||||
|
||||
if (
|
||||
not entered
|
||||
and
|
||||
not is_multi_cancelled(
|
||||
err,
|
||||
)
|
||||
):
|
||||
logger.exception('Root actor crashed\n')
|
||||
|
||||
# ALWAYS re-raise any error bubbled up from the
|
||||
# runtime!
|
||||
raise
|
||||
|
||||
finally:
|
||||
# NOTE: not sure if we'll ever need this but it's
|
||||
# possibly better for even more determinism?
|
||||
# logger.cancel(
|
||||
# f'Waiting on {len(nurseries)} nurseries in root..')
|
||||
# nurseries = actor._actoruid2nursery.values()
|
||||
# async with trio.open_nursery() as tempn:
|
||||
# for an in nurseries:
|
||||
# tempn.start_soon(an.exited.wait)
|
||||
|
||||
logger.info(
|
||||
'Closing down root actor'
|
||||
)
|
||||
await actor.cancel(None) # self cancel
|
||||
finally:
|
||||
_state._current_actor = None
|
||||
_state._last_actor_terminated = actor
|
||||
|
||||
# restore built-in `breakpoint()` hook state
|
||||
if (
|
||||
debug_mode
|
||||
and
|
||||
enable_stack_on_sig
|
||||
maybe_enable_greenback
|
||||
):
|
||||
from .devx._stackscope import enable_stack_on_sig
|
||||
enable_stack_on_sig()
|
||||
if builtin_bp_handler is not None:
|
||||
sys.breakpointhook = builtin_bp_handler
|
||||
|
||||
# closed into below ping task-func
|
||||
ponged_addrs: list[UnwrappedAddress] = []
|
||||
if orig_bp_path is not None:
|
||||
os.environ['PYTHONBREAKPOINT'] = orig_bp_path
|
||||
|
||||
async def ping_tpt_socket(
|
||||
addr: UnwrappedAddress,
|
||||
timeout: float = 1,
|
||||
) -> None:
|
||||
'''
|
||||
Attempt temporary connection to see if a registry is
|
||||
listening at the requested address by a tranport layer
|
||||
ping.
|
||||
else:
|
||||
# clear env back to having no entry
|
||||
os.environ.pop('PYTHONBREAKPOINT', None)
|
||||
|
||||
If a connection can't be made quickly we assume none no
|
||||
server is listening at that addr.
|
||||
|
||||
'''
|
||||
try:
|
||||
# TODO: this connect-and-bail forces us to have to
|
||||
# carefully rewrap TCP 104-connection-reset errors as
|
||||
# EOF so as to avoid propagating cancel-causing errors
|
||||
# to the channel-msg loop machinery. Likely it would
|
||||
# be better to eventually have a "discovery" protocol
|
||||
# with basic handshake instead?
|
||||
with trio.move_on_after(timeout):
|
||||
async with _connect_chan(addr):
|
||||
ponged_addrs.append(addr)
|
||||
|
||||
except OSError:
|
||||
# TODO: make this a "discovery" log level?
|
||||
logger.info(
|
||||
f'No actor registry found @ {addr}\n'
|
||||
)
|
||||
|
||||
async with trio.open_nursery() as tn:
|
||||
for addr in registry_addrs:
|
||||
tn.start_soon(
|
||||
ping_tpt_socket,
|
||||
addr,
|
||||
)
|
||||
|
||||
trans_bind_addrs: list[UnwrappedAddress] = []
|
||||
|
||||
# Create a new local root-actor instance which IS NOT THE
|
||||
# REGISTRAR
|
||||
if ponged_addrs:
|
||||
if ensure_registry:
|
||||
raise RuntimeError(
|
||||
f'Failed to open `{name}`@{ponged_addrs}: '
|
||||
'registry socket(s) already bound'
|
||||
)
|
||||
|
||||
# we were able to connect to an arbiter
|
||||
logger.info(
|
||||
f'Registry(s) seem(s) to exist @ {ponged_addrs}'
|
||||
)
|
||||
|
||||
actor = Actor(
|
||||
name=name or 'anonymous',
|
||||
uuid=mk_uuid(),
|
||||
registry_addrs=ponged_addrs,
|
||||
loglevel=loglevel,
|
||||
enable_modules=enable_modules,
|
||||
)
|
||||
# DO NOT use the registry_addrs as the transport server
|
||||
# addrs for this new non-registar, root-actor.
|
||||
for addr in ponged_addrs:
|
||||
waddr: Address = wrap_address(addr)
|
||||
trans_bind_addrs.append(
|
||||
waddr.get_random(bindspace=waddr.bindspace)
|
||||
)
|
||||
|
||||
# Start this local actor as the "registrar", aka a regular
|
||||
# actor who manages the local registry of "mailboxes" of
|
||||
# other process-tree-local sub-actors.
|
||||
else:
|
||||
|
||||
# NOTE that if the current actor IS THE REGISTAR, the
|
||||
# following init steps are taken:
|
||||
# - the tranport layer server is bound to each addr
|
||||
# pair defined in provided registry_addrs, or the default.
|
||||
trans_bind_addrs = registry_addrs
|
||||
|
||||
# - it is normally desirable for any registrar to stay up
|
||||
# indefinitely until either all registered (child/sub)
|
||||
# actors are terminated (via SC supervision) or,
|
||||
# a re-election process has taken place.
|
||||
# NOTE: all of ^ which is not implemented yet - see:
|
||||
# https://github.com/goodboy/tractor/issues/216
|
||||
# https://github.com/goodboy/tractor/pull/348
|
||||
# https://github.com/goodboy/tractor/issues/296
|
||||
|
||||
actor = Arbiter(
|
||||
name=name or 'registrar',
|
||||
uuid=mk_uuid(),
|
||||
registry_addrs=registry_addrs,
|
||||
loglevel=loglevel,
|
||||
enable_modules=enable_modules,
|
||||
)
|
||||
# XXX, in case the root actor runtime was actually run from
|
||||
# `tractor.to_asyncio.run_as_asyncio_guest()` and NOt
|
||||
# `.trio.run()`.
|
||||
actor._infected_aio = _state._runtime_vars['_is_infected_aio']
|
||||
|
||||
# Start up main task set via core actor-runtime nurseries.
|
||||
try:
|
||||
# assign process-local actor
|
||||
_state._current_actor = actor
|
||||
|
||||
# start local channel-server and fake the portal API
|
||||
# NOTE: this won't block since we provide the nursery
|
||||
ml_addrs_str: str = '\n'.join(
|
||||
f'@{addr}' for addr in trans_bind_addrs
|
||||
)
|
||||
logger.info(
|
||||
f'Starting local {actor.uid} on the following transport addrs:\n'
|
||||
f'{ml_addrs_str}'
|
||||
)
|
||||
|
||||
# start the actor runtime in a new task
|
||||
async with trio.open_nursery(
|
||||
strict_exception_groups=False,
|
||||
# ^XXX^ TODO? instead unpack any RAE as per "loose" style?
|
||||
) as nursery:
|
||||
|
||||
# ``_runtime.async_main()`` creates an internal nursery
|
||||
# and blocks here until any underlying actor(-process)
|
||||
# tree has terminated thereby conducting so called
|
||||
# "end-to-end" structured concurrency throughout an
|
||||
# entire hierarchical python sub-process set; all
|
||||
# "actor runtime" primitives are SC-compat and thus all
|
||||
# transitively spawned actors/processes must be as
|
||||
# well.
|
||||
await nursery.start(
|
||||
partial(
|
||||
async_main,
|
||||
actor,
|
||||
accept_addrs=trans_bind_addrs,
|
||||
parent_addr=None
|
||||
)
|
||||
)
|
||||
try:
|
||||
yield actor
|
||||
except (
|
||||
Exception,
|
||||
BaseExceptionGroup,
|
||||
) as err:
|
||||
|
||||
# TODO, in beginning to handle the subsubactor with
|
||||
# crashed grandparent cases..
|
||||
#
|
||||
# was_locked: bool = await _debug.maybe_wait_for_debugger(
|
||||
# child_in_debug=True,
|
||||
# )
|
||||
# XXX NOTE XXX see equiv note inside
|
||||
# `._runtime.Actor._stream_handler()` where in the
|
||||
# non-root or root-that-opened-this-mahually case we
|
||||
# wait for the local actor-nursery to exit before
|
||||
# exiting the transport channel handler.
|
||||
entered: bool = await _debug._maybe_enter_pm(
|
||||
err,
|
||||
api_frame=inspect.currentframe(),
|
||||
debug_filter=debug_filter,
|
||||
)
|
||||
|
||||
if (
|
||||
not entered
|
||||
and
|
||||
not is_multi_cancelled(
|
||||
err,
|
||||
)
|
||||
):
|
||||
logger.exception(
|
||||
'Root actor crashed\n'
|
||||
f'>x)\n'
|
||||
f' |_{actor}\n'
|
||||
)
|
||||
|
||||
# ALWAYS re-raise any error bubbled up from the
|
||||
# runtime!
|
||||
raise
|
||||
|
||||
finally:
|
||||
# NOTE: not sure if we'll ever need this but it's
|
||||
# possibly better for even more determinism?
|
||||
# logger.cancel(
|
||||
# f'Waiting on {len(nurseries)} nurseries in root..')
|
||||
# nurseries = actor._actoruid2nursery.values()
|
||||
# async with trio.open_nursery() as tempn:
|
||||
# for an in nurseries:
|
||||
# tempn.start_soon(an.exited.wait)
|
||||
|
||||
logger.info(
|
||||
f'Closing down root actor\n'
|
||||
f'>)\n'
|
||||
f'|_{actor}\n'
|
||||
)
|
||||
await actor.cancel(None) # self cancel
|
||||
finally:
|
||||
_state._current_actor = None
|
||||
_state._last_actor_terminated = actor
|
||||
logger.runtime(
|
||||
f'Root actor terminated\n'
|
||||
f')>\n'
|
||||
f' |_{actor}\n'
|
||||
)
|
||||
logger.runtime("Root actor terminated")
|
||||
|
||||
|
||||
def run_daemon(
|
||||
|
@ -531,7 +462,7 @@ def run_daemon(
|
|||
|
||||
# runtime kwargs
|
||||
name: str | None = 'root',
|
||||
registry_addrs: list[UnwrappedAddress]|None = None,
|
||||
registry_addrs: list[tuple[str, int]] = _default_lo_addrs,
|
||||
|
||||
start_method: str | None = None,
|
||||
debug_mode: bool = False,
|
||||
|
|
|
@ -42,7 +42,7 @@ from trio import (
|
|||
TaskStatus,
|
||||
)
|
||||
|
||||
from .ipc import Channel
|
||||
from ._ipc import Channel
|
||||
from ._context import (
|
||||
Context,
|
||||
)
|
||||
|
@ -649,10 +649,6 @@ async def _invoke(
|
|||
)
|
||||
# set and shuttle final result to "parent"-side task.
|
||||
ctx._result = res
|
||||
log.runtime(
|
||||
f'Sending result msg and exiting {ctx.side!r}\n'
|
||||
f'{return_msg}\n'
|
||||
)
|
||||
await chan.send(return_msg)
|
||||
|
||||
# NOTE: this happens IFF `ctx._scope.cancel()` is
|
||||
|
@ -869,6 +865,7 @@ async def try_ship_error_to_remote(
|
|||
|
||||
|
||||
async def process_messages(
|
||||
actor: Actor,
|
||||
chan: Channel,
|
||||
shield: bool = False,
|
||||
task_status: TaskStatus[CancelScope] = trio.TASK_STATUS_IGNORED,
|
||||
|
@ -906,7 +903,6 @@ async def process_messages(
|
|||
(as utilized inside `Portal.cancel_actor()` ).
|
||||
|
||||
'''
|
||||
actor: Actor = _state.current_actor()
|
||||
assert actor._service_n # runtime state sanity
|
||||
|
||||
# TODO: once `trio` get's an "obvious way" for req/resp we
|
||||
|
@ -1156,7 +1152,7 @@ async def process_messages(
|
|||
trio.Event(),
|
||||
)
|
||||
|
||||
# XXX RUNTIME-SCOPED! remote (likely internal) error
|
||||
# runtime-scoped remote (internal) error
|
||||
# (^- bc no `Error.cid` -^)
|
||||
#
|
||||
# NOTE: this is the non-rpc error case, that
|
||||
|
@ -1219,10 +1215,8 @@ async def process_messages(
|
|||
# -[ ] figure out how this will break with other transports?
|
||||
tc.report_n_maybe_raise(
|
||||
message=(
|
||||
f'peer IPC channel closed abruptly?\n'
|
||||
f'\n'
|
||||
f'<=x[\n'
|
||||
f' {chan}\n'
|
||||
f'peer IPC channel closed abruptly?\n\n'
|
||||
f'<=x {chan}\n'
|
||||
f' |_{chan.raddr}\n\n'
|
||||
)
|
||||
+
|
||||
|
|
1102
tractor/_runtime.py
1102
tractor/_runtime.py
File diff suppressed because it is too large
Load Diff
|
@ -46,23 +46,19 @@ from tractor._state import (
|
|||
_runtime_vars,
|
||||
)
|
||||
from tractor.log import get_logger
|
||||
from tractor._addr import UnwrappedAddress
|
||||
from tractor._portal import Portal
|
||||
from tractor._runtime import Actor
|
||||
from tractor._entry import _mp_main
|
||||
from tractor._exceptions import ActorFailure
|
||||
from tractor.msg.types import (
|
||||
Aid,
|
||||
SpawnSpec,
|
||||
)
|
||||
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ipc import IPCServer
|
||||
from ._supervise import ActorNursery
|
||||
ProcessType = TypeVar('ProcessType', mp.Process, trio.Process)
|
||||
|
||||
|
||||
log = get_logger('tractor')
|
||||
|
||||
# placeholder for an mp start context if so using that backend
|
||||
|
@ -167,7 +163,7 @@ async def exhaust_portal(
|
|||
# TODO: merge with above?
|
||||
log.warning(
|
||||
'Cancelled portal result waiter task:\n'
|
||||
f'uid: {portal.channel.aid}\n'
|
||||
f'uid: {portal.channel.uid}\n'
|
||||
f'error: {err}\n'
|
||||
)
|
||||
return err
|
||||
|
@ -175,7 +171,7 @@ async def exhaust_portal(
|
|||
else:
|
||||
log.debug(
|
||||
f'Returning final result from portal:\n'
|
||||
f'uid: {portal.channel.aid}\n'
|
||||
f'uid: {portal.channel.uid}\n'
|
||||
f'result: {final}\n'
|
||||
)
|
||||
return final
|
||||
|
@ -328,12 +324,12 @@ async def soft_kill(
|
|||
see `.hard_kill()`).
|
||||
|
||||
'''
|
||||
peer_aid: Aid = portal.channel.aid
|
||||
uid: tuple[str, str] = portal.channel.uid
|
||||
try:
|
||||
log.cancel(
|
||||
f'Soft killing sub-actor via portal request\n'
|
||||
f'\n'
|
||||
f'(c=> {peer_aid}\n'
|
||||
f'(c=> {portal.chan.uid}\n'
|
||||
f' |_{proc}\n'
|
||||
)
|
||||
# wait on sub-proc to signal termination
|
||||
|
@ -382,7 +378,7 @@ async def soft_kill(
|
|||
if proc.poll() is None: # type: ignore
|
||||
log.warning(
|
||||
'Subactor still alive after cancel request?\n\n'
|
||||
f'uid: {peer_aid}\n'
|
||||
f'uid: {uid}\n'
|
||||
f'|_{proc}\n'
|
||||
)
|
||||
n.cancel_scope.cancel()
|
||||
|
@ -396,15 +392,14 @@ async def new_proc(
|
|||
errors: dict[tuple[str, str], Exception],
|
||||
|
||||
# passed through to actor main
|
||||
bind_addrs: list[UnwrappedAddress],
|
||||
parent_addr: UnwrappedAddress,
|
||||
bind_addrs: list[tuple[str, int]],
|
||||
parent_addr: tuple[str, int],
|
||||
_runtime_vars: dict[str, Any], # serialized and sent to _child
|
||||
|
||||
*,
|
||||
|
||||
infect_asyncio: bool = False,
|
||||
task_status: TaskStatus[Portal] = trio.TASK_STATUS_IGNORED,
|
||||
proc_kwargs: dict[str, any] = {}
|
||||
task_status: TaskStatus[Portal] = trio.TASK_STATUS_IGNORED
|
||||
|
||||
) -> None:
|
||||
|
||||
|
@ -424,7 +419,6 @@ async def new_proc(
|
|||
_runtime_vars, # run time vars
|
||||
infect_asyncio=infect_asyncio,
|
||||
task_status=task_status,
|
||||
proc_kwargs=proc_kwargs
|
||||
)
|
||||
|
||||
|
||||
|
@ -435,13 +429,12 @@ async def trio_proc(
|
|||
errors: dict[tuple[str, str], Exception],
|
||||
|
||||
# passed through to actor main
|
||||
bind_addrs: list[UnwrappedAddress],
|
||||
parent_addr: UnwrappedAddress,
|
||||
bind_addrs: list[tuple[str, int]],
|
||||
parent_addr: tuple[str, int],
|
||||
_runtime_vars: dict[str, Any], # serialized and sent to _child
|
||||
*,
|
||||
infect_asyncio: bool = False,
|
||||
task_status: TaskStatus[Portal] = trio.TASK_STATUS_IGNORED,
|
||||
proc_kwargs: dict[str, any] = {}
|
||||
task_status: TaskStatus[Portal] = trio.TASK_STATUS_IGNORED
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
|
@ -463,9 +456,6 @@ async def trio_proc(
|
|||
# the OS; it otherwise can be passed via the parent channel if
|
||||
# we prefer in the future (for privacy).
|
||||
"--uid",
|
||||
# TODO, how to pass this over "wire" encodings like
|
||||
# cmdline args?
|
||||
# -[ ] maybe we can add an `Aid.min_tuple()` ?
|
||||
str(subactor.uid),
|
||||
# Address the child must connect to on startup
|
||||
"--parent_addr",
|
||||
|
@ -483,10 +473,9 @@ async def trio_proc(
|
|||
|
||||
cancelled_during_spawn: bool = False
|
||||
proc: trio.Process|None = None
|
||||
ipc_server: IPCServer = actor_nursery._actor.ipc_server
|
||||
try:
|
||||
try:
|
||||
proc: trio.Process = await trio.lowlevel.open_process(spawn_cmd, **proc_kwargs)
|
||||
proc: trio.Process = await trio.lowlevel.open_process(spawn_cmd)
|
||||
log.runtime(
|
||||
'Started new child\n'
|
||||
f'|_{proc}\n'
|
||||
|
@ -495,7 +484,7 @@ async def trio_proc(
|
|||
# wait for actor to spawn and connect back to us
|
||||
# channel should have handshake completed by the
|
||||
# local actor by the time we get a ref to it
|
||||
event, chan = await ipc_server.wait_for_peer(
|
||||
event, chan = await actor_nursery._actor.wait_for_peer(
|
||||
subactor.uid
|
||||
)
|
||||
|
||||
|
@ -528,15 +517,15 @@ async def trio_proc(
|
|||
|
||||
# send a "spawning specification" which configures the
|
||||
# initial runtime state of the child.
|
||||
sspec = SpawnSpec(
|
||||
_parent_main_data=subactor._parent_main_data,
|
||||
enable_modules=subactor.enable_modules,
|
||||
reg_addrs=subactor.reg_addrs,
|
||||
bind_addrs=bind_addrs,
|
||||
_runtime_vars=_runtime_vars,
|
||||
await chan.send(
|
||||
SpawnSpec(
|
||||
_parent_main_data=subactor._parent_main_data,
|
||||
enable_modules=subactor.enable_modules,
|
||||
reg_addrs=subactor.reg_addrs,
|
||||
bind_addrs=bind_addrs,
|
||||
_runtime_vars=_runtime_vars,
|
||||
)
|
||||
)
|
||||
log.runtime(f'Sending spawn spec: {str(sspec)}')
|
||||
await chan.send(sspec)
|
||||
|
||||
# track subactor in current nursery
|
||||
curr_actor: Actor = current_actor()
|
||||
|
@ -646,13 +635,12 @@ async def mp_proc(
|
|||
subactor: Actor,
|
||||
errors: dict[tuple[str, str], Exception],
|
||||
# passed through to actor main
|
||||
bind_addrs: list[UnwrappedAddress],
|
||||
parent_addr: UnwrappedAddress,
|
||||
bind_addrs: list[tuple[str, int]],
|
||||
parent_addr: tuple[str, int],
|
||||
_runtime_vars: dict[str, Any], # serialized and sent to _child
|
||||
*,
|
||||
infect_asyncio: bool = False,
|
||||
task_status: TaskStatus[Portal] = trio.TASK_STATUS_IGNORED,
|
||||
proc_kwargs: dict[str, any] = {}
|
||||
task_status: TaskStatus[Portal] = trio.TASK_STATUS_IGNORED
|
||||
|
||||
) -> None:
|
||||
|
||||
|
@ -727,14 +715,12 @@ async def mp_proc(
|
|||
|
||||
log.runtime(f"Started {proc}")
|
||||
|
||||
ipc_server: IPCServer = actor_nursery._actor.ipc_server
|
||||
try:
|
||||
# wait for actor to spawn and connect back to us
|
||||
# channel should have handshake completed by the
|
||||
# local actor by the time we get a ref to it
|
||||
event, chan = await ipc_server.wait_for_peer(
|
||||
subactor.uid,
|
||||
)
|
||||
event, chan = await actor_nursery._actor.wait_for_peer(
|
||||
subactor.uid)
|
||||
|
||||
# XXX: monkey patch poll API to match the ``subprocess`` API..
|
||||
# not sure why they don't expose this but kk.
|
||||
|
|
|
@ -14,19 +14,16 @@
|
|||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
'''
|
||||
Per actor-process runtime state mgmt APIs.
|
||||
"""
|
||||
Per process state
|
||||
|
||||
'''
|
||||
"""
|
||||
from __future__ import annotations
|
||||
from contextvars import (
|
||||
ContextVar,
|
||||
)
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import (
|
||||
Any,
|
||||
Literal,
|
||||
TYPE_CHECKING,
|
||||
)
|
||||
|
||||
|
@ -37,28 +34,14 @@ if TYPE_CHECKING:
|
|||
from ._context import Context
|
||||
|
||||
|
||||
# default IPC transport protocol settings
|
||||
TransportProtocolKey = Literal[
|
||||
'tcp',
|
||||
'uds',
|
||||
]
|
||||
_def_tpt_proto: TransportProtocolKey = 'tcp'
|
||||
|
||||
_current_actor: Actor|None = None # type: ignore # noqa
|
||||
_last_actor_terminated: Actor|None = None
|
||||
|
||||
# TODO: mk this a `msgspec.Struct`!
|
||||
_runtime_vars: dict[str, Any] = {
|
||||
'_debug_mode': False,
|
||||
# root of actor-process tree info
|
||||
'_is_root': False, # bool
|
||||
'_root_mailbox': (None, None), # tuple[str|None, str|None]
|
||||
'_root_addrs': [], # tuple[str|None, str|None]
|
||||
|
||||
# parent->chld ipc protocol caps
|
||||
'_enable_tpts': [_def_tpt_proto],
|
||||
|
||||
# registrar info
|
||||
'_is_root': False,
|
||||
'_root_mailbox': (None, None),
|
||||
'_registry_addrs': [],
|
||||
|
||||
'_is_infected_aio': False,
|
||||
|
@ -116,7 +99,7 @@ def current_actor(
|
|||
return _current_actor
|
||||
|
||||
|
||||
def is_root_process() -> bool:
|
||||
def is_main_process() -> bool:
|
||||
'''
|
||||
Bool determining if this actor is running in the top-most process.
|
||||
|
||||
|
@ -125,10 +108,8 @@ def is_root_process() -> bool:
|
|||
return mp.current_process().name == 'MainProcess'
|
||||
|
||||
|
||||
is_main_process = is_root_process
|
||||
|
||||
|
||||
def is_debug_mode() -> bool:
|
||||
# TODO, more verby name?
|
||||
def debug_mode() -> bool:
|
||||
'''
|
||||
Bool determining if "debug mode" is on which enables
|
||||
remote subactor pdb entry on crashes.
|
||||
|
@ -137,9 +118,6 @@ def is_debug_mode() -> bool:
|
|||
return bool(_runtime_vars['_debug_mode'])
|
||||
|
||||
|
||||
debug_mode = is_debug_mode
|
||||
|
||||
|
||||
def is_root_process() -> bool:
|
||||
return _runtime_vars['_is_root']
|
||||
|
||||
|
@ -165,34 +143,3 @@ def current_ipc_ctx(
|
|||
f'|_{current_task()}\n'
|
||||
)
|
||||
return ctx
|
||||
|
||||
|
||||
# std ODE (mutable) app state location
|
||||
_rtdir: Path = Path(os.environ['XDG_RUNTIME_DIR'])
|
||||
|
||||
|
||||
def get_rt_dir(
|
||||
subdir: str = 'tractor'
|
||||
) -> Path:
|
||||
'''
|
||||
Return the user "runtime dir" where most userspace apps stick
|
||||
their IPC and cache related system util-files; we take hold
|
||||
of a `'XDG_RUNTIME_DIR'/tractor/` subdir by default.
|
||||
|
||||
'''
|
||||
rtdir: Path = _rtdir / subdir
|
||||
if not rtdir.is_dir():
|
||||
rtdir.mkdir()
|
||||
return rtdir
|
||||
|
||||
|
||||
def current_ipc_protos() -> list[str]:
|
||||
'''
|
||||
Return the list of IPC transport protocol keys currently
|
||||
in use by this actor.
|
||||
|
||||
The keys are as declared by `MsgTransport` and `Address`
|
||||
concrete-backend sub-types defined throughout `tractor.ipc`.
|
||||
|
||||
'''
|
||||
return _runtime_vars['_enable_tpts']
|
||||
|
|
|
@ -45,18 +45,16 @@ from .trionics import (
|
|||
BroadcastReceiver,
|
||||
)
|
||||
from tractor.msg import (
|
||||
Error,
|
||||
Return,
|
||||
Stop,
|
||||
# Return,
|
||||
# Stop,
|
||||
MsgType,
|
||||
PayloadT,
|
||||
Yield,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ._runtime import Actor
|
||||
from ._context import Context
|
||||
from .ipc import Channel
|
||||
from ._ipc import Channel
|
||||
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
@ -72,7 +70,8 @@ class MsgStream(trio.abc.Channel):
|
|||
A bidirectional message stream for receiving logically sequenced
|
||||
values over an inter-actor IPC `Channel`.
|
||||
|
||||
|
||||
This is the type returned to a local task which entered either
|
||||
`Portal.open_stream_from()` or `Context.open_stream()`.
|
||||
|
||||
Termination rules:
|
||||
|
||||
|
@ -95,9 +94,6 @@ class MsgStream(trio.abc.Channel):
|
|||
self._rx_chan = rx_chan
|
||||
self._broadcaster = _broadcaster
|
||||
|
||||
# any actual IPC msg which is effectively an `EndOfStream`
|
||||
self._stop_msg: bool|Stop = False
|
||||
|
||||
# flag to denote end of stream
|
||||
self._eoc: bool|trio.EndOfChannel = False
|
||||
self._closed: bool|trio.ClosedResourceError = False
|
||||
|
@ -129,67 +125,16 @@ class MsgStream(trio.abc.Channel):
|
|||
def receive_nowait(
|
||||
self,
|
||||
expect_msg: MsgType = Yield,
|
||||
) -> PayloadT:
|
||||
):
|
||||
ctx: Context = self._ctx
|
||||
(
|
||||
msg,
|
||||
pld,
|
||||
) = ctx._pld_rx.recv_msg_nowait(
|
||||
return ctx._pld_rx.recv_pld_nowait(
|
||||
ipc=self,
|
||||
expect_msg=expect_msg,
|
||||
)
|
||||
|
||||
# ?TODO, maybe factor this into a hyper-common `unwrap_pld()`
|
||||
#
|
||||
match msg:
|
||||
|
||||
# XXX, these never seems to ever hit? cool?
|
||||
case Stop():
|
||||
log.cancel(
|
||||
f'Msg-stream was ended via stop msg\n'
|
||||
f'{msg}'
|
||||
)
|
||||
case Error():
|
||||
log.error(
|
||||
f'Msg-stream was ended via error msg\n'
|
||||
f'{msg}'
|
||||
)
|
||||
|
||||
# XXX NOTE, always set any final result on the ctx to
|
||||
# avoid teardown race conditions where previously this msg
|
||||
# would be consumed silently (by `.aclose()` doing its
|
||||
# own "msg drain loop" but WITHOUT those `drained: lists[MsgType]`
|
||||
# being post-close-processed!
|
||||
#
|
||||
# !!TODO, see the equiv todo-comment in `.receive()`
|
||||
# around the `if drained:` where we should prolly
|
||||
# ACTUALLY be doing this post-close processing??
|
||||
#
|
||||
case Return(pld=pld):
|
||||
log.warning(
|
||||
f'Msg-stream final result msg for IPC ctx?\n'
|
||||
f'{msg}'
|
||||
)
|
||||
# XXX TODO, this **should be covered** by higher
|
||||
# scoped runtime-side method calls such as
|
||||
# `Context._deliver_msg()`, so you should never
|
||||
# really see the warning above or else something
|
||||
# racy/out-of-order is likely going on between
|
||||
# actor-runtime-side push tasks and the user-app-side
|
||||
# consume tasks!
|
||||
# -[ ] figure out that set of race cases and fix!
|
||||
# -[ ] possibly return the `msg` given an input
|
||||
# arg-flag is set so we can process the `Return`
|
||||
# from the `.aclose()` caller?
|
||||
#
|
||||
# breakpoint() # to debug this RACE CASE!
|
||||
ctx._result = pld
|
||||
ctx._outcome_msg = msg
|
||||
|
||||
return pld
|
||||
|
||||
async def receive(
|
||||
self,
|
||||
|
||||
hide_tb: bool = False,
|
||||
):
|
||||
'''
|
||||
|
@ -209,7 +154,7 @@ class MsgStream(trio.abc.Channel):
|
|||
# except trio.EndOfChannel:
|
||||
# raise StopAsyncIteration
|
||||
#
|
||||
# see `.aclose()` for notes on the old behaviour prior to
|
||||
# see ``.aclose()`` for notes on the old behaviour prior to
|
||||
# introducing this
|
||||
if self._eoc:
|
||||
raise self._eoc
|
||||
|
@ -220,11 +165,7 @@ class MsgStream(trio.abc.Channel):
|
|||
src_err: Exception|None = None # orig tb
|
||||
try:
|
||||
ctx: Context = self._ctx
|
||||
pld = await ctx._pld_rx.recv_pld(
|
||||
ipc=self,
|
||||
expect_msg=Yield,
|
||||
)
|
||||
return pld
|
||||
return await ctx._pld_rx.recv_pld(ipc=self)
|
||||
|
||||
# XXX: the stream terminates on either of:
|
||||
# - `self._rx_chan.receive()` raising after manual closure
|
||||
|
@ -233,7 +174,7 @@ class MsgStream(trio.abc.Channel):
|
|||
# - via a `Stop`-msg received from remote peer task.
|
||||
# NOTE
|
||||
# |_ previously this was triggered by calling
|
||||
# `._rx_chan.aclose()` on the send side of the channel
|
||||
# ``._rx_chan.aclose()`` on the send side of the channel
|
||||
# inside `Actor._deliver_ctx_payload()`, but now the 'stop'
|
||||
# message handling gets delegated to `PldRFx.recv_pld()`
|
||||
# internals.
|
||||
|
@ -257,14 +198,11 @@ class MsgStream(trio.abc.Channel):
|
|||
# terminated and signal this local iterator to stop
|
||||
drained: list[Exception|dict] = await self.aclose()
|
||||
if drained:
|
||||
# ^^^^^^^^TODO? pass these to the `._ctx._drained_msgs:
|
||||
# deque` and then iterate them as part of any
|
||||
# `.wait_for_result()` call?
|
||||
#
|
||||
# -[ ] move the match-case processing from
|
||||
# `.receive_nowait()` instead to right here, use it from
|
||||
# a for msg in drained:` post-proc loop?
|
||||
#
|
||||
# ?TODO? pass these to the `._ctx._drained_msgs: deque`
|
||||
# and then iterate them as part of any `.wait_for_result()` call?
|
||||
#
|
||||
# from .devx import pause
|
||||
# await pause()
|
||||
log.warning(
|
||||
'Drained context msgs during closure\n\n'
|
||||
f'{drained}'
|
||||
|
@ -327,6 +265,9 @@ class MsgStream(trio.abc.Channel):
|
|||
- more or less we try to maintain adherance to trio's `.aclose()` semantics:
|
||||
https://trio.readthedocs.io/en/stable/reference-io.html#trio.abc.AsyncResource.aclose
|
||||
'''
|
||||
|
||||
# rx_chan = self._rx_chan
|
||||
|
||||
# XXX NOTE XXX
|
||||
# it's SUPER IMPORTANT that we ensure we don't DOUBLE
|
||||
# DRAIN msgs on closure so avoid getting stuck handing on
|
||||
|
@ -338,16 +279,15 @@ class MsgStream(trio.abc.Channel):
|
|||
# this stream has already been closed so silently succeed as
|
||||
# per ``trio.AsyncResource`` semantics.
|
||||
# https://trio.readthedocs.io/en/stable/reference-io.html#trio.abc.AsyncResource.aclose
|
||||
# import tractor
|
||||
# await tractor.pause()
|
||||
return []
|
||||
|
||||
ctx: Context = self._ctx
|
||||
drained: list[Exception|dict] = []
|
||||
while not drained:
|
||||
try:
|
||||
maybe_final_msg: Yield|Return = self.receive_nowait(
|
||||
expect_msg=Yield|Return,
|
||||
maybe_final_msg = self.receive_nowait(
|
||||
# allow_msgs=[Yield, Return],
|
||||
expect_msg=Yield,
|
||||
)
|
||||
if maybe_final_msg:
|
||||
log.debug(
|
||||
|
@ -432,31 +372,18 @@ class MsgStream(trio.abc.Channel):
|
|||
# await rx_chan.aclose()
|
||||
|
||||
if not self._eoc:
|
||||
this_side: str = self._ctx.side
|
||||
peer_side: str = self._ctx.peer_side
|
||||
message: str = (
|
||||
f'Stream self-closed by {this_side!r}-side before EoC from {peer_side!r}\n'
|
||||
f'Stream self-closed by {self._ctx.side!r}-side before EoC\n'
|
||||
# } bc a stream is a "scope"/msging-phase inside an IPC
|
||||
f'c}}>\n'
|
||||
f'x}}>\n'
|
||||
f' |_{self}\n'
|
||||
)
|
||||
if (
|
||||
(rx_chan := self._rx_chan)
|
||||
and
|
||||
(stats := rx_chan.statistics()).tasks_waiting_receive
|
||||
):
|
||||
message += (
|
||||
f'AND there is still reader tasks,\n'
|
||||
f'\n'
|
||||
f'{stats}\n'
|
||||
)
|
||||
|
||||
log.cancel(message)
|
||||
self._eoc = trio.EndOfChannel(message)
|
||||
|
||||
# ?XXX WAIT, why do we not close the local mem chan `._rx_chan` XXX?
|
||||
# => NO, DEFINITELY NOT! <=
|
||||
# if we're a bi-dir `MsgStream` BECAUSE this same
|
||||
# if we're a bi-dir ``MsgStream`` BECAUSE this same
|
||||
# core-msg-loop mem recv-chan is used to deliver the
|
||||
# potential final result from the surrounding inter-actor
|
||||
# `Context` so we don't want to close it until that
|
||||
|
@ -596,17 +523,8 @@ class MsgStream(trio.abc.Channel):
|
|||
trio.ClosedResourceError,
|
||||
trio.BrokenResourceError,
|
||||
BrokenPipeError,
|
||||
) as _trans_err:
|
||||
trans_err = _trans_err
|
||||
if (
|
||||
hide_tb
|
||||
and
|
||||
self._ctx.chan._exc is trans_err
|
||||
# ^XXX, IOW, only if the channel is marked errored
|
||||
# for the same reason as whatever its underlying
|
||||
# transport raised, do we keep the full low-level tb
|
||||
# suppressed from the user.
|
||||
):
|
||||
) as trans_err:
|
||||
if hide_tb:
|
||||
raise type(trans_err)(
|
||||
*trans_err.args
|
||||
) from trans_err
|
||||
|
@ -812,12 +730,13 @@ async def open_stream_from_ctx(
|
|||
# sanity, can remove?
|
||||
assert eoc is stream._eoc
|
||||
|
||||
log.runtime(
|
||||
log.warning(
|
||||
'Stream was terminated by EoC\n\n'
|
||||
# NOTE: won't show the error <Type> but
|
||||
# does show txt followed by IPC msg.
|
||||
f'{str(eoc)}\n'
|
||||
)
|
||||
|
||||
finally:
|
||||
if ctx._portal:
|
||||
try:
|
||||
|
|
|
@ -22,20 +22,13 @@ from contextlib import asynccontextmanager as acm
|
|||
from functools import partial
|
||||
import inspect
|
||||
from pprint import pformat
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
)
|
||||
from typing import TYPE_CHECKING
|
||||
import typing
|
||||
import warnings
|
||||
|
||||
import trio
|
||||
|
||||
|
||||
from .devx._debug import maybe_wait_for_debugger
|
||||
from ._addr import (
|
||||
UnwrappedAddress,
|
||||
mk_uuid,
|
||||
)
|
||||
from ._state import current_actor, is_main_process
|
||||
from .log import get_logger, get_loglevel
|
||||
from ._runtime import Actor
|
||||
|
@ -44,21 +37,18 @@ from ._exceptions import (
|
|||
is_multi_cancelled,
|
||||
ContextCancelled,
|
||||
)
|
||||
from ._root import (
|
||||
open_root_actor,
|
||||
)
|
||||
from ._root import open_root_actor
|
||||
from . import _state
|
||||
from . import _spawn
|
||||
|
||||
|
||||
if TYPE_CHECKING:
|
||||
import multiprocessing as mp
|
||||
# from .ipc._server import IPCServer
|
||||
from .ipc import IPCServer
|
||||
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
_default_bind_addr: tuple[str, int] = ('127.0.0.1', 0)
|
||||
|
||||
|
||||
class ActorNursery:
|
||||
'''
|
||||
|
@ -140,9 +130,8 @@ class ActorNursery:
|
|||
|
||||
*,
|
||||
|
||||
bind_addrs: list[UnwrappedAddress]|None = None,
|
||||
bind_addrs: list[tuple[str, int]] = [_default_bind_addr],
|
||||
rpc_module_paths: list[str]|None = None,
|
||||
enable_transports: list[str] = [_state._def_tpt_proto],
|
||||
enable_modules: list[str]|None = None,
|
||||
loglevel: str|None = None, # set log level per subactor
|
||||
debug_mode: bool|None = None,
|
||||
|
@ -152,7 +141,6 @@ class ActorNursery:
|
|||
# a `._ria_nursery` since the dependent APIs have been
|
||||
# removed!
|
||||
nursery: trio.Nursery|None = None,
|
||||
proc_kwargs: dict[str, any] = {}
|
||||
|
||||
) -> Portal:
|
||||
'''
|
||||
|
@ -189,9 +177,7 @@ class ActorNursery:
|
|||
enable_modules.extend(rpc_module_paths)
|
||||
|
||||
subactor = Actor(
|
||||
name=name,
|
||||
uuid=mk_uuid(),
|
||||
|
||||
name,
|
||||
# modules allowed to invoked funcs from
|
||||
enable_modules=enable_modules,
|
||||
loglevel=loglevel,
|
||||
|
@ -199,7 +185,7 @@ class ActorNursery:
|
|||
# verbatim relay this actor's registrar addresses
|
||||
registry_addrs=current_actor().reg_addrs,
|
||||
)
|
||||
parent_addr: UnwrappedAddress = self._actor.accept_addr
|
||||
parent_addr = self._actor.accept_addr
|
||||
assert parent_addr
|
||||
|
||||
# start a task to spawn a process
|
||||
|
@ -218,7 +204,6 @@ class ActorNursery:
|
|||
parent_addr,
|
||||
_rtv, # run time vars
|
||||
infect_asyncio=infect_asyncio,
|
||||
proc_kwargs=proc_kwargs
|
||||
)
|
||||
)
|
||||
|
||||
|
@ -237,12 +222,11 @@ class ActorNursery:
|
|||
*,
|
||||
|
||||
name: str | None = None,
|
||||
bind_addrs: UnwrappedAddress|None = None,
|
||||
bind_addrs: tuple[str, int] = [_default_bind_addr],
|
||||
rpc_module_paths: list[str] | None = None,
|
||||
enable_modules: list[str] | None = None,
|
||||
loglevel: str | None = None, # set log level per subactor
|
||||
infect_asyncio: bool = False,
|
||||
proc_kwargs: dict[str, any] = {},
|
||||
|
||||
**kwargs, # explicit args to ``fn``
|
||||
|
||||
|
@ -273,7 +257,6 @@ class ActorNursery:
|
|||
# use the run_in_actor nursery
|
||||
nursery=self._ria_nursery,
|
||||
infect_asyncio=infect_asyncio,
|
||||
proc_kwargs=proc_kwargs
|
||||
)
|
||||
|
||||
# XXX: don't allow stream funcs
|
||||
|
@ -318,13 +301,8 @@ class ActorNursery:
|
|||
children: dict = self._children
|
||||
child_count: int = len(children)
|
||||
msg: str = f'Cancelling actor nursery with {child_count} children\n'
|
||||
|
||||
server: IPCServer = self._actor.ipc_server
|
||||
|
||||
with trio.move_on_after(3) as cs:
|
||||
async with trio.open_nursery(
|
||||
strict_exception_groups=False,
|
||||
) as tn:
|
||||
async with trio.open_nursery() as tn:
|
||||
|
||||
subactor: Actor
|
||||
proc: trio.Process
|
||||
|
@ -343,7 +321,7 @@ class ActorNursery:
|
|||
|
||||
else:
|
||||
if portal is None: # actor hasn't fully spawned yet
|
||||
event: trio.Event = server._peer_connected[subactor.uid]
|
||||
event = self._actor._peer_connected[subactor.uid]
|
||||
log.warning(
|
||||
f"{subactor.uid} never 't finished spawning?"
|
||||
)
|
||||
|
@ -359,7 +337,7 @@ class ActorNursery:
|
|||
if portal is None:
|
||||
# cancelled while waiting on the event
|
||||
# to arrive
|
||||
chan = server._peers[subactor.uid][-1]
|
||||
chan = self._actor._peers[subactor.uid][-1]
|
||||
if chan:
|
||||
portal = Portal(chan)
|
||||
else: # there's no other choice left
|
||||
|
|
|
@ -26,9 +26,6 @@ import os
|
|||
import pathlib
|
||||
|
||||
import tractor
|
||||
from tractor.devx._debug import (
|
||||
BoxedMaybeException,
|
||||
)
|
||||
from .pytest import (
|
||||
tractor_test as tractor_test
|
||||
)
|
||||
|
@ -101,13 +98,12 @@ async def expect_ctxc(
|
|||
'''
|
||||
if yay:
|
||||
try:
|
||||
yield (maybe_exc := BoxedMaybeException())
|
||||
yield
|
||||
raise RuntimeError('Never raised ctxc?')
|
||||
except tractor.ContextCancelled as ctxc:
|
||||
maybe_exc.value = ctxc
|
||||
except tractor.ContextCancelled:
|
||||
if reraise:
|
||||
raise
|
||||
else:
|
||||
return
|
||||
else:
|
||||
yield (maybe_exc := BoxedMaybeException())
|
||||
yield
|
||||
|
|
|
@ -1,70 +0,0 @@
|
|||
# tractor: structured concurrent "actors".
|
||||
# Copyright 2018-eternity Tyler Goodlet.
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
'''
|
||||
Random IPC addr generation for isolating
|
||||
the discovery space between test sessions.
|
||||
|
||||
Might be eventually useful to expose as a util set from
|
||||
our `tractor.discovery` subsys?
|
||||
|
||||
'''
|
||||
import random
|
||||
from typing import (
|
||||
Type,
|
||||
)
|
||||
from tractor import (
|
||||
_addr,
|
||||
)
|
||||
|
||||
|
||||
def get_rando_addr(
|
||||
tpt_proto: str,
|
||||
*,
|
||||
|
||||
# choose random port at import time
|
||||
_rando_port: str = random.randint(1000, 9999)
|
||||
|
||||
) -> tuple[str, str|int]:
|
||||
'''
|
||||
Used to globally override the runtime to the
|
||||
per-test-session-dynamic addr so that all tests never conflict
|
||||
with any other actor tree using the default.
|
||||
|
||||
'''
|
||||
addr_type: Type[_addr.Addres] = _addr._address_types[tpt_proto]
|
||||
def_reg_addr: tuple[str, int] = _addr._default_lo_addrs[tpt_proto]
|
||||
|
||||
# this is the "unwrapped" form expected to be passed to
|
||||
# `.open_root_actor()` by test body.
|
||||
testrun_reg_addr: tuple[str, int|str]
|
||||
match tpt_proto:
|
||||
case 'tcp':
|
||||
testrun_reg_addr = (
|
||||
addr_type.def_bindspace,
|
||||
_rando_port,
|
||||
)
|
||||
|
||||
# NOTE, file-name uniqueness (no-collisions) will be based on
|
||||
# the runtime-directory and root (pytest-proc's) pid.
|
||||
case 'uds':
|
||||
testrun_reg_addr = addr_type.get_random().unwrap()
|
||||
|
||||
# XXX, as sanity it should never the same as the default for the
|
||||
# host-singleton registry actor.
|
||||
assert def_reg_addr != testrun_reg_addr
|
||||
|
||||
return testrun_reg_addr
|
|
@ -26,46 +26,29 @@ from functools import (
|
|||
import inspect
|
||||
import platform
|
||||
|
||||
import pytest
|
||||
import tractor
|
||||
import trio
|
||||
|
||||
|
||||
def tractor_test(fn):
|
||||
'''
|
||||
Decorator for async test fns to decorator-wrap them as "native"
|
||||
looking sync funcs runnable by `pytest` and auto invoked with
|
||||
`trio.run()` (much like the `pytest-trio` plugin's approach).
|
||||
Decorator for async test funcs to present them as "native"
|
||||
looking sync funcs runnable by `pytest` using `trio.run()`.
|
||||
|
||||
Further the test fn body will be invoked AFTER booting the actor
|
||||
runtime, i.e. from inside a `tractor.open_root_actor()` block AND
|
||||
with various runtime and tooling parameters implicitly passed as
|
||||
requested by by the test session's config; see immediately below.
|
||||
Use:
|
||||
|
||||
Basic deco use:
|
||||
---------------
|
||||
@tractor_test
|
||||
async def test_whatever():
|
||||
await ...
|
||||
|
||||
@tractor_test
|
||||
async def test_whatever():
|
||||
await ...
|
||||
If fixtures:
|
||||
|
||||
- ``reg_addr`` (a socket addr tuple where arbiter is listening)
|
||||
- ``loglevel`` (logging level passed to tractor internals)
|
||||
- ``start_method`` (subprocess spawning backend)
|
||||
|
||||
Runtime config via special fixtures:
|
||||
------------------------------------
|
||||
If any of the following fixture are requested by the wrapped test
|
||||
fn (via normal func-args declaration),
|
||||
|
||||
- `reg_addr` (a socket addr tuple where arbiter is listening)
|
||||
- `loglevel` (logging level passed to tractor internals)
|
||||
- `start_method` (subprocess spawning backend)
|
||||
|
||||
(TODO support)
|
||||
- `tpt_proto` (IPC transport protocol key)
|
||||
|
||||
they will be automatically injected to each test as normally
|
||||
expected as well as passed to the initial
|
||||
`tractor.open_root_actor()` funcargs.
|
||||
|
||||
are defined in the `pytest` fixture space they will be automatically
|
||||
injected to tests declaring these funcargs.
|
||||
'''
|
||||
@wraps(fn)
|
||||
def wrapper(
|
||||
|
@ -128,164 +111,3 @@ def tractor_test(fn):
|
|||
return trio.run(main)
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
def pytest_addoption(
|
||||
parser: pytest.Parser,
|
||||
):
|
||||
# parser.addoption(
|
||||
# "--ll",
|
||||
# action="store",
|
||||
# dest='loglevel',
|
||||
# default='ERROR', help="logging level to set when testing"
|
||||
# )
|
||||
|
||||
parser.addoption(
|
||||
"--spawn-backend",
|
||||
action="store",
|
||||
dest='spawn_backend',
|
||||
default='trio',
|
||||
help="Processing spawning backend to use for test run",
|
||||
)
|
||||
|
||||
parser.addoption(
|
||||
"--tpdb",
|
||||
"--debug-mode",
|
||||
action="store_true",
|
||||
dest='tractor_debug_mode',
|
||||
# default=False,
|
||||
help=(
|
||||
'Enable a flag that can be used by tests to to set the '
|
||||
'`debug_mode: bool` for engaging the internal '
|
||||
'multi-proc debugger sys.'
|
||||
),
|
||||
)
|
||||
|
||||
# provide which IPC transport protocols opting-in test suites
|
||||
# should accumulatively run against.
|
||||
parser.addoption(
|
||||
"--tpt-proto",
|
||||
nargs='+', # accumulate-multiple-args
|
||||
action="store",
|
||||
dest='tpt_protos',
|
||||
default=['tcp'],
|
||||
help="Transport protocol to use under the `tractor.ipc.Channel`",
|
||||
)
|
||||
|
||||
|
||||
def pytest_configure(config):
|
||||
backend = config.option.spawn_backend
|
||||
tractor._spawn.try_set_start_method(backend)
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def debug_mode(request) -> bool:
|
||||
'''
|
||||
Flag state for whether `--tpdb` (for `tractor`-py-debugger)
|
||||
was passed to the test run.
|
||||
|
||||
Normally tests should pass this directly to `.open_root_actor()`
|
||||
to allow the user to opt into suite-wide crash handling.
|
||||
|
||||
'''
|
||||
debug_mode: bool = request.config.option.tractor_debug_mode
|
||||
return debug_mode
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def spawn_backend(request) -> str:
|
||||
return request.config.option.spawn_backend
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def tpt_protos(request) -> list[str]:
|
||||
|
||||
# allow quoting on CLI
|
||||
proto_keys: list[str] = [
|
||||
proto_key.replace('"', '').replace("'", "")
|
||||
for proto_key in request.config.option.tpt_protos
|
||||
]
|
||||
|
||||
# ?TODO, eventually support multiple protos per test-sesh?
|
||||
if len(proto_keys) > 1:
|
||||
pytest.fail(
|
||||
'We only support one `--tpt-proto <key>` atm!\n'
|
||||
)
|
||||
|
||||
# XXX ensure we support the protocol by name via lookup!
|
||||
for proto_key in proto_keys:
|
||||
addr_type = tractor._addr._address_types[proto_key]
|
||||
assert addr_type.proto_key == proto_key
|
||||
|
||||
yield proto_keys
|
||||
|
||||
|
||||
@pytest.fixture(
|
||||
scope='session',
|
||||
autouse=True,
|
||||
)
|
||||
def tpt_proto(
|
||||
tpt_protos: list[str],
|
||||
) -> str:
|
||||
proto_key: str = tpt_protos[0]
|
||||
|
||||
from tractor import _state
|
||||
if _state._def_tpt_proto != proto_key:
|
||||
_state._def_tpt_proto = proto_key
|
||||
|
||||
yield proto_key
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def reg_addr(
|
||||
tpt_proto: str,
|
||||
) -> tuple[str, int|str]:
|
||||
'''
|
||||
Deliver a test-sesh unique registry address such
|
||||
that each run's (tests which use this fixture) will
|
||||
have no conflicts/cross-talk when running simultaneously
|
||||
nor will interfere with other live `tractor` apps active
|
||||
on the same network-host (namespace).
|
||||
|
||||
'''
|
||||
from tractor._testing.addr import get_rando_addr
|
||||
return get_rando_addr(
|
||||
tpt_proto=tpt_proto,
|
||||
)
|
||||
|
||||
|
||||
def pytest_generate_tests(
|
||||
metafunc: pytest.Metafunc,
|
||||
):
|
||||
spawn_backend: str = metafunc.config.option.spawn_backend
|
||||
|
||||
if not spawn_backend:
|
||||
# XXX some weird windows bug with `pytest`?
|
||||
spawn_backend = 'trio'
|
||||
|
||||
# TODO: maybe just use the literal `._spawn.SpawnMethodKey`?
|
||||
assert spawn_backend in (
|
||||
'mp_spawn',
|
||||
'mp_forkserver',
|
||||
'trio',
|
||||
)
|
||||
|
||||
# NOTE: used-to-be-used-to dyanmically parametrize tests for when
|
||||
# you just passed --spawn-backend=`mp` on the cli, but now we expect
|
||||
# that cli input to be manually specified, BUT, maybe we'll do
|
||||
# something like this again in the future?
|
||||
if 'start_method' in metafunc.fixturenames:
|
||||
metafunc.parametrize(
|
||||
"start_method",
|
||||
[spawn_backend],
|
||||
scope='module',
|
||||
)
|
||||
|
||||
# TODO, parametrize any `tpt_proto: str` declaring tests!
|
||||
# proto_tpts: list[str] = metafunc.config.option.proto_tpts
|
||||
# if 'tpt_proto' in metafunc.fixturenames:
|
||||
# metafunc.parametrize(
|
||||
# 'tpt_proto',
|
||||
# proto_tpts, # TODO, double check this list usage!
|
||||
# scope='module',
|
||||
# )
|
||||
|
|
|
@ -1,35 +0,0 @@
|
|||
import os
|
||||
import random
|
||||
|
||||
|
||||
def generate_sample_messages(
|
||||
amount: int,
|
||||
rand_min: int = 0,
|
||||
rand_max: int = 0,
|
||||
silent: bool = False
|
||||
) -> tuple[list[bytes], int]:
|
||||
|
||||
msgs = []
|
||||
size = 0
|
||||
|
||||
if not silent:
|
||||
print(f'\ngenerating {amount} messages...')
|
||||
|
||||
for i in range(amount):
|
||||
msg = f'[{i:08}]'.encode('utf-8')
|
||||
|
||||
if rand_max > 0:
|
||||
msg += os.urandom(
|
||||
random.randint(rand_min, rand_max))
|
||||
|
||||
size += len(msg)
|
||||
|
||||
msgs.append(msg)
|
||||
|
||||
if not silent and i and i % 10_000 == 0:
|
||||
print(f'{i} generated')
|
||||
|
||||
if not silent:
|
||||
print(f'done, {size:,} bytes in total')
|
||||
|
||||
return msgs, size
|
|
@ -73,7 +73,6 @@ from tractor.log import get_logger
|
|||
from tractor._context import Context
|
||||
from tractor import _state
|
||||
from tractor._exceptions import (
|
||||
DebugRequestError,
|
||||
InternalError,
|
||||
NoRuntime,
|
||||
is_multi_cancelled,
|
||||
|
@ -92,11 +91,7 @@ from tractor._state import (
|
|||
if TYPE_CHECKING:
|
||||
from trio.lowlevel import Task
|
||||
from threading import Thread
|
||||
from tractor.ipc import (
|
||||
Channel,
|
||||
IPCServer,
|
||||
# _server, # TODO? export at top level?
|
||||
)
|
||||
from tractor._ipc import Channel
|
||||
from tractor._runtime import (
|
||||
Actor,
|
||||
)
|
||||
|
@ -1438,7 +1433,6 @@ def any_connected_locker_child() -> bool:
|
|||
|
||||
'''
|
||||
actor: Actor = current_actor()
|
||||
server: IPCServer = actor.ipc_server
|
||||
|
||||
if not is_root_process():
|
||||
raise InternalError('This is a root-actor only API!')
|
||||
|
@ -1448,7 +1442,7 @@ def any_connected_locker_child() -> bool:
|
|||
and
|
||||
(uid_in_debug := ctx.chan.uid)
|
||||
):
|
||||
chans: list[tractor.Channel] = server._peers.get(
|
||||
chans: list[tractor.Channel] = actor._peers.get(
|
||||
tuple(uid_in_debug)
|
||||
)
|
||||
if chans:
|
||||
|
@ -1746,6 +1740,13 @@ def sigint_shield(
|
|||
_pause_msg: str = 'Opening a pdb REPL in paused actor'
|
||||
|
||||
|
||||
class DebugRequestError(RuntimeError):
|
||||
'''
|
||||
Failed to request stdio lock from root actor!
|
||||
|
||||
'''
|
||||
|
||||
|
||||
_repl_fail_msg: str|None = (
|
||||
'Failed to REPl via `_pause()` '
|
||||
)
|
||||
|
@ -3008,7 +3009,6 @@ async def _maybe_enter_pm(
|
|||
[BaseException|BaseExceptionGroup],
|
||||
bool,
|
||||
] = lambda err: not is_multi_cancelled(err),
|
||||
**_pause_kws,
|
||||
|
||||
):
|
||||
if (
|
||||
|
@ -3035,7 +3035,6 @@ async def _maybe_enter_pm(
|
|||
await post_mortem(
|
||||
api_frame=api_frame,
|
||||
tb=tb,
|
||||
**_pause_kws,
|
||||
)
|
||||
return True
|
||||
|
||||
|
|
|
@ -237,7 +237,7 @@ def enable_stack_on_sig(
|
|||
try:
|
||||
import stackscope
|
||||
except ImportError:
|
||||
log.error(
|
||||
log.warning(
|
||||
'`stackscope` not installed for use in debug mode!'
|
||||
)
|
||||
return None
|
||||
|
@ -255,8 +255,8 @@ def enable_stack_on_sig(
|
|||
dump_tree_on_sig,
|
||||
)
|
||||
log.devx(
|
||||
f'Enabling trace-trees on `SIGUSR1` '
|
||||
f'since `stackscope` is installed @ \n'
|
||||
'Enabling trace-trees on `SIGUSR1` '
|
||||
'since `stackscope` is installed @ \n'
|
||||
f'{stackscope!r}\n\n'
|
||||
f'With `SIGUSR1` handler\n'
|
||||
f'|_{dump_tree_on_sig}\n'
|
||||
|
|
|
@ -19,7 +19,6 @@ Pretty formatters for use throughout the code base.
|
|||
Mostly handy for logging and exception message content.
|
||||
|
||||
'''
|
||||
import sys
|
||||
import textwrap
|
||||
import traceback
|
||||
|
||||
|
@ -116,85 +115,6 @@ def pformat_boxed_tb(
|
|||
)
|
||||
|
||||
|
||||
def pformat_exc(
|
||||
exc: Exception,
|
||||
header: str = '',
|
||||
message: str = '',
|
||||
body: str = '',
|
||||
with_type_header: bool = True,
|
||||
) -> str:
|
||||
|
||||
# XXX when the currently raised exception is this instance,
|
||||
# we do not ever use the "type header" style repr.
|
||||
is_being_raised: bool = False
|
||||
if (
|
||||
(curr_exc := sys.exception())
|
||||
and
|
||||
curr_exc is exc
|
||||
):
|
||||
is_being_raised: bool = True
|
||||
|
||||
with_type_header: bool = (
|
||||
with_type_header
|
||||
and
|
||||
not is_being_raised
|
||||
)
|
||||
|
||||
# <RemoteActorError( .. )> style
|
||||
if (
|
||||
with_type_header
|
||||
and
|
||||
not header
|
||||
):
|
||||
header: str = f'<{type(exc).__name__}('
|
||||
|
||||
message: str = (
|
||||
message
|
||||
or
|
||||
exc.message
|
||||
)
|
||||
if message:
|
||||
# split off the first line so, if needed, it isn't
|
||||
# indented the same like the "boxed content" which
|
||||
# since there is no `.tb_str` is just the `.message`.
|
||||
lines: list[str] = message.splitlines()
|
||||
first: str = lines[0]
|
||||
message: str = message.removeprefix(first)
|
||||
|
||||
# with a type-style header we,
|
||||
# - have no special message "first line" extraction/handling
|
||||
# - place the message a space in from the header:
|
||||
# `MsgTypeError( <message> ..`
|
||||
# ^-here
|
||||
# - indent the `.message` inside the type body.
|
||||
if with_type_header:
|
||||
first = f' {first} )>'
|
||||
|
||||
message: str = textwrap.indent(
|
||||
message,
|
||||
prefix=' '*2,
|
||||
)
|
||||
message: str = first + message
|
||||
|
||||
tail: str = ''
|
||||
if (
|
||||
with_type_header
|
||||
and
|
||||
not message
|
||||
):
|
||||
tail: str = '>'
|
||||
|
||||
return (
|
||||
header
|
||||
+
|
||||
message
|
||||
+
|
||||
f'{body}'
|
||||
+
|
||||
tail
|
||||
)
|
||||
|
||||
|
||||
def pformat_caller_frame(
|
||||
stack_limit: int = 1,
|
||||
box_tb: bool = True,
|
||||
|
|
|
@ -45,8 +45,6 @@ __all__ = ['pub']
|
|||
log = get_logger('messaging')
|
||||
|
||||
|
||||
# TODO! this needs to reworked to use the modern
|
||||
# `Context`/`MsgStream` APIs!!
|
||||
async def fan_out_to_ctxs(
|
||||
pub_async_gen_func: typing.Callable, # it's an async gen ... gd mypy
|
||||
topics2ctxs: dict[str, list],
|
||||
|
|
|
@ -1,24 +0,0 @@
|
|||
# tractor: structured concurrent "actors".
|
||||
# Copyright 2018-eternity Tyler Goodlet.
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
'''
|
||||
A modular IPC layer supporting the power of cross-process SC!
|
||||
|
||||
'''
|
||||
from ._chan import (
|
||||
_connect_chan as _connect_chan,
|
||||
Channel as Channel
|
||||
)
|
|
@ -1,457 +0,0 @@
|
|||
# tractor: structured concurrent "actors".
|
||||
# Copyright 2018-eternity Tyler Goodlet.
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
Inter-process comms abstractions
|
||||
|
||||
"""
|
||||
from __future__ import annotations
|
||||
from collections.abc import AsyncGenerator
|
||||
from contextlib import (
|
||||
asynccontextmanager as acm,
|
||||
contextmanager as cm,
|
||||
)
|
||||
import platform
|
||||
from pprint import pformat
|
||||
import typing
|
||||
from typing import (
|
||||
Any,
|
||||
TYPE_CHECKING,
|
||||
)
|
||||
import warnings
|
||||
|
||||
import trio
|
||||
|
||||
from ._types import (
|
||||
transport_from_addr,
|
||||
transport_from_stream,
|
||||
)
|
||||
from tractor._addr import (
|
||||
is_wrapped_addr,
|
||||
wrap_address,
|
||||
Address,
|
||||
UnwrappedAddress,
|
||||
)
|
||||
from tractor.log import get_logger
|
||||
from tractor._exceptions import (
|
||||
MsgTypeError,
|
||||
pack_from_raise,
|
||||
TransportClosed,
|
||||
)
|
||||
from tractor.msg import (
|
||||
Aid,
|
||||
MsgCodec,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ._transport import MsgTransport
|
||||
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
_is_windows = platform.system() == 'Windows'
|
||||
|
||||
|
||||
class Channel:
|
||||
'''
|
||||
An inter-process channel for communication between (remote) actors.
|
||||
|
||||
Wraps a ``MsgStream``: transport + encoding IPC connection.
|
||||
|
||||
Currently we only support ``trio.SocketStream`` for transport
|
||||
(aka TCP) and the ``msgpack`` interchange format via the ``msgspec``
|
||||
codec libary.
|
||||
|
||||
'''
|
||||
def __init__(
|
||||
|
||||
self,
|
||||
transport: MsgTransport|None = None,
|
||||
# TODO: optional reconnection support?
|
||||
# auto_reconnect: bool = False,
|
||||
# on_reconnect: typing.Callable[..., typing.Awaitable] = None,
|
||||
|
||||
) -> None:
|
||||
|
||||
# self._recon_seq = on_reconnect
|
||||
# self._autorecon = auto_reconnect
|
||||
|
||||
# Either created in ``.connect()`` or passed in by
|
||||
# user in ``.from_stream()``.
|
||||
self._transport: MsgTransport|None = transport
|
||||
|
||||
# set after handshake - always info from peer end
|
||||
self.aid: Aid|None = None
|
||||
|
||||
self._aiter_msgs = self._iter_msgs()
|
||||
self._exc: Exception|None = None
|
||||
# ^XXX! ONLY set if a remote actor sends an `Error`-msg
|
||||
self._closed: bool = False
|
||||
|
||||
# flag set by ``Portal.cancel_actor()`` indicating remote
|
||||
# (possibly peer) cancellation of the far end actor
|
||||
# runtime.
|
||||
self._cancel_called: bool = False
|
||||
|
||||
@property
|
||||
def uid(self) -> tuple[str, str]:
|
||||
'''
|
||||
Peer actor's unique id.
|
||||
|
||||
'''
|
||||
msg: str = (
|
||||
f'`{type(self).__name__}.uid` is now deprecated.\n'
|
||||
'Use the new `.aid: tractor.msg.Aid` (struct) instead '
|
||||
'which also provides additional named (optional) fields '
|
||||
'beyond just the `.name` and `.uuid`.'
|
||||
)
|
||||
warnings.warn(
|
||||
msg,
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
peer_aid: Aid = self.aid
|
||||
return (
|
||||
peer_aid.name,
|
||||
peer_aid.uuid,
|
||||
)
|
||||
|
||||
@property
|
||||
def stream(self) -> trio.abc.Stream | None:
|
||||
return self._transport.stream if self._transport else None
|
||||
|
||||
@property
|
||||
def msgstream(self) -> MsgTransport:
|
||||
log.info(
|
||||
'`Channel.msgstream` is an old name, use `._transport`'
|
||||
)
|
||||
return self._transport
|
||||
|
||||
@property
|
||||
def transport(self) -> MsgTransport:
|
||||
return self._transport
|
||||
|
||||
@classmethod
|
||||
def from_stream(
|
||||
cls,
|
||||
stream: trio.abc.Stream,
|
||||
) -> Channel:
|
||||
transport_cls = transport_from_stream(stream)
|
||||
return Channel(
|
||||
transport=transport_cls(stream)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
async def from_addr(
|
||||
cls,
|
||||
addr: UnwrappedAddress,
|
||||
**kwargs
|
||||
) -> Channel:
|
||||
|
||||
if not is_wrapped_addr(addr):
|
||||
addr: Address = wrap_address(addr)
|
||||
|
||||
transport_cls = transport_from_addr(addr)
|
||||
transport = await transport_cls.connect_to(
|
||||
addr,
|
||||
**kwargs,
|
||||
)
|
||||
assert transport.raddr == addr
|
||||
chan = Channel(transport=transport)
|
||||
log.runtime(
|
||||
f'Connected channel IPC transport\n'
|
||||
f'[>\n'
|
||||
f' |_{chan}\n'
|
||||
)
|
||||
return chan
|
||||
|
||||
@cm
|
||||
def apply_codec(
|
||||
self,
|
||||
codec: MsgCodec,
|
||||
) -> None:
|
||||
'''
|
||||
Temporarily override the underlying IPC msg codec for
|
||||
dynamic enforcement of messaging schema.
|
||||
|
||||
'''
|
||||
orig: MsgCodec = self._transport.codec
|
||||
try:
|
||||
self._transport.codec = codec
|
||||
yield
|
||||
finally:
|
||||
self._transport.codec = orig
|
||||
|
||||
# TODO: do a .src/.dst: str for maddrs?
|
||||
def pformat(self) -> str:
|
||||
if not self._transport:
|
||||
return '<Channel with inactive transport?>'
|
||||
|
||||
tpt: MsgTransport = self._transport
|
||||
tpt_name: str = type(tpt).__name__
|
||||
tpt_status: str = (
|
||||
'connected' if self.connected()
|
||||
else 'closed'
|
||||
)
|
||||
return (
|
||||
f'<Channel(\n'
|
||||
f' |_status: {tpt_status!r}\n'
|
||||
f' _closed={self._closed}\n'
|
||||
f' _cancel_called={self._cancel_called}\n'
|
||||
f'\n'
|
||||
f' |_peer: {self.aid}\n'
|
||||
f'\n'
|
||||
f' |_msgstream: {tpt_name}\n'
|
||||
f' proto={tpt.laddr.proto_key!r}\n'
|
||||
f' layer={tpt.layer_key!r}\n'
|
||||
f' laddr={tpt.laddr}\n'
|
||||
f' raddr={tpt.raddr}\n'
|
||||
f' codec={tpt.codec_key!r}\n'
|
||||
f' stream={tpt.stream}\n'
|
||||
f' maddr={tpt.maddr!r}\n'
|
||||
f' drained={tpt.drained}\n'
|
||||
f' _send_lock={tpt._send_lock.statistics()}\n'
|
||||
f')>\n'
|
||||
)
|
||||
|
||||
# NOTE: making this return a value that can be passed to
|
||||
# `eval()` is entirely **optional** FYI!
|
||||
# https://docs.python.org/3/library/functions.html#repr
|
||||
# https://docs.python.org/3/reference/datamodel.html#object.__repr__
|
||||
#
|
||||
# Currently we target **readability** from a (console)
|
||||
# logging perspective over `eval()`-ability since we do NOT
|
||||
# target serializing non-struct instances!
|
||||
# def __repr__(self) -> str:
|
||||
__str__ = pformat
|
||||
__repr__ = pformat
|
||||
|
||||
@property
|
||||
def laddr(self) -> Address|None:
|
||||
return self._transport.laddr if self._transport else None
|
||||
|
||||
@property
|
||||
def raddr(self) -> Address|None:
|
||||
return self._transport.raddr if self._transport else None
|
||||
|
||||
# TODO: something like,
|
||||
# `pdbp.hideframe_on(errors=[MsgTypeError])`
|
||||
# instead of the `try/except` hack we have rn..
|
||||
# seems like a pretty useful thing to have in general
|
||||
# along with being able to filter certain stack frame(s / sets)
|
||||
# possibly based on the current log-level?
|
||||
async def send(
|
||||
self,
|
||||
payload: Any,
|
||||
|
||||
hide_tb: bool = True,
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
Send a coded msg-blob over the transport.
|
||||
|
||||
'''
|
||||
__tracebackhide__: bool = hide_tb
|
||||
try:
|
||||
log.transport(
|
||||
'=> send IPC msg:\n\n'
|
||||
f'{pformat(payload)}\n'
|
||||
)
|
||||
# assert self._transport # but why typing?
|
||||
await self._transport.send(
|
||||
payload,
|
||||
hide_tb=hide_tb,
|
||||
)
|
||||
except (
|
||||
BaseException,
|
||||
MsgTypeError,
|
||||
TransportClosed,
|
||||
)as _err:
|
||||
err = _err # bind for introspection
|
||||
match err:
|
||||
case MsgTypeError():
|
||||
try:
|
||||
assert err.cid
|
||||
except KeyError:
|
||||
raise err
|
||||
case TransportClosed():
|
||||
log.transport(
|
||||
f'Transport stream closed due to\n'
|
||||
f'{err.repr_src_exc()}\n'
|
||||
)
|
||||
|
||||
case _:
|
||||
# never suppress non-tpt sources
|
||||
__tracebackhide__: bool = False
|
||||
raise
|
||||
|
||||
async def recv(self) -> Any:
|
||||
assert self._transport
|
||||
return await self._transport.recv()
|
||||
|
||||
# TODO: auto-reconnect features like 0mq/nanomsg?
|
||||
# -[ ] implement it manually with nods to SC prot
|
||||
# possibly on multiple transport backends?
|
||||
# -> seems like that might be re-inventing scalability
|
||||
# prots tho no?
|
||||
# try:
|
||||
# return await self._transport.recv()
|
||||
# except trio.BrokenResourceError:
|
||||
# if self._autorecon:
|
||||
# await self._reconnect()
|
||||
# return await self.recv()
|
||||
# raise
|
||||
|
||||
async def aclose(self) -> None:
|
||||
|
||||
log.transport(
|
||||
f'Closing channel to {self.aid} '
|
||||
f'{self.laddr} -> {self.raddr}'
|
||||
)
|
||||
assert self._transport
|
||||
await self._transport.stream.aclose()
|
||||
self._closed = True
|
||||
|
||||
async def __aenter__(self):
|
||||
await self.connect()
|
||||
return self
|
||||
|
||||
async def __aexit__(self, *args):
|
||||
await self.aclose(*args)
|
||||
|
||||
def __aiter__(self):
|
||||
return self._aiter_msgs
|
||||
|
||||
# ?TODO? run any reconnection sequence?
|
||||
# -[ ] prolly should be impl-ed as deco-API?
|
||||
#
|
||||
# async def _reconnect(self) -> None:
|
||||
# """Handle connection failures by polling until a reconnect can be
|
||||
# established.
|
||||
# """
|
||||
# down = False
|
||||
# while True:
|
||||
# try:
|
||||
# with trio.move_on_after(3) as cancel_scope:
|
||||
# await self.connect()
|
||||
# cancelled = cancel_scope.cancelled_caught
|
||||
# if cancelled:
|
||||
# log.transport(
|
||||
# "Reconnect timed out after 3 seconds, retrying...")
|
||||
# continue
|
||||
# else:
|
||||
# log.transport("Stream connection re-established!")
|
||||
|
||||
# # on_recon = self._recon_seq
|
||||
# # if on_recon:
|
||||
# # await on_recon(self)
|
||||
|
||||
# break
|
||||
# except (OSError, ConnectionRefusedError):
|
||||
# if not down:
|
||||
# down = True
|
||||
# log.transport(
|
||||
# f"Connection to {self.raddr} went down, waiting"
|
||||
# " for re-establishment")
|
||||
# await trio.sleep(1)
|
||||
|
||||
async def _iter_msgs(
|
||||
self
|
||||
) -> AsyncGenerator[Any, None]:
|
||||
'''
|
||||
Yield `MsgType` IPC msgs decoded and deliverd from
|
||||
an underlying `MsgTransport` protocol.
|
||||
|
||||
This is a streaming routine alo implemented as an async-gen
|
||||
func (same a `MsgTransport._iter_pkts()`) gets allocated by
|
||||
a `.__call__()` inside `.__init__()` where it is assigned to
|
||||
the `._aiter_msgs` attr.
|
||||
|
||||
'''
|
||||
assert self._transport
|
||||
while True:
|
||||
try:
|
||||
async for msg in self._transport:
|
||||
match msg:
|
||||
# NOTE: if transport/interchange delivers
|
||||
# a type error, we pack it with the far
|
||||
# end peer `Actor.uid` and relay the
|
||||
# `Error`-msg upward to the `._rpc` stack
|
||||
# for normal RAE handling.
|
||||
case MsgTypeError():
|
||||
yield pack_from_raise(
|
||||
local_err=msg,
|
||||
cid=msg.cid,
|
||||
|
||||
# XXX we pack it here bc lower
|
||||
# layers have no notion of an
|
||||
# actor-id ;)
|
||||
src_uid=self.uid,
|
||||
)
|
||||
case _:
|
||||
yield msg
|
||||
|
||||
except trio.BrokenResourceError:
|
||||
|
||||
# if not self._autorecon:
|
||||
raise
|
||||
|
||||
await self.aclose()
|
||||
|
||||
# if self._autorecon: # attempt reconnect
|
||||
# await self._reconnect()
|
||||
# continue
|
||||
|
||||
def connected(self) -> bool:
|
||||
return self._transport.connected() if self._transport else False
|
||||
|
||||
async def _do_handshake(
|
||||
self,
|
||||
aid: Aid,
|
||||
|
||||
) -> Aid:
|
||||
'''
|
||||
Exchange `(name, UUIDs)` identifiers as the first
|
||||
communication step with any (peer) remote `Actor`.
|
||||
|
||||
These are essentially the "mailbox addresses" found in
|
||||
"actor model" parlance.
|
||||
|
||||
'''
|
||||
await self.send(aid)
|
||||
peer_aid: Aid = await self.recv()
|
||||
log.runtime(
|
||||
f'Received hanshake with peer actor,\n'
|
||||
f'{peer_aid}\n'
|
||||
)
|
||||
# NOTE, we always are referencing the remote peer!
|
||||
self.aid = peer_aid
|
||||
return peer_aid
|
||||
|
||||
|
||||
@acm
|
||||
async def _connect_chan(
|
||||
addr: UnwrappedAddress
|
||||
) -> typing.AsyncGenerator[Channel, None]:
|
||||
'''
|
||||
Create and connect a channel with disconnect on context manager
|
||||
teardown.
|
||||
|
||||
'''
|
||||
chan = await Channel.from_addr(addr)
|
||||
yield chan
|
||||
with trio.CancelScope(shield=True):
|
||||
await chan.aclose()
|
|
@ -1,163 +0,0 @@
|
|||
# tractor: structured concurrent "actors".
|
||||
# Copyright 2018-eternity Tyler Goodlet.
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
'''
|
||||
File-descriptor-sharing on `linux` by "wilhelm_of_bohemia".
|
||||
|
||||
'''
|
||||
from __future__ import annotations
|
||||
import os
|
||||
import array
|
||||
import socket
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
from contextlib import ExitStack
|
||||
|
||||
import trio
|
||||
import tractor
|
||||
from tractor.ipc import RBToken
|
||||
|
||||
|
||||
actor_name = 'ringd'
|
||||
|
||||
|
||||
_rings: dict[str, dict] = {}
|
||||
|
||||
|
||||
async def _attach_to_ring(
|
||||
ring_name: str
|
||||
) -> tuple[int, int, int]:
|
||||
actor = tractor.current_actor()
|
||||
|
||||
fd_amount = 3
|
||||
sock_path = (
|
||||
Path(tempfile.gettempdir())
|
||||
/
|
||||
f'{os.getpid()}-pass-ring-fds-{ring_name}-to-{actor.name}.sock'
|
||||
)
|
||||
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
|
||||
sock.bind(sock_path)
|
||||
sock.listen(1)
|
||||
|
||||
async with (
|
||||
tractor.find_actor(actor_name) as ringd,
|
||||
ringd.open_context(
|
||||
_pass_fds,
|
||||
name=ring_name,
|
||||
sock_path=sock_path
|
||||
) as (ctx, _sent)
|
||||
):
|
||||
# prepare array to receive FD
|
||||
fds = array.array("i", [0] * fd_amount)
|
||||
|
||||
conn, _ = sock.accept()
|
||||
|
||||
# receive FD
|
||||
msg, ancdata, flags, addr = conn.recvmsg(
|
||||
1024,
|
||||
socket.CMSG_LEN(fds.itemsize * fd_amount)
|
||||
)
|
||||
|
||||
for (
|
||||
cmsg_level,
|
||||
cmsg_type,
|
||||
cmsg_data,
|
||||
) in ancdata:
|
||||
if (
|
||||
cmsg_level == socket.SOL_SOCKET
|
||||
and
|
||||
cmsg_type == socket.SCM_RIGHTS
|
||||
):
|
||||
fds.frombytes(cmsg_data[:fds.itemsize * fd_amount])
|
||||
break
|
||||
else:
|
||||
raise RuntimeError("Receiver: No FDs received")
|
||||
|
||||
conn.close()
|
||||
sock.close()
|
||||
sock_path.unlink()
|
||||
|
||||
return RBToken.from_msg(
|
||||
await ctx.wait_for_result()
|
||||
)
|
||||
|
||||
|
||||
@tractor.context
|
||||
async def _pass_fds(
|
||||
ctx: tractor.Context,
|
||||
name: str,
|
||||
sock_path: str
|
||||
) -> RBToken:
|
||||
global _rings
|
||||
token = _rings[name]
|
||||
client = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
|
||||
client.connect(sock_path)
|
||||
await ctx.started()
|
||||
fds = array.array('i', token.fds)
|
||||
client.sendmsg([b'FDs'], [(socket.SOL_SOCKET, socket.SCM_RIGHTS, fds)])
|
||||
client.close()
|
||||
return token
|
||||
|
||||
|
||||
@tractor.context
|
||||
async def _open_ringbuf(
|
||||
ctx: tractor.Context,
|
||||
name: str,
|
||||
buf_size: int
|
||||
) -> RBToken:
|
||||
global _rings
|
||||
is_owner = False
|
||||
if name not in _rings:
|
||||
stack = ExitStack()
|
||||
token = stack.enter_context(
|
||||
tractor.open_ringbuf(
|
||||
name,
|
||||
buf_size=buf_size
|
||||
)
|
||||
)
|
||||
_rings[name] = {
|
||||
'token': token,
|
||||
'stack': stack,
|
||||
}
|
||||
is_owner = True
|
||||
|
||||
ring = _rings[name]
|
||||
await ctx.started()
|
||||
|
||||
try:
|
||||
await trio.sleep_forever()
|
||||
|
||||
except tractor.ContextCancelled:
|
||||
...
|
||||
|
||||
finally:
|
||||
if is_owner:
|
||||
ring['stack'].close()
|
||||
|
||||
|
||||
async def open_ringbuf(
|
||||
name: str,
|
||||
buf_size: int
|
||||
) -> RBToken:
|
||||
async with (
|
||||
tractor.find_actor(actor_name) as ringd,
|
||||
ringd.open_context(
|
||||
_open_ringbuf,
|
||||
name=name,
|
||||
buf_size=buf_size
|
||||
) as (rd_ctx, _)
|
||||
):
|
||||
yield await _attach_to_ring(name)
|
||||
await rd_ctx.cancel()
|
|
@ -1,153 +0,0 @@
|
|||
# tractor: structured concurrent "actors".
|
||||
# Copyright 2018-eternity Tyler Goodlet.
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
'''
|
||||
Linux specifics, for now we are only exposing EventFD
|
||||
|
||||
'''
|
||||
import os
|
||||
import errno
|
||||
|
||||
import cffi
|
||||
import trio
|
||||
|
||||
ffi = cffi.FFI()
|
||||
|
||||
# Declare the C functions and types we plan to use.
|
||||
# - eventfd: for creating the event file descriptor
|
||||
# - write: for writing to the file descriptor
|
||||
# - read: for reading from the file descriptor
|
||||
# - close: for closing the file descriptor
|
||||
ffi.cdef(
|
||||
'''
|
||||
int eventfd(unsigned int initval, int flags);
|
||||
|
||||
ssize_t write(int fd, const void *buf, size_t count);
|
||||
ssize_t read(int fd, void *buf, size_t count);
|
||||
|
||||
int close(int fd);
|
||||
'''
|
||||
)
|
||||
|
||||
|
||||
# Open the default dynamic library (essentially 'libc' in most cases)
|
||||
C = ffi.dlopen(None)
|
||||
|
||||
|
||||
# Constants from <sys/eventfd.h>, if needed.
|
||||
EFD_SEMAPHORE = 1
|
||||
EFD_CLOEXEC = 0o2000000
|
||||
EFD_NONBLOCK = 0o4000
|
||||
|
||||
|
||||
def open_eventfd(initval: int = 0, flags: int = 0) -> int:
|
||||
'''
|
||||
Open an eventfd with the given initial value and flags.
|
||||
Returns the file descriptor on success, otherwise raises OSError.
|
||||
|
||||
'''
|
||||
fd = C.eventfd(initval, flags)
|
||||
if fd < 0:
|
||||
raise OSError(errno.errorcode[ffi.errno], 'eventfd failed')
|
||||
return fd
|
||||
|
||||
|
||||
def write_eventfd(fd: int, value: int) -> int:
|
||||
'''
|
||||
Write a 64-bit integer (uint64_t) to the eventfd's counter.
|
||||
|
||||
'''
|
||||
# Create a uint64_t* in C, store `value`
|
||||
data_ptr = ffi.new('uint64_t *', value)
|
||||
|
||||
# Call write(fd, data_ptr, 8)
|
||||
# We expect to write exactly 8 bytes (sizeof(uint64_t))
|
||||
ret = C.write(fd, data_ptr, 8)
|
||||
if ret < 0:
|
||||
raise OSError(errno.errorcode[ffi.errno], 'write to eventfd failed')
|
||||
return ret
|
||||
|
||||
|
||||
def read_eventfd(fd: int) -> int:
|
||||
'''
|
||||
Read a 64-bit integer (uint64_t) from the eventfd, returning the value.
|
||||
Reading resets the counter to 0 (unless using EFD_SEMAPHORE).
|
||||
|
||||
'''
|
||||
# Allocate an 8-byte buffer in C for reading
|
||||
buf = ffi.new('char[]', 8)
|
||||
|
||||
ret = C.read(fd, buf, 8)
|
||||
if ret < 0:
|
||||
raise OSError(errno.errorcode[ffi.errno], 'read from eventfd failed')
|
||||
# Convert the 8 bytes we read into a Python integer
|
||||
data_bytes = ffi.unpack(buf, 8) # returns a Python bytes object of length 8
|
||||
value = int.from_bytes(data_bytes, byteorder='little', signed=False)
|
||||
return value
|
||||
|
||||
|
||||
def close_eventfd(fd: int) -> int:
|
||||
'''
|
||||
Close the eventfd.
|
||||
|
||||
'''
|
||||
ret = C.close(fd)
|
||||
if ret < 0:
|
||||
raise OSError(errno.errorcode[ffi.errno], 'close failed')
|
||||
|
||||
|
||||
class EventFD:
|
||||
'''
|
||||
Use a previously opened eventfd(2), meant to be used in
|
||||
sub-actors after root actor opens the eventfds then passes
|
||||
them through pass_fds
|
||||
|
||||
'''
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
fd: int,
|
||||
omode: str
|
||||
):
|
||||
self._fd: int = fd
|
||||
self._omode: str = omode
|
||||
self._fobj = None
|
||||
|
||||
@property
|
||||
def fd(self) -> int | None:
|
||||
return self._fd
|
||||
|
||||
def write(self, value: int) -> int:
|
||||
return write_eventfd(self._fd, value)
|
||||
|
||||
async def read(self) -> int:
|
||||
return await trio.to_thread.run_sync(
|
||||
read_eventfd, self._fd,
|
||||
abandon_on_cancel=True
|
||||
)
|
||||
|
||||
def open(self):
|
||||
self._fobj = os.fdopen(self._fd, self._omode)
|
||||
|
||||
def close(self):
|
||||
if self._fobj:
|
||||
self._fobj.close()
|
||||
|
||||
def __enter__(self):
|
||||
self.open()
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_value, traceback):
|
||||
self.close()
|
|
@ -1,45 +0,0 @@
|
|||
# tractor: structured concurrent "actors".
|
||||
# Copyright 2018-eternity Tyler Goodlet.
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
'''
|
||||
Utils to tame mp non-SC madeness
|
||||
|
||||
'''
|
||||
def disable_mantracker():
|
||||
'''
|
||||
Disable all ``multiprocessing``` "resource tracking" machinery since
|
||||
it's an absolute multi-threaded mess of non-SC madness.
|
||||
|
||||
'''
|
||||
from multiprocessing import resource_tracker as mantracker
|
||||
|
||||
# Tell the "resource tracker" thing to fuck off.
|
||||
class ManTracker(mantracker.ResourceTracker):
|
||||
def register(self, name, rtype):
|
||||
pass
|
||||
|
||||
def unregister(self, name, rtype):
|
||||
pass
|
||||
|
||||
def ensure_running(self):
|
||||
pass
|
||||
|
||||
# "know your land and know your prey"
|
||||
# https://www.dailymotion.com/video/x6ozzco
|
||||
mantracker._resource_tracker = ManTracker()
|
||||
mantracker.register = mantracker._resource_tracker.register
|
||||
mantracker.ensure_running = mantracker._resource_tracker.ensure_running
|
||||
mantracker.unregister = mantracker._resource_tracker.unregister
|
||||
mantracker.getfd = mantracker._resource_tracker.getfd
|
|
@ -1,253 +0,0 @@
|
|||
# tractor: structured concurrent "actors".
|
||||
# Copyright 2018-eternity Tyler Goodlet.
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
'''
|
||||
IPC Reliable RingBuffer implementation
|
||||
|
||||
'''
|
||||
from __future__ import annotations
|
||||
from contextlib import contextmanager as cm
|
||||
from multiprocessing.shared_memory import SharedMemory
|
||||
|
||||
import trio
|
||||
from msgspec import (
|
||||
Struct,
|
||||
to_builtins
|
||||
)
|
||||
|
||||
from ._linux import (
|
||||
EFD_NONBLOCK,
|
||||
open_eventfd,
|
||||
EventFD
|
||||
)
|
||||
from ._mp_bs import disable_mantracker
|
||||
|
||||
|
||||
disable_mantracker()
|
||||
|
||||
|
||||
class RBToken(Struct, frozen=True):
|
||||
'''
|
||||
RingBuffer token contains necesary info to open the two
|
||||
eventfds and the shared memory
|
||||
|
||||
'''
|
||||
shm_name: str
|
||||
write_eventfd: int
|
||||
wrap_eventfd: int
|
||||
buf_size: int
|
||||
|
||||
def as_msg(self):
|
||||
return to_builtins(self)
|
||||
|
||||
@classmethod
|
||||
def from_msg(cls, msg: dict) -> RBToken:
|
||||
if isinstance(msg, RBToken):
|
||||
return msg
|
||||
|
||||
return RBToken(**msg)
|
||||
|
||||
|
||||
@cm
|
||||
def open_ringbuf(
|
||||
shm_name: str,
|
||||
buf_size: int = 10 * 1024,
|
||||
write_efd_flags: int = 0,
|
||||
wrap_efd_flags: int = 0
|
||||
) -> RBToken:
|
||||
shm = SharedMemory(
|
||||
name=shm_name,
|
||||
size=buf_size,
|
||||
create=True
|
||||
)
|
||||
try:
|
||||
token = RBToken(
|
||||
shm_name=shm_name,
|
||||
write_eventfd=open_eventfd(flags=write_efd_flags),
|
||||
wrap_eventfd=open_eventfd(flags=wrap_efd_flags),
|
||||
buf_size=buf_size
|
||||
)
|
||||
yield token
|
||||
|
||||
finally:
|
||||
shm.unlink()
|
||||
|
||||
|
||||
class RingBuffSender(trio.abc.SendStream):
|
||||
'''
|
||||
IPC Reliable Ring Buffer sender side implementation
|
||||
|
||||
`eventfd(2)` is used for wrap around sync, and also to signal
|
||||
writes to the reader.
|
||||
|
||||
'''
|
||||
def __init__(
|
||||
self,
|
||||
token: RBToken,
|
||||
start_ptr: int = 0,
|
||||
):
|
||||
token = RBToken.from_msg(token)
|
||||
self._shm = SharedMemory(
|
||||
name=token.shm_name,
|
||||
size=token.buf_size,
|
||||
create=False
|
||||
)
|
||||
self._write_event = EventFD(token.write_eventfd, 'w')
|
||||
self._wrap_event = EventFD(token.wrap_eventfd, 'r')
|
||||
self._ptr = start_ptr
|
||||
|
||||
@property
|
||||
def key(self) -> str:
|
||||
return self._shm.name
|
||||
|
||||
@property
|
||||
def size(self) -> int:
|
||||
return self._shm.size
|
||||
|
||||
@property
|
||||
def ptr(self) -> int:
|
||||
return self._ptr
|
||||
|
||||
@property
|
||||
def write_fd(self) -> int:
|
||||
return self._write_event.fd
|
||||
|
||||
@property
|
||||
def wrap_fd(self) -> int:
|
||||
return self._wrap_event.fd
|
||||
|
||||
async def send_all(self, data: bytes | bytearray | memoryview):
|
||||
# while data is larger than the remaining buf
|
||||
target_ptr = self.ptr + len(data)
|
||||
while target_ptr > self.size:
|
||||
# write all bytes that fit
|
||||
remaining = self.size - self.ptr
|
||||
self._shm.buf[self.ptr:] = data[:remaining]
|
||||
# signal write and wait for reader wrap around
|
||||
self._write_event.write(remaining)
|
||||
await self._wrap_event.read()
|
||||
|
||||
# wrap around and trim already written bytes
|
||||
self._ptr = 0
|
||||
data = data[remaining:]
|
||||
target_ptr = self._ptr + len(data)
|
||||
|
||||
# remaining data fits on buffer
|
||||
self._shm.buf[self.ptr:target_ptr] = data
|
||||
self._write_event.write(len(data))
|
||||
self._ptr = target_ptr
|
||||
|
||||
async def wait_send_all_might_not_block(self):
|
||||
raise NotImplementedError
|
||||
|
||||
async def aclose(self):
|
||||
self._write_event.close()
|
||||
self._wrap_event.close()
|
||||
self._shm.close()
|
||||
|
||||
async def __aenter__(self):
|
||||
self._write_event.open()
|
||||
self._wrap_event.open()
|
||||
return self
|
||||
|
||||
|
||||
class RingBuffReceiver(trio.abc.ReceiveStream):
|
||||
'''
|
||||
IPC Reliable Ring Buffer receiver side implementation
|
||||
|
||||
`eventfd(2)` is used for wrap around sync, and also to signal
|
||||
writes to the reader.
|
||||
|
||||
'''
|
||||
def __init__(
|
||||
self,
|
||||
token: RBToken,
|
||||
start_ptr: int = 0,
|
||||
flags: int = 0
|
||||
):
|
||||
token = RBToken.from_msg(token)
|
||||
self._shm = SharedMemory(
|
||||
name=token.shm_name,
|
||||
size=token.buf_size,
|
||||
create=False
|
||||
)
|
||||
self._write_event = EventFD(token.write_eventfd, 'w')
|
||||
self._wrap_event = EventFD(token.wrap_eventfd, 'r')
|
||||
self._ptr = start_ptr
|
||||
self._flags = flags
|
||||
|
||||
@property
|
||||
def key(self) -> str:
|
||||
return self._shm.name
|
||||
|
||||
@property
|
||||
def size(self) -> int:
|
||||
return self._shm.size
|
||||
|
||||
@property
|
||||
def ptr(self) -> int:
|
||||
return self._ptr
|
||||
|
||||
@property
|
||||
def write_fd(self) -> int:
|
||||
return self._write_event.fd
|
||||
|
||||
@property
|
||||
def wrap_fd(self) -> int:
|
||||
return self._wrap_event.fd
|
||||
|
||||
async def receive_some(
|
||||
self,
|
||||
max_bytes: int | None = None,
|
||||
nb_timeout: float = 0.1
|
||||
) -> memoryview:
|
||||
# if non blocking eventfd enabled, do polling
|
||||
# until next write, this allows signal handling
|
||||
if self._flags | EFD_NONBLOCK:
|
||||
delta = None
|
||||
while delta is None:
|
||||
try:
|
||||
delta = await self._write_event.read()
|
||||
|
||||
except OSError as e:
|
||||
if e.errno == 'EAGAIN':
|
||||
continue
|
||||
|
||||
raise e
|
||||
|
||||
else:
|
||||
delta = await self._write_event.read()
|
||||
|
||||
# fetch next segment and advance ptr
|
||||
next_ptr = self._ptr + delta
|
||||
segment = self._shm.buf[self._ptr:next_ptr]
|
||||
self._ptr = next_ptr
|
||||
|
||||
if self.ptr == self.size:
|
||||
# reached the end, signal wrap around
|
||||
self._ptr = 0
|
||||
self._wrap_event.write(1)
|
||||
|
||||
return segment
|
||||
|
||||
async def aclose(self):
|
||||
self._write_event.close()
|
||||
self._wrap_event.close()
|
||||
self._shm.close()
|
||||
|
||||
async def __aenter__(self):
|
||||
self._write_event.open()
|
||||
self._wrap_event.open()
|
||||
return self
|
File diff suppressed because it is too large
Load Diff
|
@ -1,812 +0,0 @@
|
|||
# tractor: structured concurrent "actors".
|
||||
# Copyright 2018-eternity Tyler Goodlet.
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
SC friendly shared memory management geared at real-time
|
||||
processing.
|
||||
|
||||
Support for ``numpy`` compatible array-buffers is provided but is
|
||||
considered optional within the context of this runtime-library.
|
||||
|
||||
"""
|
||||
from __future__ import annotations
|
||||
from sys import byteorder
|
||||
import time
|
||||
from typing import Optional
|
||||
from multiprocessing import shared_memory as shm
|
||||
from multiprocessing.shared_memory import (
|
||||
SharedMemory,
|
||||
ShareableList,
|
||||
)
|
||||
|
||||
from msgspec import (
|
||||
Struct,
|
||||
to_builtins
|
||||
)
|
||||
import tractor
|
||||
|
||||
from tractor.ipc._mp_bs import disable_mantracker
|
||||
from tractor.log import get_logger
|
||||
|
||||
|
||||
_USE_POSIX = getattr(shm, '_USE_POSIX', False)
|
||||
if _USE_POSIX:
|
||||
from _posixshmem import shm_unlink
|
||||
|
||||
|
||||
try:
|
||||
import numpy as np
|
||||
from numpy.lib import recfunctions as rfn
|
||||
# TODO ruff complains with,
|
||||
# warning| F401: `nptyping` imported but unused; consider using
|
||||
# `importlib.util.find_spec` to test for availability
|
||||
import nptyping # noqa
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
||||
disable_mantracker()
|
||||
|
||||
|
||||
class SharedInt:
|
||||
'''
|
||||
Wrapper around a single entry shared memory array which
|
||||
holds an ``int`` value used as an index counter.
|
||||
|
||||
'''
|
||||
def __init__(
|
||||
self,
|
||||
shm: SharedMemory,
|
||||
) -> None:
|
||||
self._shm = shm
|
||||
|
||||
@property
|
||||
def value(self) -> int:
|
||||
return int.from_bytes(self._shm.buf, byteorder)
|
||||
|
||||
@value.setter
|
||||
def value(self, value) -> None:
|
||||
self._shm.buf[:] = value.to_bytes(self._shm.size, byteorder)
|
||||
|
||||
def destroy(self) -> None:
|
||||
if _USE_POSIX:
|
||||
# We manually unlink to bypass all the "resource tracker"
|
||||
# nonsense meant for non-SC systems.
|
||||
name = self._shm.name
|
||||
try:
|
||||
shm_unlink(name)
|
||||
except FileNotFoundError:
|
||||
# might be a teardown race here?
|
||||
log.warning(f'Shm for {name} already unlinked?')
|
||||
|
||||
|
||||
class NDToken(Struct, frozen=True):
|
||||
'''
|
||||
Internal represenation of a shared memory ``numpy`` array "token"
|
||||
which can be used to key and load a system (OS) wide shm entry
|
||||
and correctly read the array by type signature.
|
||||
|
||||
This type is msg safe.
|
||||
|
||||
'''
|
||||
shm_name: str # this servers as a "key" value
|
||||
shm_first_index_name: str
|
||||
shm_last_index_name: str
|
||||
dtype_descr: tuple
|
||||
size: int # in struct-array index / row terms
|
||||
|
||||
# TODO: use nptyping here on dtypes
|
||||
@property
|
||||
def dtype(self) -> list[tuple[str, str, tuple[int, ...]]]:
|
||||
return np.dtype(
|
||||
list(
|
||||
map(tuple, self.dtype_descr)
|
||||
)
|
||||
).descr
|
||||
|
||||
def as_msg(self):
|
||||
return to_builtins(self)
|
||||
|
||||
@classmethod
|
||||
def from_msg(cls, msg: dict) -> NDToken:
|
||||
if isinstance(msg, NDToken):
|
||||
return msg
|
||||
|
||||
# TODO: native struct decoding
|
||||
# return _token_dec.decode(msg)
|
||||
|
||||
msg['dtype_descr'] = tuple(map(tuple, msg['dtype_descr']))
|
||||
return NDToken(**msg)
|
||||
|
||||
|
||||
# _token_dec = msgspec.msgpack.Decoder(NDToken)
|
||||
|
||||
# TODO: this api?
|
||||
# _known_tokens = tractor.ActorVar('_shm_tokens', {})
|
||||
# _known_tokens = tractor.ContextStack('_known_tokens', )
|
||||
# _known_tokens = trio.RunVar('shms', {})
|
||||
|
||||
# TODO: this should maybe be provided via
|
||||
# a `.trionics.maybe_open_context()` wrapper factory?
|
||||
# process-local store of keys to tokens
|
||||
_known_tokens: dict[str, NDToken] = {}
|
||||
|
||||
|
||||
def get_shm_token(key: str) -> NDToken | None:
|
||||
'''
|
||||
Convenience func to check if a token
|
||||
for the provided key is known by this process.
|
||||
|
||||
Returns either the ``numpy`` token or a string for a shared list.
|
||||
|
||||
'''
|
||||
return _known_tokens.get(key)
|
||||
|
||||
|
||||
def _make_token(
|
||||
key: str,
|
||||
size: int,
|
||||
dtype: np.dtype,
|
||||
|
||||
) -> NDToken:
|
||||
'''
|
||||
Create a serializable token that can be used
|
||||
to access a shared array.
|
||||
|
||||
'''
|
||||
return NDToken(
|
||||
shm_name=key,
|
||||
shm_first_index_name=key + "_first",
|
||||
shm_last_index_name=key + "_last",
|
||||
dtype_descr=tuple(np.dtype(dtype).descr),
|
||||
size=size,
|
||||
)
|
||||
|
||||
|
||||
class ShmArray:
|
||||
'''
|
||||
A shared memory ``numpy.ndarray`` API.
|
||||
|
||||
An underlying shared memory buffer is allocated based on
|
||||
a user specified ``numpy.ndarray``. This fixed size array
|
||||
can be read and written to by pushing data both onto the "front"
|
||||
or "back" of a set index range. The indexes for the "first" and
|
||||
"last" index are themselves stored in shared memory (accessed via
|
||||
``SharedInt`` interfaces) values such that multiple processes can
|
||||
interact with the same array using a synchronized-index.
|
||||
|
||||
'''
|
||||
def __init__(
|
||||
self,
|
||||
shmarr: np.ndarray,
|
||||
first: SharedInt,
|
||||
last: SharedInt,
|
||||
shm: SharedMemory,
|
||||
# readonly: bool = True,
|
||||
) -> None:
|
||||
self._array = shmarr
|
||||
|
||||
# indexes for first and last indices corresponding
|
||||
# to fille data
|
||||
self._first = first
|
||||
self._last = last
|
||||
|
||||
self._len = len(shmarr)
|
||||
self._shm = shm
|
||||
self._post_init: bool = False
|
||||
|
||||
# pushing data does not write the index (aka primary key)
|
||||
self._write_fields: list[str] | None = None
|
||||
dtype = shmarr.dtype
|
||||
if dtype.fields:
|
||||
self._write_fields = list(shmarr.dtype.fields.keys())[1:]
|
||||
|
||||
# TODO: ringbuf api?
|
||||
|
||||
@property
|
||||
def _token(self) -> NDToken:
|
||||
return NDToken(
|
||||
shm_name=self._shm.name,
|
||||
shm_first_index_name=self._first._shm.name,
|
||||
shm_last_index_name=self._last._shm.name,
|
||||
dtype_descr=tuple(self._array.dtype.descr),
|
||||
size=self._len,
|
||||
)
|
||||
|
||||
@property
|
||||
def token(self) -> dict:
|
||||
"""Shared memory token that can be serialized and used by
|
||||
another process to attach to this array.
|
||||
"""
|
||||
return self._token.as_msg()
|
||||
|
||||
@property
|
||||
def index(self) -> int:
|
||||
return self._last.value % self._len
|
||||
|
||||
@property
|
||||
def array(self) -> np.ndarray:
|
||||
'''
|
||||
Return an up-to-date ``np.ndarray`` view of the
|
||||
so-far-written data to the underlying shm buffer.
|
||||
|
||||
'''
|
||||
a = self._array[self._first.value:self._last.value]
|
||||
|
||||
# first, last = self._first.value, self._last.value
|
||||
# a = self._array[first:last]
|
||||
|
||||
# TODO: eventually comment this once we've not seen it in the
|
||||
# wild in a long time..
|
||||
# XXX: race where first/last indexes cause a reader
|
||||
# to load an empty array..
|
||||
if len(a) == 0 and self._post_init:
|
||||
raise RuntimeError('Empty array race condition hit!?')
|
||||
# breakpoint()
|
||||
|
||||
return a
|
||||
|
||||
def ustruct(
|
||||
self,
|
||||
fields: Optional[list[str]] = None,
|
||||
|
||||
# type that all field values will be cast to
|
||||
# in the returned view.
|
||||
common_dtype: np.dtype = float,
|
||||
|
||||
) -> np.ndarray:
|
||||
|
||||
array = self._array
|
||||
|
||||
if fields:
|
||||
selection = array[fields]
|
||||
# fcount = len(fields)
|
||||
else:
|
||||
selection = array
|
||||
# fcount = len(array.dtype.fields)
|
||||
|
||||
# XXX: manual ``.view()`` attempt that also doesn't work.
|
||||
# uview = selection.view(
|
||||
# dtype='<f16',
|
||||
# ).reshape(-1, 4, order='A')
|
||||
|
||||
# assert len(selection) == len(uview)
|
||||
|
||||
u = rfn.structured_to_unstructured(
|
||||
selection,
|
||||
# dtype=float,
|
||||
copy=True,
|
||||
)
|
||||
|
||||
# unstruct = np.ndarray(u.shape, dtype=a.dtype, buffer=shm.buf)
|
||||
# array[:] = a[:]
|
||||
return u
|
||||
# return ShmArray(
|
||||
# shmarr=u,
|
||||
# first=self._first,
|
||||
# last=self._last,
|
||||
# shm=self._shm
|
||||
# )
|
||||
|
||||
def last(
|
||||
self,
|
||||
length: int = 1,
|
||||
|
||||
) -> np.ndarray:
|
||||
'''
|
||||
Return the last ``length``'s worth of ("row") entries from the
|
||||
array.
|
||||
|
||||
'''
|
||||
return self.array[-length:]
|
||||
|
||||
def push(
|
||||
self,
|
||||
data: np.ndarray,
|
||||
|
||||
field_map: Optional[dict[str, str]] = None,
|
||||
prepend: bool = False,
|
||||
update_first: bool = True,
|
||||
start: int | None = None,
|
||||
|
||||
) -> int:
|
||||
'''
|
||||
Ring buffer like "push" to append data
|
||||
into the buffer and return updated "last" index.
|
||||
|
||||
NB: no actual ring logic yet to give a "loop around" on overflow
|
||||
condition, lel.
|
||||
|
||||
'''
|
||||
length = len(data)
|
||||
|
||||
if prepend:
|
||||
index = (start or self._first.value) - length
|
||||
|
||||
if index < 0:
|
||||
raise ValueError(
|
||||
f'Array size of {self._len} was overrun during prepend.\n'
|
||||
f'You have passed {abs(index)} too many datums.'
|
||||
)
|
||||
|
||||
else:
|
||||
index = start if start is not None else self._last.value
|
||||
|
||||
end = index + length
|
||||
|
||||
if field_map:
|
||||
src_names, dst_names = zip(*field_map.items())
|
||||
else:
|
||||
dst_names = src_names = self._write_fields
|
||||
|
||||
try:
|
||||
self._array[
|
||||
list(dst_names)
|
||||
][index:end] = data[list(src_names)][:]
|
||||
|
||||
# NOTE: there was a race here between updating
|
||||
# the first and last indices and when the next reader
|
||||
# tries to access ``.array`` (which due to the index
|
||||
# overlap will be empty). Pretty sure we've fixed it now
|
||||
# but leaving this here as a reminder.
|
||||
if (
|
||||
prepend
|
||||
and update_first
|
||||
and length
|
||||
):
|
||||
assert index < self._first.value
|
||||
|
||||
if (
|
||||
index < self._first.value
|
||||
and update_first
|
||||
):
|
||||
assert prepend, 'prepend=True not passed but index decreased?'
|
||||
self._first.value = index
|
||||
|
||||
elif not prepend:
|
||||
self._last.value = end
|
||||
|
||||
self._post_init = True
|
||||
return end
|
||||
|
||||
except ValueError as err:
|
||||
if field_map:
|
||||
raise
|
||||
|
||||
# should raise if diff detected
|
||||
self.diff_err_fields(data)
|
||||
raise err
|
||||
|
||||
def diff_err_fields(
|
||||
self,
|
||||
data: np.ndarray,
|
||||
) -> None:
|
||||
# reraise with any field discrepancy
|
||||
our_fields, their_fields = (
|
||||
set(self._array.dtype.fields),
|
||||
set(data.dtype.fields),
|
||||
)
|
||||
|
||||
only_in_ours = our_fields - their_fields
|
||||
only_in_theirs = their_fields - our_fields
|
||||
|
||||
if only_in_ours:
|
||||
raise TypeError(
|
||||
f"Input array is missing field(s): {only_in_ours}"
|
||||
)
|
||||
elif only_in_theirs:
|
||||
raise TypeError(
|
||||
f"Input array has unknown field(s): {only_in_theirs}"
|
||||
)
|
||||
|
||||
# TODO: support "silent" prepends that don't update ._first.value?
|
||||
def prepend(
|
||||
self,
|
||||
data: np.ndarray,
|
||||
) -> int:
|
||||
end = self.push(data, prepend=True)
|
||||
assert end
|
||||
|
||||
def close(self) -> None:
|
||||
self._first._shm.close()
|
||||
self._last._shm.close()
|
||||
self._shm.close()
|
||||
|
||||
def destroy(self) -> None:
|
||||
if _USE_POSIX:
|
||||
# We manually unlink to bypass all the "resource tracker"
|
||||
# nonsense meant for non-SC systems.
|
||||
shm_unlink(self._shm.name)
|
||||
|
||||
self._first.destroy()
|
||||
self._last.destroy()
|
||||
|
||||
def flush(self) -> None:
|
||||
# TODO: flush to storage backend like markestore?
|
||||
...
|
||||
|
||||
|
||||
def open_shm_ndarray(
|
||||
size: int,
|
||||
key: str | None = None,
|
||||
dtype: np.dtype | None = None,
|
||||
append_start_index: int | None = None,
|
||||
readonly: bool = False,
|
||||
|
||||
) -> ShmArray:
|
||||
'''
|
||||
Open a memory shared ``numpy`` using the standard library.
|
||||
|
||||
This call unlinks (aka permanently destroys) the buffer on teardown
|
||||
and thus should be used from the parent-most accessor (process).
|
||||
|
||||
'''
|
||||
# create new shared mem segment for which we
|
||||
# have write permission
|
||||
a = np.zeros(size, dtype=dtype)
|
||||
a['index'] = np.arange(len(a))
|
||||
|
||||
shm = SharedMemory(
|
||||
name=key,
|
||||
create=True,
|
||||
size=a.nbytes
|
||||
)
|
||||
array = np.ndarray(
|
||||
a.shape,
|
||||
dtype=a.dtype,
|
||||
buffer=shm.buf
|
||||
)
|
||||
array[:] = a[:]
|
||||
array.setflags(write=int(not readonly))
|
||||
|
||||
token = _make_token(
|
||||
key=key,
|
||||
size=size,
|
||||
dtype=dtype,
|
||||
)
|
||||
|
||||
# create single entry arrays for storing an first and last indices
|
||||
first = SharedInt(
|
||||
shm=SharedMemory(
|
||||
name=token.shm_first_index_name,
|
||||
create=True,
|
||||
size=4, # std int
|
||||
)
|
||||
)
|
||||
|
||||
last = SharedInt(
|
||||
shm=SharedMemory(
|
||||
name=token.shm_last_index_name,
|
||||
create=True,
|
||||
size=4, # std int
|
||||
)
|
||||
)
|
||||
|
||||
# Start the "real-time" append-updated (or "pushed-to") section
|
||||
# after some start index: ``append_start_index``. This allows appending
|
||||
# from a start point in the array which isn't the 0 index and looks
|
||||
# something like,
|
||||
# -------------------------
|
||||
# | | i
|
||||
# _________________________
|
||||
# <-------------> <------->
|
||||
# history real-time
|
||||
#
|
||||
# Once fully "prepended", the history section will leave the
|
||||
# ``ShmArray._start.value: int = 0`` and the yet-to-be written
|
||||
# real-time section will start at ``ShmArray.index: int``.
|
||||
|
||||
# this sets the index to nearly 2/3rds into the the length of
|
||||
# the buffer leaving at least a "days worth of second samples"
|
||||
# for the real-time section.
|
||||
if append_start_index is None:
|
||||
append_start_index = round(size * 0.616)
|
||||
|
||||
last.value = first.value = append_start_index
|
||||
|
||||
shmarr = ShmArray(
|
||||
array,
|
||||
first,
|
||||
last,
|
||||
shm,
|
||||
)
|
||||
|
||||
assert shmarr._token == token
|
||||
_known_tokens[key] = shmarr.token
|
||||
|
||||
# "unlink" created shm on process teardown by
|
||||
# pushing teardown calls onto actor context stack
|
||||
stack = tractor.current_actor().lifetime_stack
|
||||
stack.callback(shmarr.close)
|
||||
stack.callback(shmarr.destroy)
|
||||
|
||||
return shmarr
|
||||
|
||||
|
||||
def attach_shm_ndarray(
|
||||
token: tuple[str, str, tuple[str, str]],
|
||||
readonly: bool = True,
|
||||
|
||||
) -> ShmArray:
|
||||
'''
|
||||
Attach to an existing shared memory array previously
|
||||
created by another process using ``open_shared_array``.
|
||||
|
||||
No new shared mem is allocated but wrapper types for read/write
|
||||
access are constructed.
|
||||
|
||||
'''
|
||||
token = NDToken.from_msg(token)
|
||||
key = token.shm_name
|
||||
|
||||
if key in _known_tokens:
|
||||
assert NDToken.from_msg(_known_tokens[key]) == token, "WTF"
|
||||
|
||||
# XXX: ugh, looks like due to the ``shm_open()`` C api we can't
|
||||
# actually place files in a subdir, see discussion here:
|
||||
# https://stackoverflow.com/a/11103289
|
||||
|
||||
# attach to array buffer and view as per dtype
|
||||
_err: Optional[Exception] = None
|
||||
for _ in range(3):
|
||||
try:
|
||||
shm = SharedMemory(
|
||||
name=key,
|
||||
create=False,
|
||||
)
|
||||
break
|
||||
except OSError as oserr:
|
||||
_err = oserr
|
||||
time.sleep(0.1)
|
||||
else:
|
||||
if _err:
|
||||
raise _err
|
||||
|
||||
shmarr = np.ndarray(
|
||||
(token.size,),
|
||||
dtype=token.dtype,
|
||||
buffer=shm.buf
|
||||
)
|
||||
shmarr.setflags(write=int(not readonly))
|
||||
|
||||
first = SharedInt(
|
||||
shm=SharedMemory(
|
||||
name=token.shm_first_index_name,
|
||||
create=False,
|
||||
size=4, # std int
|
||||
),
|
||||
)
|
||||
last = SharedInt(
|
||||
shm=SharedMemory(
|
||||
name=token.shm_last_index_name,
|
||||
create=False,
|
||||
size=4, # std int
|
||||
),
|
||||
)
|
||||
|
||||
# make sure we can read
|
||||
first.value
|
||||
|
||||
sha = ShmArray(
|
||||
shmarr,
|
||||
first,
|
||||
last,
|
||||
shm,
|
||||
)
|
||||
# read test
|
||||
sha.array
|
||||
|
||||
# Stash key -> token knowledge for future queries
|
||||
# via `maybe_opepn_shm_array()` but only after we know
|
||||
# we can attach.
|
||||
if key not in _known_tokens:
|
||||
_known_tokens[key] = token
|
||||
|
||||
# "close" attached shm on actor teardown
|
||||
tractor.current_actor().lifetime_stack.callback(sha.close)
|
||||
|
||||
return sha
|
||||
|
||||
|
||||
def maybe_open_shm_ndarray(
|
||||
key: str, # unique identifier for segment
|
||||
size: int,
|
||||
dtype: np.dtype | None = None,
|
||||
append_start_index: int = 0,
|
||||
readonly: bool = True,
|
||||
|
||||
) -> tuple[ShmArray, bool]:
|
||||
'''
|
||||
Attempt to attach to a shared memory block using a "key" lookup
|
||||
to registered blocks in the users overall "system" registry
|
||||
(presumes you don't have the block's explicit token).
|
||||
|
||||
This function is meant to solve the problem of discovering whether
|
||||
a shared array token has been allocated or discovered by the actor
|
||||
running in **this** process. Systems where multiple actors may seek
|
||||
to access a common block can use this function to attempt to acquire
|
||||
a token as discovered by the actors who have previously stored
|
||||
a "key" -> ``NDToken`` map in an actor local (aka python global)
|
||||
variable.
|
||||
|
||||
If you know the explicit ``NDToken`` for your memory segment instead
|
||||
use ``attach_shm_array``.
|
||||
|
||||
'''
|
||||
try:
|
||||
# see if we already know this key
|
||||
token = _known_tokens[key]
|
||||
return (
|
||||
attach_shm_ndarray(
|
||||
token=token,
|
||||
readonly=readonly,
|
||||
),
|
||||
False, # not newly opened
|
||||
)
|
||||
except KeyError:
|
||||
log.warning(f"Could not find {key} in shms cache")
|
||||
if dtype:
|
||||
token = _make_token(
|
||||
key,
|
||||
size=size,
|
||||
dtype=dtype,
|
||||
)
|
||||
else:
|
||||
|
||||
try:
|
||||
return (
|
||||
attach_shm_ndarray(
|
||||
token=token,
|
||||
readonly=readonly,
|
||||
),
|
||||
False,
|
||||
)
|
||||
except FileNotFoundError:
|
||||
log.warning(f"Could not attach to shm with token {token}")
|
||||
|
||||
# This actor does not know about memory
|
||||
# associated with the provided "key".
|
||||
# Attempt to open a block and expect
|
||||
# to fail if a block has been allocated
|
||||
# on the OS by someone else.
|
||||
return (
|
||||
open_shm_ndarray(
|
||||
key=key,
|
||||
size=size,
|
||||
dtype=dtype,
|
||||
append_start_index=append_start_index,
|
||||
readonly=readonly,
|
||||
),
|
||||
True,
|
||||
)
|
||||
|
||||
|
||||
class ShmList(ShareableList):
|
||||
'''
|
||||
Carbon copy of ``.shared_memory.ShareableList`` with a few
|
||||
enhancements:
|
||||
|
||||
- readonly mode via instance var flag `._readonly: bool`
|
||||
- ``.__getitem__()`` accepts ``slice`` inputs
|
||||
- exposes the underlying buffer "name" as a ``.key: str``
|
||||
|
||||
'''
|
||||
def __init__(
|
||||
self,
|
||||
sequence: list | None = None,
|
||||
*,
|
||||
name: str | None = None,
|
||||
readonly: bool = True
|
||||
|
||||
) -> None:
|
||||
self._readonly = readonly
|
||||
self._key = name
|
||||
return super().__init__(
|
||||
sequence=sequence,
|
||||
name=name,
|
||||
)
|
||||
|
||||
@property
|
||||
def key(self) -> str:
|
||||
return self._key
|
||||
|
||||
@property
|
||||
def readonly(self) -> bool:
|
||||
return self._readonly
|
||||
|
||||
def __setitem__(
|
||||
self,
|
||||
position,
|
||||
value,
|
||||
|
||||
) -> None:
|
||||
|
||||
# mimick ``numpy`` error
|
||||
if self._readonly:
|
||||
raise ValueError('assignment destination is read-only')
|
||||
|
||||
return super().__setitem__(position, value)
|
||||
|
||||
def __getitem__(
|
||||
self,
|
||||
indexish,
|
||||
) -> list:
|
||||
|
||||
# NOTE: this is a non-writeable view (copy?) of the buffer
|
||||
# in a new list instance.
|
||||
if isinstance(indexish, slice):
|
||||
return list(self)[indexish]
|
||||
|
||||
return super().__getitem__(indexish)
|
||||
|
||||
# TODO: should we offer a `.array` and `.push()` equivalent
|
||||
# to the `ShmArray`?
|
||||
# currently we have the following limitations:
|
||||
# - can't write slices of input using traditional slice-assign
|
||||
# syntax due to the ``ShareableList.__setitem__()`` implementation.
|
||||
# - ``list(shmlist)`` returns a non-mutable copy instead of
|
||||
# a writeable view which would be handier numpy-style ops.
|
||||
|
||||
|
||||
def open_shm_list(
|
||||
key: str,
|
||||
sequence: list | None = None,
|
||||
size: int = int(2 ** 10),
|
||||
dtype: float | int | bool | str | bytes | None = float,
|
||||
readonly: bool = True,
|
||||
|
||||
) -> ShmList:
|
||||
|
||||
if sequence is None:
|
||||
default = {
|
||||
float: 0.,
|
||||
int: 0,
|
||||
bool: True,
|
||||
str: 'doggy',
|
||||
None: None,
|
||||
}[dtype]
|
||||
sequence = [default] * size
|
||||
|
||||
shml = ShmList(
|
||||
sequence=sequence,
|
||||
name=key,
|
||||
readonly=readonly,
|
||||
)
|
||||
|
||||
# "close" attached shm on actor teardown
|
||||
try:
|
||||
actor = tractor.current_actor()
|
||||
actor.lifetime_stack.callback(shml.shm.close)
|
||||
actor.lifetime_stack.callback(shml.shm.unlink)
|
||||
except RuntimeError:
|
||||
log.warning('tractor runtime not active, skipping teardown steps')
|
||||
|
||||
return shml
|
||||
|
||||
|
||||
def attach_shm_list(
|
||||
key: str,
|
||||
readonly: bool = False,
|
||||
|
||||
) -> ShmList:
|
||||
|
||||
return ShmList(
|
||||
name=key,
|
||||
readonly=readonly,
|
||||
)
|
|
@ -1,256 +0,0 @@
|
|||
# tractor: structured concurrent "actors".
|
||||
# Copyright 2018-eternity Tyler Goodlet.
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
'''
|
||||
TCP implementation of tractor.ipc._transport.MsgTransport protocol
|
||||
|
||||
'''
|
||||
from __future__ import annotations
|
||||
import ipaddress
|
||||
from typing import (
|
||||
ClassVar,
|
||||
)
|
||||
# from contextlib import (
|
||||
# asynccontextmanager as acm,
|
||||
# )
|
||||
|
||||
import msgspec
|
||||
import trio
|
||||
from trio import (
|
||||
SocketListener,
|
||||
open_tcp_listeners,
|
||||
)
|
||||
|
||||
from tractor.msg import MsgCodec
|
||||
from tractor.log import get_logger
|
||||
from tractor.ipc._transport import (
|
||||
MsgTransport,
|
||||
MsgpackTransport,
|
||||
)
|
||||
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
||||
class TCPAddress(
|
||||
msgspec.Struct,
|
||||
frozen=True,
|
||||
):
|
||||
_host: str
|
||||
_port: int
|
||||
|
||||
def __post_init__(self):
|
||||
try:
|
||||
ipaddress.ip_address(self._host)
|
||||
except ValueError as valerr:
|
||||
raise ValueError(
|
||||
'Invalid {type(self).__name__}._host = {self._host!r}\n'
|
||||
) from valerr
|
||||
|
||||
proto_key: ClassVar[str] = 'tcp'
|
||||
unwrapped_type: ClassVar[type] = tuple[str, int]
|
||||
def_bindspace: ClassVar[str] = '127.0.0.1'
|
||||
|
||||
# ?TODO, actually validate ipv4/6 with stdlib's `ipaddress`
|
||||
@property
|
||||
def is_valid(self) -> bool:
|
||||
'''
|
||||
Predicate to ensure a valid socket-address pair.
|
||||
|
||||
'''
|
||||
return (
|
||||
self._port != 0
|
||||
and
|
||||
(ipaddr := ipaddress.ip_address(self._host))
|
||||
and not (
|
||||
ipaddr.is_reserved
|
||||
or
|
||||
ipaddr.is_unspecified
|
||||
or
|
||||
ipaddr.is_link_local
|
||||
or
|
||||
ipaddr.is_link_local
|
||||
or
|
||||
ipaddr.is_multicast
|
||||
or
|
||||
ipaddr.is_global
|
||||
)
|
||||
)
|
||||
# ^XXX^ see various properties of invalid addrs here,
|
||||
# https://docs.python.org/3/library/ipaddress.html#ipaddress.IPv4Address
|
||||
|
||||
@property
|
||||
def bindspace(self) -> str:
|
||||
return self._host
|
||||
|
||||
@property
|
||||
def domain(self) -> str:
|
||||
return self._host
|
||||
|
||||
@classmethod
|
||||
def from_addr(
|
||||
cls,
|
||||
addr: tuple[str, int]
|
||||
) -> TCPAddress:
|
||||
match addr:
|
||||
case (str(), int()):
|
||||
return TCPAddress(addr[0], addr[1])
|
||||
case _:
|
||||
raise ValueError(
|
||||
f'Invalid unwrapped address for {cls}\n'
|
||||
f'{addr}\n'
|
||||
)
|
||||
|
||||
def unwrap(self) -> tuple[str, int]:
|
||||
return (
|
||||
self._host,
|
||||
self._port,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_random(
|
||||
cls,
|
||||
bindspace: str = def_bindspace,
|
||||
) -> TCPAddress:
|
||||
return TCPAddress(bindspace, 0)
|
||||
|
||||
@classmethod
|
||||
def get_root(cls) -> TCPAddress:
|
||||
return TCPAddress(
|
||||
'127.0.0.1',
|
||||
1616,
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return (
|
||||
f'{type(self).__name__}[{self.unwrap()}]'
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_transport(
|
||||
cls,
|
||||
codec: str = 'msgpack',
|
||||
) -> MsgTransport:
|
||||
match codec:
|
||||
case 'msgspack':
|
||||
return MsgpackTCPStream
|
||||
case _:
|
||||
raise ValueError(
|
||||
f'No IPC transport with {codec!r} supported !'
|
||||
)
|
||||
|
||||
|
||||
async def start_listener(
|
||||
addr: TCPAddress,
|
||||
**kwargs,
|
||||
) -> SocketListener:
|
||||
'''
|
||||
Start a TCP socket listener on the given `TCPAddress`.
|
||||
|
||||
'''
|
||||
log.info(
|
||||
f'Attempting to bind TCP socket\n'
|
||||
f'>[\n'
|
||||
f'|_{addr}\n'
|
||||
)
|
||||
# ?TODO, maybe we should just change the lower-level call this is
|
||||
# using internall per-listener?
|
||||
listeners: list[SocketListener] = await open_tcp_listeners(
|
||||
host=addr._host,
|
||||
port=addr._port,
|
||||
**kwargs
|
||||
)
|
||||
# NOTE, for now we don't expect non-singleton-resolving
|
||||
# domain-addresses/multi-homed-hosts.
|
||||
# (though it is supported by `open_tcp_listeners()`)
|
||||
assert len(listeners) == 1
|
||||
listener = listeners[0]
|
||||
host, port = listener.socket.getsockname()[:2]
|
||||
|
||||
log.info(
|
||||
f'Listening on TCP socket\n'
|
||||
f'[>\n'
|
||||
f' |_{addr}\n'
|
||||
)
|
||||
return listener
|
||||
|
||||
|
||||
# TODO: typing oddity.. not sure why we have to inherit here, but it
|
||||
# seems to be an issue with `get_msg_transport()` returning
|
||||
# a `Type[Protocol]`; probably should make a `mypy` issue?
|
||||
class MsgpackTCPStream(MsgpackTransport):
|
||||
'''
|
||||
A ``trio.SocketStream`` delivering ``msgpack`` formatted data
|
||||
using the ``msgspec`` codec lib.
|
||||
|
||||
'''
|
||||
address_type = TCPAddress
|
||||
layer_key: int = 4
|
||||
|
||||
@property
|
||||
def maddr(self) -> str:
|
||||
host, port = self.raddr.unwrap()
|
||||
return (
|
||||
# TODO, use `ipaddress` from stdlib to handle
|
||||
# first detecting which of `ipv4/6` before
|
||||
# choosing the routing prefix part.
|
||||
f'/ipv4/{host}'
|
||||
|
||||
f'/{self.address_type.proto_key}/{port}'
|
||||
# f'/{self.chan.uid[0]}'
|
||||
# f'/{self.cid}'
|
||||
|
||||
# f'/cid={cid_head}..{cid_tail}'
|
||||
# TODO: ? not use this ^ right ?
|
||||
)
|
||||
|
||||
def connected(self) -> bool:
|
||||
return self.stream.socket.fileno() != -1
|
||||
|
||||
@classmethod
|
||||
async def connect_to(
|
||||
cls,
|
||||
destaddr: TCPAddress,
|
||||
prefix_size: int = 4,
|
||||
codec: MsgCodec|None = None,
|
||||
**kwargs
|
||||
) -> MsgpackTCPStream:
|
||||
stream = await trio.open_tcp_stream(
|
||||
*destaddr.unwrap(),
|
||||
**kwargs
|
||||
)
|
||||
return MsgpackTCPStream(
|
||||
stream,
|
||||
prefix_size=prefix_size,
|
||||
codec=codec
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_stream_addrs(
|
||||
cls,
|
||||
stream: trio.SocketStream
|
||||
) -> tuple[
|
||||
TCPAddress,
|
||||
TCPAddress,
|
||||
]:
|
||||
# TODO, what types are these?
|
||||
lsockname = stream.socket.getsockname()
|
||||
l_sockaddr: tuple[str, int] = tuple(lsockname[:2])
|
||||
rsockname = stream.socket.getpeername()
|
||||
r_sockaddr: tuple[str, int] = tuple(rsockname[:2])
|
||||
return (
|
||||
TCPAddress.from_addr(l_sockaddr),
|
||||
TCPAddress.from_addr(r_sockaddr),
|
||||
)
|
|
@ -1,514 +0,0 @@
|
|||
# tractor: structured concurrent "actors".
|
||||
# Copyright 2018-eternity Tyler Goodlet.
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
'''
|
||||
typing.Protocol based generic msg API, implement this class to add
|
||||
backends for tractor.ipc.Channel
|
||||
|
||||
'''
|
||||
from __future__ import annotations
|
||||
from typing import (
|
||||
runtime_checkable,
|
||||
Type,
|
||||
Protocol,
|
||||
# TypeVar,
|
||||
ClassVar,
|
||||
TYPE_CHECKING,
|
||||
)
|
||||
from collections.abc import (
|
||||
AsyncGenerator,
|
||||
AsyncIterator,
|
||||
)
|
||||
import struct
|
||||
|
||||
import trio
|
||||
import msgspec
|
||||
from tricycle import BufferedReceiveStream
|
||||
|
||||
from tractor.log import get_logger
|
||||
from tractor._exceptions import (
|
||||
MsgTypeError,
|
||||
TransportClosed,
|
||||
_mk_send_mte,
|
||||
_mk_recv_mte,
|
||||
)
|
||||
from tractor.msg import (
|
||||
_ctxvar_MsgCodec,
|
||||
# _codec, XXX see `self._codec` sanity/debug checks
|
||||
MsgCodec,
|
||||
MsgType,
|
||||
types as msgtypes,
|
||||
pretty_struct,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from tractor._addr import Address
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
||||
# (codec, transport)
|
||||
MsgTransportKey = tuple[str, str]
|
||||
|
||||
|
||||
# from tractor.msg.types import MsgType
|
||||
# ?TODO? this should be our `Union[*msgtypes.__spec__]` alias now right..?
|
||||
# => BLEH, except can't bc prots must inherit typevar or param-spec
|
||||
# vars..
|
||||
# MsgType = TypeVar('MsgType')
|
||||
|
||||
|
||||
@runtime_checkable
|
||||
class MsgTransport(Protocol):
|
||||
#
|
||||
# class MsgTransport(Protocol[MsgType]):
|
||||
# ^-TODO-^ consider using a generic def and indexing with our
|
||||
# eventual msg definition/types?
|
||||
# - https://docs.python.org/3/library/typing.html#typing.Protocol
|
||||
|
||||
stream: trio.SocketStream
|
||||
drained: list[MsgType]
|
||||
|
||||
address_type: ClassVar[Type[Address]]
|
||||
codec_key: ClassVar[str]
|
||||
|
||||
# XXX: should this instead be called `.sendall()`?
|
||||
async def send(self, msg: MsgType) -> None:
|
||||
...
|
||||
|
||||
async def recv(self) -> MsgType:
|
||||
...
|
||||
|
||||
def __aiter__(self) -> MsgType:
|
||||
...
|
||||
|
||||
def connected(self) -> bool:
|
||||
...
|
||||
|
||||
# defining this sync otherwise it causes a mypy error because it
|
||||
# can't figure out it's a generator i guess?..?
|
||||
def drain(self) -> AsyncIterator[dict]:
|
||||
...
|
||||
|
||||
@classmethod
|
||||
def key(cls) -> MsgTransportKey:
|
||||
return (
|
||||
cls.codec_key,
|
||||
cls.address_type.proto_key,
|
||||
)
|
||||
|
||||
@property
|
||||
def laddr(self) -> Address:
|
||||
...
|
||||
|
||||
@property
|
||||
def raddr(self) -> Address:
|
||||
...
|
||||
|
||||
@property
|
||||
def maddr(self) -> str:
|
||||
...
|
||||
|
||||
@classmethod
|
||||
async def connect_to(
|
||||
cls,
|
||||
addr: Address,
|
||||
**kwargs
|
||||
) -> MsgTransport:
|
||||
...
|
||||
|
||||
@classmethod
|
||||
def get_stream_addrs(
|
||||
cls,
|
||||
stream: trio.abc.Stream
|
||||
) -> tuple[
|
||||
Address, # local
|
||||
Address # remote
|
||||
]:
|
||||
'''
|
||||
Return the transport protocol's address pair for the local
|
||||
and remote-peer side.
|
||||
|
||||
'''
|
||||
...
|
||||
|
||||
# TODO, such that all `.raddr`s for each `SocketStream` are
|
||||
# delivered?
|
||||
# -[ ] move `.open_listener()` here and internally track the
|
||||
# listener set, per address?
|
||||
# def get_peers(
|
||||
# self,
|
||||
# ) -> list[Address]:
|
||||
# ...
|
||||
|
||||
|
||||
|
||||
class MsgpackTransport(MsgTransport):
|
||||
|
||||
# TODO: better naming for this?
|
||||
# -[ ] check how libp2p does naming for such things?
|
||||
codec_key: str = 'msgpack'
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
stream: trio.abc.Stream,
|
||||
prefix_size: int = 4,
|
||||
|
||||
# XXX optionally provided codec pair for `msgspec`:
|
||||
# https://jcristharif.com/msgspec/extending.html#mapping-to-from-native-types
|
||||
#
|
||||
# TODO: define this as a `Codec` struct which can be
|
||||
# overriden dynamically by the application/runtime?
|
||||
codec: MsgCodec = None,
|
||||
|
||||
) -> None:
|
||||
self.stream = stream
|
||||
(
|
||||
self._laddr,
|
||||
self._raddr,
|
||||
) = self.get_stream_addrs(stream)
|
||||
|
||||
# create read loop instance
|
||||
self._aiter_pkts = self._iter_packets()
|
||||
self._send_lock = trio.StrictFIFOLock()
|
||||
|
||||
# public i guess?
|
||||
self.drained: list[dict] = []
|
||||
|
||||
self.recv_stream = BufferedReceiveStream(
|
||||
transport_stream=stream
|
||||
)
|
||||
self.prefix_size = prefix_size
|
||||
|
||||
# allow for custom IPC msg interchange format
|
||||
# dynamic override Bo
|
||||
self._task = trio.lowlevel.current_task()
|
||||
|
||||
# XXX for ctxvar debug only!
|
||||
# self._codec: MsgCodec = (
|
||||
# codec
|
||||
# or
|
||||
# _codec._ctxvar_MsgCodec.get()
|
||||
# )
|
||||
|
||||
async def _iter_packets(self) -> AsyncGenerator[dict, None]:
|
||||
'''
|
||||
Yield `bytes`-blob decoded packets from the underlying TCP
|
||||
stream using the current task's `MsgCodec`.
|
||||
|
||||
This is a streaming routine implemented as an async generator
|
||||
func (which was the original design, but could be changed?)
|
||||
and is allocated by a `.__call__()` inside `.__init__()` where
|
||||
it is assigned to the `._aiter_pkts` attr.
|
||||
|
||||
'''
|
||||
decodes_failed: int = 0
|
||||
|
||||
tpt_name: str = f'{type(self).__name__!r}'
|
||||
while True:
|
||||
try:
|
||||
header: bytes = await self.recv_stream.receive_exactly(4)
|
||||
except (
|
||||
ValueError,
|
||||
ConnectionResetError,
|
||||
|
||||
# not sure entirely why we need this but without it we
|
||||
# seem to be getting racy failures here on
|
||||
# arbiter/registry name subs..
|
||||
trio.BrokenResourceError,
|
||||
|
||||
) as trans_err:
|
||||
|
||||
loglevel = 'transport'
|
||||
match trans_err:
|
||||
# case (
|
||||
# ConnectionResetError()
|
||||
# ):
|
||||
# loglevel = 'transport'
|
||||
|
||||
# peer actor (graceful??) TCP EOF but `tricycle`
|
||||
# seems to raise a 0-bytes-read?
|
||||
case ValueError() if (
|
||||
'unclean EOF' in trans_err.args[0]
|
||||
):
|
||||
pass
|
||||
|
||||
# peer actor (task) prolly shutdown quickly due
|
||||
# to cancellation
|
||||
case trio.BrokenResourceError() if (
|
||||
'Connection reset by peer' in trans_err.args[0]
|
||||
):
|
||||
pass
|
||||
|
||||
# unless the disconnect condition falls under "a
|
||||
# normal operation breakage" we usualy console warn
|
||||
# about it.
|
||||
case _:
|
||||
loglevel: str = 'warning'
|
||||
|
||||
|
||||
raise TransportClosed(
|
||||
message=(
|
||||
f'{tpt_name} already closed by peer\n'
|
||||
),
|
||||
src_exc=trans_err,
|
||||
loglevel=loglevel,
|
||||
) from trans_err
|
||||
|
||||
# XXX definitely can happen if transport is closed
|
||||
# manually by another `trio.lowlevel.Task` in the
|
||||
# same actor; we use this in some simulated fault
|
||||
# testing for ex, but generally should never happen
|
||||
# under normal operation!
|
||||
#
|
||||
# NOTE: as such we always re-raise this error from the
|
||||
# RPC msg loop!
|
||||
except trio.ClosedResourceError as cre:
|
||||
closure_err = cre
|
||||
|
||||
raise TransportClosed(
|
||||
message=(
|
||||
f'{tpt_name} was already closed locally ?\n'
|
||||
),
|
||||
src_exc=closure_err,
|
||||
loglevel='error',
|
||||
raise_on_report=(
|
||||
'another task closed this fd' in closure_err.args
|
||||
),
|
||||
) from closure_err
|
||||
|
||||
# graceful TCP EOF disconnect
|
||||
if header == b'':
|
||||
raise TransportClosed(
|
||||
message=(
|
||||
f'{tpt_name} already gracefully closed\n'
|
||||
),
|
||||
loglevel='transport',
|
||||
)
|
||||
|
||||
size: int
|
||||
size, = struct.unpack("<I", header)
|
||||
|
||||
log.transport(f'received header {size}') # type: ignore
|
||||
msg_bytes: bytes = await self.recv_stream.receive_exactly(size)
|
||||
|
||||
log.transport(f"received {msg_bytes}") # type: ignore
|
||||
try:
|
||||
# NOTE: lookup the `trio.Task.context`'s var for
|
||||
# the current `MsgCodec`.
|
||||
codec: MsgCodec = _ctxvar_MsgCodec.get()
|
||||
|
||||
# XXX for ctxvar debug only!
|
||||
# if self._codec.pld_spec != codec.pld_spec:
|
||||
# assert (
|
||||
# task := trio.lowlevel.current_task()
|
||||
# ) is not self._task
|
||||
# self._task = task
|
||||
# self._codec = codec
|
||||
# log.runtime(
|
||||
# f'Using new codec in {self}.recv()\n'
|
||||
# f'codec: {self._codec}\n\n'
|
||||
# f'msg_bytes: {msg_bytes}\n'
|
||||
# )
|
||||
yield codec.decode(msg_bytes)
|
||||
|
||||
# XXX NOTE: since the below error derives from
|
||||
# `DecodeError` we need to catch is specially
|
||||
# and always raise such that spec violations
|
||||
# are never allowed to be caught silently!
|
||||
except msgspec.ValidationError as verr:
|
||||
msgtyperr: MsgTypeError = _mk_recv_mte(
|
||||
msg=msg_bytes,
|
||||
codec=codec,
|
||||
src_validation_error=verr,
|
||||
)
|
||||
# XXX deliver up to `Channel.recv()` where
|
||||
# a re-raise and `Error`-pack can inject the far
|
||||
# end actor `.uid`.
|
||||
yield msgtyperr
|
||||
|
||||
except (
|
||||
msgspec.DecodeError,
|
||||
UnicodeDecodeError,
|
||||
):
|
||||
if decodes_failed < 4:
|
||||
# ignore decoding errors for now and assume they have to
|
||||
# do with a channel drop - hope that receiving from the
|
||||
# channel will raise an expected error and bubble up.
|
||||
try:
|
||||
msg_str: str|bytes = msg_bytes.decode()
|
||||
except UnicodeDecodeError:
|
||||
msg_str = msg_bytes
|
||||
|
||||
log.exception(
|
||||
'Failed to decode msg?\n'
|
||||
f'{codec}\n\n'
|
||||
'Rxed bytes from wire:\n\n'
|
||||
f'{msg_str!r}\n'
|
||||
)
|
||||
decodes_failed += 1
|
||||
else:
|
||||
raise
|
||||
|
||||
async def send(
|
||||
self,
|
||||
msg: msgtypes.MsgType,
|
||||
|
||||
strict_types: bool = True,
|
||||
hide_tb: bool = True,
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
Send a msgpack encoded py-object-blob-as-msg over TCP.
|
||||
|
||||
If `strict_types == True` then a `MsgTypeError` will be raised on any
|
||||
invalid msg type
|
||||
|
||||
'''
|
||||
__tracebackhide__: bool = hide_tb
|
||||
|
||||
# XXX see `trio._sync.AsyncContextManagerMixin` for details
|
||||
# on the `.acquire()`/`.release()` sequencing..
|
||||
async with self._send_lock:
|
||||
|
||||
# NOTE: lookup the `trio.Task.context`'s var for
|
||||
# the current `MsgCodec`.
|
||||
codec: MsgCodec = _ctxvar_MsgCodec.get()
|
||||
|
||||
# XXX for ctxvar debug only!
|
||||
# if self._codec.pld_spec != codec.pld_spec:
|
||||
# self._codec = codec
|
||||
# log.runtime(
|
||||
# f'Using new codec in {self}.send()\n'
|
||||
# f'codec: {self._codec}\n\n'
|
||||
# f'msg: {msg}\n'
|
||||
# )
|
||||
|
||||
if type(msg) not in msgtypes.__msg_types__:
|
||||
if strict_types:
|
||||
raise _mk_send_mte(
|
||||
msg,
|
||||
codec=codec,
|
||||
)
|
||||
else:
|
||||
log.warning(
|
||||
'Sending non-`Msg`-spec msg?\n\n'
|
||||
f'{msg}\n'
|
||||
)
|
||||
|
||||
try:
|
||||
bytes_data: bytes = codec.encode(msg)
|
||||
except TypeError as _err:
|
||||
typerr = _err
|
||||
msgtyperr: MsgTypeError = _mk_send_mte(
|
||||
msg,
|
||||
codec=codec,
|
||||
message=(
|
||||
f'IPC-msg-spec violation in\n\n'
|
||||
f'{pretty_struct.Struct.pformat(msg)}'
|
||||
),
|
||||
src_type_error=typerr,
|
||||
)
|
||||
raise msgtyperr from typerr
|
||||
|
||||
# supposedly the fastest says,
|
||||
# https://stackoverflow.com/a/54027962
|
||||
size: bytes = struct.pack("<I", len(bytes_data))
|
||||
try:
|
||||
return await self.stream.send_all(size + bytes_data)
|
||||
except (
|
||||
trio.BrokenResourceError,
|
||||
) as bre:
|
||||
trans_err = bre
|
||||
tpt_name: str = f'{type(self).__name__!r}'
|
||||
match trans_err:
|
||||
case trio.BrokenResourceError() if (
|
||||
'[Errno 32] Broken pipe' in trans_err.args[0]
|
||||
# ^XXX, specifc to UDS transport and its,
|
||||
# well, "speediness".. XD
|
||||
# |_ likely todo with races related to how fast
|
||||
# the socket is setup/torn-down on linux
|
||||
# as it pertains to rando pings from the
|
||||
# `.discovery` subsys and protos.
|
||||
):
|
||||
raise TransportClosed.from_src_exc(
|
||||
message=(
|
||||
f'{tpt_name} already closed by peer\n'
|
||||
),
|
||||
body=f'{self}\n',
|
||||
src_exc=trans_err,
|
||||
raise_on_report=True,
|
||||
loglevel='transport',
|
||||
) from bre
|
||||
|
||||
# unless the disconnect condition falls under "a
|
||||
# normal operation breakage" we usualy console warn
|
||||
# about it.
|
||||
case _:
|
||||
log.exception(
|
||||
'{tpt_name} layer failed pre-send ??\n'
|
||||
)
|
||||
raise trans_err
|
||||
|
||||
# ?TODO? does it help ever to dynamically show this
|
||||
# frame?
|
||||
# try:
|
||||
# <the-above_code>
|
||||
# except BaseException as _err:
|
||||
# err = _err
|
||||
# if not isinstance(err, MsgTypeError):
|
||||
# __tracebackhide__: bool = False
|
||||
# raise
|
||||
|
||||
async def recv(self) -> msgtypes.MsgType:
|
||||
return await self._aiter_pkts.asend(None)
|
||||
|
||||
async def drain(self) -> AsyncIterator[dict]:
|
||||
'''
|
||||
Drain the stream's remaining messages sent from
|
||||
the far end until the connection is closed by
|
||||
the peer.
|
||||
|
||||
'''
|
||||
try:
|
||||
async for msg in self._iter_packets():
|
||||
self.drained.append(msg)
|
||||
except TransportClosed:
|
||||
for msg in self.drained:
|
||||
yield msg
|
||||
|
||||
def __aiter__(self):
|
||||
return self._aiter_pkts
|
||||
|
||||
@property
|
||||
def laddr(self) -> Address:
|
||||
return self._laddr
|
||||
|
||||
@property
|
||||
def raddr(self) -> Address:
|
||||
return self._raddr
|
||||
|
||||
def pformat(self) -> str:
|
||||
return (
|
||||
f'<{type(self).__name__}(\n'
|
||||
f' |_peers: 2\n'
|
||||
f' laddr: {self._laddr}\n'
|
||||
f' raddr: {self._raddr}\n'
|
||||
# f'\n'
|
||||
f' |_task: {self._task}\n'
|
||||
f')>\n'
|
||||
)
|
||||
|
||||
__repr__ = __str__ = pformat
|
|
@ -1,123 +0,0 @@
|
|||
# tractor: structured concurrent "actors".
|
||||
# Copyright 2018-eternity Tyler Goodlet.
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
'''
|
||||
IPC subsys type-lookup helpers?
|
||||
|
||||
'''
|
||||
from typing import (
|
||||
Type,
|
||||
# TYPE_CHECKING,
|
||||
)
|
||||
|
||||
import trio
|
||||
import socket
|
||||
|
||||
from tractor.ipc._transport import (
|
||||
MsgTransportKey,
|
||||
MsgTransport
|
||||
)
|
||||
from tractor.ipc._tcp import (
|
||||
TCPAddress,
|
||||
MsgpackTCPStream,
|
||||
)
|
||||
from tractor.ipc._uds import (
|
||||
UDSAddress,
|
||||
MsgpackUDSStream,
|
||||
)
|
||||
|
||||
# if TYPE_CHECKING:
|
||||
# from tractor._addr import Address
|
||||
|
||||
|
||||
Address = TCPAddress|UDSAddress
|
||||
|
||||
# manually updated list of all supported msg transport types
|
||||
_msg_transports = [
|
||||
MsgpackTCPStream,
|
||||
MsgpackUDSStream
|
||||
]
|
||||
|
||||
|
||||
# convert a MsgTransportKey to the corresponding transport type
|
||||
_key_to_transport: dict[
|
||||
MsgTransportKey,
|
||||
Type[MsgTransport],
|
||||
] = {
|
||||
('msgpack', 'tcp'): MsgpackTCPStream,
|
||||
('msgpack', 'uds'): MsgpackUDSStream,
|
||||
}
|
||||
|
||||
# convert an Address wrapper to its corresponding transport type
|
||||
_addr_to_transport: dict[
|
||||
Type[TCPAddress|UDSAddress],
|
||||
Type[MsgTransport]
|
||||
] = {
|
||||
TCPAddress: MsgpackTCPStream,
|
||||
UDSAddress: MsgpackUDSStream,
|
||||
}
|
||||
|
||||
|
||||
def transport_from_addr(
|
||||
addr: Address,
|
||||
codec_key: str = 'msgpack',
|
||||
) -> Type[MsgTransport]:
|
||||
'''
|
||||
Given a destination address and a desired codec, find the
|
||||
corresponding `MsgTransport` type.
|
||||
|
||||
'''
|
||||
try:
|
||||
return _addr_to_transport[type(addr)]
|
||||
|
||||
except KeyError:
|
||||
raise NotImplementedError(
|
||||
f'No known transport for address {repr(addr)}'
|
||||
)
|
||||
|
||||
|
||||
def transport_from_stream(
|
||||
stream: trio.abc.Stream,
|
||||
codec_key: str = 'msgpack'
|
||||
) -> Type[MsgTransport]:
|
||||
'''
|
||||
Given an arbitrary `trio.abc.Stream` and a desired codec,
|
||||
find the corresponding `MsgTransport` type.
|
||||
|
||||
'''
|
||||
transport = None
|
||||
if isinstance(stream, trio.SocketStream):
|
||||
sock: socket.socket = stream.socket
|
||||
match sock.family:
|
||||
case socket.AF_INET | socket.AF_INET6:
|
||||
transport = 'tcp'
|
||||
|
||||
case socket.AF_UNIX:
|
||||
transport = 'uds'
|
||||
|
||||
case _:
|
||||
raise NotImplementedError(
|
||||
f'Unsupported socket family: {sock.family}'
|
||||
)
|
||||
|
||||
if not transport:
|
||||
raise NotImplementedError(
|
||||
f'Could not figure out transport type for stream type {type(stream)}'
|
||||
)
|
||||
|
||||
key = (codec_key, transport)
|
||||
|
||||
return _key_to_transport[key]
|
|
@ -1,422 +0,0 @@
|
|||
# tractor: structured concurrent "actors".
|
||||
# Copyright 2018-eternity Tyler Goodlet.
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
'''
|
||||
Unix Domain Socket implementation of tractor.ipc._transport.MsgTransport protocol
|
||||
|
||||
'''
|
||||
from __future__ import annotations
|
||||
from pathlib import Path
|
||||
import os
|
||||
from socket import (
|
||||
AF_UNIX,
|
||||
SOCK_STREAM,
|
||||
SO_PASSCRED,
|
||||
SO_PEERCRED,
|
||||
SOL_SOCKET,
|
||||
)
|
||||
import struct
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
ClassVar,
|
||||
)
|
||||
|
||||
import msgspec
|
||||
import trio
|
||||
from trio import (
|
||||
socket,
|
||||
SocketListener,
|
||||
)
|
||||
from trio._highlevel_open_unix_stream import (
|
||||
close_on_error,
|
||||
has_unix,
|
||||
)
|
||||
|
||||
from tractor.msg import MsgCodec
|
||||
from tractor.log import get_logger
|
||||
from tractor.ipc._transport import (
|
||||
MsgpackTransport,
|
||||
)
|
||||
from .._state import (
|
||||
get_rt_dir,
|
||||
current_actor,
|
||||
is_root_process,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ._runtime import Actor
|
||||
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
||||
def unwrap_sockpath(
|
||||
sockpath: Path,
|
||||
) -> tuple[Path, Path]:
|
||||
return (
|
||||
sockpath.parent,
|
||||
sockpath.name,
|
||||
)
|
||||
|
||||
|
||||
class UDSAddress(
|
||||
msgspec.Struct,
|
||||
frozen=True,
|
||||
):
|
||||
filedir: str|Path|None
|
||||
filename: str|Path
|
||||
maybe_pid: int|None = None
|
||||
|
||||
# TODO, maybe we should use better field and value
|
||||
# -[x] really this is a `.protocol_key` not a "name" of anything.
|
||||
# -[ ] consider a 'unix' proto-key instead?
|
||||
# -[ ] need to check what other mult-transport frameworks do
|
||||
# like zmq, nng, uri-spec et al!
|
||||
proto_key: ClassVar[str] = 'uds'
|
||||
unwrapped_type: ClassVar[type] = tuple[str, int]
|
||||
def_bindspace: ClassVar[Path] = get_rt_dir()
|
||||
|
||||
@property
|
||||
def bindspace(self) -> Path:
|
||||
'''
|
||||
We replicate the "ip-set-of-hosts" part of a UDS socket as
|
||||
just the sub-directory in which we allocate socket files.
|
||||
|
||||
'''
|
||||
return (
|
||||
self.filedir
|
||||
or
|
||||
self.def_bindspace
|
||||
# or
|
||||
# get_rt_dir()
|
||||
)
|
||||
|
||||
@property
|
||||
def sockpath(self) -> Path:
|
||||
return self.bindspace / self.filename
|
||||
|
||||
@property
|
||||
def is_valid(self) -> bool:
|
||||
'''
|
||||
We block socket files not allocated under the runtime subdir.
|
||||
|
||||
'''
|
||||
return self.bindspace in self.sockpath.parents
|
||||
|
||||
@classmethod
|
||||
def from_addr(
|
||||
cls,
|
||||
addr: (
|
||||
tuple[Path|str, Path|str]|Path|str
|
||||
),
|
||||
) -> UDSAddress:
|
||||
match addr:
|
||||
case tuple()|list():
|
||||
filedir = Path(addr[0])
|
||||
filename = Path(addr[1])
|
||||
return UDSAddress(
|
||||
filedir=filedir,
|
||||
filename=filename,
|
||||
# maybe_pid=pid,
|
||||
)
|
||||
# NOTE, in case we ever decide to just `.unwrap()`
|
||||
# to a `Path|str`?
|
||||
case str()|Path():
|
||||
sockpath: Path = Path(addr)
|
||||
return UDSAddress(*unwrap_sockpath(sockpath))
|
||||
case _:
|
||||
# import pdbp; pdbp.set_trace()
|
||||
raise TypeError(
|
||||
f'Bad unwrapped-address for {cls} !\n'
|
||||
f'{addr!r}\n'
|
||||
)
|
||||
|
||||
def unwrap(self) -> tuple[str, int]:
|
||||
# XXX NOTE, since this gets passed DIRECTLY to
|
||||
# `.ipc._uds.open_unix_socket_w_passcred()`
|
||||
return (
|
||||
str(self.filedir),
|
||||
str(self.filename),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_random(
|
||||
cls,
|
||||
bindspace: Path|None = None, # default netns
|
||||
) -> UDSAddress:
|
||||
|
||||
filedir: Path = bindspace or cls.def_bindspace
|
||||
pid: int = os.getpid()
|
||||
actor: Actor|None = current_actor(
|
||||
err_on_no_runtime=False,
|
||||
)
|
||||
if actor:
|
||||
sockname: str = '::'.join(actor.uid) + f'@{pid}'
|
||||
else:
|
||||
prefix: str = '<unknown-actor>'
|
||||
if is_root_process():
|
||||
prefix: str = 'root'
|
||||
sockname: str = f'{prefix}@{pid}'
|
||||
|
||||
sockpath: Path = Path(f'{sockname}.sock')
|
||||
return UDSAddress(
|
||||
filedir=filedir,
|
||||
filename=sockpath,
|
||||
maybe_pid=pid,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_root(cls) -> UDSAddress:
|
||||
def_uds_filename: Path = 'registry@1616.sock'
|
||||
return UDSAddress(
|
||||
filedir=cls.def_bindspace,
|
||||
filename=def_uds_filename,
|
||||
# maybe_pid=1616,
|
||||
)
|
||||
|
||||
# ?TODO, maybe we should just our .msg.pretty_struct.Struct` for
|
||||
# this instead?
|
||||
# -[ ] is it too "multi-line"y tho?
|
||||
# the compact tuple/.unwrapped() form is simple enough?
|
||||
#
|
||||
def __repr__(self) -> str:
|
||||
if not (pid := self.maybe_pid):
|
||||
pid: str = '<unknown-peer-pid>'
|
||||
|
||||
body: str = (
|
||||
f'({self.filedir}, {self.filename}, {pid})'
|
||||
)
|
||||
return (
|
||||
f'{type(self).__name__}'
|
||||
f'['
|
||||
f'{body}'
|
||||
f']'
|
||||
)
|
||||
|
||||
|
||||
async def start_listener(
|
||||
addr: UDSAddress,
|
||||
**kwargs,
|
||||
) -> SocketListener:
|
||||
# sock = addr._sock = socket.socket(
|
||||
sock = socket.socket(
|
||||
socket.AF_UNIX,
|
||||
socket.SOCK_STREAM
|
||||
)
|
||||
log.info(
|
||||
f'Attempting to bind UDS socket\n'
|
||||
f'>[\n'
|
||||
f'|_{addr}\n'
|
||||
)
|
||||
|
||||
bindpath: Path = addr.sockpath
|
||||
try:
|
||||
await sock.bind(str(bindpath))
|
||||
except (
|
||||
FileNotFoundError,
|
||||
) as fdne:
|
||||
raise ConnectionError(
|
||||
f'Bad UDS socket-filepath-as-address ??\n'
|
||||
f'{addr}\n'
|
||||
f' |_sockpath: {addr.sockpath}\n'
|
||||
) from fdne
|
||||
|
||||
sock.listen(1)
|
||||
log.info(
|
||||
f'Listening on UDS socket\n'
|
||||
f'[>\n'
|
||||
f' |_{addr}\n'
|
||||
)
|
||||
return SocketListener(sock)
|
||||
|
||||
|
||||
def close_listener(
|
||||
addr: UDSAddress,
|
||||
lstnr: SocketListener,
|
||||
) -> None:
|
||||
'''
|
||||
Close and remove the listening unix socket's path.
|
||||
|
||||
'''
|
||||
lstnr.socket.close()
|
||||
os.unlink(addr.sockpath)
|
||||
|
||||
|
||||
async def open_unix_socket_w_passcred(
|
||||
filename: str|bytes|os.PathLike[str]|os.PathLike[bytes],
|
||||
) -> trio.SocketStream:
|
||||
'''
|
||||
Literally the exact same as `trio.open_unix_socket()` except we set the additiona
|
||||
`socket.SO_PASSCRED` option to ensure the server side (the process calling `accept()`)
|
||||
can extract the connecting peer's credentials, namely OS specific process
|
||||
related IDs.
|
||||
|
||||
See this SO for "why" the extra opts,
|
||||
- https://stackoverflow.com/a/7982749
|
||||
|
||||
'''
|
||||
if not has_unix:
|
||||
raise RuntimeError("Unix sockets are not supported on this platform")
|
||||
|
||||
# much more simplified logic vs tcp sockets - one socket type and only one
|
||||
# possible location to connect to
|
||||
sock = trio.socket.socket(AF_UNIX, SOCK_STREAM)
|
||||
sock.setsockopt(SOL_SOCKET, SO_PASSCRED, 1)
|
||||
with close_on_error(sock):
|
||||
await sock.connect(os.fspath(filename))
|
||||
|
||||
return trio.SocketStream(sock)
|
||||
|
||||
|
||||
def get_peer_info(sock: trio.socket.socket) -> tuple[
|
||||
int, # pid
|
||||
int, # uid
|
||||
int, # guid
|
||||
]:
|
||||
'''
|
||||
Deliver the connecting peer's "credentials"-info as defined in
|
||||
a very Linux specific way..
|
||||
|
||||
For more deats see,
|
||||
- `man accept`,
|
||||
- `man unix`,
|
||||
|
||||
this great online guide to all things sockets,
|
||||
- https://beej.us/guide/bgnet/html/split-wide/man-pages.html#setsockoptman
|
||||
|
||||
AND this **wonderful SO answer**
|
||||
- https://stackoverflow.com/a/7982749
|
||||
|
||||
'''
|
||||
creds: bytes = sock.getsockopt(
|
||||
SOL_SOCKET,
|
||||
SO_PEERCRED,
|
||||
struct.calcsize('3i')
|
||||
)
|
||||
# i.e a tuple of the fields,
|
||||
# pid: int, "process"
|
||||
# uid: int, "user"
|
||||
# gid: int, "group"
|
||||
return struct.unpack('3i', creds)
|
||||
|
||||
|
||||
class MsgpackUDSStream(MsgpackTransport):
|
||||
'''
|
||||
A `trio.SocketStream` around a Unix-Domain-Socket transport
|
||||
delivering `msgpack` encoded msgs using the `msgspec` codec lib.
|
||||
|
||||
'''
|
||||
address_type = UDSAddress
|
||||
layer_key: int = 4
|
||||
|
||||
@property
|
||||
def maddr(self) -> str:
|
||||
if not self.raddr:
|
||||
return '<unknown-peer>'
|
||||
|
||||
filepath: Path = Path(self.raddr.unwrap()[0])
|
||||
return (
|
||||
f'/{self.address_type.proto_key}/{filepath}'
|
||||
# f'/{self.chan.uid[0]}'
|
||||
# f'/{self.cid}'
|
||||
|
||||
# f'/cid={cid_head}..{cid_tail}'
|
||||
# TODO: ? not use this ^ right ?
|
||||
)
|
||||
|
||||
def connected(self) -> bool:
|
||||
return self.stream.socket.fileno() != -1
|
||||
|
||||
@classmethod
|
||||
async def connect_to(
|
||||
cls,
|
||||
addr: UDSAddress,
|
||||
prefix_size: int = 4,
|
||||
codec: MsgCodec|None = None,
|
||||
**kwargs
|
||||
) -> MsgpackUDSStream:
|
||||
|
||||
|
||||
sockpath: Path = addr.sockpath
|
||||
#
|
||||
# ^XXX NOTE, we don't provide any out-of-band `.pid` info
|
||||
# (like, over the socket as extra msgs) since the (augmented)
|
||||
# `.setsockopt()` call tells the OS provide it; the client
|
||||
# pid can then be read on server/listen() side via
|
||||
# `get_peer_info()` above.
|
||||
try:
|
||||
stream = await open_unix_socket_w_passcred(
|
||||
str(sockpath),
|
||||
**kwargs
|
||||
)
|
||||
except (
|
||||
FileNotFoundError,
|
||||
) as fdne:
|
||||
raise ConnectionError(
|
||||
f'Bad UDS socket-filepath-as-address ??\n'
|
||||
f'{addr}\n'
|
||||
f' |_sockpath: {sockpath}\n'
|
||||
) from fdne
|
||||
|
||||
stream = MsgpackUDSStream(
|
||||
stream,
|
||||
prefix_size=prefix_size,
|
||||
codec=codec
|
||||
)
|
||||
stream._raddr = addr
|
||||
return stream
|
||||
|
||||
@classmethod
|
||||
def get_stream_addrs(
|
||||
cls,
|
||||
stream: trio.SocketStream
|
||||
) -> tuple[
|
||||
Path,
|
||||
int,
|
||||
]:
|
||||
sock: trio.socket.socket = stream.socket
|
||||
|
||||
# NOTE XXX, it's unclear why one or the other ends up being
|
||||
# `bytes` versus the socket-file-path, i presume it's
|
||||
# something to do with who is the server (called `.listen()`)?
|
||||
# maybe could be better implemented using another info-query
|
||||
# on the socket like,
|
||||
# https://beej.us/guide/bgnet/html/split-wide/system-calls-or-bust.html#gethostnamewho-am-i
|
||||
sockname: str|bytes = sock.getsockname()
|
||||
# https://beej.us/guide/bgnet/html/split-wide/system-calls-or-bust.html#getpeernamewho-are-you
|
||||
peername: str|bytes = sock.getpeername()
|
||||
match (peername, sockname):
|
||||
case (str(), bytes()):
|
||||
sock_path: Path = Path(peername)
|
||||
case (bytes(), str()):
|
||||
sock_path: Path = Path(sockname)
|
||||
(
|
||||
peer_pid,
|
||||
_,
|
||||
_,
|
||||
) = get_peer_info(sock)
|
||||
|
||||
filedir, filename = unwrap_sockpath(sock_path)
|
||||
laddr = UDSAddress(
|
||||
filedir=filedir,
|
||||
filename=filename,
|
||||
maybe_pid=os.getpid(),
|
||||
)
|
||||
raddr = UDSAddress(
|
||||
filedir=filedir,
|
||||
filename=filename,
|
||||
maybe_pid=peer_pid
|
||||
)
|
||||
return (laddr, raddr)
|
|
@ -92,7 +92,7 @@ class StackLevelAdapter(LoggerAdapter):
|
|||
) -> None:
|
||||
'''
|
||||
IPC transport level msg IO; generally anything below
|
||||
`.ipc.Channel` and friends.
|
||||
`._ipc.Channel` and friends.
|
||||
|
||||
'''
|
||||
return self.log(5, msg)
|
||||
|
@ -285,7 +285,7 @@ def get_logger(
|
|||
# NOTE: for handling for modules that use ``get_logger(__name__)``
|
||||
# we make the following stylistic choice:
|
||||
# - always avoid duplicate project-package token
|
||||
# in msg output: i.e. tractor.tractor.ipc._chan.py in header
|
||||
# in msg output: i.e. tractor.tractor _ipc.py in header
|
||||
# looks ridiculous XD
|
||||
# - never show the leaf module name in the {name} part
|
||||
# since in python the {filename} is always this same
|
||||
|
|
|
@ -33,7 +33,6 @@ from ._codec import (
|
|||
|
||||
apply_codec as apply_codec,
|
||||
mk_codec as mk_codec,
|
||||
mk_dec as mk_dec,
|
||||
MsgCodec as MsgCodec,
|
||||
MsgDec as MsgDec,
|
||||
current_codec as current_codec,
|
||||
|
|
|
@ -61,7 +61,6 @@ from tractor.msg.pretty_struct import Struct
|
|||
from tractor.msg.types import (
|
||||
mk_msg_spec,
|
||||
MsgType,
|
||||
PayloadMsg,
|
||||
)
|
||||
from tractor.log import get_logger
|
||||
|
||||
|
@ -81,7 +80,6 @@ class MsgDec(Struct):
|
|||
|
||||
'''
|
||||
_dec: msgpack.Decoder
|
||||
# _ext_types_box: Struct|None = None
|
||||
|
||||
@property
|
||||
def dec(self) -> msgpack.Decoder:
|
||||
|
@ -181,126 +179,23 @@ class MsgDec(Struct):
|
|||
|
||||
|
||||
def mk_dec(
|
||||
spec: Union[Type[Struct]]|Type|None,
|
||||
|
||||
# NOTE, required for ad-hoc type extensions to the underlying
|
||||
# serialization proto (which is default `msgpack`),
|
||||
# https://jcristharif.com/msgspec/extending.html#mapping-to-from-native-types
|
||||
spec: Union[Type[Struct]]|Any = Any,
|
||||
dec_hook: Callable|None = None,
|
||||
ext_types: list[Type]|None = None,
|
||||
|
||||
) -> MsgDec:
|
||||
'''
|
||||
Create an IPC msg decoder, a slightly higher level wrapper around
|
||||
a `msgspec.msgpack.Decoder` which provides,
|
||||
|
||||
- easier introspection of the underlying type spec via
|
||||
the `.spec` and `.spec_str` attrs,
|
||||
- `.hook` access to the `Decoder.dec_hook()`,
|
||||
- automatic custom extension-types decode support when
|
||||
`dec_hook()` is provided such that any `PayloadMsg.pld` tagged
|
||||
as a type from from `ext_types` (presuming the `MsgCodec.encode()` also used
|
||||
a `.enc_hook()`) is processed and constructed by a `PldRx` implicitily.
|
||||
|
||||
NOTE, as mentioned a `MsgDec` is normally used for `PayloadMsg.pld: PayloadT` field
|
||||
decoding inside an IPC-ctx-oriented `PldRx`.
|
||||
Create an IPC msg decoder, normally used as the
|
||||
`PayloadMsg.pld: PayloadT` field decoder inside a `PldRx`.
|
||||
|
||||
'''
|
||||
if (
|
||||
spec is None
|
||||
and
|
||||
ext_types is None
|
||||
):
|
||||
raise TypeError(
|
||||
f'MIssing type-`spec` for msg decoder!\n'
|
||||
f'\n'
|
||||
f'`spec=None` is **only** permitted is if custom extension types '
|
||||
f'are provided via `ext_types`, meaning it must be non-`None`.\n'
|
||||
f'\n'
|
||||
f'In this case it is presumed that only the `ext_types`, '
|
||||
f'which much be handled by a paired `dec_hook()`, '
|
||||
f'will be permitted within the payload type-`spec`!\n'
|
||||
f'\n'
|
||||
f'spec = {spec!r}\n'
|
||||
f'dec_hook = {dec_hook!r}\n'
|
||||
f'ext_types = {ext_types!r}\n'
|
||||
)
|
||||
|
||||
if dec_hook:
|
||||
if ext_types is None:
|
||||
raise TypeError(
|
||||
f'If extending the serializable types with a custom decode hook (`dec_hook()`), '
|
||||
f'you must also provide the expected type set that the hook will handle '
|
||||
f'via a `ext_types: Union[Type]|None = None` argument!\n'
|
||||
f'\n'
|
||||
f'dec_hook = {dec_hook!r}\n'
|
||||
f'ext_types = {ext_types!r}\n'
|
||||
)
|
||||
|
||||
# XXX, i *thought* we would require a boxing struct as per docs,
|
||||
# https://jcristharif.com/msgspec/extending.html#mapping-to-from-native-types
|
||||
# |_ see comment,
|
||||
# > Note that typed deserialization is required for
|
||||
# > successful roundtripping here, so we pass `MyMessage` to
|
||||
# > `Decoder`.
|
||||
#
|
||||
# BUT, turns out as long as you spec a union with `Raw` it
|
||||
# will work? kk B)
|
||||
#
|
||||
# maybe_box_struct = mk_boxed_ext_struct(ext_types)
|
||||
spec = Raw | Union[*ext_types]
|
||||
|
||||
return MsgDec(
|
||||
_dec=msgpack.Decoder(
|
||||
type=spec, # like `MsgType[Any]`
|
||||
dec_hook=dec_hook,
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
# TODO? remove since didn't end up needing this?
|
||||
def mk_boxed_ext_struct(
|
||||
ext_types: list[Type],
|
||||
) -> Struct:
|
||||
# NOTE, originally was to wrap non-msgpack-supported "extension
|
||||
# types" in a field-typed boxing struct, see notes around the
|
||||
# `dec_hook()` branch in `mk_dec()`.
|
||||
ext_types_union = Union[*ext_types]
|
||||
repr_ext_types_union: str = (
|
||||
str(ext_types_union)
|
||||
or
|
||||
"|".join(ext_types)
|
||||
)
|
||||
BoxedExtType = msgspec.defstruct(
|
||||
f'BoxedExts[{repr_ext_types_union}]',
|
||||
fields=[
|
||||
('boxed', ext_types_union),
|
||||
],
|
||||
)
|
||||
return BoxedExtType
|
||||
|
||||
|
||||
def unpack_spec_types(
|
||||
spec: Union[Type]|Type,
|
||||
) -> set[Type]:
|
||||
'''
|
||||
Given an input type-`spec`, either a lone type
|
||||
or a `Union` of types (like `str|int|MyThing`),
|
||||
return a set of individual types.
|
||||
|
||||
When `spec` is not a type-union returns `{spec,}`.
|
||||
|
||||
'''
|
||||
spec_subtypes: set[Union[Type]] = set(
|
||||
getattr(
|
||||
spec,
|
||||
'__args__',
|
||||
{spec,},
|
||||
)
|
||||
)
|
||||
return spec_subtypes
|
||||
|
||||
|
||||
def mk_msgspec_table(
|
||||
dec: msgpack.Decoder,
|
||||
msg: MsgType|None = None,
|
||||
|
@ -378,8 +273,6 @@ class MsgCodec(Struct):
|
|||
_dec: msgpack.Decoder
|
||||
_pld_spec: Type[Struct]|Raw|Any
|
||||
|
||||
# _ext_types_box: Struct|None = None
|
||||
|
||||
def __repr__(self) -> str:
|
||||
speclines: str = textwrap.indent(
|
||||
pformat_msgspec(codec=self),
|
||||
|
@ -446,15 +339,12 @@ class MsgCodec(Struct):
|
|||
|
||||
def encode(
|
||||
self,
|
||||
py_obj: Any|PayloadMsg,
|
||||
py_obj: Any,
|
||||
|
||||
use_buf: bool = False,
|
||||
# ^-XXX-^ uhh why am i getting this?
|
||||
# |_BufferError: Existing exports of data: object cannot be re-sized
|
||||
|
||||
as_ext_type: bool = False,
|
||||
hide_tb: bool = True,
|
||||
|
||||
) -> bytes:
|
||||
'''
|
||||
Encode input python objects to `msgpack` bytes for
|
||||
|
@ -464,46 +354,11 @@ class MsgCodec(Struct):
|
|||
https://jcristharif.com/msgspec/perf-tips.html#reusing-an-output-buffer
|
||||
|
||||
'''
|
||||
__tracebackhide__: bool = hide_tb
|
||||
if use_buf:
|
||||
self._enc.encode_into(py_obj, self._buf)
|
||||
return self._buf
|
||||
|
||||
return self._enc.encode(py_obj)
|
||||
# try:
|
||||
# return self._enc.encode(py_obj)
|
||||
# except TypeError as typerr:
|
||||
# typerr.add_note(
|
||||
# '|_src error from `msgspec`'
|
||||
# # f'|_{self._enc.encode!r}'
|
||||
# )
|
||||
# raise typerr
|
||||
|
||||
# TODO! REMOVE once i'm confident we won't ever need it!
|
||||
#
|
||||
# box: Struct = self._ext_types_box
|
||||
# if (
|
||||
# as_ext_type
|
||||
# or
|
||||
# (
|
||||
# # XXX NOTE, auto-detect if the input type
|
||||
# box
|
||||
# and
|
||||
# (ext_types := unpack_spec_types(
|
||||
# spec=box.__annotations__['boxed'])
|
||||
# )
|
||||
# )
|
||||
# ):
|
||||
# match py_obj:
|
||||
# # case PayloadMsg(pld=pld) if (
|
||||
# # type(pld) in ext_types
|
||||
# # ):
|
||||
# # py_obj.pld = box(boxed=py_obj)
|
||||
# # breakpoint()
|
||||
# case _ if (
|
||||
# type(py_obj) in ext_types
|
||||
# ):
|
||||
# py_obj = box(boxed=py_obj)
|
||||
else:
|
||||
return self._enc.encode(py_obj)
|
||||
|
||||
@property
|
||||
def dec(self) -> msgpack.Decoder:
|
||||
|
@ -523,30 +378,21 @@ class MsgCodec(Struct):
|
|||
return self._dec.decode(msg)
|
||||
|
||||
|
||||
# ?TODO? time to remove this finally?
|
||||
#
|
||||
# -[x] TODO: a sub-decoder system as well?
|
||||
# => No! already re-architected to include a "payload-receiver"
|
||||
# now found in `._ops`.
|
||||
# [x] TODO: a sub-decoder system as well? => No!
|
||||
#
|
||||
# -[x] do we still want to try and support the sub-decoder with
|
||||
# `.Raw` technique in the case that the `Generic` approach gives
|
||||
# future grief?
|
||||
# => well YES but NO, since we went with the `PldRx` approach
|
||||
# instead!
|
||||
# => NO, since we went with the `PldRx` approach instead B)
|
||||
#
|
||||
# IF however you want to see the code that was staged for this
|
||||
# from wayyy back, see the pure removal commit.
|
||||
|
||||
|
||||
def mk_codec(
|
||||
ipc_pld_spec: Union[Type[Struct]]|Any|Raw = Raw,
|
||||
# tagged-struct-types-union set for `Decoder`ing of payloads, as
|
||||
# per https://jcristharif.com/msgspec/structs.html#tagged-unions.
|
||||
# NOTE that the default `Raw` here **is very intentional** since
|
||||
# the `PldRx._pld_dec: MsgDec` is responsible for per ipc-ctx-task
|
||||
# decoding of msg-specs defined by the user as part of **their**
|
||||
# `tractor` "app's" type-limited IPC msg-spec.
|
||||
# struct type unions set for `Decoder`
|
||||
# https://jcristharif.com/msgspec/structs.html#tagged-unions
|
||||
ipc_pld_spec: Union[Type[Struct]]|Any = Any,
|
||||
|
||||
# TODO: offering a per-msg(-field) type-spec such that
|
||||
# the fields can be dynamically NOT decoded and left as `Raw`
|
||||
|
@ -559,18 +405,13 @@ def mk_codec(
|
|||
|
||||
libname: str = 'msgspec',
|
||||
|
||||
# settings for encoding-to-send extension-types,
|
||||
# proxy as `Struct(**kwargs)` for ad-hoc type extensions
|
||||
# https://jcristharif.com/msgspec/extending.html#mapping-to-from-native-types
|
||||
# dec_hook: Callable|None = None,
|
||||
# ------ - ------
|
||||
dec_hook: Callable|None = None,
|
||||
enc_hook: Callable|None = None,
|
||||
ext_types: list[Type]|None = None,
|
||||
|
||||
# optionally provided msg-decoder from which we pull its,
|
||||
# |_.dec_hook()
|
||||
# |_.type
|
||||
ext_dec: MsgDec|None = None
|
||||
# ------ - ------
|
||||
#
|
||||
# ?TODO? other params we might want to support
|
||||
# Encoder:
|
||||
# write_buffer_size=write_buffer_size,
|
||||
#
|
||||
|
@ -584,44 +425,26 @@ def mk_codec(
|
|||
`msgspec` ;).
|
||||
|
||||
'''
|
||||
pld_spec = ipc_pld_spec
|
||||
if enc_hook:
|
||||
if not ext_types:
|
||||
raise TypeError(
|
||||
f'If extending the serializable types with a custom encode hook (`enc_hook()`), '
|
||||
f'you must also provide the expected type set that the hook will handle '
|
||||
f'via a `ext_types: Union[Type]|None = None` argument!\n'
|
||||
f'\n'
|
||||
f'enc_hook = {enc_hook!r}\n'
|
||||
f'ext_types = {ext_types!r}\n'
|
||||
)
|
||||
|
||||
dec_hook: Callable|None = None
|
||||
if ext_dec:
|
||||
dec: msgspec.Decoder = ext_dec.dec
|
||||
dec_hook = dec.dec_hook
|
||||
pld_spec |= dec.type
|
||||
if ext_types:
|
||||
pld_spec |= Union[*ext_types]
|
||||
|
||||
# (manually) generate a msg-spec (how appropes) for all relevant
|
||||
# payload-boxing-struct-msg-types, parameterizing the
|
||||
# `PayloadMsg.pld: PayloadT` for the decoder such that all msgs
|
||||
# in our SC-RPC-protocol will automatically decode to
|
||||
# a type-"limited" payload (`Struct`) object (set).
|
||||
# (manually) generate a msg-payload-spec for all relevant
|
||||
# god-boxing-msg subtypes, parameterizing the `PayloadMsg.pld: PayloadT`
|
||||
# for the decoder such that all sub-type msgs in our SCIPP
|
||||
# will automatically decode to a type-"limited" payload (`Struct`)
|
||||
# object (set).
|
||||
(
|
||||
ipc_msg_spec,
|
||||
msg_types,
|
||||
) = mk_msg_spec(
|
||||
payload_type_union=pld_spec,
|
||||
payload_type_union=ipc_pld_spec,
|
||||
)
|
||||
assert len(ipc_msg_spec.__args__) == len(msg_types)
|
||||
assert ipc_msg_spec
|
||||
|
||||
msg_spec_types: set[Type] = unpack_spec_types(ipc_msg_spec)
|
||||
assert (
|
||||
len(ipc_msg_spec.__args__) == len(msg_types)
|
||||
and
|
||||
len(msg_spec_types) == len(msg_types)
|
||||
)
|
||||
# TODO: use this shim instead?
|
||||
# bc.. unification, err somethin?
|
||||
# dec: MsgDec = mk_dec(
|
||||
# spec=ipc_msg_spec,
|
||||
# dec_hook=dec_hook,
|
||||
# )
|
||||
|
||||
dec = msgpack.Decoder(
|
||||
type=ipc_msg_spec,
|
||||
|
@ -630,29 +453,22 @@ def mk_codec(
|
|||
enc = msgpack.Encoder(
|
||||
enc_hook=enc_hook,
|
||||
)
|
||||
|
||||
codec = MsgCodec(
|
||||
_enc=enc,
|
||||
_dec=dec,
|
||||
_pld_spec=pld_spec,
|
||||
_pld_spec=ipc_pld_spec,
|
||||
)
|
||||
|
||||
# sanity on expected backend support
|
||||
assert codec.lib.__name__ == libname
|
||||
|
||||
return codec
|
||||
|
||||
|
||||
# instance of the default `msgspec.msgpack` codec settings, i.e.
|
||||
# no custom structs, hooks or other special types.
|
||||
#
|
||||
# XXX NOTE XXX, this will break our `Context.start()` call!
|
||||
#
|
||||
# * by default we roundtrip the started pld-`value` and if you apply
|
||||
# this codec (globally anyway with `apply_codec()`) then the
|
||||
# `roundtripped` value will include a non-`.pld: Raw` which will
|
||||
# then type-error on the consequent `._ops.validte_payload_msg()`..
|
||||
#
|
||||
_def_msgspec_codec: MsgCodec = mk_codec(
|
||||
ipc_pld_spec=Any,
|
||||
)
|
||||
_def_msgspec_codec: MsgCodec = mk_codec(ipc_pld_spec=Any)
|
||||
|
||||
# The built-in IPC `Msg` spec.
|
||||
# Our composing "shuttle" protocol which allows `tractor`-app code
|
||||
|
@ -660,13 +476,13 @@ _def_msgspec_codec: MsgCodec = mk_codec(
|
|||
# https://jcristharif.com/msgspec/supported-types.html
|
||||
#
|
||||
_def_tractor_codec: MsgCodec = mk_codec(
|
||||
ipc_pld_spec=Raw, # XXX should be default righ!?
|
||||
# TODO: use this for debug mode locking prot?
|
||||
# ipc_pld_spec=Any,
|
||||
ipc_pld_spec=Raw,
|
||||
)
|
||||
|
||||
# -[x] TODO, IDEALLY provides for per-`trio.Task` specificity of the
|
||||
# TODO: IDEALLY provides for per-`trio.Task` specificity of the
|
||||
# IPC msging codec used by the transport layer when doing
|
||||
# `Channel.send()/.recv()` of wire data.
|
||||
# => impled as our `PldRx` which is `Context` scoped B)
|
||||
|
||||
# ContextVar-TODO: DIDN'T WORK, kept resetting in every new task to default!?
|
||||
# _ctxvar_MsgCodec: ContextVar[MsgCodec] = ContextVar(
|
||||
|
@ -743,6 +559,17 @@ def apply_codec(
|
|||
)
|
||||
token: Token = var.set(codec)
|
||||
|
||||
# ?TODO? for TreeVar approach which copies from the
|
||||
# cancel-scope of the prior value, NOT the prior task
|
||||
# See the docs:
|
||||
# - https://tricycle.readthedocs.io/en/latest/reference.html#tree-variables
|
||||
# - https://github.com/oremanj/tricycle/blob/master/tricycle/_tests/test_tree_var.py
|
||||
# ^- see docs for @cm `.being()` API
|
||||
# with _ctxvar_MsgCodec.being(codec):
|
||||
# new = _ctxvar_MsgCodec.get()
|
||||
# assert new is codec
|
||||
# yield codec
|
||||
|
||||
try:
|
||||
yield var.get()
|
||||
finally:
|
||||
|
@ -753,19 +580,6 @@ def apply_codec(
|
|||
)
|
||||
assert var.get() is orig
|
||||
|
||||
# ?TODO? for TreeVar approach which copies from the
|
||||
# cancel-scope of the prior value, NOT the prior task
|
||||
#
|
||||
# See the docs:
|
||||
# - https://tricycle.readthedocs.io/en/latest/reference.html#tree-variables
|
||||
# - https://github.com/oremanj/tricycle/blob/master/tricycle/_tests/test_tree_var.py
|
||||
# ^- see docs for @cm `.being()` API
|
||||
#
|
||||
# with _ctxvar_MsgCodec.being(codec):
|
||||
# new = _ctxvar_MsgCodec.get()
|
||||
# assert new is codec
|
||||
# yield codec
|
||||
|
||||
|
||||
def current_codec() -> MsgCodec:
|
||||
'''
|
||||
|
@ -785,7 +599,6 @@ def limit_msg_spec(
|
|||
# -> related to the `MsgCodec._payload_decs` stuff above..
|
||||
# tagged_structs: list[Struct]|None = None,
|
||||
|
||||
hide_tb: bool = True,
|
||||
**codec_kwargs,
|
||||
|
||||
) -> MsgCodec:
|
||||
|
@ -796,7 +609,7 @@ def limit_msg_spec(
|
|||
for all IPC contexts in use by the current `trio.Task`.
|
||||
|
||||
'''
|
||||
__tracebackhide__: bool = hide_tb
|
||||
__tracebackhide__: bool = True
|
||||
curr_codec: MsgCodec = current_codec()
|
||||
msgspec_codec: MsgCodec = mk_codec(
|
||||
ipc_pld_spec=payload_spec,
|
||||
|
|
|
@ -1,94 +0,0 @@
|
|||
# tractor: structured concurrent "actors".
|
||||
# Copyright 2018-eternity Tyler Goodlet.
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
'''
|
||||
Type-extension-utils for codec-ing (python) objects not
|
||||
covered by the `msgspec.msgpack` protocol.
|
||||
|
||||
See the various API docs from `msgspec`.
|
||||
|
||||
extending from native types,
|
||||
- https://jcristharif.com/msgspec/extending.html#mapping-to-from-native-types
|
||||
|
||||
converters,
|
||||
- https://jcristharif.com/msgspec/converters.html
|
||||
- https://jcristharif.com/msgspec/api.html#msgspec.convert
|
||||
|
||||
`Raw` fields,
|
||||
- https://jcristharif.com/msgspec/api.html#raw
|
||||
- support for `.convert()` and `Raw`,
|
||||
|_ https://jcristharif.com/msgspec/changelog.html
|
||||
|
||||
'''
|
||||
from types import (
|
||||
ModuleType,
|
||||
)
|
||||
import typing
|
||||
from typing import (
|
||||
Type,
|
||||
Union,
|
||||
)
|
||||
|
||||
def dec_type_union(
|
||||
type_names: list[str],
|
||||
mods: list[ModuleType] = []
|
||||
) -> Type|Union[Type]:
|
||||
'''
|
||||
Look up types by name, compile into a list and then create and
|
||||
return a `typing.Union` from the full set.
|
||||
|
||||
'''
|
||||
# import importlib
|
||||
types: list[Type] = []
|
||||
for type_name in type_names:
|
||||
for mod in [
|
||||
typing,
|
||||
# importlib.import_module(__name__),
|
||||
] + mods:
|
||||
if type_ref := getattr(
|
||||
mod,
|
||||
type_name,
|
||||
False,
|
||||
):
|
||||
types.append(type_ref)
|
||||
|
||||
# special case handling only..
|
||||
# ipc_pld_spec: Union[Type] = eval(
|
||||
# pld_spec_str,
|
||||
# {}, # globals
|
||||
# {'typing': typing}, # locals
|
||||
# )
|
||||
|
||||
return Union[*types]
|
||||
|
||||
|
||||
def enc_type_union(
|
||||
union_or_type: Union[Type]|Type,
|
||||
) -> list[str]:
|
||||
'''
|
||||
Encode a type-union or single type to a list of type-name-strings
|
||||
ready for IPC interchange.
|
||||
|
||||
'''
|
||||
type_strs: list[str] = []
|
||||
for typ in getattr(
|
||||
union_or_type,
|
||||
'__args__',
|
||||
{union_or_type,},
|
||||
):
|
||||
type_strs.append(typ.__qualname__)
|
||||
|
||||
return type_strs
|
|
@ -50,9 +50,7 @@ from tractor._exceptions import (
|
|||
_mk_recv_mte,
|
||||
pack_error,
|
||||
)
|
||||
from tractor._state import (
|
||||
current_ipc_ctx,
|
||||
)
|
||||
from tractor._state import current_ipc_ctx
|
||||
from ._codec import (
|
||||
mk_dec,
|
||||
MsgDec,
|
||||
|
@ -80,7 +78,7 @@ if TYPE_CHECKING:
|
|||
log = get_logger(__name__)
|
||||
|
||||
|
||||
_def_any_pldec: MsgDec[Any] = mk_dec(spec=Any)
|
||||
_def_any_pldec: MsgDec[Any] = mk_dec()
|
||||
|
||||
|
||||
class PldRx(Struct):
|
||||
|
@ -110,11 +108,33 @@ class PldRx(Struct):
|
|||
# TODO: better to bind it here?
|
||||
# _rx_mc: trio.MemoryReceiveChannel
|
||||
_pld_dec: MsgDec
|
||||
_ctx: Context|None = None
|
||||
_ipc: Context|MsgStream|None = None
|
||||
|
||||
@property
|
||||
def pld_dec(self) -> MsgDec:
|
||||
return self._pld_dec
|
||||
|
||||
# TODO: a better name?
|
||||
# -[ ] when would this be used as it avoids needingn to pass the
|
||||
# ipc prim to every method
|
||||
@cm
|
||||
def wraps_ipc(
|
||||
self,
|
||||
ipc_prim: Context|MsgStream,
|
||||
|
||||
) -> PldRx:
|
||||
'''
|
||||
Apply this payload receiver to an IPC primitive type, one
|
||||
of `Context` or `MsgStream`.
|
||||
|
||||
'''
|
||||
self._ipc = ipc_prim
|
||||
try:
|
||||
yield self
|
||||
finally:
|
||||
self._ipc = None
|
||||
|
||||
@cm
|
||||
def limit_plds(
|
||||
self,
|
||||
|
@ -128,10 +148,6 @@ class PldRx(Struct):
|
|||
exit.
|
||||
|
||||
'''
|
||||
# TODO, ensure we pull the current `MsgCodec`'s custom
|
||||
# dec/enc_hook settings as well ?
|
||||
# -[ ] see `._codec.mk_codec()` inputs
|
||||
#
|
||||
orig_dec: MsgDec = self._pld_dec
|
||||
limit_dec: MsgDec = mk_dec(
|
||||
spec=spec,
|
||||
|
@ -147,7 +163,7 @@ class PldRx(Struct):
|
|||
def dec(self) -> msgpack.Decoder:
|
||||
return self._pld_dec.dec
|
||||
|
||||
def recv_msg_nowait(
|
||||
def recv_pld_nowait(
|
||||
self,
|
||||
# TODO: make this `MsgStream` compat as well, see above^
|
||||
# ipc_prim: Context|MsgStream,
|
||||
|
@ -158,95 +174,34 @@ class PldRx(Struct):
|
|||
hide_tb: bool = False,
|
||||
**dec_pld_kwargs,
|
||||
|
||||
) -> tuple[
|
||||
MsgType[PayloadT],
|
||||
PayloadT,
|
||||
]:
|
||||
'''
|
||||
Attempt to non-blocking receive a message from the `._rx_chan` and
|
||||
unwrap it's payload delivering the pair to the caller.
|
||||
|
||||
'''
|
||||
) -> Any|Raw:
|
||||
__tracebackhide__: bool = hide_tb
|
||||
|
||||
msg: MsgType = (
|
||||
ipc_msg
|
||||
or
|
||||
|
||||
# sync-rx msg from underlying IPC feeder (mem-)chan
|
||||
ipc._rx_chan.receive_nowait()
|
||||
)
|
||||
pld: PayloadT = self.decode_pld(
|
||||
return self.decode_pld(
|
||||
msg,
|
||||
ipc=ipc,
|
||||
expect_msg=expect_msg,
|
||||
hide_tb=hide_tb,
|
||||
**dec_pld_kwargs,
|
||||
)
|
||||
return (
|
||||
msg,
|
||||
pld,
|
||||
)
|
||||
|
||||
async def recv_msg(
|
||||
self,
|
||||
ipc: Context|MsgStream,
|
||||
expect_msg: MsgType,
|
||||
|
||||
# NOTE: ONLY for handling `Stop`-msgs that arrive during
|
||||
# a call to `drain_to_final_msg()` above!
|
||||
passthrough_non_pld_msgs: bool = True,
|
||||
hide_tb: bool = True,
|
||||
|
||||
**decode_pld_kwargs,
|
||||
|
||||
) -> tuple[MsgType, PayloadT]:
|
||||
'''
|
||||
Retrieve the next avail IPC msg, decode its payload, and
|
||||
return the (msg, pld) pair.
|
||||
|
||||
'''
|
||||
__tracebackhide__: bool = hide_tb
|
||||
msg: MsgType = await ipc._rx_chan.receive()
|
||||
match msg:
|
||||
case Return()|Error():
|
||||
log.runtime(
|
||||
f'Rxed final outcome msg\n'
|
||||
f'{msg}\n'
|
||||
)
|
||||
case Stop():
|
||||
log.runtime(
|
||||
f'Rxed stream stopped msg\n'
|
||||
f'{msg}\n'
|
||||
)
|
||||
if passthrough_non_pld_msgs:
|
||||
return msg, None
|
||||
|
||||
# TODO: is there some way we can inject the decoded
|
||||
# payload into an existing output buffer for the original
|
||||
# msg instance?
|
||||
pld: PayloadT = self.decode_pld(
|
||||
msg,
|
||||
ipc=ipc,
|
||||
expect_msg=expect_msg,
|
||||
hide_tb=hide_tb,
|
||||
|
||||
**decode_pld_kwargs,
|
||||
)
|
||||
return (
|
||||
msg,
|
||||
pld,
|
||||
)
|
||||
|
||||
async def recv_pld(
|
||||
self,
|
||||
ipc: Context|MsgStream,
|
||||
ipc_msg: MsgType[PayloadT]|None = None,
|
||||
ipc_msg: MsgType|None = None,
|
||||
expect_msg: Type[MsgType]|None = None,
|
||||
hide_tb: bool = True,
|
||||
|
||||
**dec_pld_kwargs,
|
||||
|
||||
) -> PayloadT:
|
||||
) -> Any|Raw:
|
||||
'''
|
||||
Receive a `MsgType`, then decode and return its `.pld` field.
|
||||
|
||||
|
@ -258,13 +213,6 @@ class PldRx(Struct):
|
|||
# async-rx msg from underlying IPC feeder (mem-)chan
|
||||
await ipc._rx_chan.receive()
|
||||
)
|
||||
if (
|
||||
type(msg) is Return
|
||||
):
|
||||
log.info(
|
||||
f'Rxed final result msg\n'
|
||||
f'{msg}\n'
|
||||
)
|
||||
return self.decode_pld(
|
||||
msg=msg,
|
||||
ipc=ipc,
|
||||
|
@ -453,6 +401,45 @@ class PldRx(Struct):
|
|||
__tracebackhide__: bool = False
|
||||
raise
|
||||
|
||||
dec_msg = decode_pld
|
||||
|
||||
async def recv_msg_w_pld(
|
||||
self,
|
||||
ipc: Context|MsgStream,
|
||||
expect_msg: MsgType,
|
||||
|
||||
# NOTE: generally speaking only for handling `Stop`-msgs that
|
||||
# arrive during a call to `drain_to_final_msg()` above!
|
||||
passthrough_non_pld_msgs: bool = True,
|
||||
hide_tb: bool = True,
|
||||
**kwargs,
|
||||
|
||||
) -> tuple[MsgType, PayloadT]:
|
||||
'''
|
||||
Retrieve the next avail IPC msg, decode it's payload, and return
|
||||
the pair of refs.
|
||||
|
||||
'''
|
||||
__tracebackhide__: bool = hide_tb
|
||||
msg: MsgType = await ipc._rx_chan.receive()
|
||||
|
||||
if passthrough_non_pld_msgs:
|
||||
match msg:
|
||||
case Stop():
|
||||
return msg, None
|
||||
|
||||
# TODO: is there some way we can inject the decoded
|
||||
# payload into an existing output buffer for the original
|
||||
# msg instance?
|
||||
pld: PayloadT = self.decode_pld(
|
||||
msg,
|
||||
ipc=ipc,
|
||||
expect_msg=expect_msg,
|
||||
hide_tb=hide_tb,
|
||||
**kwargs,
|
||||
)
|
||||
return msg, pld
|
||||
|
||||
|
||||
@cm
|
||||
def limit_plds(
|
||||
|
@ -468,16 +455,11 @@ def limit_plds(
|
|||
|
||||
'''
|
||||
__tracebackhide__: bool = True
|
||||
curr_ctx: Context|None = current_ipc_ctx()
|
||||
if curr_ctx is None:
|
||||
raise RuntimeError(
|
||||
'No IPC `Context` is active !?\n'
|
||||
'Did you open `limit_plds()` from outside '
|
||||
'a `Portal.open_context()` scope-block?'
|
||||
)
|
||||
try:
|
||||
curr_ctx: Context = current_ipc_ctx()
|
||||
rx: PldRx = curr_ctx._pld_rx
|
||||
orig_pldec: MsgDec = rx.pld_dec
|
||||
|
||||
with rx.limit_plds(
|
||||
spec=spec,
|
||||
**dec_kwargs,
|
||||
|
@ -487,11 +469,6 @@ def limit_plds(
|
|||
f'{pldec}\n'
|
||||
)
|
||||
yield pldec
|
||||
|
||||
except BaseException:
|
||||
__tracebackhide__: bool = False
|
||||
raise
|
||||
|
||||
finally:
|
||||
log.runtime(
|
||||
'Reverted to previous payload-decoder\n\n'
|
||||
|
@ -545,8 +522,8 @@ async def maybe_limit_plds(
|
|||
async def drain_to_final_msg(
|
||||
ctx: Context,
|
||||
|
||||
msg_limit: int = 6,
|
||||
hide_tb: bool = True,
|
||||
msg_limit: int = 6,
|
||||
|
||||
) -> tuple[
|
||||
Return|None,
|
||||
|
@ -575,8 +552,8 @@ async def drain_to_final_msg(
|
|||
even after ctx closure and the `.open_context()` block exit.
|
||||
|
||||
'''
|
||||
__tracebackhide__: bool = hide_tb
|
||||
raise_overrun: bool = not ctx._allow_overruns
|
||||
parent_never_opened_stream: bool = ctx._stream is None
|
||||
|
||||
# wait for a final context result by collecting (but
|
||||
# basically ignoring) any bi-dir-stream msgs still in transit
|
||||
|
@ -585,14 +562,13 @@ async def drain_to_final_msg(
|
|||
result_msg: Return|Error|None = None
|
||||
while not (
|
||||
ctx.maybe_error
|
||||
and
|
||||
not ctx._final_result_is_set()
|
||||
and not ctx._final_result_is_set()
|
||||
):
|
||||
try:
|
||||
# receive all msgs, scanning for either a final result
|
||||
# or error; the underlying call should never raise any
|
||||
# remote error directly!
|
||||
msg, pld = await ctx._pld_rx.recv_msg(
|
||||
msg, pld = await ctx._pld_rx.recv_msg_w_pld(
|
||||
ipc=ctx,
|
||||
expect_msg=Return,
|
||||
raise_error=False,
|
||||
|
@ -639,11 +615,6 @@ async def drain_to_final_msg(
|
|||
)
|
||||
__tracebackhide__: bool = False
|
||||
|
||||
else:
|
||||
log.cancel(
|
||||
f'IPC ctx cancelled externally during result drain ?\n'
|
||||
f'{ctx}'
|
||||
)
|
||||
# CASE 2: mask the local cancelled-error(s)
|
||||
# only when we are sure the remote error is
|
||||
# the source cause of this local task's
|
||||
|
@ -675,24 +646,17 @@ async def drain_to_final_msg(
|
|||
case Yield():
|
||||
pre_result_drained.append(msg)
|
||||
if (
|
||||
not parent_never_opened_stream
|
||||
and (
|
||||
(ctx._stream.closed
|
||||
and
|
||||
(reason := 'stream was already closed')
|
||||
) or
|
||||
(ctx.cancel_acked
|
||||
and
|
||||
(reason := 'ctx cancelled other side')
|
||||
)
|
||||
or (ctx._cancel_called
|
||||
and
|
||||
(reason := 'ctx called `.cancel()`')
|
||||
)
|
||||
or (len(pre_result_drained) > msg_limit
|
||||
and
|
||||
(reason := f'"yield" limit={msg_limit}')
|
||||
)
|
||||
(ctx._stream.closed
|
||||
and (reason := 'stream was already closed')
|
||||
)
|
||||
or (ctx.cancel_acked
|
||||
and (reason := 'ctx cancelled other side')
|
||||
)
|
||||
or (ctx._cancel_called
|
||||
and (reason := 'ctx called `.cancel()`')
|
||||
)
|
||||
or (len(pre_result_drained) > msg_limit
|
||||
and (reason := f'"yield" limit={msg_limit}')
|
||||
)
|
||||
):
|
||||
log.cancel(
|
||||
|
@ -710,7 +674,7 @@ async def drain_to_final_msg(
|
|||
# drain up to the `msg_limit` hoping to get
|
||||
# a final result or error/ctxc.
|
||||
else:
|
||||
report: str = (
|
||||
log.warning(
|
||||
'Ignoring "yield" msg during `ctx.result()` drain..\n'
|
||||
f'<= {ctx.chan.uid}\n'
|
||||
f' |_{ctx._nsf}()\n\n'
|
||||
|
@ -719,14 +683,6 @@ async def drain_to_final_msg(
|
|||
|
||||
f'{pretty_struct.pformat(msg)}\n'
|
||||
)
|
||||
if parent_never_opened_stream:
|
||||
report = (
|
||||
f'IPC ctx never opened stream on {ctx.side!r}-side!\n'
|
||||
f'\n'
|
||||
# f'{ctx}\n'
|
||||
) + report
|
||||
|
||||
log.warning(report)
|
||||
continue
|
||||
|
||||
# stream terminated, but no result yet..
|
||||
|
@ -818,7 +774,6 @@ async def drain_to_final_msg(
|
|||
f'{ctx.outcome}\n'
|
||||
)
|
||||
|
||||
__tracebackhide__: bool = hide_tb
|
||||
return (
|
||||
result_msg,
|
||||
pre_result_drained,
|
||||
|
|
|
@ -31,7 +31,6 @@ from typing import (
|
|||
Type,
|
||||
TypeVar,
|
||||
TypeAlias,
|
||||
# TYPE_CHECKING,
|
||||
Union,
|
||||
)
|
||||
|
||||
|
@ -48,7 +47,6 @@ from tractor.msg import (
|
|||
pretty_struct,
|
||||
)
|
||||
from tractor.log import get_logger
|
||||
# from tractor._addr import UnwrappedAddress
|
||||
|
||||
|
||||
log = get_logger('tractor.msgspec')
|
||||
|
@ -143,16 +141,9 @@ class Aid(
|
|||
'''
|
||||
name: str
|
||||
uuid: str
|
||||
pid: int|None = None
|
||||
|
||||
# TODO? can/should we extend this field set?
|
||||
# -[ ] use built-in support for UUIDs? `uuid.UUID` which has
|
||||
# multi-protocol support
|
||||
# https://jcristharif.com/msgspec/supported-types.html#uuid
|
||||
#
|
||||
# -[ ] as per the `.ipc._uds` / `._addr` comments, maybe we
|
||||
# should also include at least `.pid` (equiv to port for tcp)
|
||||
# and/or host-part always?
|
||||
# TODO: use built-in support for UUIDs?
|
||||
# -[ ] `uuid.UUID` which has multi-protocol support
|
||||
# https://jcristharif.com/msgspec/supported-types.html#uuid
|
||||
|
||||
|
||||
class SpawnSpec(
|
||||
|
@ -170,15 +161,14 @@ class SpawnSpec(
|
|||
# a hard `Struct` def for all of these fields!
|
||||
_parent_main_data: dict
|
||||
_runtime_vars: dict[str, Any]
|
||||
# ^NOTE see `._state._runtime_vars: dict`
|
||||
|
||||
# module import capability
|
||||
enable_modules: dict[str, str]
|
||||
|
||||
# TODO: not just sockaddr pairs?
|
||||
# -[ ] abstract into a `TransportAddr` type?
|
||||
reg_addrs: list[tuple[str, str|int]]
|
||||
bind_addrs: list[tuple[str, str|int]]|None
|
||||
reg_addrs: list[tuple[str, int]]
|
||||
bind_addrs: list[tuple[str, int]]
|
||||
|
||||
|
||||
# TODO: caps based RPC support in the payload?
|
||||
|
@ -609,15 +599,15 @@ def mk_msg_spec(
|
|||
Msg[payload_type_union],
|
||||
Generic[PayloadT],
|
||||
)
|
||||
# defstruct_bases: tuple = (
|
||||
# Msg, # [payload_type_union],
|
||||
# # Generic[PayloadT],
|
||||
# # ^-XXX-^: not allowed? lul..
|
||||
# )
|
||||
defstruct_bases: tuple = (
|
||||
Msg, # [payload_type_union],
|
||||
# Generic[PayloadT],
|
||||
# ^-XXX-^: not allowed? lul..
|
||||
)
|
||||
ipc_msg_types: list[Msg] = []
|
||||
|
||||
idx_msg_types: list[Msg] = []
|
||||
# defs_msg_types: list[Msg] = []
|
||||
defs_msg_types: list[Msg] = []
|
||||
nc_msg_types: list[Msg] = []
|
||||
|
||||
for msgtype in __msg_types__:
|
||||
|
@ -635,7 +625,7 @@ def mk_msg_spec(
|
|||
# TODO: wait why do we need the dynamic version here?
|
||||
# XXX ANSWER XXX -> BC INHERITANCE.. don't work w generics..
|
||||
#
|
||||
# NOTE previously bc msgtypes WERE NOT inheriting
|
||||
# NOTE previously bc msgtypes WERE NOT inheritting
|
||||
# directly the `Generic[PayloadT]` type, the manual method
|
||||
# of generic-paraming with `.__class_getitem__()` wasn't
|
||||
# working..
|
||||
|
@ -672,35 +662,38 @@ def mk_msg_spec(
|
|||
|
||||
# with `msgspec.structs.defstruct`
|
||||
# XXX ALSO DOESN'T WORK
|
||||
# defstruct_msgtype = defstruct(
|
||||
# name=msgtype.__name__,
|
||||
# fields=[
|
||||
# ('cid', str),
|
||||
defstruct_msgtype = defstruct(
|
||||
name=msgtype.__name__,
|
||||
fields=[
|
||||
('cid', str),
|
||||
|
||||
# # XXX doesn't seem to work..
|
||||
# # ('pld', PayloadT),
|
||||
# XXX doesn't seem to work..
|
||||
# ('pld', PayloadT),
|
||||
|
||||
('pld', payload_type_union),
|
||||
],
|
||||
bases=defstruct_bases,
|
||||
)
|
||||
defs_msg_types.append(defstruct_msgtype)
|
||||
|
||||
# ('pld', payload_type_union),
|
||||
# ],
|
||||
# bases=defstruct_bases,
|
||||
# )
|
||||
# defs_msg_types.append(defstruct_msgtype)
|
||||
# assert index_paramed_msg_type == manual_paramed_msg_subtype
|
||||
|
||||
# paramed_msg_type = manual_paramed_msg_subtype
|
||||
|
||||
# ipc_payload_msgs_type_union |= index_paramed_msg_type
|
||||
|
||||
idx_spec: Union[Type[Msg]] = Union[*idx_msg_types]
|
||||
# def_spec: Union[Type[Msg]] = Union[*defs_msg_types]
|
||||
def_spec: Union[Type[Msg]] = Union[*defs_msg_types]
|
||||
nc_spec: Union[Type[Msg]] = Union[*nc_msg_types]
|
||||
|
||||
specs: dict[str, Union[Type[Msg]]] = {
|
||||
'indexed_generics': idx_spec,
|
||||
# 'defstruct': def_spec,
|
||||
'defstruct': def_spec,
|
||||
'types_new_class': nc_spec,
|
||||
}
|
||||
msgtypes_table: dict[str, list[Msg]] = {
|
||||
'indexed_generics': idx_msg_types,
|
||||
# 'defstruct': defs_msg_types,
|
||||
'defstruct': defs_msg_types,
|
||||
'types_new_class': nc_msg_types,
|
||||
}
|
||||
|
||||
|
|
|
@ -70,8 +70,7 @@ async def maybe_open_nursery(
|
|||
yield nursery
|
||||
else:
|
||||
async with lib.open_nursery(**kwargs) as nursery:
|
||||
if lib == trio:
|
||||
nursery.cancel_scope.shield = shield
|
||||
nursery.cancel_scope.shield = shield
|
||||
yield nursery
|
||||
|
||||
|
||||
|
|
401
uv.lock
401
uv.lock
|
@ -1,23 +1,14 @@
|
|||
version = 1
|
||||
revision = 2
|
||||
revision = 1
|
||||
requires-python = ">=3.11"
|
||||
|
||||
[[package]]
|
||||
name = "attrs"
|
||||
version = "24.3.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/48/c8/6260f8ccc11f0917360fc0da435c5c9c7504e3db174d5a12a1494887b045/attrs-24.3.0.tar.gz", hash = "sha256:8f5c07333d543103541ba7be0e2ce16eeee8130cb0b3f9238ab904ce1e85baff", size = 805984, upload-time = "2024-12-16T06:59:29.899Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/48/c8/6260f8ccc11f0917360fc0da435c5c9c7504e3db174d5a12a1494887b045/attrs-24.3.0.tar.gz", hash = "sha256:8f5c07333d543103541ba7be0e2ce16eeee8130cb0b3f9238ab904ce1e85baff", size = 805984 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/89/aa/ab0f7891a01eeb2d2e338ae8fecbe57fcebea1a24dbb64d45801bfab481d/attrs-24.3.0-py3-none-any.whl", hash = "sha256:ac96cd038792094f438ad1f6ff80837353805ac950cd2aa0e0625ef19850c308", size = 63397, upload-time = "2024-12-16T06:59:26.977Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bidict"
|
||||
version = "0.23.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/9a/6e/026678aa5a830e07cd9498a05d3e7e650a4f56a42f267a53d22bcda1bdc9/bidict-0.23.1.tar.gz", hash = "sha256:03069d763bc387bbd20e7d49914e75fc4132a41937fa3405417e1a5a2d006d71", size = 29093, upload-time = "2024-02-18T19:09:05.748Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/99/37/e8730c3587a65eb5645d4aba2d27aae48e8003614d6aaf15dda67f702f1f/bidict-0.23.1-py3-none-any.whl", hash = "sha256:5dae8d4d79b552a71cbabc7deb25dfe8ce710b17ff41711e13010ead2abfc3e5", size = 32764, upload-time = "2024-02-18T19:09:04.156Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/89/aa/ab0f7891a01eeb2d2e338ae8fecbe57fcebea1a24dbb64d45801bfab481d/attrs-24.3.0-py3-none-any.whl", hash = "sha256:ac96cd038792094f438ad1f6ff80837353805ac950cd2aa0e0625ef19850c308", size = 63397 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -27,51 +18,23 @@ source = { registry = "https://pypi.org/simple" }
|
|||
dependencies = [
|
||||
{ name = "pycparser" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/fc/97/c783634659c2920c3fc70419e3af40972dbaf758daa229a7d6ea6135c90d/cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824", size = 516621, upload-time = "2024-09-04T20:45:21.852Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/fc/97/c783634659c2920c3fc70419e3af40972dbaf758daa229a7d6ea6135c90d/cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824", size = 516621 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/6b/f4/927e3a8899e52a27fa57a48607ff7dc91a9ebe97399b357b85a0c7892e00/cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401", size = 182264, upload-time = "2024-09-04T20:43:51.124Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6c/f5/6c3a8efe5f503175aaddcbea6ad0d2c96dad6f5abb205750d1b3df44ef29/cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf", size = 178651, upload-time = "2024-09-04T20:43:52.872Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/94/dd/a3f0118e688d1b1a57553da23b16bdade96d2f9bcda4d32e7d2838047ff7/cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4", size = 445259, upload-time = "2024-09-04T20:43:56.123Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2e/ea/70ce63780f096e16ce8588efe039d3c4f91deb1dc01e9c73a287939c79a6/cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41", size = 469200, upload-time = "2024-09-04T20:43:57.891Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1c/a0/a4fa9f4f781bda074c3ddd57a572b060fa0df7655d2a4247bbe277200146/cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1", size = 477235, upload-time = "2024-09-04T20:44:00.18Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/62/12/ce8710b5b8affbcdd5c6e367217c242524ad17a02fe5beec3ee339f69f85/cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6", size = 459721, upload-time = "2024-09-04T20:44:01.585Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ff/6b/d45873c5e0242196f042d555526f92aa9e0c32355a1be1ff8c27f077fd37/cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d", size = 467242, upload-time = "2024-09-04T20:44:03.467Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1a/52/d9a0e523a572fbccf2955f5abe883cfa8bcc570d7faeee06336fbd50c9fc/cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6", size = 477999, upload-time = "2024-09-04T20:44:05.023Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/44/74/f2a2460684a1a2d00ca799ad880d54652841a780c4c97b87754f660c7603/cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f", size = 454242, upload-time = "2024-09-04T20:44:06.444Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f8/4a/34599cac7dfcd888ff54e801afe06a19c17787dfd94495ab0c8d35fe99fb/cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b", size = 478604, upload-time = "2024-09-04T20:44:08.206Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/34/33/e1b8a1ba29025adbdcda5fb3a36f94c03d771c1b7b12f726ff7fef2ebe36/cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655", size = 171727, upload-time = "2024-09-04T20:44:09.481Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3d/97/50228be003bb2802627d28ec0627837ac0bf35c90cf769812056f235b2d1/cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0", size = 181400, upload-time = "2024-09-04T20:44:10.873Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5a/84/e94227139ee5fb4d600a7a4927f322e1d4aea6fdc50bd3fca8493caba23f/cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4", size = 183178, upload-time = "2024-09-04T20:44:12.232Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/da/ee/fb72c2b48656111c4ef27f0f91da355e130a923473bf5ee75c5643d00cca/cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c", size = 178840, upload-time = "2024-09-04T20:44:13.739Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cc/b6/db007700f67d151abadf508cbfd6a1884f57eab90b1bb985c4c8c02b0f28/cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36", size = 454803, upload-time = "2024-09-04T20:44:15.231Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1a/df/f8d151540d8c200eb1c6fba8cd0dfd40904f1b0682ea705c36e6c2e97ab3/cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5", size = 478850, upload-time = "2024-09-04T20:44:17.188Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/28/c0/b31116332a547fd2677ae5b78a2ef662dfc8023d67f41b2a83f7c2aa78b1/cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff", size = 485729, upload-time = "2024-09-04T20:44:18.688Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/91/2b/9a1ddfa5c7f13cab007a2c9cc295b70fbbda7cb10a286aa6810338e60ea1/cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99", size = 471256, upload-time = "2024-09-04T20:44:20.248Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b2/d5/da47df7004cb17e4955df6a43d14b3b4ae77737dff8bf7f8f333196717bf/cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93", size = 479424, upload-time = "2024-09-04T20:44:21.673Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0b/ac/2a28bcf513e93a219c8a4e8e125534f4f6db03e3179ba1c45e949b76212c/cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3", size = 484568, upload-time = "2024-09-04T20:44:23.245Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d4/38/ca8a4f639065f14ae0f1d9751e70447a261f1a30fa7547a828ae08142465/cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8", size = 488736, upload-time = "2024-09-04T20:44:24.757Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/86/c5/28b2d6f799ec0bdecf44dced2ec5ed43e0eb63097b0f58c293583b406582/cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65", size = 172448, upload-time = "2024-09-04T20:44:26.208Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/50/b9/db34c4755a7bd1cb2d1603ac3863f22bcecbd1ba29e5ee841a4bc510b294/cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903", size = 181976, upload-time = "2024-09-04T20:44:27.578Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8d/f8/dd6c246b148639254dad4d6803eb6a54e8c85c6e11ec9df2cffa87571dbe/cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e", size = 182989, upload-time = "2024-09-04T20:44:28.956Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8b/f1/672d303ddf17c24fc83afd712316fda78dc6fce1cd53011b839483e1ecc8/cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2", size = 178802, upload-time = "2024-09-04T20:44:30.289Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0e/2d/eab2e858a91fdff70533cab61dcff4a1f55ec60425832ddfdc9cd36bc8af/cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3", size = 454792, upload-time = "2024-09-04T20:44:32.01Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/75/b2/fbaec7c4455c604e29388d55599b99ebcc250a60050610fadde58932b7ee/cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683", size = 478893, upload-time = "2024-09-04T20:44:33.606Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4f/b7/6e4a2162178bf1935c336d4da8a9352cccab4d3a5d7914065490f08c0690/cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5", size = 485810, upload-time = "2024-09-04T20:44:35.191Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c7/8a/1d0e4a9c26e54746dc08c2c6c037889124d4f59dffd853a659fa545f1b40/cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4", size = 471200, upload-time = "2024-09-04T20:44:36.743Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/26/9f/1aab65a6c0db35f43c4d1b4f580e8df53914310afc10ae0397d29d697af4/cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd", size = 479447, upload-time = "2024-09-04T20:44:38.492Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5f/e4/fb8b3dd8dc0e98edf1135ff067ae070bb32ef9d509d6cb0f538cd6f7483f/cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed", size = 484358, upload-time = "2024-09-04T20:44:40.046Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f1/47/d7145bf2dc04684935d57d67dff9d6d795b2ba2796806bb109864be3a151/cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9", size = 488469, upload-time = "2024-09-04T20:44:41.616Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bf/ee/f94057fa6426481d663b88637a9a10e859e492c73d0384514a17d78ee205/cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d", size = 172475, upload-time = "2024-09-04T20:44:43.733Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7c/fc/6a8cb64e5f0324877d503c854da15d76c1e50eb722e320b15345c4d0c6de/cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a", size = 182009, upload-time = "2024-09-04T20:44:45.309Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/34/33/e1b8a1ba29025adbdcda5fb3a36f94c03d771c1b7b12f726ff7fef2ebe36/cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655", size = 171727 },
|
||||
{ url = "https://files.pythonhosted.org/packages/3d/97/50228be003bb2802627d28ec0627837ac0bf35c90cf769812056f235b2d1/cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0", size = 181400 },
|
||||
{ url = "https://files.pythonhosted.org/packages/86/c5/28b2d6f799ec0bdecf44dced2ec5ed43e0eb63097b0f58c293583b406582/cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65", size = 172448 },
|
||||
{ url = "https://files.pythonhosted.org/packages/50/b9/db34c4755a7bd1cb2d1603ac3863f22bcecbd1ba29e5ee841a4bc510b294/cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903", size = 181976 },
|
||||
{ url = "https://files.pythonhosted.org/packages/bf/ee/f94057fa6426481d663b88637a9a10e859e492c73d0384514a17d78ee205/cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d", size = 172475 },
|
||||
{ url = "https://files.pythonhosted.org/packages/7c/fc/6a8cb64e5f0324877d503c854da15d76c1e50eb722e320b15345c4d0c6de/cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a", size = 182009 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "colorama"
|
||||
version = "0.4.6"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -81,9 +44,9 @@ source = { registry = "https://pypi.org/simple" }
|
|||
dependencies = [
|
||||
{ name = "colorama", marker = "sys_platform == 'win32'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/d3/7a/359f4d5df2353f26172b3cc39ea32daa39af8de522205f512f458923e677/colorlog-6.9.0.tar.gz", hash = "sha256:bfba54a1b93b94f54e1f4fe48395725a3d92fd2a4af702f6bd70946bdc0c6ac2", size = 16624, upload-time = "2024-10-29T18:34:51.011Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/d3/7a/359f4d5df2353f26172b3cc39ea32daa39af8de522205f512f458923e677/colorlog-6.9.0.tar.gz", hash = "sha256:bfba54a1b93b94f54e1f4fe48395725a3d92fd2a4af702f6bd70946bdc0c6ac2", size = 16624 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/e3/51/9b208e85196941db2f0654ad0357ca6388ab3ed67efdbfc799f35d1f83aa/colorlog-6.9.0-py3-none-any.whl", hash = "sha256:5906e71acd67cb07a71e779c47c4bcb45fb8c2993eebe9e5adcd6a6f1b283eff", size = 11424, upload-time = "2024-10-29T18:34:49.815Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e3/51/9b208e85196941db2f0654ad0357ca6388ab3ed67efdbfc799f35d1f83aa/colorlog-6.9.0-py3-none-any.whl", hash = "sha256:5906e71acd67cb07a71e779c47c4bcb45fb8c2993eebe9e5adcd6a6f1b283eff", size = 11424 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -95,98 +58,98 @@ dependencies = [
|
|||
{ name = "outcome" },
|
||||
{ name = "sniffio" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/dc/c1/ab3a42c0f3ed56df9cd33de1539b3198d98c6ccbaf88a73d6be0b72d85e0/greenback-1.2.1.tar.gz", hash = "sha256:de3ca656885c03b96dab36079f3de74bb5ba061da9bfe3bb69dccc866ef95ea3", size = 42597, upload-time = "2024-02-20T21:23:13.239Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/dc/c1/ab3a42c0f3ed56df9cd33de1539b3198d98c6ccbaf88a73d6be0b72d85e0/greenback-1.2.1.tar.gz", hash = "sha256:de3ca656885c03b96dab36079f3de74bb5ba061da9bfe3bb69dccc866ef95ea3", size = 42597 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/71/d0/b8dc79d5ecfffacad9c844b6ae76b9c6259935796d3c561deccbf8fa421d/greenback-1.2.1-py3-none-any.whl", hash = "sha256:98768edbbe4340091a9730cf64a683fcbaa3f2cb81e4ac41d7ed28d3b6f74b79", size = 28062, upload-time = "2024-02-20T21:23:12.031Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/71/d0/b8dc79d5ecfffacad9c844b6ae76b9c6259935796d3c561deccbf8fa421d/greenback-1.2.1-py3-none-any.whl", hash = "sha256:98768edbbe4340091a9730cf64a683fcbaa3f2cb81e4ac41d7ed28d3b6f74b79", size = 28062 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "greenlet"
|
||||
version = "3.1.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/2f/ff/df5fede753cc10f6a5be0931204ea30c35fa2f2ea7a35b25bdaf4fe40e46/greenlet-3.1.1.tar.gz", hash = "sha256:4ce3ac6cdb6adf7946475d7ef31777c26d94bccc377e070a7986bd2d5c515467", size = 186022, upload-time = "2024-09-20T18:21:04.506Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/2f/ff/df5fede753cc10f6a5be0931204ea30c35fa2f2ea7a35b25bdaf4fe40e46/greenlet-3.1.1.tar.gz", hash = "sha256:4ce3ac6cdb6adf7946475d7ef31777c26d94bccc377e070a7986bd2d5c515467", size = 186022 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/28/62/1c2665558618553c42922ed47a4e6d6527e2fa3516a8256c2f431c5d0441/greenlet-3.1.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:e4d333e558953648ca09d64f13e6d8f0523fa705f51cae3f03b5983489958c70", size = 272479, upload-time = "2024-09-20T17:07:22.332Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/76/9d/421e2d5f07285b6e4e3a676b016ca781f63cfe4a0cd8eaecf3fd6f7a71ae/greenlet-3.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fc016b73c94e98e29af67ab7b9a879c307c6731a2c9da0db5a7d9b7edd1159", size = 640404, upload-time = "2024-09-20T17:36:45.588Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e5/de/6e05f5c59262a584e502dd3d261bbdd2c97ab5416cc9c0b91ea38932a901/greenlet-3.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d5e975ca70269d66d17dd995dafc06f1b06e8cb1ec1e9ed54c1d1e4a7c4cf26e", size = 652813, upload-time = "2024-09-20T17:39:19.052Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/49/93/d5f93c84241acdea15a8fd329362c2c71c79e1a507c3f142a5d67ea435ae/greenlet-3.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2813dc3de8c1ee3f924e4d4227999285fd335d1bcc0d2be6dc3f1f6a318ec1", size = 648517, upload-time = "2024-09-20T17:44:24.101Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/15/85/72f77fc02d00470c86a5c982b8daafdf65d38aefbbe441cebff3bf7037fc/greenlet-3.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e347b3bfcf985a05e8c0b7d462ba6f15b1ee1c909e2dcad795e49e91b152c383", size = 647831, upload-time = "2024-09-20T17:08:40.577Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f7/4b/1c9695aa24f808e156c8f4813f685d975ca73c000c2a5056c514c64980f6/greenlet-3.1.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e8f8c9cb53cdac7ba9793c276acd90168f416b9ce36799b9b885790f8ad6c0a", size = 602413, upload-time = "2024-09-20T17:08:31.728Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/76/70/ad6e5b31ef330f03b12559d19fda2606a522d3849cde46b24f223d6d1619/greenlet-3.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62ee94988d6b4722ce0028644418d93a52429e977d742ca2ccbe1c4f4a792511", size = 1129619, upload-time = "2024-09-20T17:44:14.222Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f4/fb/201e1b932e584066e0f0658b538e73c459b34d44b4bd4034f682423bc801/greenlet-3.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1776fd7f989fc6b8d8c8cb8da1f6b82c5814957264d1f6cf818d475ec2bf6395", size = 1155198, upload-time = "2024-09-20T17:09:23.903Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/12/da/b9ed5e310bb8b89661b80cbcd4db5a067903bbcd7fc854923f5ebb4144f0/greenlet-3.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:48ca08c771c268a768087b408658e216133aecd835c0ded47ce955381105ba39", size = 298930, upload-time = "2024-09-20T17:25:18.656Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7d/ec/bad1ac26764d26aa1353216fcbfa4670050f66d445448aafa227f8b16e80/greenlet-3.1.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:4afe7ea89de619adc868e087b4d2359282058479d7cfb94970adf4b55284574d", size = 274260, upload-time = "2024-09-20T17:08:07.301Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/66/d4/c8c04958870f482459ab5956c2942c4ec35cac7fe245527f1039837c17a9/greenlet-3.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f406b22b7c9a9b4f8aa9d2ab13d6ae0ac3e85c9a809bd590ad53fed2bf70dc79", size = 649064, upload-time = "2024-09-20T17:36:47.628Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/51/41/467b12a8c7c1303d20abcca145db2be4e6cd50a951fa30af48b6ec607581/greenlet-3.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c3a701fe5a9695b238503ce5bbe8218e03c3bcccf7e204e455e7462d770268aa", size = 663420, upload-time = "2024-09-20T17:39:21.258Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/27/8f/2a93cd9b1e7107d5c7b3b7816eeadcac2ebcaf6d6513df9abaf0334777f6/greenlet-3.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2846930c65b47d70b9d178e89c7e1a69c95c1f68ea5aa0a58646b7a96df12441", size = 658035, upload-time = "2024-09-20T17:44:26.501Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/57/5c/7c6f50cb12be092e1dccb2599be5a942c3416dbcfb76efcf54b3f8be4d8d/greenlet-3.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99cfaa2110534e2cf3ba31a7abcac9d328d1d9f1b95beede58294a60348fba36", size = 660105, upload-time = "2024-09-20T17:08:42.048Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f1/66/033e58a50fd9ec9df00a8671c74f1f3a320564c6415a4ed82a1c651654ba/greenlet-3.1.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1443279c19fca463fc33e65ef2a935a5b09bb90f978beab37729e1c3c6c25fe9", size = 613077, upload-time = "2024-09-20T17:08:33.707Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/19/c5/36384a06f748044d06bdd8776e231fadf92fc896bd12cb1c9f5a1bda9578/greenlet-3.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b7cede291382a78f7bb5f04a529cb18e068dd29e0fb27376074b6d0317bf4dd0", size = 1135975, upload-time = "2024-09-20T17:44:15.989Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/38/f9/c0a0eb61bdf808d23266ecf1d63309f0e1471f284300ce6dac0ae1231881/greenlet-3.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23f20bb60ae298d7d8656c6ec6db134bca379ecefadb0b19ce6f19d1f232a942", size = 1163955, upload-time = "2024-09-20T17:09:25.539Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/43/21/a5d9df1d21514883333fc86584c07c2b49ba7c602e670b174bd73cfc9c7f/greenlet-3.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:7124e16b4c55d417577c2077be379514321916d5790fa287c9ed6f23bd2ffd01", size = 299655, upload-time = "2024-09-20T17:21:22.427Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f3/57/0db4940cd7bb461365ca8d6fd53e68254c9dbbcc2b452e69d0d41f10a85e/greenlet-3.1.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:05175c27cb459dcfc05d026c4232f9de8913ed006d42713cb8a5137bd49375f1", size = 272990, upload-time = "2024-09-20T17:08:26.312Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1c/ec/423d113c9f74e5e402e175b157203e9102feeb7088cee844d735b28ef963/greenlet-3.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:935e943ec47c4afab8965954bf49bfa639c05d4ccf9ef6e924188f762145c0ff", size = 649175, upload-time = "2024-09-20T17:36:48.983Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a9/46/ddbd2db9ff209186b7b7c621d1432e2f21714adc988703dbdd0e65155c77/greenlet-3.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:667a9706c970cb552ede35aee17339a18e8f2a87a51fba2ed39ceeeb1004798a", size = 663425, upload-time = "2024-09-20T17:39:22.705Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bc/f9/9c82d6b2b04aa37e38e74f0c429aece5eeb02bab6e3b98e7db89b23d94c6/greenlet-3.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8a678974d1f3aa55f6cc34dc480169d58f2e6d8958895d68845fa4ab566509e", size = 657736, upload-time = "2024-09-20T17:44:28.544Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d9/42/b87bc2a81e3a62c3de2b0d550bf91a86939442b7ff85abb94eec3fc0e6aa/greenlet-3.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efc0f674aa41b92da8c49e0346318c6075d734994c3c4e4430b1c3f853e498e4", size = 660347, upload-time = "2024-09-20T17:08:45.56Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/37/fa/71599c3fd06336cdc3eac52e6871cfebab4d9d70674a9a9e7a482c318e99/greenlet-3.1.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0153404a4bb921f0ff1abeb5ce8a5131da56b953eda6e14b88dc6bbc04d2049e", size = 615583, upload-time = "2024-09-20T17:08:36.85Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4e/96/e9ef85de031703ee7a4483489b40cf307f93c1824a02e903106f2ea315fe/greenlet-3.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:275f72decf9932639c1c6dd1013a1bc266438eb32710016a1c742df5da6e60a1", size = 1133039, upload-time = "2024-09-20T17:44:18.287Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/87/76/b2b6362accd69f2d1889db61a18c94bc743e961e3cab344c2effaa4b4a25/greenlet-3.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c4aab7f6381f38a4b42f269057aee279ab0fc7bf2e929e3d4abfae97b682a12c", size = 1160716, upload-time = "2024-09-20T17:09:27.112Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1f/1b/54336d876186920e185066d8c3024ad55f21d7cc3683c856127ddb7b13ce/greenlet-3.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:b42703b1cf69f2aa1df7d1030b9d77d3e584a70755674d60e710f0af570f3761", size = 299490, upload-time = "2024-09-20T17:17:09.501Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5f/17/bea55bf36990e1638a2af5ba10c1640273ef20f627962cf97107f1e5d637/greenlet-3.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1695e76146579f8c06c1509c7ce4dfe0706f49c6831a817ac04eebb2fd02011", size = 643731, upload-time = "2024-09-20T17:36:50.376Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/78/d2/aa3d2157f9ab742a08e0fd8f77d4699f37c22adfbfeb0c610a186b5f75e0/greenlet-3.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7876452af029456b3f3549b696bb36a06db7c90747740c5302f74a9e9fa14b13", size = 649304, upload-time = "2024-09-20T17:39:24.55Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f1/8e/d0aeffe69e53ccff5a28fa86f07ad1d2d2d6537a9506229431a2a02e2f15/greenlet-3.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ead44c85f8ab905852d3de8d86f6f8baf77109f9da589cb4fa142bd3b57b475", size = 646537, upload-time = "2024-09-20T17:44:31.102Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/05/79/e15408220bbb989469c8871062c97c6c9136770657ba779711b90870d867/greenlet-3.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8320f64b777d00dd7ccdade271eaf0cad6636343293a25074cc5566160e4de7b", size = 642506, upload-time = "2024-09-20T17:08:47.852Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/18/87/470e01a940307796f1d25f8167b551a968540fbe0551c0ebb853cb527dd6/greenlet-3.1.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6510bf84a6b643dabba74d3049ead221257603a253d0a9873f55f6a59a65f822", size = 602753, upload-time = "2024-09-20T17:08:38.079Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e2/72/576815ba674eddc3c25028238f74d7b8068902b3968cbe456771b166455e/greenlet-3.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:04b013dc07c96f83134b1e99888e7a79979f1a247e2a9f59697fa14b5862ed01", size = 1122731, upload-time = "2024-09-20T17:44:20.556Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ac/38/08cc303ddddc4b3d7c628c3039a61a3aae36c241ed01393d00c2fd663473/greenlet-3.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:411f015496fec93c1c8cd4e5238da364e1da7a124bcb293f085bf2860c32c6f6", size = 1142112, upload-time = "2024-09-20T17:09:28.753Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/28/62/1c2665558618553c42922ed47a4e6d6527e2fa3516a8256c2f431c5d0441/greenlet-3.1.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:e4d333e558953648ca09d64f13e6d8f0523fa705f51cae3f03b5983489958c70", size = 272479 },
|
||||
{ url = "https://files.pythonhosted.org/packages/76/9d/421e2d5f07285b6e4e3a676b016ca781f63cfe4a0cd8eaecf3fd6f7a71ae/greenlet-3.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fc016b73c94e98e29af67ab7b9a879c307c6731a2c9da0db5a7d9b7edd1159", size = 640404 },
|
||||
{ url = "https://files.pythonhosted.org/packages/e5/de/6e05f5c59262a584e502dd3d261bbdd2c97ab5416cc9c0b91ea38932a901/greenlet-3.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d5e975ca70269d66d17dd995dafc06f1b06e8cb1ec1e9ed54c1d1e4a7c4cf26e", size = 652813 },
|
||||
{ url = "https://files.pythonhosted.org/packages/49/93/d5f93c84241acdea15a8fd329362c2c71c79e1a507c3f142a5d67ea435ae/greenlet-3.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2813dc3de8c1ee3f924e4d4227999285fd335d1bcc0d2be6dc3f1f6a318ec1", size = 648517 },
|
||||
{ url = "https://files.pythonhosted.org/packages/15/85/72f77fc02d00470c86a5c982b8daafdf65d38aefbbe441cebff3bf7037fc/greenlet-3.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e347b3bfcf985a05e8c0b7d462ba6f15b1ee1c909e2dcad795e49e91b152c383", size = 647831 },
|
||||
{ url = "https://files.pythonhosted.org/packages/f7/4b/1c9695aa24f808e156c8f4813f685d975ca73c000c2a5056c514c64980f6/greenlet-3.1.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e8f8c9cb53cdac7ba9793c276acd90168f416b9ce36799b9b885790f8ad6c0a", size = 602413 },
|
||||
{ url = "https://files.pythonhosted.org/packages/76/70/ad6e5b31ef330f03b12559d19fda2606a522d3849cde46b24f223d6d1619/greenlet-3.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62ee94988d6b4722ce0028644418d93a52429e977d742ca2ccbe1c4f4a792511", size = 1129619 },
|
||||
{ url = "https://files.pythonhosted.org/packages/f4/fb/201e1b932e584066e0f0658b538e73c459b34d44b4bd4034f682423bc801/greenlet-3.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1776fd7f989fc6b8d8c8cb8da1f6b82c5814957264d1f6cf818d475ec2bf6395", size = 1155198 },
|
||||
{ url = "https://files.pythonhosted.org/packages/12/da/b9ed5e310bb8b89661b80cbcd4db5a067903bbcd7fc854923f5ebb4144f0/greenlet-3.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:48ca08c771c268a768087b408658e216133aecd835c0ded47ce955381105ba39", size = 298930 },
|
||||
{ url = "https://files.pythonhosted.org/packages/7d/ec/bad1ac26764d26aa1353216fcbfa4670050f66d445448aafa227f8b16e80/greenlet-3.1.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:4afe7ea89de619adc868e087b4d2359282058479d7cfb94970adf4b55284574d", size = 274260 },
|
||||
{ url = "https://files.pythonhosted.org/packages/66/d4/c8c04958870f482459ab5956c2942c4ec35cac7fe245527f1039837c17a9/greenlet-3.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f406b22b7c9a9b4f8aa9d2ab13d6ae0ac3e85c9a809bd590ad53fed2bf70dc79", size = 649064 },
|
||||
{ url = "https://files.pythonhosted.org/packages/51/41/467b12a8c7c1303d20abcca145db2be4e6cd50a951fa30af48b6ec607581/greenlet-3.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c3a701fe5a9695b238503ce5bbe8218e03c3bcccf7e204e455e7462d770268aa", size = 663420 },
|
||||
{ url = "https://files.pythonhosted.org/packages/27/8f/2a93cd9b1e7107d5c7b3b7816eeadcac2ebcaf6d6513df9abaf0334777f6/greenlet-3.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2846930c65b47d70b9d178e89c7e1a69c95c1f68ea5aa0a58646b7a96df12441", size = 658035 },
|
||||
{ url = "https://files.pythonhosted.org/packages/57/5c/7c6f50cb12be092e1dccb2599be5a942c3416dbcfb76efcf54b3f8be4d8d/greenlet-3.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99cfaa2110534e2cf3ba31a7abcac9d328d1d9f1b95beede58294a60348fba36", size = 660105 },
|
||||
{ url = "https://files.pythonhosted.org/packages/f1/66/033e58a50fd9ec9df00a8671c74f1f3a320564c6415a4ed82a1c651654ba/greenlet-3.1.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1443279c19fca463fc33e65ef2a935a5b09bb90f978beab37729e1c3c6c25fe9", size = 613077 },
|
||||
{ url = "https://files.pythonhosted.org/packages/19/c5/36384a06f748044d06bdd8776e231fadf92fc896bd12cb1c9f5a1bda9578/greenlet-3.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b7cede291382a78f7bb5f04a529cb18e068dd29e0fb27376074b6d0317bf4dd0", size = 1135975 },
|
||||
{ url = "https://files.pythonhosted.org/packages/38/f9/c0a0eb61bdf808d23266ecf1d63309f0e1471f284300ce6dac0ae1231881/greenlet-3.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23f20bb60ae298d7d8656c6ec6db134bca379ecefadb0b19ce6f19d1f232a942", size = 1163955 },
|
||||
{ url = "https://files.pythonhosted.org/packages/43/21/a5d9df1d21514883333fc86584c07c2b49ba7c602e670b174bd73cfc9c7f/greenlet-3.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:7124e16b4c55d417577c2077be379514321916d5790fa287c9ed6f23bd2ffd01", size = 299655 },
|
||||
{ url = "https://files.pythonhosted.org/packages/f3/57/0db4940cd7bb461365ca8d6fd53e68254c9dbbcc2b452e69d0d41f10a85e/greenlet-3.1.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:05175c27cb459dcfc05d026c4232f9de8913ed006d42713cb8a5137bd49375f1", size = 272990 },
|
||||
{ url = "https://files.pythonhosted.org/packages/1c/ec/423d113c9f74e5e402e175b157203e9102feeb7088cee844d735b28ef963/greenlet-3.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:935e943ec47c4afab8965954bf49bfa639c05d4ccf9ef6e924188f762145c0ff", size = 649175 },
|
||||
{ url = "https://files.pythonhosted.org/packages/a9/46/ddbd2db9ff209186b7b7c621d1432e2f21714adc988703dbdd0e65155c77/greenlet-3.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:667a9706c970cb552ede35aee17339a18e8f2a87a51fba2ed39ceeeb1004798a", size = 663425 },
|
||||
{ url = "https://files.pythonhosted.org/packages/bc/f9/9c82d6b2b04aa37e38e74f0c429aece5eeb02bab6e3b98e7db89b23d94c6/greenlet-3.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8a678974d1f3aa55f6cc34dc480169d58f2e6d8958895d68845fa4ab566509e", size = 657736 },
|
||||
{ url = "https://files.pythonhosted.org/packages/d9/42/b87bc2a81e3a62c3de2b0d550bf91a86939442b7ff85abb94eec3fc0e6aa/greenlet-3.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efc0f674aa41b92da8c49e0346318c6075d734994c3c4e4430b1c3f853e498e4", size = 660347 },
|
||||
{ url = "https://files.pythonhosted.org/packages/37/fa/71599c3fd06336cdc3eac52e6871cfebab4d9d70674a9a9e7a482c318e99/greenlet-3.1.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0153404a4bb921f0ff1abeb5ce8a5131da56b953eda6e14b88dc6bbc04d2049e", size = 615583 },
|
||||
{ url = "https://files.pythonhosted.org/packages/4e/96/e9ef85de031703ee7a4483489b40cf307f93c1824a02e903106f2ea315fe/greenlet-3.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:275f72decf9932639c1c6dd1013a1bc266438eb32710016a1c742df5da6e60a1", size = 1133039 },
|
||||
{ url = "https://files.pythonhosted.org/packages/87/76/b2b6362accd69f2d1889db61a18c94bc743e961e3cab344c2effaa4b4a25/greenlet-3.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c4aab7f6381f38a4b42f269057aee279ab0fc7bf2e929e3d4abfae97b682a12c", size = 1160716 },
|
||||
{ url = "https://files.pythonhosted.org/packages/1f/1b/54336d876186920e185066d8c3024ad55f21d7cc3683c856127ddb7b13ce/greenlet-3.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:b42703b1cf69f2aa1df7d1030b9d77d3e584a70755674d60e710f0af570f3761", size = 299490 },
|
||||
{ url = "https://files.pythonhosted.org/packages/5f/17/bea55bf36990e1638a2af5ba10c1640273ef20f627962cf97107f1e5d637/greenlet-3.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1695e76146579f8c06c1509c7ce4dfe0706f49c6831a817ac04eebb2fd02011", size = 643731 },
|
||||
{ url = "https://files.pythonhosted.org/packages/78/d2/aa3d2157f9ab742a08e0fd8f77d4699f37c22adfbfeb0c610a186b5f75e0/greenlet-3.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7876452af029456b3f3549b696bb36a06db7c90747740c5302f74a9e9fa14b13", size = 649304 },
|
||||
{ url = "https://files.pythonhosted.org/packages/f1/8e/d0aeffe69e53ccff5a28fa86f07ad1d2d2d6537a9506229431a2a02e2f15/greenlet-3.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ead44c85f8ab905852d3de8d86f6f8baf77109f9da589cb4fa142bd3b57b475", size = 646537 },
|
||||
{ url = "https://files.pythonhosted.org/packages/05/79/e15408220bbb989469c8871062c97c6c9136770657ba779711b90870d867/greenlet-3.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8320f64b777d00dd7ccdade271eaf0cad6636343293a25074cc5566160e4de7b", size = 642506 },
|
||||
{ url = "https://files.pythonhosted.org/packages/18/87/470e01a940307796f1d25f8167b551a968540fbe0551c0ebb853cb527dd6/greenlet-3.1.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6510bf84a6b643dabba74d3049ead221257603a253d0a9873f55f6a59a65f822", size = 602753 },
|
||||
{ url = "https://files.pythonhosted.org/packages/e2/72/576815ba674eddc3c25028238f74d7b8068902b3968cbe456771b166455e/greenlet-3.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:04b013dc07c96f83134b1e99888e7a79979f1a247e2a9f59697fa14b5862ed01", size = 1122731 },
|
||||
{ url = "https://files.pythonhosted.org/packages/ac/38/08cc303ddddc4b3d7c628c3039a61a3aae36c241ed01393d00c2fd663473/greenlet-3.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:411f015496fec93c1c8cd4e5238da364e1da7a124bcb293f085bf2860c32c6f6", size = 1142112 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "idna"
|
||||
version = "3.10"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490, upload-time = "2024-09-15T18:07:39.745Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "iniconfig"
|
||||
version = "2.0.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/d7/4b/cbd8e699e64a6f16ca3a8220661b5f83792b3017d0f79807cb8708d33913/iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3", size = 4646, upload-time = "2023-01-07T11:08:11.254Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/d7/4b/cbd8e699e64a6f16ca3a8220661b5f83792b3017d0f79807cb8708d33913/iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3", size = 4646 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374", size = 5892, upload-time = "2023-01-07T11:08:09.864Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374", size = 5892 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "msgspec"
|
||||
version = "0.19.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/cf/9b/95d8ce458462b8b71b8a70fa94563b2498b89933689f3a7b8911edfae3d7/msgspec-0.19.0.tar.gz", hash = "sha256:604037e7cd475345848116e89c553aa9a233259733ab51986ac924ab1b976f8e", size = 216934, upload-time = "2024-12-27T17:40:28.597Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/cf/9b/95d8ce458462b8b71b8a70fa94563b2498b89933689f3a7b8911edfae3d7/msgspec-0.19.0.tar.gz", hash = "sha256:604037e7cd475345848116e89c553aa9a233259733ab51986ac924ab1b976f8e", size = 216934 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/24/d4/2ec2567ac30dab072cce3e91fb17803c52f0a37aab6b0c24375d2b20a581/msgspec-0.19.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aa77046904db764b0462036bc63ef71f02b75b8f72e9c9dd4c447d6da1ed8f8e", size = 187939, upload-time = "2024-12-27T17:39:32.347Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2b/c0/18226e4328897f4f19875cb62bb9259fe47e901eade9d9376ab5f251a929/msgspec-0.19.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:047cfa8675eb3bad68722cfe95c60e7afabf84d1bd8938979dd2b92e9e4a9551", size = 182202, upload-time = "2024-12-27T17:39:33.633Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/81/25/3a4b24d468203d8af90d1d351b77ea3cffb96b29492855cf83078f16bfe4/msgspec-0.19.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e78f46ff39a427e10b4a61614a2777ad69559cc8d603a7c05681f5a595ea98f7", size = 209029, upload-time = "2024-12-27T17:39:35.023Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/85/2e/db7e189b57901955239f7689b5dcd6ae9458637a9c66747326726c650523/msgspec-0.19.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c7adf191e4bd3be0e9231c3b6dc20cf1199ada2af523885efc2ed218eafd011", size = 210682, upload-time = "2024-12-27T17:39:36.384Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/03/97/7c8895c9074a97052d7e4a1cc1230b7b6e2ca2486714eb12c3f08bb9d284/msgspec-0.19.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:f04cad4385e20be7c7176bb8ae3dca54a08e9756cfc97bcdb4f18560c3042063", size = 214003, upload-time = "2024-12-27T17:39:39.097Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/61/61/e892997bcaa289559b4d5869f066a8021b79f4bf8e955f831b095f47a4cd/msgspec-0.19.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:45c8fb410670b3b7eb884d44a75589377c341ec1392b778311acdbfa55187716", size = 216833, upload-time = "2024-12-27T17:39:41.203Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ce/3d/71b2dffd3a1c743ffe13296ff701ee503feaebc3f04d0e75613b6563c374/msgspec-0.19.0-cp311-cp311-win_amd64.whl", hash = "sha256:70eaef4934b87193a27d802534dc466778ad8d536e296ae2f9334e182ac27b6c", size = 186184, upload-time = "2024-12-27T17:39:43.702Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b2/5f/a70c24f075e3e7af2fae5414c7048b0e11389685b7f717bb55ba282a34a7/msgspec-0.19.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f98bd8962ad549c27d63845b50af3f53ec468b6318400c9f1adfe8b092d7b62f", size = 190485, upload-time = "2024-12-27T17:39:44.974Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/89/b0/1b9763938cfae12acf14b682fcf05c92855974d921a5a985ecc197d1c672/msgspec-0.19.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:43bbb237feab761b815ed9df43b266114203f53596f9b6e6f00ebd79d178cdf2", size = 183910, upload-time = "2024-12-27T17:39:46.401Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/87/81/0c8c93f0b92c97e326b279795f9c5b956c5a97af28ca0fbb9fd86c83737a/msgspec-0.19.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4cfc033c02c3e0aec52b71710d7f84cb3ca5eb407ab2ad23d75631153fdb1f12", size = 210633, upload-time = "2024-12-27T17:39:49.099Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d0/ef/c5422ce8af73928d194a6606f8ae36e93a52fd5e8df5abd366903a5ca8da/msgspec-0.19.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d911c442571605e17658ca2b416fd8579c5050ac9adc5e00c2cb3126c97f73bc", size = 213594, upload-time = "2024-12-27T17:39:51.204Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/19/2b/4137bc2ed45660444842d042be2cf5b18aa06efd2cda107cff18253b9653/msgspec-0.19.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:757b501fa57e24896cf40a831442b19a864f56d253679f34f260dcb002524a6c", size = 214053, upload-time = "2024-12-27T17:39:52.866Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9d/e6/8ad51bdc806aac1dc501e8fe43f759f9ed7284043d722b53323ea421c360/msgspec-0.19.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5f0f65f29b45e2816d8bded36e6b837a4bf5fb60ec4bc3c625fa2c6da4124537", size = 219081, upload-time = "2024-12-27T17:39:55.142Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b1/ef/27dd35a7049c9a4f4211c6cd6a8c9db0a50647546f003a5867827ec45391/msgspec-0.19.0-cp312-cp312-win_amd64.whl", hash = "sha256:067f0de1c33cfa0b6a8206562efdf6be5985b988b53dd244a8e06f993f27c8c0", size = 187467, upload-time = "2024-12-27T17:39:56.531Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3c/cb/2842c312bbe618d8fefc8b9cedce37f773cdc8fa453306546dba2c21fd98/msgspec-0.19.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f12d30dd6266557aaaf0aa0f9580a9a8fbeadfa83699c487713e355ec5f0bd86", size = 190498, upload-time = "2024-12-27T17:40:00.427Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/58/95/c40b01b93465e1a5f3b6c7d91b10fb574818163740cc3acbe722d1e0e7e4/msgspec-0.19.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82b2c42c1b9ebc89e822e7e13bbe9d17ede0c23c187469fdd9505afd5a481314", size = 183950, upload-time = "2024-12-27T17:40:04.219Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e8/f0/5b764e066ce9aba4b70d1db8b087ea66098c7c27d59b9dd8a3532774d48f/msgspec-0.19.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19746b50be214a54239aab822964f2ac81e38b0055cca94808359d779338c10e", size = 210647, upload-time = "2024-12-27T17:40:05.606Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9d/87/bc14f49bc95c4cb0dd0a8c56028a67c014ee7e6818ccdce74a4862af259b/msgspec-0.19.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60ef4bdb0ec8e4ad62e5a1f95230c08efb1f64f32e6e8dd2ced685bcc73858b5", size = 213563, upload-time = "2024-12-27T17:40:10.516Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/53/2f/2b1c2b056894fbaa975f68f81e3014bb447516a8b010f1bed3fb0e016ed7/msgspec-0.19.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac7f7c377c122b649f7545810c6cd1b47586e3aa3059126ce3516ac7ccc6a6a9", size = 213996, upload-time = "2024-12-27T17:40:12.244Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/aa/5a/4cd408d90d1417e8d2ce6a22b98a6853c1b4d7cb7669153e4424d60087f6/msgspec-0.19.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a5bc1472223a643f5ffb5bf46ccdede7f9795078194f14edd69e3aab7020d327", size = 219087, upload-time = "2024-12-27T17:40:14.881Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/23/d8/f15b40611c2d5753d1abb0ca0da0c75348daf1252220e5dda2867bd81062/msgspec-0.19.0-cp313-cp313-win_amd64.whl", hash = "sha256:317050bc0f7739cb30d257ff09152ca309bf5a369854bbf1e57dffc310c1f20f", size = 187432, upload-time = "2024-12-27T17:40:16.256Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/24/d4/2ec2567ac30dab072cce3e91fb17803c52f0a37aab6b0c24375d2b20a581/msgspec-0.19.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aa77046904db764b0462036bc63ef71f02b75b8f72e9c9dd4c447d6da1ed8f8e", size = 187939 },
|
||||
{ url = "https://files.pythonhosted.org/packages/2b/c0/18226e4328897f4f19875cb62bb9259fe47e901eade9d9376ab5f251a929/msgspec-0.19.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:047cfa8675eb3bad68722cfe95c60e7afabf84d1bd8938979dd2b92e9e4a9551", size = 182202 },
|
||||
{ url = "https://files.pythonhosted.org/packages/81/25/3a4b24d468203d8af90d1d351b77ea3cffb96b29492855cf83078f16bfe4/msgspec-0.19.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e78f46ff39a427e10b4a61614a2777ad69559cc8d603a7c05681f5a595ea98f7", size = 209029 },
|
||||
{ url = "https://files.pythonhosted.org/packages/85/2e/db7e189b57901955239f7689b5dcd6ae9458637a9c66747326726c650523/msgspec-0.19.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c7adf191e4bd3be0e9231c3b6dc20cf1199ada2af523885efc2ed218eafd011", size = 210682 },
|
||||
{ url = "https://files.pythonhosted.org/packages/03/97/7c8895c9074a97052d7e4a1cc1230b7b6e2ca2486714eb12c3f08bb9d284/msgspec-0.19.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:f04cad4385e20be7c7176bb8ae3dca54a08e9756cfc97bcdb4f18560c3042063", size = 214003 },
|
||||
{ url = "https://files.pythonhosted.org/packages/61/61/e892997bcaa289559b4d5869f066a8021b79f4bf8e955f831b095f47a4cd/msgspec-0.19.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:45c8fb410670b3b7eb884d44a75589377c341ec1392b778311acdbfa55187716", size = 216833 },
|
||||
{ url = "https://files.pythonhosted.org/packages/ce/3d/71b2dffd3a1c743ffe13296ff701ee503feaebc3f04d0e75613b6563c374/msgspec-0.19.0-cp311-cp311-win_amd64.whl", hash = "sha256:70eaef4934b87193a27d802534dc466778ad8d536e296ae2f9334e182ac27b6c", size = 186184 },
|
||||
{ url = "https://files.pythonhosted.org/packages/b2/5f/a70c24f075e3e7af2fae5414c7048b0e11389685b7f717bb55ba282a34a7/msgspec-0.19.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f98bd8962ad549c27d63845b50af3f53ec468b6318400c9f1adfe8b092d7b62f", size = 190485 },
|
||||
{ url = "https://files.pythonhosted.org/packages/89/b0/1b9763938cfae12acf14b682fcf05c92855974d921a5a985ecc197d1c672/msgspec-0.19.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:43bbb237feab761b815ed9df43b266114203f53596f9b6e6f00ebd79d178cdf2", size = 183910 },
|
||||
{ url = "https://files.pythonhosted.org/packages/87/81/0c8c93f0b92c97e326b279795f9c5b956c5a97af28ca0fbb9fd86c83737a/msgspec-0.19.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4cfc033c02c3e0aec52b71710d7f84cb3ca5eb407ab2ad23d75631153fdb1f12", size = 210633 },
|
||||
{ url = "https://files.pythonhosted.org/packages/d0/ef/c5422ce8af73928d194a6606f8ae36e93a52fd5e8df5abd366903a5ca8da/msgspec-0.19.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d911c442571605e17658ca2b416fd8579c5050ac9adc5e00c2cb3126c97f73bc", size = 213594 },
|
||||
{ url = "https://files.pythonhosted.org/packages/19/2b/4137bc2ed45660444842d042be2cf5b18aa06efd2cda107cff18253b9653/msgspec-0.19.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:757b501fa57e24896cf40a831442b19a864f56d253679f34f260dcb002524a6c", size = 214053 },
|
||||
{ url = "https://files.pythonhosted.org/packages/9d/e6/8ad51bdc806aac1dc501e8fe43f759f9ed7284043d722b53323ea421c360/msgspec-0.19.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5f0f65f29b45e2816d8bded36e6b837a4bf5fb60ec4bc3c625fa2c6da4124537", size = 219081 },
|
||||
{ url = "https://files.pythonhosted.org/packages/b1/ef/27dd35a7049c9a4f4211c6cd6a8c9db0a50647546f003a5867827ec45391/msgspec-0.19.0-cp312-cp312-win_amd64.whl", hash = "sha256:067f0de1c33cfa0b6a8206562efdf6be5985b988b53dd244a8e06f993f27c8c0", size = 187467 },
|
||||
{ url = "https://files.pythonhosted.org/packages/3c/cb/2842c312bbe618d8fefc8b9cedce37f773cdc8fa453306546dba2c21fd98/msgspec-0.19.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f12d30dd6266557aaaf0aa0f9580a9a8fbeadfa83699c487713e355ec5f0bd86", size = 190498 },
|
||||
{ url = "https://files.pythonhosted.org/packages/58/95/c40b01b93465e1a5f3b6c7d91b10fb574818163740cc3acbe722d1e0e7e4/msgspec-0.19.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82b2c42c1b9ebc89e822e7e13bbe9d17ede0c23c187469fdd9505afd5a481314", size = 183950 },
|
||||
{ url = "https://files.pythonhosted.org/packages/e8/f0/5b764e066ce9aba4b70d1db8b087ea66098c7c27d59b9dd8a3532774d48f/msgspec-0.19.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19746b50be214a54239aab822964f2ac81e38b0055cca94808359d779338c10e", size = 210647 },
|
||||
{ url = "https://files.pythonhosted.org/packages/9d/87/bc14f49bc95c4cb0dd0a8c56028a67c014ee7e6818ccdce74a4862af259b/msgspec-0.19.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60ef4bdb0ec8e4ad62e5a1f95230c08efb1f64f32e6e8dd2ced685bcc73858b5", size = 213563 },
|
||||
{ url = "https://files.pythonhosted.org/packages/53/2f/2b1c2b056894fbaa975f68f81e3014bb447516a8b010f1bed3fb0e016ed7/msgspec-0.19.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac7f7c377c122b649f7545810c6cd1b47586e3aa3059126ce3516ac7ccc6a6a9", size = 213996 },
|
||||
{ url = "https://files.pythonhosted.org/packages/aa/5a/4cd408d90d1417e8d2ce6a22b98a6853c1b4d7cb7669153e4424d60087f6/msgspec-0.19.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a5bc1472223a643f5ffb5bf46ccdede7f9795078194f14edd69e3aab7020d327", size = 219087 },
|
||||
{ url = "https://files.pythonhosted.org/packages/23/d8/f15b40611c2d5753d1abb0ca0da0c75348daf1252220e5dda2867bd81062/msgspec-0.19.0-cp313-cp313-win_amd64.whl", hash = "sha256:317050bc0f7739cb30d257ff09152ca309bf5a369854bbf1e57dffc310c1f20f", size = 187432 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -196,18 +159,18 @@ source = { registry = "https://pypi.org/simple" }
|
|||
dependencies = [
|
||||
{ name = "attrs" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/98/df/77698abfac98571e65ffeb0c1fba8ffd692ab8458d617a0eed7d9a8d38f2/outcome-1.3.0.post0.tar.gz", hash = "sha256:9dcf02e65f2971b80047b377468e72a268e15c0af3cf1238e6ff14f7f91143b8", size = 21060, upload-time = "2023-10-26T04:26:04.361Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/98/df/77698abfac98571e65ffeb0c1fba8ffd692ab8458d617a0eed7d9a8d38f2/outcome-1.3.0.post0.tar.gz", hash = "sha256:9dcf02e65f2971b80047b377468e72a268e15c0af3cf1238e6ff14f7f91143b8", size = 21060 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/55/8b/5ab7257531a5d830fc8000c476e63c935488d74609b50f9384a643ec0a62/outcome-1.3.0.post0-py2.py3-none-any.whl", hash = "sha256:e771c5ce06d1415e356078d3bdd68523f284b4ce5419828922b6871e65eda82b", size = 10692, upload-time = "2023-10-26T04:26:02.532Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/55/8b/5ab7257531a5d830fc8000c476e63c935488d74609b50f9384a643ec0a62/outcome-1.3.0.post0-py2.py3-none-any.whl", hash = "sha256:e771c5ce06d1415e356078d3bdd68523f284b4ce5419828922b6871e65eda82b", size = 10692 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "packaging"
|
||||
version = "24.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/d0/63/68dbb6eb2de9cb10ee4c9c14a0148804425e13c4fb20d61cce69f53106da/packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f", size = 163950, upload-time = "2024-11-08T09:47:47.202Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/d0/63/68dbb6eb2de9cb10ee4c9c14a0148804425e13c4fb20d61cce69f53106da/packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f", size = 163950 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/88/ef/eb23f262cca3c0c4eb7ab1933c3b1f03d021f2c48f54763065b6f0e321be/packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759", size = 65451, upload-time = "2024-11-08T09:47:44.722Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/88/ef/eb23f262cca3c0c4eb7ab1933c3b1f03d021f2c48f54763065b6f0e321be/packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759", size = 65451 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -219,9 +182,9 @@ dependencies = [
|
|||
{ name = "pygments" },
|
||||
{ name = "tabcompleter" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/69/13/80da03638f62facbee76312ca9ee5941c017b080f2e4c6919fd4e87e16e3/pdbp-1.6.1.tar.gz", hash = "sha256:f4041642952a05df89664e166d5bd379607a0866ddd753c06874f65552bdf40b", size = 25322, upload-time = "2024-11-07T15:36:43.062Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/69/13/80da03638f62facbee76312ca9ee5941c017b080f2e4c6919fd4e87e16e3/pdbp-1.6.1.tar.gz", hash = "sha256:f4041642952a05df89664e166d5bd379607a0866ddd753c06874f65552bdf40b", size = 25322 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/29/93/d56fb9ba5569dc29d8263c72e46d21a2fd38741339ebf03f54cf7561828c/pdbp-1.6.1-py3-none-any.whl", hash = "sha256:f10bad2ee044c0e5c168cb0825abfdbdc01c50013e9755df5261b060bdd35c22", size = 21495, upload-time = "2024-11-07T15:36:41.061Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/29/93/d56fb9ba5569dc29d8263c72e46d21a2fd38741339ebf03f54cf7561828c/pdbp-1.6.1-py3-none-any.whl", hash = "sha256:f10bad2ee044c0e5c168cb0825abfdbdc01c50013e9755df5261b060bdd35c22", size = 21495 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -231,18 +194,18 @@ source = { registry = "https://pypi.org/simple" }
|
|||
dependencies = [
|
||||
{ name = "ptyprocess" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/42/92/cc564bf6381ff43ce1f4d06852fc19a2f11d180f23dc32d9588bee2f149d/pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f", size = 166450, upload-time = "2023-11-25T09:07:26.339Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/42/92/cc564bf6381ff43ce1f4d06852fc19a2f11d180f23dc32d9588bee2f149d/pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f", size = 166450 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/9e/c3/059298687310d527a58bb01f3b1965787ee3b40dce76752eda8b44e9a2c5/pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523", size = 63772, upload-time = "2023-11-25T06:56:14.81Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9e/c3/059298687310d527a58bb01f3b1965787ee3b40dce76752eda8b44e9a2c5/pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523", size = 63772 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pluggy"
|
||||
version = "1.5.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/96/2d/02d4312c973c6050a18b314a5ad0b3210edb65a906f868e31c111dede4a6/pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1", size = 67955, upload-time = "2024-04-20T21:34:42.531Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/96/2d/02d4312c973c6050a18b314a5ad0b3210edb65a906f868e31c111dede4a6/pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1", size = 67955 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669", size = 20556, upload-time = "2024-04-20T21:34:40.434Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669", size = 20556 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -252,66 +215,51 @@ source = { registry = "https://pypi.org/simple" }
|
|||
dependencies = [
|
||||
{ name = "wcwidth" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/a1/e1/bd15cb8ffdcfeeb2bdc215de3c3cffca11408d829e4b8416dcfe71ba8854/prompt_toolkit-3.0.50.tar.gz", hash = "sha256:544748f3860a2623ca5cd6d2795e7a14f3d0e1c3c9728359013f79877fc89bab", size = 429087, upload-time = "2025-01-20T15:55:35.072Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/a1/e1/bd15cb8ffdcfeeb2bdc215de3c3cffca11408d829e4b8416dcfe71ba8854/prompt_toolkit-3.0.50.tar.gz", hash = "sha256:544748f3860a2623ca5cd6d2795e7a14f3d0e1c3c9728359013f79877fc89bab", size = 429087 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/e4/ea/d836f008d33151c7a1f62caf3d8dd782e4d15f6a43897f64480c2b8de2ad/prompt_toolkit-3.0.50-py3-none-any.whl", hash = "sha256:9b6427eb19e479d98acff65196a307c555eb567989e6d88ebbb1b509d9779198", size = 387816, upload-time = "2025-01-20T15:55:29.98Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "psutil"
|
||||
version = "7.0.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/2a/80/336820c1ad9286a4ded7e845b2eccfcb27851ab8ac6abece774a6ff4d3de/psutil-7.0.0.tar.gz", hash = "sha256:7be9c3eba38beccb6495ea33afd982a44074b78f28c434a1f51cc07fd315c456", size = 497003, upload-time = "2025-02-13T21:54:07.946Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/ed/e6/2d26234410f8b8abdbf891c9da62bee396583f713fb9f3325a4760875d22/psutil-7.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:101d71dc322e3cffd7cea0650b09b3d08b8e7c4109dd6809fe452dfd00e58b25", size = 238051, upload-time = "2025-02-13T21:54:12.36Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/04/8b/30f930733afe425e3cbfc0e1468a30a18942350c1a8816acfade80c005c4/psutil-7.0.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:39db632f6bb862eeccf56660871433e111b6ea58f2caea825571951d4b6aa3da", size = 239535, upload-time = "2025-02-13T21:54:16.07Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2a/ed/d362e84620dd22876b55389248e522338ed1bf134a5edd3b8231d7207f6d/psutil-7.0.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fcee592b4c6f146991ca55919ea3d1f8926497a713ed7faaf8225e174581e91", size = 275004, upload-time = "2025-02-13T21:54:18.662Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bf/b9/b0eb3f3cbcb734d930fdf839431606844a825b23eaf9a6ab371edac8162c/psutil-7.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b1388a4f6875d7e2aff5c4ca1cc16c545ed41dd8bb596cefea80111db353a34", size = 277986, upload-time = "2025-02-13T21:54:21.811Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/eb/a2/709e0fe2f093556c17fbafda93ac032257242cabcc7ff3369e2cb76a97aa/psutil-7.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5f098451abc2828f7dc6b58d44b532b22f2088f4999a937557b603ce72b1993", size = 279544, upload-time = "2025-02-13T21:54:24.68Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/50/e6/eecf58810b9d12e6427369784efe814a1eec0f492084ce8eb8f4d89d6d61/psutil-7.0.0-cp37-abi3-win32.whl", hash = "sha256:ba3fcef7523064a6c9da440fc4d6bd07da93ac726b5733c29027d7dc95b39d99", size = 241053, upload-time = "2025-02-13T21:54:34.31Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/50/1b/6921afe68c74868b4c9fa424dad3be35b095e16687989ebbb50ce4fceb7c/psutil-7.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:4cf3d4eb1aa9b348dec30105c55cd9b7d4629285735a102beb4441e38db90553", size = 244885, upload-time = "2025-02-13T21:54:37.486Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e4/ea/d836f008d33151c7a1f62caf3d8dd782e4d15f6a43897f64480c2b8de2ad/prompt_toolkit-3.0.50-py3-none-any.whl", hash = "sha256:9b6427eb19e479d98acff65196a307c555eb567989e6d88ebbb1b509d9779198", size = 387816 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ptyprocess"
|
||||
version = "0.7.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/20/e5/16ff212c1e452235a90aeb09066144d0c5a6a8c0834397e03f5224495c4e/ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220", size = 70762, upload-time = "2020-12-28T15:15:30.155Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/20/e5/16ff212c1e452235a90aeb09066144d0c5a6a8c0834397e03f5224495c4e/ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220", size = 70762 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/22/a6/858897256d0deac81a172289110f31629fc4cee19b6f01283303e18c8db3/ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35", size = 13993, upload-time = "2020-12-28T15:15:28.35Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/22/a6/858897256d0deac81a172289110f31629fc4cee19b6f01283303e18c8db3/ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35", size = 13993 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pycparser"
|
||||
version = "2.22"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/1d/b2/31537cf4b1ca988837256c910a668b553fceb8f069bedc4b1c826024b52c/pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6", size = 172736, upload-time = "2024-03-30T13:22:22.564Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/1d/b2/31537cf4b1ca988837256c910a668b553fceb8f069bedc4b1c826024b52c/pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6", size = 172736 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc", size = 117552, upload-time = "2024-03-30T13:22:20.476Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc", size = 117552 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pygments"
|
||||
version = "2.19.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/7c/2d/c3338d48ea6cc0feb8446d8e6937e1408088a72a39937982cc6111d17f84/pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f", size = 4968581, upload-time = "2025-01-06T17:26:30.443Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/7c/2d/c3338d48ea6cc0feb8446d8e6937e1408088a72a39937982cc6111d17f84/pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f", size = 4968581 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/8a/0b/9fcc47d19c48b59121088dd6da2488a49d5f72dacf8262e2790a1d2c7d15/pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c", size = 1225293, upload-time = "2025-01-06T17:26:25.553Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8a/0b/9fcc47d19c48b59121088dd6da2488a49d5f72dacf8262e2790a1d2c7d15/pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c", size = 1225293 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyperclip"
|
||||
version = "1.9.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/30/23/2f0a3efc4d6a32f3b63cdff36cd398d9701d26cda58e3ab97ac79fb5e60d/pyperclip-1.9.0.tar.gz", hash = "sha256:b7de0142ddc81bfc5c7507eea19da920b92252b548b96186caf94a5e2527d310", size = 20961, upload-time = "2024-06-18T20:38:48.401Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/30/23/2f0a3efc4d6a32f3b63cdff36cd398d9701d26cda58e3ab97ac79fb5e60d/pyperclip-1.9.0.tar.gz", hash = "sha256:b7de0142ddc81bfc5c7507eea19da920b92252b548b96186caf94a5e2527d310", size = 20961 }
|
||||
|
||||
[[package]]
|
||||
name = "pyreadline3"
|
||||
version = "3.5.4"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/0f/49/4cea918a08f02817aabae639e3d0ac046fef9f9180518a3ad394e22da148/pyreadline3-3.5.4.tar.gz", hash = "sha256:8d57d53039a1c75adba8e50dd3d992b28143480816187ea5efbd5c78e6c885b7", size = 99839, upload-time = "2024-09-19T02:40:10.062Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/0f/49/4cea918a08f02817aabae639e3d0ac046fef9f9180518a3ad394e22da148/pyreadline3-3.5.4.tar.gz", hash = "sha256:8d57d53039a1c75adba8e50dd3d992b28143480816187ea5efbd5c78e6c885b7", size = 99839 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/5a/dc/491b7661614ab97483abf2056be1deee4dc2490ecbf7bff9ab5cdbac86e1/pyreadline3-3.5.4-py3-none-any.whl", hash = "sha256:eaf8e6cc3c49bcccf145fc6067ba8643d1df34d604a1ec0eccbf7a18e6d3fae6", size = 83178, upload-time = "2024-09-19T02:40:08.598Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5a/dc/491b7661614ab97483abf2056be1deee4dc2490ecbf7bff9ab5cdbac86e1/pyreadline3-3.5.4-py3-none-any.whl", hash = "sha256:eaf8e6cc3c49bcccf145fc6067ba8643d1df34d604a1ec0eccbf7a18e6d3fae6", size = 83178 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -324,36 +272,36 @@ dependencies = [
|
|||
{ name = "packaging" },
|
||||
{ name = "pluggy" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/ae/3c/c9d525a414d506893f0cd8a8d0de7706446213181570cdbd766691164e40/pytest-8.3.5.tar.gz", hash = "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845", size = 1450891, upload-time = "2025-03-02T12:54:54.503Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/ae/3c/c9d525a414d506893f0cd8a8d0de7706446213181570cdbd766691164e40/pytest-8.3.5.tar.gz", hash = "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845", size = 1450891 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/30/3d/64ad57c803f1fa1e963a7946b6e0fea4a70df53c1a7fed304586539c2bac/pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820", size = 343634, upload-time = "2025-03-02T12:54:52.069Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/30/3d/64ad57c803f1fa1e963a7946b6e0fea4a70df53c1a7fed304586539c2bac/pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820", size = 343634 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sniffio"
|
||||
version = "1.3.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sortedcontainers"
|
||||
version = "2.4.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/e8/c4/ba2f8066cceb6f23394729afe52f3bf7adec04bf9ed2c820b39e19299111/sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88", size = 30594, upload-time = "2021-05-16T22:03:42.897Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/e8/c4/ba2f8066cceb6f23394729afe52f3bf7adec04bf9ed2c820b39e19299111/sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88", size = 30594 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/32/46/9cb0e58b2deb7f82b84065f37f3bffeb12413f947f9388e4cac22c4621ce/sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0", size = 29575, upload-time = "2021-05-16T22:03:41.177Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/32/46/9cb0e58b2deb7f82b84065f37f3bffeb12413f947f9388e4cac22c4621ce/sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0", size = 29575 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "stackscope"
|
||||
version = "0.2.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/4a/fc/20dbb993353f31230138f3c63f3f0c881d1853e70d7a30cd68d2ba4cf1e2/stackscope-0.2.2.tar.gz", hash = "sha256:f508c93eb4861ada466dd3ff613ca203962ceb7587ad013759f15394e6a4e619", size = 90479, upload-time = "2024-02-27T22:02:15.831Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/4a/fc/20dbb993353f31230138f3c63f3f0c881d1853e70d7a30cd68d2ba4cf1e2/stackscope-0.2.2.tar.gz", hash = "sha256:f508c93eb4861ada466dd3ff613ca203962ceb7587ad013759f15394e6a4e619", size = 90479 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/f1/5f/0a674fcafa03528089badb46419413f342537b5b57d2fefc9900fb8ee4e4/stackscope-0.2.2-py3-none-any.whl", hash = "sha256:c199b0cda738d39c993ee04eb01961b06b7e9aeb43ebf9fd6226cdd72ea9faf6", size = 80807, upload-time = "2024-02-27T22:02:13.692Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f1/5f/0a674fcafa03528089badb46419413f342537b5b57d2fefc9900fb8ee4e4/stackscope-0.2.2-py3-none-any.whl", hash = "sha256:c199b0cda738d39c993ee04eb01961b06b7e9aeb43ebf9fd6226cdd72ea9faf6", size = 80807 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -363,9 +311,9 @@ source = { registry = "https://pypi.org/simple" }
|
|||
dependencies = [
|
||||
{ name = "pyreadline3", marker = "sys_platform == 'win32'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/73/1a/ed3544579628c5709bae6fae2255e94c6982a9ff77d42d8ba59fd2f3b21a/tabcompleter-1.4.0.tar.gz", hash = "sha256:7562a9938e62f8e7c3be612c3ac4e14c5ec4307b58ba9031c148260e866e8814", size = 10431, upload-time = "2024-10-28T00:44:52.665Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/73/1a/ed3544579628c5709bae6fae2255e94c6982a9ff77d42d8ba59fd2f3b21a/tabcompleter-1.4.0.tar.gz", hash = "sha256:7562a9938e62f8e7c3be612c3ac4e14c5ec4307b58ba9031c148260e866e8814", size = 10431 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/65/44/bb509c3d2c0b5a87e7a5af1d5917a402a32ff026f777a6d7cb6990746cbb/tabcompleter-1.4.0-py3-none-any.whl", hash = "sha256:d744aa735b49c0a6cc2fb8fcd40077fec47425e4388301010b14e6ce3311368b", size = 6725, upload-time = "2024-10-28T00:44:51.267Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/65/44/bb509c3d2c0b5a87e7a5af1d5917a402a32ff026f777a6d7cb6990746cbb/tabcompleter-1.4.0-py3-none-any.whl", hash = "sha256:d744aa735b49c0a6cc2fb8fcd40077fec47425e4388301010b14e6ce3311368b", size = 6725 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -373,8 +321,6 @@ name = "tractor"
|
|||
version = "0.1.0a6.dev0"
|
||||
source = { editable = "." }
|
||||
dependencies = [
|
||||
{ name = "bidict" },
|
||||
{ name = "cffi" },
|
||||
{ name = "colorlog" },
|
||||
{ name = "msgspec" },
|
||||
{ name = "pdbp" },
|
||||
|
@ -388,18 +334,14 @@ dev = [
|
|||
{ name = "greenback" },
|
||||
{ name = "pexpect" },
|
||||
{ name = "prompt-toolkit" },
|
||||
{ name = "psutil" },
|
||||
{ name = "pyperclip" },
|
||||
{ name = "pytest" },
|
||||
{ name = "stackscope" },
|
||||
{ name = "typing-extensions" },
|
||||
{ name = "xonsh" },
|
||||
]
|
||||
|
||||
[package.metadata]
|
||||
requires-dist = [
|
||||
{ name = "bidict", specifier = ">=0.23.1" },
|
||||
{ name = "cffi", specifier = ">=1.17.1" },
|
||||
{ name = "colorlog", specifier = ">=6.8.2,<7" },
|
||||
{ name = "msgspec", specifier = ">=0.19.0" },
|
||||
{ name = "pdbp", specifier = ">=1.6,<2" },
|
||||
|
@ -413,11 +355,9 @@ dev = [
|
|||
{ name = "greenback", specifier = ">=1.2.1,<2" },
|
||||
{ name = "pexpect", specifier = ">=4.9.0,<5" },
|
||||
{ name = "prompt-toolkit", specifier = ">=3.0.50" },
|
||||
{ name = "psutil", specifier = ">=7.0.0" },
|
||||
{ name = "pyperclip", specifier = ">=1.9.0" },
|
||||
{ name = "pytest", specifier = ">=8.3.5" },
|
||||
{ name = "stackscope", specifier = ">=0.2.2,<0.3" },
|
||||
{ name = "typing-extensions", specifier = ">=4.14.1" },
|
||||
{ name = "xonsh", specifier = ">=0.19.2" },
|
||||
]
|
||||
|
||||
|
@ -428,9 +368,9 @@ source = { registry = "https://pypi.org/simple" }
|
|||
dependencies = [
|
||||
{ name = "trio" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/f8/8e/fdd7bc467b40eedd0a5f2ed36b0d692c6e6f2473be00c8160e2e9f53adc1/tricycle-0.4.1.tar.gz", hash = "sha256:f56edb4b3e1bed3e2552b1b499b24a2dab47741e92e9b4d806acc5c35c9e6066", size = 41551, upload-time = "2024-02-02T20:41:15.298Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/f8/8e/fdd7bc467b40eedd0a5f2ed36b0d692c6e6f2473be00c8160e2e9f53adc1/tricycle-0.4.1.tar.gz", hash = "sha256:f56edb4b3e1bed3e2552b1b499b24a2dab47741e92e9b4d806acc5c35c9e6066", size = 41551 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/d7/c6/7cc05d60e21c683df99167db071ce5d848f5063c2a63971a8443466f603e/tricycle-0.4.1-py3-none-any.whl", hash = "sha256:67900995a73e7445e2c70250cdca04a778d9c3923dd960a97ad4569085e0fb3f", size = 35316, upload-time = "2024-02-02T20:41:14.108Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d7/c6/7cc05d60e21c683df99167db071ce5d848f5063c2a63971a8443466f603e/tricycle-0.4.1-py3-none-any.whl", hash = "sha256:67900995a73e7445e2c70250cdca04a778d9c3923dd960a97ad4569085e0fb3f", size = 35316 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -445,91 +385,82 @@ dependencies = [
|
|||
{ name = "sniffio" },
|
||||
{ name = "sortedcontainers" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/a1/47/f62e62a1a6f37909aed0bf8f5d5411e06fa03846cfcb64540cd1180ccc9f/trio-0.29.0.tar.gz", hash = "sha256:ea0d3967159fc130acb6939a0be0e558e364fee26b5deeecc893a6b08c361bdf", size = 588952, upload-time = "2025-02-14T07:13:50.724Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/a1/47/f62e62a1a6f37909aed0bf8f5d5411e06fa03846cfcb64540cd1180ccc9f/trio-0.29.0.tar.gz", hash = "sha256:ea0d3967159fc130acb6939a0be0e558e364fee26b5deeecc893a6b08c361bdf", size = 588952 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/c9/55/c4d9bea8b3d7937901958f65124123512419ab0eb73695e5f382521abbfb/trio-0.29.0-py3-none-any.whl", hash = "sha256:d8c463f1a9cc776ff63e331aba44c125f423a5a13c684307e828d930e625ba66", size = 492920, upload-time = "2025-02-14T07:13:48.696Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "typing-extensions"
|
||||
version = "4.14.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/98/5a/da40306b885cc8c09109dc2e1abd358d5684b1425678151cdaed4731c822/typing_extensions-4.14.1.tar.gz", hash = "sha256:38b39f4aeeab64884ce9f74c94263ef78f3c22467c8724005483154c26648d36", size = 107673, upload-time = "2025-07-04T13:28:34.16Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/b5/00/d631e67a838026495268c2f6884f3711a15a9a2a96cd244fdaea53b823fb/typing_extensions-4.14.1-py3-none-any.whl", hash = "sha256:d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76", size = 43906, upload-time = "2025-07-04T13:28:32.743Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c9/55/c4d9bea8b3d7937901958f65124123512419ab0eb73695e5f382521abbfb/trio-0.29.0-py3-none-any.whl", hash = "sha256:d8c463f1a9cc776ff63e331aba44c125f423a5a13c684307e828d930e625ba66", size = 492920 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wcwidth"
|
||||
version = "0.2.13"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/6c/63/53559446a878410fc5a5974feb13d31d78d752eb18aeba59c7fef1af7598/wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5", size = 101301, upload-time = "2024-01-06T02:10:57.829Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/6c/63/53559446a878410fc5a5974feb13d31d78d752eb18aeba59c7fef1af7598/wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5", size = 101301 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/fd/84/fd2ba7aafacbad3c4201d395674fc6348826569da3c0937e75505ead3528/wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859", size = 34166, upload-time = "2024-01-06T02:10:55.763Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fd/84/fd2ba7aafacbad3c4201d395674fc6348826569da3c0937e75505ead3528/wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859", size = 34166 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wrapt"
|
||||
version = "1.17.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/c3/fc/e91cc220803d7bc4db93fb02facd8461c37364151b8494762cc88b0fbcef/wrapt-1.17.2.tar.gz", hash = "sha256:41388e9d4d1522446fe79d3213196bd9e3b301a336965b9e27ca2788ebd122f3", size = 55531, upload-time = "2025-01-14T10:35:45.465Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/c3/fc/e91cc220803d7bc4db93fb02facd8461c37364151b8494762cc88b0fbcef/wrapt-1.17.2.tar.gz", hash = "sha256:41388e9d4d1522446fe79d3213196bd9e3b301a336965b9e27ca2788ebd122f3", size = 55531 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/cd/f7/a2aab2cbc7a665efab072344a8949a71081eed1d2f451f7f7d2b966594a2/wrapt-1.17.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ff04ef6eec3eee8a5efef2401495967a916feaa353643defcc03fc74fe213b58", size = 53308, upload-time = "2025-01-14T10:33:33.992Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/50/ff/149aba8365fdacef52b31a258c4dc1c57c79759c335eff0b3316a2664a64/wrapt-1.17.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4db983e7bca53819efdbd64590ee96c9213894272c776966ca6306b73e4affda", size = 38488, upload-time = "2025-01-14T10:33:35.264Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/65/46/5a917ce85b5c3b490d35c02bf71aedaa9f2f63f2d15d9949cc4ba56e8ba9/wrapt-1.17.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9abc77a4ce4c6f2a3168ff34b1da9b0f311a8f1cfd694ec96b0603dff1c79438", size = 38776, upload-time = "2025-01-14T10:33:38.28Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ca/74/336c918d2915a4943501c77566db41d1bd6e9f4dbc317f356b9a244dfe83/wrapt-1.17.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b929ac182f5ace000d459c59c2c9c33047e20e935f8e39371fa6e3b85d56f4a", size = 83776, upload-time = "2025-01-14T10:33:40.678Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/09/99/c0c844a5ccde0fe5761d4305485297f91d67cf2a1a824c5f282e661ec7ff/wrapt-1.17.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f09b286faeff3c750a879d336fb6d8713206fc97af3adc14def0cdd349df6000", size = 75420, upload-time = "2025-01-14T10:33:41.868Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b4/b0/9fc566b0fe08b282c850063591a756057c3247b2362b9286429ec5bf1721/wrapt-1.17.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a7ed2d9d039bd41e889f6fb9364554052ca21ce823580f6a07c4ec245c1f5d6", size = 83199, upload-time = "2025-01-14T10:33:43.598Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9d/4b/71996e62d543b0a0bd95dda485219856def3347e3e9380cc0d6cf10cfb2f/wrapt-1.17.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:129a150f5c445165ff941fc02ee27df65940fcb8a22a61828b1853c98763a64b", size = 82307, upload-time = "2025-01-14T10:33:48.499Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/39/35/0282c0d8789c0dc9bcc738911776c762a701f95cfe113fb8f0b40e45c2b9/wrapt-1.17.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1fb5699e4464afe5c7e65fa51d4f99e0b2eadcc176e4aa33600a3df7801d6662", size = 75025, upload-time = "2025-01-14T10:33:51.191Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4f/6d/90c9fd2c3c6fee181feecb620d95105370198b6b98a0770cba090441a828/wrapt-1.17.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9a2bce789a5ea90e51a02dfcc39e31b7f1e662bc3317979aa7e5538e3a034f72", size = 81879, upload-time = "2025-01-14T10:33:52.328Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8f/fa/9fb6e594f2ce03ef03eddbdb5f4f90acb1452221a5351116c7c4708ac865/wrapt-1.17.2-cp311-cp311-win32.whl", hash = "sha256:4afd5814270fdf6380616b321fd31435a462019d834f83c8611a0ce7484c7317", size = 36419, upload-time = "2025-01-14T10:33:53.551Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/47/f8/fb1773491a253cbc123c5d5dc15c86041f746ed30416535f2a8df1f4a392/wrapt-1.17.2-cp311-cp311-win_amd64.whl", hash = "sha256:acc130bc0375999da18e3d19e5a86403667ac0c4042a094fefb7eec8ebac7cf3", size = 38773, upload-time = "2025-01-14T10:33:56.323Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a1/bd/ab55f849fd1f9a58ed7ea47f5559ff09741b25f00c191231f9f059c83949/wrapt-1.17.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:d5e2439eecc762cd85e7bd37161d4714aa03a33c5ba884e26c81559817ca0925", size = 53799, upload-time = "2025-01-14T10:33:57.4Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/53/18/75ddc64c3f63988f5a1d7e10fb204ffe5762bc663f8023f18ecaf31a332e/wrapt-1.17.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3fc7cb4c1c744f8c05cd5f9438a3caa6ab94ce8344e952d7c45a8ed59dd88392", size = 38821, upload-time = "2025-01-14T10:33:59.334Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/48/2a/97928387d6ed1c1ebbfd4efc4133a0633546bec8481a2dd5ec961313a1c7/wrapt-1.17.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8fdbdb757d5390f7c675e558fd3186d590973244fab0c5fe63d373ade3e99d40", size = 38919, upload-time = "2025-01-14T10:34:04.093Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/73/54/3bfe5a1febbbccb7a2f77de47b989c0b85ed3a6a41614b104204a788c20e/wrapt-1.17.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bb1d0dbf99411f3d871deb6faa9aabb9d4e744d67dcaaa05399af89d847a91d", size = 88721, upload-time = "2025-01-14T10:34:07.163Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/25/cb/7262bc1b0300b4b64af50c2720ef958c2c1917525238d661c3e9a2b71b7b/wrapt-1.17.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d18a4865f46b8579d44e4fe1e2bcbc6472ad83d98e22a26c963d46e4c125ef0b", size = 80899, upload-time = "2025-01-14T10:34:09.82Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2a/5a/04cde32b07a7431d4ed0553a76fdb7a61270e78c5fd5a603e190ac389f14/wrapt-1.17.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc570b5f14a79734437cb7b0500376b6b791153314986074486e0b0fa8d71d98", size = 89222, upload-time = "2025-01-14T10:34:11.258Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/09/28/2e45a4f4771fcfb109e244d5dbe54259e970362a311b67a965555ba65026/wrapt-1.17.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6d9187b01bebc3875bac9b087948a2bccefe464a7d8f627cf6e48b1bbae30f82", size = 86707, upload-time = "2025-01-14T10:34:12.49Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c6/d2/dcb56bf5f32fcd4bd9aacc77b50a539abdd5b6536872413fd3f428b21bed/wrapt-1.17.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:9e8659775f1adf02eb1e6f109751268e493c73716ca5761f8acb695e52a756ae", size = 79685, upload-time = "2025-01-14T10:34:15.043Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/80/4e/eb8b353e36711347893f502ce91c770b0b0929f8f0bed2670a6856e667a9/wrapt-1.17.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e8b2816ebef96d83657b56306152a93909a83f23994f4b30ad4573b00bd11bb9", size = 87567, upload-time = "2025-01-14T10:34:16.563Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/17/27/4fe749a54e7fae6e7146f1c7d914d28ef599dacd4416566c055564080fe2/wrapt-1.17.2-cp312-cp312-win32.whl", hash = "sha256:468090021f391fe0056ad3e807e3d9034e0fd01adcd3bdfba977b6fdf4213ea9", size = 36672, upload-time = "2025-01-14T10:34:17.727Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/15/06/1dbf478ea45c03e78a6a8c4be4fdc3c3bddea5c8de8a93bc971415e47f0f/wrapt-1.17.2-cp312-cp312-win_amd64.whl", hash = "sha256:ec89ed91f2fa8e3f52ae53cd3cf640d6feff92ba90d62236a81e4e563ac0e991", size = 38865, upload-time = "2025-01-14T10:34:19.577Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ce/b9/0ffd557a92f3b11d4c5d5e0c5e4ad057bd9eb8586615cdaf901409920b14/wrapt-1.17.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6ed6ffac43aecfe6d86ec5b74b06a5be33d5bb9243d055141e8cabb12aa08125", size = 53800, upload-time = "2025-01-14T10:34:21.571Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c0/ef/8be90a0b7e73c32e550c73cfb2fa09db62234227ece47b0e80a05073b375/wrapt-1.17.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:35621ae4c00e056adb0009f8e86e28eb4a41a4bfa8f9bfa9fca7d343fe94f998", size = 38824, upload-time = "2025-01-14T10:34:22.999Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/36/89/0aae34c10fe524cce30fe5fc433210376bce94cf74d05b0d68344c8ba46e/wrapt-1.17.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a604bf7a053f8362d27eb9fefd2097f82600b856d5abe996d623babd067b1ab5", size = 38920, upload-time = "2025-01-14T10:34:25.386Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3b/24/11c4510de906d77e0cfb5197f1b1445d4fec42c9a39ea853d482698ac681/wrapt-1.17.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cbabee4f083b6b4cd282f5b817a867cf0b1028c54d445b7ec7cfe6505057cf8", size = 88690, upload-time = "2025-01-14T10:34:28.058Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/71/d7/cfcf842291267bf455b3e266c0c29dcb675b5540ee8b50ba1699abf3af45/wrapt-1.17.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49703ce2ddc220df165bd2962f8e03b84c89fee2d65e1c24a7defff6f988f4d6", size = 80861, upload-time = "2025-01-14T10:34:29.167Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d5/66/5d973e9f3e7370fd686fb47a9af3319418ed925c27d72ce16b791231576d/wrapt-1.17.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8112e52c5822fc4253f3901b676c55ddf288614dc7011634e2719718eaa187dc", size = 89174, upload-time = "2025-01-14T10:34:31.702Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a7/d3/8e17bb70f6ae25dabc1aaf990f86824e4fd98ee9cadf197054e068500d27/wrapt-1.17.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9fee687dce376205d9a494e9c121e27183b2a3df18037f89d69bd7b35bcf59e2", size = 86721, upload-time = "2025-01-14T10:34:32.91Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6f/54/f170dfb278fe1c30d0ff864513cff526d624ab8de3254b20abb9cffedc24/wrapt-1.17.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:18983c537e04d11cf027fbb60a1e8dfd5190e2b60cc27bc0808e653e7b218d1b", size = 79763, upload-time = "2025-01-14T10:34:34.903Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4a/98/de07243751f1c4a9b15c76019250210dd3486ce098c3d80d5f729cba029c/wrapt-1.17.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:703919b1633412ab54bcf920ab388735832fdcb9f9a00ae49387f0fe67dad504", size = 87585, upload-time = "2025-01-14T10:34:36.13Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f9/f0/13925f4bd6548013038cdeb11ee2cbd4e37c30f8bfd5db9e5a2a370d6e20/wrapt-1.17.2-cp313-cp313-win32.whl", hash = "sha256:abbb9e76177c35d4e8568e58650aa6926040d6a9f6f03435b7a522bf1c487f9a", size = 36676, upload-time = "2025-01-14T10:34:37.962Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bf/ae/743f16ef8c2e3628df3ddfd652b7d4c555d12c84b53f3d8218498f4ade9b/wrapt-1.17.2-cp313-cp313-win_amd64.whl", hash = "sha256:69606d7bb691b50a4240ce6b22ebb319c1cfb164e5f6569835058196e0f3a845", size = 38871, upload-time = "2025-01-14T10:34:39.13Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3d/bc/30f903f891a82d402ffb5fda27ec1d621cc97cb74c16fea0b6141f1d4e87/wrapt-1.17.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:4a721d3c943dae44f8e243b380cb645a709ba5bd35d3ad27bc2ed947e9c68192", size = 56312, upload-time = "2025-01-14T10:34:40.604Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8a/04/c97273eb491b5f1c918857cd26f314b74fc9b29224521f5b83f872253725/wrapt-1.17.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:766d8bbefcb9e00c3ac3b000d9acc51f1b399513f44d77dfe0eb026ad7c9a19b", size = 40062, upload-time = "2025-01-14T10:34:45.011Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4e/ca/3b7afa1eae3a9e7fefe499db9b96813f41828b9fdb016ee836c4c379dadb/wrapt-1.17.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e496a8ce2c256da1eb98bd15803a79bee00fc351f5dfb9ea82594a3f058309e0", size = 40155, upload-time = "2025-01-14T10:34:47.25Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/89/be/7c1baed43290775cb9030c774bc53c860db140397047cc49aedaf0a15477/wrapt-1.17.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d615e4fe22f4ad3528448c193b218e077656ca9ccb22ce2cb20db730f8d306", size = 113471, upload-time = "2025-01-14T10:34:50.934Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/32/98/4ed894cf012b6d6aae5f5cc974006bdeb92f0241775addad3f8cd6ab71c8/wrapt-1.17.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a5aaeff38654462bc4b09023918b7f21790efb807f54c000a39d41d69cf552cb", size = 101208, upload-time = "2025-01-14T10:34:52.297Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ea/fd/0c30f2301ca94e655e5e057012e83284ce8c545df7661a78d8bfca2fac7a/wrapt-1.17.2-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a7d15bbd2bc99e92e39f49a04653062ee6085c0e18b3b7512a4f2fe91f2d681", size = 109339, upload-time = "2025-01-14T10:34:53.489Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/75/56/05d000de894c4cfcb84bcd6b1df6214297b8089a7bd324c21a4765e49b14/wrapt-1.17.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:e3890b508a23299083e065f435a492b5435eba6e304a7114d2f919d400888cc6", size = 110232, upload-time = "2025-01-14T10:34:55.327Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/53/f8/c3f6b2cf9b9277fb0813418e1503e68414cd036b3b099c823379c9575e6d/wrapt-1.17.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:8c8b293cd65ad716d13d8dd3624e42e5a19cc2a2f1acc74b30c2c13f15cb61a6", size = 100476, upload-time = "2025-01-14T10:34:58.055Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a7/b1/0bb11e29aa5139d90b770ebbfa167267b1fc548d2302c30c8f7572851738/wrapt-1.17.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4c82b8785d98cdd9fed4cac84d765d234ed3251bd6afe34cb7ac523cb93e8b4f", size = 106377, upload-time = "2025-01-14T10:34:59.3Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6a/e1/0122853035b40b3f333bbb25f1939fc1045e21dd518f7f0922b60c156f7c/wrapt-1.17.2-cp313-cp313t-win32.whl", hash = "sha256:13e6afb7fe71fe7485a4550a8844cc9ffbe263c0f1a1eea569bc7091d4898555", size = 37986, upload-time = "2025-01-14T10:35:00.498Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/09/5e/1655cf481e079c1f22d0cabdd4e51733679932718dc23bf2db175f329b76/wrapt-1.17.2-cp313-cp313t-win_amd64.whl", hash = "sha256:eaf675418ed6b3b31c7a989fd007fa7c3be66ce14e5c3b27336383604c9da85c", size = 40750, upload-time = "2025-01-14T10:35:03.378Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2d/82/f56956041adef78f849db6b289b282e72b55ab8045a75abad81898c28d19/wrapt-1.17.2-py3-none-any.whl", hash = "sha256:b18f2d1533a71f069c7f82d524a52599053d4c7166e9dd374ae2136b7f40f7c8", size = 23594, upload-time = "2025-01-14T10:35:44.018Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cd/f7/a2aab2cbc7a665efab072344a8949a71081eed1d2f451f7f7d2b966594a2/wrapt-1.17.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ff04ef6eec3eee8a5efef2401495967a916feaa353643defcc03fc74fe213b58", size = 53308 },
|
||||
{ url = "https://files.pythonhosted.org/packages/50/ff/149aba8365fdacef52b31a258c4dc1c57c79759c335eff0b3316a2664a64/wrapt-1.17.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4db983e7bca53819efdbd64590ee96c9213894272c776966ca6306b73e4affda", size = 38488 },
|
||||
{ url = "https://files.pythonhosted.org/packages/65/46/5a917ce85b5c3b490d35c02bf71aedaa9f2f63f2d15d9949cc4ba56e8ba9/wrapt-1.17.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9abc77a4ce4c6f2a3168ff34b1da9b0f311a8f1cfd694ec96b0603dff1c79438", size = 38776 },
|
||||
{ url = "https://files.pythonhosted.org/packages/ca/74/336c918d2915a4943501c77566db41d1bd6e9f4dbc317f356b9a244dfe83/wrapt-1.17.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b929ac182f5ace000d459c59c2c9c33047e20e935f8e39371fa6e3b85d56f4a", size = 83776 },
|
||||
{ url = "https://files.pythonhosted.org/packages/09/99/c0c844a5ccde0fe5761d4305485297f91d67cf2a1a824c5f282e661ec7ff/wrapt-1.17.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f09b286faeff3c750a879d336fb6d8713206fc97af3adc14def0cdd349df6000", size = 75420 },
|
||||
{ url = "https://files.pythonhosted.org/packages/b4/b0/9fc566b0fe08b282c850063591a756057c3247b2362b9286429ec5bf1721/wrapt-1.17.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a7ed2d9d039bd41e889f6fb9364554052ca21ce823580f6a07c4ec245c1f5d6", size = 83199 },
|
||||
{ url = "https://files.pythonhosted.org/packages/9d/4b/71996e62d543b0a0bd95dda485219856def3347e3e9380cc0d6cf10cfb2f/wrapt-1.17.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:129a150f5c445165ff941fc02ee27df65940fcb8a22a61828b1853c98763a64b", size = 82307 },
|
||||
{ url = "https://files.pythonhosted.org/packages/39/35/0282c0d8789c0dc9bcc738911776c762a701f95cfe113fb8f0b40e45c2b9/wrapt-1.17.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1fb5699e4464afe5c7e65fa51d4f99e0b2eadcc176e4aa33600a3df7801d6662", size = 75025 },
|
||||
{ url = "https://files.pythonhosted.org/packages/4f/6d/90c9fd2c3c6fee181feecb620d95105370198b6b98a0770cba090441a828/wrapt-1.17.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9a2bce789a5ea90e51a02dfcc39e31b7f1e662bc3317979aa7e5538e3a034f72", size = 81879 },
|
||||
{ url = "https://files.pythonhosted.org/packages/8f/fa/9fb6e594f2ce03ef03eddbdb5f4f90acb1452221a5351116c7c4708ac865/wrapt-1.17.2-cp311-cp311-win32.whl", hash = "sha256:4afd5814270fdf6380616b321fd31435a462019d834f83c8611a0ce7484c7317", size = 36419 },
|
||||
{ url = "https://files.pythonhosted.org/packages/47/f8/fb1773491a253cbc123c5d5dc15c86041f746ed30416535f2a8df1f4a392/wrapt-1.17.2-cp311-cp311-win_amd64.whl", hash = "sha256:acc130bc0375999da18e3d19e5a86403667ac0c4042a094fefb7eec8ebac7cf3", size = 38773 },
|
||||
{ url = "https://files.pythonhosted.org/packages/a1/bd/ab55f849fd1f9a58ed7ea47f5559ff09741b25f00c191231f9f059c83949/wrapt-1.17.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:d5e2439eecc762cd85e7bd37161d4714aa03a33c5ba884e26c81559817ca0925", size = 53799 },
|
||||
{ url = "https://files.pythonhosted.org/packages/53/18/75ddc64c3f63988f5a1d7e10fb204ffe5762bc663f8023f18ecaf31a332e/wrapt-1.17.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3fc7cb4c1c744f8c05cd5f9438a3caa6ab94ce8344e952d7c45a8ed59dd88392", size = 38821 },
|
||||
{ url = "https://files.pythonhosted.org/packages/48/2a/97928387d6ed1c1ebbfd4efc4133a0633546bec8481a2dd5ec961313a1c7/wrapt-1.17.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8fdbdb757d5390f7c675e558fd3186d590973244fab0c5fe63d373ade3e99d40", size = 38919 },
|
||||
{ url = "https://files.pythonhosted.org/packages/73/54/3bfe5a1febbbccb7a2f77de47b989c0b85ed3a6a41614b104204a788c20e/wrapt-1.17.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bb1d0dbf99411f3d871deb6faa9aabb9d4e744d67dcaaa05399af89d847a91d", size = 88721 },
|
||||
{ url = "https://files.pythonhosted.org/packages/25/cb/7262bc1b0300b4b64af50c2720ef958c2c1917525238d661c3e9a2b71b7b/wrapt-1.17.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d18a4865f46b8579d44e4fe1e2bcbc6472ad83d98e22a26c963d46e4c125ef0b", size = 80899 },
|
||||
{ url = "https://files.pythonhosted.org/packages/2a/5a/04cde32b07a7431d4ed0553a76fdb7a61270e78c5fd5a603e190ac389f14/wrapt-1.17.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc570b5f14a79734437cb7b0500376b6b791153314986074486e0b0fa8d71d98", size = 89222 },
|
||||
{ url = "https://files.pythonhosted.org/packages/09/28/2e45a4f4771fcfb109e244d5dbe54259e970362a311b67a965555ba65026/wrapt-1.17.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6d9187b01bebc3875bac9b087948a2bccefe464a7d8f627cf6e48b1bbae30f82", size = 86707 },
|
||||
{ url = "https://files.pythonhosted.org/packages/c6/d2/dcb56bf5f32fcd4bd9aacc77b50a539abdd5b6536872413fd3f428b21bed/wrapt-1.17.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:9e8659775f1adf02eb1e6f109751268e493c73716ca5761f8acb695e52a756ae", size = 79685 },
|
||||
{ url = "https://files.pythonhosted.org/packages/80/4e/eb8b353e36711347893f502ce91c770b0b0929f8f0bed2670a6856e667a9/wrapt-1.17.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e8b2816ebef96d83657b56306152a93909a83f23994f4b30ad4573b00bd11bb9", size = 87567 },
|
||||
{ url = "https://files.pythonhosted.org/packages/17/27/4fe749a54e7fae6e7146f1c7d914d28ef599dacd4416566c055564080fe2/wrapt-1.17.2-cp312-cp312-win32.whl", hash = "sha256:468090021f391fe0056ad3e807e3d9034e0fd01adcd3bdfba977b6fdf4213ea9", size = 36672 },
|
||||
{ url = "https://files.pythonhosted.org/packages/15/06/1dbf478ea45c03e78a6a8c4be4fdc3c3bddea5c8de8a93bc971415e47f0f/wrapt-1.17.2-cp312-cp312-win_amd64.whl", hash = "sha256:ec89ed91f2fa8e3f52ae53cd3cf640d6feff92ba90d62236a81e4e563ac0e991", size = 38865 },
|
||||
{ url = "https://files.pythonhosted.org/packages/ce/b9/0ffd557a92f3b11d4c5d5e0c5e4ad057bd9eb8586615cdaf901409920b14/wrapt-1.17.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6ed6ffac43aecfe6d86ec5b74b06a5be33d5bb9243d055141e8cabb12aa08125", size = 53800 },
|
||||
{ url = "https://files.pythonhosted.org/packages/c0/ef/8be90a0b7e73c32e550c73cfb2fa09db62234227ece47b0e80a05073b375/wrapt-1.17.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:35621ae4c00e056adb0009f8e86e28eb4a41a4bfa8f9bfa9fca7d343fe94f998", size = 38824 },
|
||||
{ url = "https://files.pythonhosted.org/packages/36/89/0aae34c10fe524cce30fe5fc433210376bce94cf74d05b0d68344c8ba46e/wrapt-1.17.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a604bf7a053f8362d27eb9fefd2097f82600b856d5abe996d623babd067b1ab5", size = 38920 },
|
||||
{ url = "https://files.pythonhosted.org/packages/3b/24/11c4510de906d77e0cfb5197f1b1445d4fec42c9a39ea853d482698ac681/wrapt-1.17.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cbabee4f083b6b4cd282f5b817a867cf0b1028c54d445b7ec7cfe6505057cf8", size = 88690 },
|
||||
{ url = "https://files.pythonhosted.org/packages/71/d7/cfcf842291267bf455b3e266c0c29dcb675b5540ee8b50ba1699abf3af45/wrapt-1.17.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49703ce2ddc220df165bd2962f8e03b84c89fee2d65e1c24a7defff6f988f4d6", size = 80861 },
|
||||
{ url = "https://files.pythonhosted.org/packages/d5/66/5d973e9f3e7370fd686fb47a9af3319418ed925c27d72ce16b791231576d/wrapt-1.17.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8112e52c5822fc4253f3901b676c55ddf288614dc7011634e2719718eaa187dc", size = 89174 },
|
||||
{ url = "https://files.pythonhosted.org/packages/a7/d3/8e17bb70f6ae25dabc1aaf990f86824e4fd98ee9cadf197054e068500d27/wrapt-1.17.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9fee687dce376205d9a494e9c121e27183b2a3df18037f89d69bd7b35bcf59e2", size = 86721 },
|
||||
{ url = "https://files.pythonhosted.org/packages/6f/54/f170dfb278fe1c30d0ff864513cff526d624ab8de3254b20abb9cffedc24/wrapt-1.17.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:18983c537e04d11cf027fbb60a1e8dfd5190e2b60cc27bc0808e653e7b218d1b", size = 79763 },
|
||||
{ url = "https://files.pythonhosted.org/packages/4a/98/de07243751f1c4a9b15c76019250210dd3486ce098c3d80d5f729cba029c/wrapt-1.17.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:703919b1633412ab54bcf920ab388735832fdcb9f9a00ae49387f0fe67dad504", size = 87585 },
|
||||
{ url = "https://files.pythonhosted.org/packages/f9/f0/13925f4bd6548013038cdeb11ee2cbd4e37c30f8bfd5db9e5a2a370d6e20/wrapt-1.17.2-cp313-cp313-win32.whl", hash = "sha256:abbb9e76177c35d4e8568e58650aa6926040d6a9f6f03435b7a522bf1c487f9a", size = 36676 },
|
||||
{ url = "https://files.pythonhosted.org/packages/bf/ae/743f16ef8c2e3628df3ddfd652b7d4c555d12c84b53f3d8218498f4ade9b/wrapt-1.17.2-cp313-cp313-win_amd64.whl", hash = "sha256:69606d7bb691b50a4240ce6b22ebb319c1cfb164e5f6569835058196e0f3a845", size = 38871 },
|
||||
{ url = "https://files.pythonhosted.org/packages/3d/bc/30f903f891a82d402ffb5fda27ec1d621cc97cb74c16fea0b6141f1d4e87/wrapt-1.17.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:4a721d3c943dae44f8e243b380cb645a709ba5bd35d3ad27bc2ed947e9c68192", size = 56312 },
|
||||
{ url = "https://files.pythonhosted.org/packages/8a/04/c97273eb491b5f1c918857cd26f314b74fc9b29224521f5b83f872253725/wrapt-1.17.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:766d8bbefcb9e00c3ac3b000d9acc51f1b399513f44d77dfe0eb026ad7c9a19b", size = 40062 },
|
||||
{ url = "https://files.pythonhosted.org/packages/4e/ca/3b7afa1eae3a9e7fefe499db9b96813f41828b9fdb016ee836c4c379dadb/wrapt-1.17.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e496a8ce2c256da1eb98bd15803a79bee00fc351f5dfb9ea82594a3f058309e0", size = 40155 },
|
||||
{ url = "https://files.pythonhosted.org/packages/89/be/7c1baed43290775cb9030c774bc53c860db140397047cc49aedaf0a15477/wrapt-1.17.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d615e4fe22f4ad3528448c193b218e077656ca9ccb22ce2cb20db730f8d306", size = 113471 },
|
||||
{ url = "https://files.pythonhosted.org/packages/32/98/4ed894cf012b6d6aae5f5cc974006bdeb92f0241775addad3f8cd6ab71c8/wrapt-1.17.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a5aaeff38654462bc4b09023918b7f21790efb807f54c000a39d41d69cf552cb", size = 101208 },
|
||||
{ url = "https://files.pythonhosted.org/packages/ea/fd/0c30f2301ca94e655e5e057012e83284ce8c545df7661a78d8bfca2fac7a/wrapt-1.17.2-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a7d15bbd2bc99e92e39f49a04653062ee6085c0e18b3b7512a4f2fe91f2d681", size = 109339 },
|
||||
{ url = "https://files.pythonhosted.org/packages/75/56/05d000de894c4cfcb84bcd6b1df6214297b8089a7bd324c21a4765e49b14/wrapt-1.17.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:e3890b508a23299083e065f435a492b5435eba6e304a7114d2f919d400888cc6", size = 110232 },
|
||||
{ url = "https://files.pythonhosted.org/packages/53/f8/c3f6b2cf9b9277fb0813418e1503e68414cd036b3b099c823379c9575e6d/wrapt-1.17.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:8c8b293cd65ad716d13d8dd3624e42e5a19cc2a2f1acc74b30c2c13f15cb61a6", size = 100476 },
|
||||
{ url = "https://files.pythonhosted.org/packages/a7/b1/0bb11e29aa5139d90b770ebbfa167267b1fc548d2302c30c8f7572851738/wrapt-1.17.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4c82b8785d98cdd9fed4cac84d765d234ed3251bd6afe34cb7ac523cb93e8b4f", size = 106377 },
|
||||
{ url = "https://files.pythonhosted.org/packages/6a/e1/0122853035b40b3f333bbb25f1939fc1045e21dd518f7f0922b60c156f7c/wrapt-1.17.2-cp313-cp313t-win32.whl", hash = "sha256:13e6afb7fe71fe7485a4550a8844cc9ffbe263c0f1a1eea569bc7091d4898555", size = 37986 },
|
||||
{ url = "https://files.pythonhosted.org/packages/09/5e/1655cf481e079c1f22d0cabdd4e51733679932718dc23bf2db175f329b76/wrapt-1.17.2-cp313-cp313t-win_amd64.whl", hash = "sha256:eaf675418ed6b3b31c7a989fd007fa7c3be66ce14e5c3b27336383604c9da85c", size = 40750 },
|
||||
{ url = "https://files.pythonhosted.org/packages/2d/82/f56956041adef78f849db6b289b282e72b55ab8045a75abad81898c28d19/wrapt-1.17.2-py3-none-any.whl", hash = "sha256:b18f2d1533a71f069c7f82d524a52599053d4c7166e9dd374ae2136b7f40f7c8", size = 23594 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "xonsh"
|
||||
version = "0.19.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/68/4e/56e95a5e607eb3b0da37396f87cde70588efc8ef819ab16f02d5b8378dc4/xonsh-0.19.2.tar.gz", hash = "sha256:cfdd0680d954a2c3aefd6caddcc7143a3d06aa417ed18365a08219bb71b960b0", size = 799960, upload-time = "2025-02-11T17:10:43.563Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/68/4e/56e95a5e607eb3b0da37396f87cde70588efc8ef819ab16f02d5b8378dc4/xonsh-0.19.2.tar.gz", hash = "sha256:cfdd0680d954a2c3aefd6caddcc7143a3d06aa417ed18365a08219bb71b960b0", size = 799960 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/6c/13/281094759df87b23b3c02dc4a16603ab08ea54d7f6acfeb69f3341137c7a/xonsh-0.19.2-py310-none-any.whl", hash = "sha256:ec7f163fd3a4943782aa34069d4e72793328c916a5975949dbec8536cbfc089b", size = 642301, upload-time = "2025-02-11T17:10:39.244Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/29/41/a51e4c3918fe9a293b150cb949b1b8c6d45eb17dfed480dcb76ea43df4e7/xonsh-0.19.2-py311-none-any.whl", hash = "sha256:53c45f7a767901f2f518f9b8dd60fc653e0498e56e89825e1710bb0859985049", size = 642286, upload-time = "2025-02-11T17:10:41.678Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0a/93/9a77b731f492fac27c577dea2afb5a2bcc2a6a1c79be0c86c95498060270/xonsh-0.19.2-py312-none-any.whl", hash = "sha256:b24c619aa52b59eae4d35c4195dba9b19a2c548fb5c42c6f85f2b8ccb96807b5", size = 642386, upload-time = "2025-02-11T17:10:43.688Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/be/75/070324769c1ff88d971ce040f4f486339be98e0a365c8dd9991eb654265b/xonsh-0.19.2-py313-none-any.whl", hash = "sha256:c53ef6c19f781fbc399ed1b382b5c2aac2125010679a3b61d643978273c27df0", size = 642873, upload-time = "2025-02-11T17:10:39.297Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fa/cb/2c7ccec54f5b0e73fdf7650e8336582ff0347d9001c5ef8271dc00c034fe/xonsh-0.19.2-py39-none-any.whl", hash = "sha256:bcc0225dc3847f1ed2f175dac6122fbcc54cea67d9c2dc2753d9615e2a5ff284", size = 634602, upload-time = "2025-02-11T17:10:37.004Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6c/13/281094759df87b23b3c02dc4a16603ab08ea54d7f6acfeb69f3341137c7a/xonsh-0.19.2-py310-none-any.whl", hash = "sha256:ec7f163fd3a4943782aa34069d4e72793328c916a5975949dbec8536cbfc089b", size = 642301 },
|
||||
{ url = "https://files.pythonhosted.org/packages/29/41/a51e4c3918fe9a293b150cb949b1b8c6d45eb17dfed480dcb76ea43df4e7/xonsh-0.19.2-py311-none-any.whl", hash = "sha256:53c45f7a767901f2f518f9b8dd60fc653e0498e56e89825e1710bb0859985049", size = 642286 },
|
||||
{ url = "https://files.pythonhosted.org/packages/0a/93/9a77b731f492fac27c577dea2afb5a2bcc2a6a1c79be0c86c95498060270/xonsh-0.19.2-py312-none-any.whl", hash = "sha256:b24c619aa52b59eae4d35c4195dba9b19a2c548fb5c42c6f85f2b8ccb96807b5", size = 642386 },
|
||||
{ url = "https://files.pythonhosted.org/packages/be/75/070324769c1ff88d971ce040f4f486339be98e0a365c8dd9991eb654265b/xonsh-0.19.2-py313-none-any.whl", hash = "sha256:c53ef6c19f781fbc399ed1b382b5c2aac2125010679a3b61d643978273c27df0", size = 642873 },
|
||||
{ url = "https://files.pythonhosted.org/packages/fa/cb/2c7ccec54f5b0e73fdf7650e8336582ff0347d9001c5ef8271dc00c034fe/xonsh-0.19.2-py39-none-any.whl", hash = "sha256:bcc0225dc3847f1ed2f175dac6122fbcc54cea67d9c2dc2753d9615e2a5ff284", size = 634602 },
|
||||
]
|
||||
|
|
Loading…
Reference in New Issue