Compare commits
	
		
			169 Commits 
		
	
	
		
			main
			...
			modden_spa
		
	
	| Author | SHA1 | Date | 
|---|---|---|
|  | d28c7e17c6 | |
|  | d23d8c1779 | |
|  | 58cc57a422 | |
|  | da913ef2bb | |
|  | 96992bcbb9 | |
|  | 6533285d7d | |
|  | 8c39b8b124 | |
|  | ededa2e88f | |
|  | dd168184c3 | |
|  | 37ee477aee | |
|  | f067cf48a7 | |
|  | c56d4b0a79 | |
|  | 7cafb59ab7 | |
|  | 7458f99733 | |
|  | 4c3c3e4b56 | |
|  | b29d33d603 | |
|  | 1617e0ff2c | |
|  | c025761f15 | |
|  | 2e797ef7ee | |
|  | c36deb1f4d | |
|  | fa7e37d6ed | |
|  | 364ea91983 | |
|  | 7ae9b5319b | |
|  | 6156ff95f8 | |
|  | 9e3f41a5b1 | |
|  | 7c22f76274 | |
|  | 04c99c2749 | |
|  | e536057fea | |
|  | c6b4da5788 | |
|  | 1f7f84fdfa | |
|  | a5bdc6db66 | |
|  | 9a18b57d38 | |
|  | ed10632d97 | |
|  | 299429a278 | |
|  | 28fefe4ffe | |
|  | 08a6a51cb8 | |
|  | 50465d4b34 | |
|  | 4f69af872c | |
|  | 9bc6a61c93 | |
|  | 23aa97692e | |
|  | 1e5810e56c | |
|  | b54cb6682c | |
|  | 3ed309f019 | |
|  | d08aeaeafe | |
|  | c6ee4e5dc1 | |
|  | ad5eee5666 | |
|  | fc72d75061 | |
|  | de1843dc84 | |
|  | 930d498841 | |
|  | 5ea112699d | |
|  | e244747bc3 | |
|  | 5a09ccf459 | |
|  | ce1bcf6d36 | |
|  | 28ba5e5435 | |
|  | 10adf34be5 | |
|  | 82dcaff8db | |
|  | 621b252b0c | |
|  | 20a089c331 | |
|  | df50d78042 | |
|  | 114ec36436 | |
|  | 179d7d2b04 | |
|  | f568fca98f | |
|  | 6c9bc627d8 | |
|  | 1d7cf7d1dd | |
|  | 54a0a0000d | |
|  | 0268b2ce91 | |
|  | 81f8e2d4ac | |
|  | bf0739c194 | |
|  | 5fe3f58ea9 | |
|  | 3e1d033708 | |
|  | c35576e196 | |
|  | 8ce26d692f | |
|  | 7f29fd8dcf | |
|  | 7fbada8a15 | |
|  | 286e75d342 | |
|  | df641d9d31 | |
|  | 35b0c4bef0 | |
|  | c4496f21fc | |
|  | 7e0e627921 | |
|  | 28ea8e787a | |
|  | 0294455c5e | |
|  | 734bc09b67 | |
|  | 0bcdea28a0 | |
|  | fdf3a1b01b | |
|  | ce7b8a5e18 | |
|  | 00024181cd | |
|  | 814384848d | |
|  | bea31f6d19 | |
|  | 250275d98d | |
|  | f415fc43ce | |
|  | 3f15923537 | |
|  | 87cd725adb | |
|  | 48accbd28f | |
|  | 227c9ea173 | |
|  | d651f3d8e9 | |
|  | ef0cfc4b20 | |
|  | ecb525a2bc | |
|  | b77d123edd | |
|  | f4e63465de | |
|  | df31047ecb | |
|  | 131674eabd | |
|  | 5a94e8fb5b | |
|  | 0518b3ab04 | |
|  | 2f0bed3018 | |
|  | 9da3b63644 | |
|  | 1d6f55543d | |
|  | a3ed30e62b | |
|  | 42d621bba7 | |
|  | 2e81ccf5b4 | |
|  | 022bf8ce75 | |
|  | 0e9457299c | |
|  | 6b1ceee19f | |
|  | 1e689ee701 | |
|  | 190845ce1d | |
|  | 0c74b04c83 | |
|  | 215fec1d41 | |
|  | fcc8cee9d3 | |
|  | ca3f7a1b6b | |
|  | 87c1113de4 | |
|  | 43b659dbe4 | |
|  | 63b1488ab6 | |
|  | 7eb31f3fea | |
|  | 534e5d150d | |
|  | e4a6223256 | |
|  | ab2664da70 | |
|  | ae326cbb9a | |
|  | 07cec02303 | |
|  | 2fdb8fc25a | |
|  | 6d951c526a | |
|  | 575a24adf1 | |
|  | 919e462f88 | |
|  | a09b8560bb | |
|  | c4cd573b26 | |
|  | d24a9e158f | |
|  | 18a1634025 | |
|  | 78c0d2b234 | |
|  | 4314a59327 | |
|  | e94f1261b5 | |
|  | 86da79a854 | |
|  | de89e3a9c4 | |
|  | 7bed470f5c | |
|  | fa9a9cfb1d | |
|  | 3d0e95513c | |
|  | ee151b00af | |
|  | 22c14e235e | |
|  | 1102843087 | |
|  | e03bec5efc | |
|  | bee2c36072 | |
|  | b36b3d522f | |
|  | 4ace8f6037 | |
|  | 98a7326c85 | |
|  | 46972df041 | |
|  | 565d7c3ee5 | |
|  | ac695a05bf | |
|  | fc56971a2d | |
|  | ee87cf0e29 | |
|  | ebcb275cd8 | |
|  | f745da9fb2 | |
|  | 4f442efbd7 | |
|  | f9a84f0732 | |
|  | e0bf964ff0 | |
|  | a9fc4c1b91 | |
|  | b52ff270c5 | |
|  | 1713ecd9f8 | |
|  | edb82fdd78 | |
|  | 339d787cf8 | |
|  | c32b21b4b1 | |
|  | 71477290fc | |
|  | 9716d86825 | 
|  | @ -8,70 +8,46 @@ on: | |||
|   workflow_dispatch: | ||||
| 
 | ||||
| jobs: | ||||
|   # ------ sdist ------ | ||||
| 
 | ||||
|   mypy: | ||||
|     name: 'MyPy' | ||||
|     runs-on: ubuntu-latest | ||||
| 
 | ||||
|     steps: | ||||
|       - name: Checkout | ||||
|         uses: actions/checkout@v2 | ||||
| 
 | ||||
|       - name: Setup python | ||||
|         uses: actions/setup-python@v2 | ||||
|         with: | ||||
|           python-version: '3.10' | ||||
| 
 | ||||
|       - name: Install dependencies | ||||
|         run: pip install -U . --upgrade-strategy eager -r requirements-test.txt | ||||
| 
 | ||||
|       - name: Run MyPy check | ||||
|         run: mypy tractor/ --ignore-missing-imports --show-traceback | ||||
| 
 | ||||
|   # test that we can generate a software distribution and install it | ||||
|   # thus avoid missing file issues after packaging. | ||||
|   # | ||||
|   # -[x] produce sdist with uv | ||||
|   # ------ - ------ | ||||
|   sdist-linux: | ||||
|     name: 'sdist' | ||||
|     runs-on: ubuntu-latest | ||||
| 
 | ||||
|     steps: | ||||
|       - name: Checkout | ||||
|         uses: actions/checkout@v4 | ||||
|         uses: actions/checkout@v2 | ||||
| 
 | ||||
|       - name: Install latest uv | ||||
|         uses: astral-sh/setup-uv@v6 | ||||
|       - name: Setup python | ||||
|         uses: actions/setup-python@v2 | ||||
|         with: | ||||
|           python-version: '3.10' | ||||
| 
 | ||||
|       - name: Build sdist as tar.gz | ||||
|         run: uv build --sdist --python=3.13 | ||||
|       - name: Build sdist | ||||
|         run: python setup.py sdist --formats=zip | ||||
| 
 | ||||
|       - name: Install sdist from .tar.gz | ||||
|         run: python -m pip install dist/*.tar.gz | ||||
| 
 | ||||
|   # ------ type-check ------ | ||||
|   # mypy: | ||||
|   #   name: 'MyPy' | ||||
|   #   runs-on: ubuntu-latest | ||||
| 
 | ||||
|   #   steps: | ||||
|   #     - name: Checkout | ||||
|   #       uses: actions/checkout@v4 | ||||
| 
 | ||||
|   #     - name: Install latest uv | ||||
|   #       uses: astral-sh/setup-uv@v6 | ||||
| 
 | ||||
|   #     # faster due to server caching? | ||||
|   #     # https://docs.astral.sh/uv/guides/integration/github/#setting-up-python | ||||
|   #     - name: "Set up Python" | ||||
|   #       uses: actions/setup-python@v6 | ||||
|   #       with: | ||||
|   #         python-version-file: "pyproject.toml" | ||||
| 
 | ||||
|   #     # w uv | ||||
|   #     # - name: Set up Python | ||||
|   #     #   run: uv python install | ||||
| 
 | ||||
|   #     - name: Setup uv venv | ||||
|   #       run: uv venv .venv --python=3.13 | ||||
| 
 | ||||
|   #     - name: Install | ||||
|   #       run: uv sync --dev | ||||
| 
 | ||||
|   #     # TODO, ty cmd over repo | ||||
|   #     # - name: type check with ty | ||||
|   #     #   run: ty ./tractor/ | ||||
| 
 | ||||
|   #     # - uses: actions/cache@v3 | ||||
|   #     #     name: Cache uv virtenv as default .venv | ||||
|   #     #     with: | ||||
|   #     #       path: ./.venv | ||||
|   #     #       key: venv-${{ hashFiles('uv.lock') }} | ||||
| 
 | ||||
|   #     - name: Run MyPy check | ||||
|   #       run: mypy tractor/ --ignore-missing-imports --show-traceback | ||||
|       - name: Install sdist from .zips | ||||
|         run: python -m pip install dist/*.zip | ||||
| 
 | ||||
| 
 | ||||
|   testing-linux: | ||||
|  | @ -83,45 +59,32 @@ jobs: | |||
|       fail-fast: false | ||||
|       matrix: | ||||
|         os: [ubuntu-latest] | ||||
|         python-version: ['3.13'] | ||||
|         python: ['3.10'] | ||||
|         spawn_backend: [ | ||||
|           'trio', | ||||
|           # 'mp_spawn', | ||||
|           # 'mp_forkserver', | ||||
|           'mp_spawn', | ||||
|           'mp_forkserver', | ||||
|         ] | ||||
| 
 | ||||
|     steps: | ||||
| 
 | ||||
|       - uses: actions/checkout@v4 | ||||
|       - name: Checkout | ||||
|         uses: actions/checkout@v2 | ||||
| 
 | ||||
|       - name: 'Install uv + py-${{ matrix.python-version }}' | ||||
|         uses: astral-sh/setup-uv@v6 | ||||
|       - name: Setup python | ||||
|         uses: actions/setup-python@v2 | ||||
|         with: | ||||
|           python-version: ${{ matrix.python-version }} | ||||
|           python-version: '${{ matrix.python }}' | ||||
| 
 | ||||
|       # GH way.. faster? | ||||
|       # - name: setup-python@v6 | ||||
|       #   uses: actions/setup-python@v6 | ||||
|       #   with: | ||||
|       #     python-version: '${{ matrix.python-version }}' | ||||
|       - name: Install dependencies | ||||
|         run: pip install -U . -r requirements-test.txt -r requirements-docs.txt --upgrade-strategy eager | ||||
| 
 | ||||
|       # consider caching for speedups? | ||||
|       # https://docs.astral.sh/uv/guides/integration/github/#caching | ||||
| 
 | ||||
|       - name: Install the project w uv | ||||
|         run: uv sync --all-extras --dev | ||||
| 
 | ||||
|       # - name: Install dependencies | ||||
|       #   run: pip install -U . -r requirements-test.txt -r requirements-docs.txt --upgrade-strategy eager | ||||
| 
 | ||||
|       - name: List deps tree | ||||
|         run: uv tree | ||||
|       - name: List dependencies | ||||
|         run: pip list | ||||
| 
 | ||||
|       - name: Run tests | ||||
|         run: uv run pytest tests/ --spawn-backend=${{ matrix.spawn_backend }} -rsx | ||||
|         run: pytest tests/ --spawn-backend=${{ matrix.spawn_backend }} -rsx | ||||
| 
 | ||||
|   # XXX legacy NOTE XXX | ||||
|   # | ||||
|   # We skip 3.10 on windows for now due to not having any collabs to | ||||
|   # debug the CI failures. Anyone wanting to hack and solve them is very | ||||
|   # welcome, but our primary user base is not using that OS. | ||||
|  |  | |||
							
								
								
									
										19
									
								
								default.nix
								
								
								
								
							
							
						
						
									
										19
									
								
								default.nix
								
								
								
								
							|  | @ -1,19 +0,0 @@ | |||
| { pkgs ? import <nixpkgs> {} }: | ||||
| let | ||||
|   nativeBuildInputs = with pkgs; [ | ||||
|     stdenv.cc.cc.lib | ||||
|     uv | ||||
|   ]; | ||||
| 
 | ||||
| in | ||||
| pkgs.mkShell { | ||||
|   inherit nativeBuildInputs; | ||||
| 
 | ||||
|   LD_LIBRARY_PATH = pkgs.lib.makeLibraryPath nativeBuildInputs; | ||||
|   TMPDIR = "/tmp"; | ||||
| 
 | ||||
|   shellHook = '' | ||||
|     set -e | ||||
|     uv venv .venv --python=3.12 | ||||
|   ''; | ||||
| } | ||||
							
								
								
									
										167
									
								
								docs/README.rst
								
								
								
								
							
							
						
						
									
										167
									
								
								docs/README.rst
								
								
								
								
							|  | @ -1,20 +1,20 @@ | |||
| |logo| ``tractor``: distributed structurred concurrency | ||||
| |logo| ``tractor``: next-gen Python parallelism | ||||
| 
 | ||||
| ``tractor`` is a `structured concurrency`_ (SC), multi-processing_ runtime built on trio_. | ||||
| |gh_actions| | ||||
| |docs| | ||||
| 
 | ||||
| Fundamentally, ``tractor`` provides parallelism via | ||||
| ``trio``-"*actors*": independent Python **processes** (i.e. | ||||
| *non-shared-memory threads*) which can schedule ``trio`` tasks whilst | ||||
| maintaining *end-to-end SC* inside a *distributed supervision tree*. | ||||
| ``tractor`` is a `structured concurrent`_, (optionally | ||||
| distributed_) multi-processing_ runtime built on trio_. | ||||
| 
 | ||||
| Fundamentally, ``tractor`` gives you parallelism via | ||||
| ``trio``-"*actors*": independent Python processes (aka | ||||
| non-shared-memory threads) which maintain structured | ||||
| concurrency (SC) *end-to-end* inside a *supervision tree*. | ||||
| 
 | ||||
| Cross-process (and thus cross-host) SC is accomplished through the | ||||
| combined use of our, | ||||
| 
 | ||||
| - "actor nurseries_" which provide for spawning multiple, and | ||||
|   possibly nested, Python processes each running a ``trio`` scheduled | ||||
|   runtime - a call to ``trio.run()``, | ||||
| - an "SC-transitive supervision protocol" enforced as an | ||||
|   IPC-message-spec encapsulating all RPC-dialogs. | ||||
| combined use of our "actor nurseries_" and an "SC-transitive IPC | ||||
| protocol" constructed on top of multiple Pythons each running a ``trio`` | ||||
| scheduled runtime - a call to ``trio.run()``. | ||||
| 
 | ||||
| We believe the system adheres to the `3 axioms`_ of an "`actor model`_" | ||||
| but likely **does not** look like what **you** probably *think* an "actor | ||||
|  | @ -27,7 +27,6 @@ The first step to grok ``tractor`` is to get an intermediate | |||
| knowledge of ``trio`` and **structured concurrency** B) | ||||
| 
 | ||||
| Some great places to start are, | ||||
| 
 | ||||
| - the seminal `blog post`_ | ||||
| - obviously the `trio docs`_ | ||||
| - wikipedia's nascent SC_ page | ||||
|  | @ -36,91 +35,22 @@ Some great places to start are, | |||
| 
 | ||||
| Features | ||||
| -------- | ||||
| - **It's just** a ``trio`` API! | ||||
| - *Infinitely nesteable* process trees running embedded ``trio`` tasks. | ||||
| - Swappable, OS-specific, process spawning via multiple backends. | ||||
| - Modular IPC stack, allowing for custom interchange formats (eg. | ||||
|   as offered from `msgspec`_), varied transport protocols (TCP, RUDP, | ||||
|   QUIC, wireguard), and OS-env specific higher-perf primitives (UDS, | ||||
|   shm-ring-buffers). | ||||
| - Optionally distributed_: all IPC and RPC APIs work over multi-host | ||||
|   transports the same as local. | ||||
| - Builtin high-level streaming API that enables your app to easily | ||||
|   leverage the benefits of a "`cheap or nasty`_" `(un)protocol`_. | ||||
| - A "native UX" around a multi-process safe debugger REPL using | ||||
|   `pdbp`_ (a fork & fix of `pdb++`_) | ||||
| - "Infected ``asyncio``" mode: support for starting an actor's | ||||
|   runtime as a `guest`_ on the ``asyncio`` loop allowing us to | ||||
|   provide stringent SC-style ``trio.Task``-supervision around any | ||||
|   ``asyncio.Task`` spawned via our ``tractor.to_asyncio`` APIs. | ||||
| - A **very naive** and still very much work-in-progress inter-actor | ||||
|   `discovery`_ sys with plans to support multiple `modern protocol`_ | ||||
|   approaches. | ||||
| - Various ``trio`` extension APIs via ``tractor.trionics`` such as, | ||||
|   - task fan-out `broadcasting`_, | ||||
|   - multi-task-single-resource-caching and fan-out-to-multi | ||||
|     ``__aenter__()`` APIs for ``@acm`` functions, | ||||
|   - (WIP) a ``TaskMngr``: one-cancels-one style nursery supervisor. | ||||
| 
 | ||||
| 
 | ||||
| Status of `main` / infra | ||||
| ------------------------ | ||||
| 
 | ||||
| - |gh_actions| | ||||
| - |docs| | ||||
| 
 | ||||
| 
 | ||||
| Install | ||||
| ------- | ||||
| ``tractor`` is still in a *alpha-near-beta-stage* for many | ||||
| of its subsystems, however we are very close to having a stable | ||||
| lowlevel runtime and API. | ||||
| 
 | ||||
| As such, it's currently recommended that you clone and install the | ||||
| repo from source:: | ||||
| 
 | ||||
|     pip install git+git://github.com/goodboy/tractor.git | ||||
| 
 | ||||
| 
 | ||||
| We use the very hip `uv`_ for project mgmt:: | ||||
| 
 | ||||
|     git clone https://github.com/goodboy/tractor.git | ||||
|     cd tractor | ||||
|     uv sync --dev | ||||
|     uv run python examples/rpc_bidir_streaming.py | ||||
| 
 | ||||
| Consider activating a virtual/project-env before starting to hack on | ||||
| the code base:: | ||||
| 
 | ||||
|     # you could use plain ol' venvs | ||||
|     # https://docs.astral.sh/uv/pip/environments/ | ||||
|     uv venv tractor_py313 --python 3.13 | ||||
| 
 | ||||
|     # but @goodboy prefers the more explicit (and shell agnostic) | ||||
|     # https://docs.astral.sh/uv/configuration/environment/#uv_project_environment | ||||
|     UV_PROJECT_ENVIRONMENT="tractor_py313 | ||||
| 
 | ||||
|     # hint hint, enter @goodboy's fave shell B) | ||||
|     uv run --dev xonsh | ||||
| 
 | ||||
| Alongside all this we ofc offer "releases" on PyPi:: | ||||
| 
 | ||||
|     pip install tractor | ||||
| 
 | ||||
| Just note that YMMV since the main git branch is often much further | ||||
| ahead then any latest release. | ||||
| 
 | ||||
| 
 | ||||
| Example codez | ||||
| ------------- | ||||
| In ``tractor``'s (very lacking) documention we prefer to point to | ||||
| example scripts in the repo over duplicating them in docs, but with | ||||
| that in mind here are some definitive snippets to try and hook you | ||||
| into digging deeper. | ||||
| - **It's just** a ``trio`` API | ||||
| - *Infinitely nesteable* process trees | ||||
| - Builtin IPC streaming APIs with task fan-out broadcasting | ||||
| - A "native" multi-core debugger REPL using `pdbp`_ (a fork & fix of | ||||
|   `pdb++`_ thanks to @mdmintz!) | ||||
| - Support for a swappable, OS specific, process spawning layer | ||||
| - A modular transport stack, allowing for custom serialization (eg. with | ||||
|   `msgspec`_), communications protocols, and environment specific IPC | ||||
|   primitives | ||||
| - Support for spawning process-level-SC, inter-loop one-to-one-task oriented | ||||
|   ``asyncio`` actors via "infected ``asyncio``" mode | ||||
| - `structured chadcurrency`_ from the ground up | ||||
| 
 | ||||
| 
 | ||||
| Run a func in a process | ||||
| *********************** | ||||
| ----------------------- | ||||
| Use ``trio``'s style of focussing on *tasks as functions*: | ||||
| 
 | ||||
| .. code:: python | ||||
|  | @ -178,7 +108,7 @@ might want to check out `trio-parallel`_. | |||
| 
 | ||||
| 
 | ||||
| Zombie safe: self-destruct a process tree | ||||
| ***************************************** | ||||
| ----------------------------------------- | ||||
| ``tractor`` tries to protect you from zombies, no matter what. | ||||
| 
 | ||||
| .. code:: python | ||||
|  | @ -234,7 +164,7 @@ it **is a bug**. | |||
| 
 | ||||
| 
 | ||||
| "Native" multi-process debugging | ||||
| ******************************** | ||||
| -------------------------------- | ||||
| Using the magic of `pdbp`_ and our internal IPC, we've | ||||
| been able to create a native feeling debugging experience for | ||||
| any (sub-)process in your ``tractor`` tree. | ||||
|  | @ -289,7 +219,7 @@ We're hoping to add a respawn-from-repl system soon! | |||
| 
 | ||||
| 
 | ||||
| SC compatible bi-directional streaming | ||||
| ************************************** | ||||
| -------------------------------------- | ||||
| Yes, you saw it here first; we provide 2-way streams | ||||
| with reliable, transitive setup/teardown semantics. | ||||
| 
 | ||||
|  | @ -381,7 +311,7 @@ hear your thoughts on! | |||
| 
 | ||||
| 
 | ||||
| Worker poolz are easy peasy | ||||
| *************************** | ||||
| --------------------------- | ||||
| The initial ask from most new users is *"how do I make a worker | ||||
| pool thing?"*. | ||||
| 
 | ||||
|  | @ -403,10 +333,10 @@ This uses no extra threads, fancy semaphores or futures; all we need | |||
| is ``tractor``'s IPC! | ||||
| 
 | ||||
| "Infected ``asyncio``" mode | ||||
| *************************** | ||||
| --------------------------- | ||||
| Have a bunch of ``asyncio`` code you want to force to be SC at the process level? | ||||
| 
 | ||||
| Check out our experimental system for `guest`_-mode controlled | ||||
| Check out our experimental system for `guest-mode`_ controlled | ||||
| ``asyncio`` actors: | ||||
| 
 | ||||
| .. code:: python | ||||
|  | @ -512,7 +442,7 @@ We need help refining the `asyncio`-side channel API to be more | |||
| 
 | ||||
| 
 | ||||
| Higher level "cluster" APIs | ||||
| *************************** | ||||
| --------------------------- | ||||
| To be extra terse the ``tractor`` devs have started hacking some "higher | ||||
| level" APIs for managing actor trees/clusters. These interfaces should | ||||
| generally be condsidered provisional for now but we encourage you to try | ||||
|  | @ -569,6 +499,18 @@ spawn a flat cluster: | |||
| .. _full worker pool re-implementation: https://github.com/goodboy/tractor/blob/master/examples/parallelism/concurrent_actors_primes.py | ||||
| 
 | ||||
| 
 | ||||
| Install | ||||
| ------- | ||||
| From PyPi:: | ||||
| 
 | ||||
|     pip install tractor | ||||
| 
 | ||||
| 
 | ||||
| From git:: | ||||
| 
 | ||||
|     pip install git+git://github.com/goodboy/tractor.git | ||||
| 
 | ||||
| 
 | ||||
| Under the hood | ||||
| -------------- | ||||
| ``tractor`` is an attempt to pair trionic_ `structured concurrency`_ with | ||||
|  | @ -672,32 +614,25 @@ channel`_! | |||
| .. _adherance to: https://www.youtube.com/watch?v=7erJ1DV_Tlo&t=1821s | ||||
| .. _trio gitter channel: https://gitter.im/python-trio/general | ||||
| .. _matrix channel: https://matrix.to/#/!tractor:matrix.org | ||||
| .. _broadcasting: https://github.com/goodboy/tractor/pull/229 | ||||
| .. _modern procotol: https://en.wikipedia.org/wiki/Rendezvous_protocol | ||||
| .. _pdbp: https://github.com/mdmintz/pdbp | ||||
| .. _pdb++: https://github.com/pdbpp/pdbpp | ||||
| .. _cheap or nasty: https://zguide.zeromq.org/docs/chapter7/#The-Cheap-or-Nasty-Pattern | ||||
| .. _(un)protocol: https://zguide.zeromq.org/docs/chapter7/#Unprotocols | ||||
| .. _discovery: https://zguide.zeromq.org/docs/chapter8/#Discovery | ||||
| .. _modern protocol: https://en.wikipedia.org/wiki/Rendezvous_protocol | ||||
| .. _guest mode: https://trio.readthedocs.io/en/stable/reference-lowlevel.html?highlight=guest%20mode#using-guest-mode-to-run-trio-on-top-of-other-event-loops | ||||
| .. _messages: https://en.wikipedia.org/wiki/Message_passing | ||||
| .. _trio docs: https://trio.readthedocs.io/en/latest/ | ||||
| .. _blog post: https://vorpus.org/blog/notes-on-structured-concurrency-or-go-statement-considered-harmful/ | ||||
| .. _structured concurrency: https://en.wikipedia.org/wiki/Structured_concurrency | ||||
| .. _SC: https://en.wikipedia.org/wiki/Structured_concurrency | ||||
| .. _libdill-docs: https://sustrik.github.io/libdill/structured-concurrency.html | ||||
| .. _structured chadcurrency: https://en.wikipedia.org/wiki/Structured_concurrency | ||||
| .. _unrequirements: https://en.wikipedia.org/wiki/Actor_model#Direct_communication_and_asynchrony | ||||
| .. _async generators: https://www.python.org/dev/peps/pep-0525/ | ||||
| .. _trio-parallel: https://github.com/richardsheridan/trio-parallel | ||||
| .. _uv: https://docs.astral.sh/uv/ | ||||
| .. _msgspec: https://jcristharif.com/msgspec/ | ||||
| .. _guest: https://trio.readthedocs.io/en/stable/reference-lowlevel.html?highlight=guest%20mode#using-guest-mode-to-run-trio-on-top-of-other-event-loops | ||||
| .. _guest-mode: https://trio.readthedocs.io/en/stable/reference-lowlevel.html?highlight=guest%20mode#using-guest-mode-to-run-trio-on-top-of-other-event-loops | ||||
| 
 | ||||
| .. | ||||
|    NOTE, on generating badge links from the UI | ||||
|    https://docs.github.com/en/actions/how-tos/monitoring-and-troubleshooting-workflows/monitoring-workflows/adding-a-workflow-status-badge?ref=gitguardian-blog-automated-secrets-detection#using-the-ui | ||||
| .. |gh_actions| image:: https://github.com/goodboy/tractor/actions/workflows/ci.yml/badge.svg?branch=main | ||||
|     :target: https://github.com/goodboy/tractor/actions/workflows/ci.yml | ||||
| 
 | ||||
| .. |gh_actions| image:: https://img.shields.io/endpoint.svg?url=https%3A%2F%2Factions-badge.atrox.dev%2Fgoodboy%2Ftractor%2Fbadge&style=popout-square | ||||
|     :target: https://actions-badge.atrox.dev/goodboy/tractor/goto | ||||
| 
 | ||||
| .. |docs| image:: https://readthedocs.org/projects/tractor/badge/?version=latest | ||||
|     :target: https://tractor.readthedocs.io/en/latest/?badge=latest | ||||
|  |  | |||
|  | @ -6,7 +6,6 @@ been an outage) and we want to ensure that despite being in debug mode | |||
| actor tree will eventually be cancelled without leaving any zombies. | ||||
| 
 | ||||
| ''' | ||||
| from contextlib import asynccontextmanager as acm | ||||
| from functools import partial | ||||
| 
 | ||||
| from tractor import ( | ||||
|  | @ -16,10 +15,64 @@ from tractor import ( | |||
|     ContextCancelled, | ||||
|     MsgStream, | ||||
|     _testing, | ||||
|     trionics, | ||||
| ) | ||||
| import trio | ||||
| import pytest | ||||
| 
 | ||||
| 
 | ||||
| async def break_ipc( | ||||
|     stream: MsgStream, | ||||
|     method: str|None = None, | ||||
|     pre_close: bool = False, | ||||
| 
 | ||||
|     def_method: str = 'eof', | ||||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|     XXX: close the channel right after an error is raised | ||||
|     purposely breaking the IPC transport to make sure the parent | ||||
|     doesn't get stuck in debug or hang on the connection join. | ||||
|     this more or less simulates an infinite msg-receive hang on | ||||
|     the other end. | ||||
| 
 | ||||
|     ''' | ||||
|     # close channel via IPC prot msging before | ||||
|     # any transport breakage | ||||
|     if pre_close: | ||||
|         await stream.aclose() | ||||
| 
 | ||||
|     method: str = method or def_method | ||||
| 
 | ||||
|     match method: | ||||
|         case 'trans_aclose': | ||||
|             await stream._ctx.chan.transport.stream.aclose() | ||||
| 
 | ||||
|         case 'eof': | ||||
|             await stream._ctx.chan.transport.stream.send_eof() | ||||
| 
 | ||||
|         case 'msg': | ||||
|             await stream._ctx.chan.send(None) | ||||
| 
 | ||||
|         # TODO: the actual real-world simulated cases like | ||||
|         # transport layer hangs and/or lower layer 2-gens type | ||||
|         # scenarios.. | ||||
|         # | ||||
|         # -[ ] already have some issues for this general testing | ||||
|         # area: | ||||
|         #  - https://github.com/goodboy/tractor/issues/97 | ||||
|         #  - https://github.com/goodboy/tractor/issues/124 | ||||
|         #   - PR from @guille: | ||||
|         #     https://github.com/goodboy/tractor/pull/149 | ||||
|         # case 'hang': | ||||
|         # TODO: framework research: | ||||
|         # | ||||
|         # - https://github.com/GuoTengda1993/pynetem | ||||
|         # - https://github.com/shopify/toxiproxy | ||||
|         # - https://manpages.ubuntu.com/manpages/trusty/man1/wirefilter.1.html | ||||
| 
 | ||||
|         case _: | ||||
|             raise RuntimeError( | ||||
|                 f'IPC break method unsupported: {method}' | ||||
|             ) | ||||
| 
 | ||||
| 
 | ||||
| async def break_ipc_then_error( | ||||
|  | @ -27,17 +80,17 @@ async def break_ipc_then_error( | |||
|     break_ipc_with: str|None = None, | ||||
|     pre_close: bool = False, | ||||
| ): | ||||
|     await _testing.break_ipc( | ||||
|         stream=stream, | ||||
|         method=break_ipc_with, | ||||
|         pre_close=pre_close, | ||||
|     ) | ||||
|     async for msg in stream: | ||||
|         await stream.send(msg) | ||||
| 
 | ||||
|     assert 0 | ||||
|         await break_ipc( | ||||
|             stream=stream, | ||||
|             method=break_ipc_with, | ||||
|             pre_close=pre_close, | ||||
|         ) | ||||
|         assert 0 | ||||
| 
 | ||||
| 
 | ||||
| # async def close_stream_and_error( | ||||
| async def iter_ipc_stream( | ||||
|     stream: MsgStream, | ||||
|     break_ipc_with: str|None = None, | ||||
|  | @ -46,6 +99,20 @@ async def iter_ipc_stream( | |||
|     async for msg in stream: | ||||
|         await stream.send(msg) | ||||
| 
 | ||||
|         # wipe out channel right before raising | ||||
|         # await break_ipc( | ||||
|         #     stream=stream, | ||||
|         #     method=break_ipc_with, | ||||
|         #     pre_close=pre_close, | ||||
|         # ) | ||||
| 
 | ||||
|         # send channel close msg at SC-prot level | ||||
|         # | ||||
|         # TODO: what should get raised here if anything? | ||||
|         # await stream.aclose() | ||||
| 
 | ||||
|     # assert 0 | ||||
| 
 | ||||
| 
 | ||||
| @context | ||||
| async def recv_and_spawn_net_killers( | ||||
|  | @ -59,54 +126,33 @@ async def recv_and_spawn_net_killers( | |||
|     Receive stream msgs and spawn some IPC killers mid-stream. | ||||
| 
 | ||||
|     ''' | ||||
|     broke_ipc: bool = False | ||||
|     await ctx.started() | ||||
|     async with ( | ||||
|         ctx.open_stream() as stream, | ||||
|         trionics.collapse_eg(), | ||||
|         trio.open_nursery() as tn, | ||||
|         trio.open_nursery() as n, | ||||
|     ): | ||||
|         async for i in stream: | ||||
|             print(f'child echoing {i}') | ||||
|             if not broke_ipc: | ||||
|                 await stream.send(i) | ||||
|             else: | ||||
|                 await trio.sleep(0.01) | ||||
| 
 | ||||
|             await stream.send(i) | ||||
|             if ( | ||||
|                 break_ipc_after | ||||
|                 and | ||||
|                 i >= break_ipc_after | ||||
|                 i > break_ipc_after | ||||
|             ): | ||||
|                 broke_ipc = True | ||||
|                 tn.start_soon( | ||||
|                     iter_ipc_stream, | ||||
|                     stream, | ||||
|                 ) | ||||
|                 tn.start_soon( | ||||
|                 '#################################\n' | ||||
|                 'Simulating CHILD-side IPC BREAK!\n' | ||||
|                 '#################################\n' | ||||
|                 n.start_soon( | ||||
|                     partial( | ||||
|                         break_ipc_then_error, | ||||
|                         stream=stream, | ||||
|                         pre_close=pre_close, | ||||
|                     ) | ||||
|                 ) | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def stuff_hangin_ctlc(timeout: float = 1) -> None: | ||||
| 
 | ||||
|     with trio.move_on_after(timeout) as cs: | ||||
|         yield timeout | ||||
| 
 | ||||
|     if cs.cancelled_caught: | ||||
|         # pretend to be a user seeing no streaming action | ||||
|         # thinking it's a hang, and then hitting ctl-c.. | ||||
|         print( | ||||
|             f"i'm a user on the PARENT side and thingz hangin " | ||||
|             f'after timeout={timeout} ???\n\n' | ||||
|             'MASHING CTlR-C..!?\n' | ||||
|         ) | ||||
|         raise KeyboardInterrupt | ||||
|                 n.start_soon( | ||||
|                     iter_ipc_stream, | ||||
|                     stream, | ||||
|                 ) | ||||
| 
 | ||||
| 
 | ||||
| async def main( | ||||
|  | @ -120,10 +166,12 @@ async def main( | |||
|     break_parent_ipc_after: int|bool = False, | ||||
|     break_child_ipc_after: int|bool = False, | ||||
|     pre_close: bool = False, | ||||
|     tpt_proto: str = 'tcp', | ||||
| 
 | ||||
| ) -> None: | ||||
| 
 | ||||
|     # from tractor._state import _runtime_vars as rtv | ||||
|     # rtv['_debug_mode'] = debug_mode | ||||
| 
 | ||||
|     async with ( | ||||
|         open_nursery( | ||||
|             start_method=start_method, | ||||
|  | @ -132,7 +180,6 @@ async def main( | |||
|             # a hang since it never engages due to broken IPC | ||||
|             debug_mode=debug_mode, | ||||
|             loglevel=loglevel, | ||||
|             enable_transports=[tpt_proto], | ||||
| 
 | ||||
|         ) as an, | ||||
|     ): | ||||
|  | @ -143,12 +190,10 @@ async def main( | |||
|         ) | ||||
| 
 | ||||
|         async with ( | ||||
|             stuff_hangin_ctlc(timeout=2) as timeout, | ||||
|             _testing.expect_ctxc( | ||||
|                 yay=( | ||||
|                     break_parent_ipc_after | ||||
|                     or | ||||
|                     break_child_ipc_after | ||||
|                     or break_child_ipc_after, | ||||
|                 ), | ||||
|                 # TODO: we CAN'T remove this right? | ||||
|                 # since we need the ctxc to bubble up from either | ||||
|  | @ -160,14 +205,12 @@ async def main( | |||
|                 # and KBI in an eg? | ||||
|                 reraise=True, | ||||
|             ), | ||||
| 
 | ||||
|             portal.open_context( | ||||
|                 recv_and_spawn_net_killers, | ||||
|                 break_ipc_after=break_child_ipc_after, | ||||
|                 pre_close=pre_close, | ||||
|             ) as (ctx, sent), | ||||
|         ): | ||||
|             rx_eoc: bool = False | ||||
|             ipc_break_sent: bool = False | ||||
|             async with ctx.open_stream() as stream: | ||||
|                 for i in range(1000): | ||||
|  | @ -185,17 +228,17 @@ async def main( | |||
|                             '#################################\n' | ||||
|                         ) | ||||
| 
 | ||||
|                         # TODO: other methods? see break func above. | ||||
|                         # await stream._ctx.chan.send(None) | ||||
|                         # await stream._ctx.chan.transport.stream.send_eof() | ||||
|                         await stream._ctx.chan.transport.stream.aclose() | ||||
| 
 | ||||
|                         ipc_break_sent = True | ||||
| 
 | ||||
|                     # it actually breaks right here in the | ||||
|                     # mp_spawn/forkserver backends and thus the | ||||
|                     # zombie reaper never even kicks in? | ||||
|                     # mp_spawn/forkserver backends and thus the zombie | ||||
|                     # reaper never even kicks in? | ||||
|                     print(f'parent sending {i}') | ||||
|                     try: | ||||
|                         print(f'parent sending {i}') | ||||
|                         await stream.send(i) | ||||
|                     except ContextCancelled as ctxc: | ||||
|                         print( | ||||
|  | @ -208,19 +251,10 @@ async def main( | |||
|                         # TODO: is this needed or no? | ||||
|                         raise | ||||
| 
 | ||||
|                     except trio.ClosedResourceError: | ||||
|                         # NOTE: don't send if we already broke the | ||||
|                         # connection to avoid raising a closed-error | ||||
|                         # such that we drop through to the ctl-c | ||||
|                         # mashing by user. | ||||
|                         await trio.sleep(0.01) | ||||
|                     timeout: int = 1 | ||||
|                     print(f'Entering `stream.receive()` with timeout={timeout}\n') | ||||
|                     with trio.move_on_after(timeout) as cs: | ||||
| 
 | ||||
|                     # timeout: int = 1 | ||||
|                     # with trio.move_on_after(timeout) as cs: | ||||
|                     async with stuff_hangin_ctlc() as timeout: | ||||
|                         print( | ||||
|                             f'PARENT `stream.receive()` with timeout={timeout}\n' | ||||
|                         ) | ||||
|                         # NOTE: in the parent side IPC failure case this | ||||
|                         # will raise an ``EndOfChannel`` after the child | ||||
|                         # is killed and sends a stop msg back to it's | ||||
|  | @ -232,30 +266,23 @@ async def main( | |||
|                                 f'{rx}\n' | ||||
|                             ) | ||||
|                         except trio.EndOfChannel: | ||||
|                             rx_eoc: bool = True | ||||
|                             print('MsgStream got EoC for PARENT') | ||||
|                             raise | ||||
| 
 | ||||
|             print( | ||||
|                 'Streaming finished and we got Eoc.\n' | ||||
|                 'Canceling `.open_context()` in root with\n' | ||||
|                 'CTlR-C..' | ||||
|             ) | ||||
|             if rx_eoc: | ||||
|                 assert stream.closed | ||||
|                 try: | ||||
|                     await stream.send(i) | ||||
|                     pytest.fail('stream not closed?') | ||||
|                 except ( | ||||
|                     trio.ClosedResourceError, | ||||
|                     trio.EndOfChannel, | ||||
|                 ) as send_err: | ||||
|                     if rx_eoc: | ||||
|                         assert send_err is stream._eoc | ||||
|                     else: | ||||
|                         assert send_err is stream._closed | ||||
|                     if cs.cancelled_caught: | ||||
|                         # pretend to be a user seeing no streaming action | ||||
|                         # thinking it's a hang, and then hitting ctl-c.. | ||||
|                         print( | ||||
|                             f"YOO i'm a PARENT user anddd thingz hangin..\n" | ||||
|                             f'after timeout={timeout}\n' | ||||
|                         ) | ||||
| 
 | ||||
|             raise KeyboardInterrupt | ||||
|                 print( | ||||
|                     "YOO i'm mad!\n" | ||||
|                     'The send side is dun but thingz hangin..\n' | ||||
|                     'MASHING CTlR-C Ctl-c..' | ||||
|                 ) | ||||
|                 raise KeyboardInterrupt | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == '__main__': | ||||
|  |  | |||
|  | @ -1,16 +1,8 @@ | |||
| ''' | ||||
| Examples of using the builtin `breakpoint()` from an `asyncio.Task` | ||||
| running in a subactor spawned with `infect_asyncio=True`. | ||||
| 
 | ||||
| ''' | ||||
| import asyncio | ||||
| 
 | ||||
| import trio | ||||
| import tractor | ||||
| from tractor import ( | ||||
|     to_asyncio, | ||||
|     Portal, | ||||
| ) | ||||
| from tractor import to_asyncio | ||||
| 
 | ||||
| 
 | ||||
| async def aio_sleep_forever(): | ||||
|  | @ -25,21 +17,21 @@ async def bp_then_error( | |||
| 
 | ||||
| ) -> None: | ||||
| 
 | ||||
|     # sync with `trio`-side (caller) task | ||||
|     # sync with ``trio``-side (caller) task | ||||
|     to_trio.send_nowait('start') | ||||
| 
 | ||||
|     # NOTE: what happens here inside the hook needs some refinement.. | ||||
|     # => seems like it's still `.debug._set_trace()` but | ||||
|     # => seems like it's still `._debug._set_trace()` but | ||||
|     #    we set `Lock.local_task_in_debug = 'sync'`, we probably want | ||||
|     #    some further, at least, meta-data about the task/actor in debug | ||||
|     #    in terms of making it clear it's `asyncio` mucking about. | ||||
|     breakpoint()  # asyncio-side | ||||
|     #    some further, at least, meta-data about the task/actoq in debug | ||||
|     #    in terms of making it clear it's asyncio mucking about. | ||||
|     breakpoint() | ||||
| 
 | ||||
|     # short checkpoint / delay | ||||
|     await asyncio.sleep(0.5)  # asyncio-side | ||||
|     await asyncio.sleep(0.5) | ||||
| 
 | ||||
|     if raise_after_bp: | ||||
|         raise ValueError('asyncio side error!') | ||||
|         raise ValueError('blah') | ||||
| 
 | ||||
|     # TODO: test case with this so that it gets cancelled? | ||||
|     else: | ||||
|  | @ -57,21 +49,23 @@ async def trio_ctx( | |||
|     # this will block until the ``asyncio`` task sends a "first" | ||||
|     # message, see first line in above func. | ||||
|     async with ( | ||||
| 
 | ||||
|         to_asyncio.open_channel_from( | ||||
|             bp_then_error, | ||||
|             # raise_after_bp=not bp_before_started, | ||||
|             raise_after_bp=not bp_before_started, | ||||
|         ) as (first, chan), | ||||
| 
 | ||||
|         trio.open_nursery() as tn, | ||||
|         trio.open_nursery() as n, | ||||
|     ): | ||||
| 
 | ||||
|         assert first == 'start' | ||||
| 
 | ||||
|         if bp_before_started: | ||||
|             await tractor.pause()  # trio-side | ||||
|             await tractor.breakpoint() | ||||
| 
 | ||||
|         await ctx.started(first)  # trio-side | ||||
|         await ctx.started(first) | ||||
| 
 | ||||
|         tn.start_soon( | ||||
|         n.start_soon( | ||||
|             to_asyncio.run_task, | ||||
|             aio_sleep_forever, | ||||
|         ) | ||||
|  | @ -79,50 +73,37 @@ async def trio_ctx( | |||
| 
 | ||||
| 
 | ||||
| async def main( | ||||
|     bps_all_over: bool = True, | ||||
| 
 | ||||
|     # TODO, WHICH OF THESE HAZ BUGZ? | ||||
|     cancel_from_root: bool = False, | ||||
|     err_from_root: bool = False, | ||||
|     bps_all_over: bool = False, | ||||
| 
 | ||||
| ) -> None: | ||||
| 
 | ||||
|     async with tractor.open_nursery( | ||||
|         debug_mode=True, | ||||
|         maybe_enable_greenback=True, | ||||
|         # loglevel='devx', | ||||
|     ) as an: | ||||
|         ptl: Portal = await an.start_actor( | ||||
|     async with tractor.open_nursery() as n: | ||||
| 
 | ||||
|         p = await n.start_actor( | ||||
|             'aio_daemon', | ||||
|             enable_modules=[__name__], | ||||
|             infect_asyncio=True, | ||||
|             debug_mode=True, | ||||
|             # loglevel='cancel', | ||||
|             loglevel='cancel', | ||||
|         ) | ||||
| 
 | ||||
|         async with ptl.open_context( | ||||
|         async with p.open_context( | ||||
|             trio_ctx, | ||||
|             bp_before_started=bps_all_over, | ||||
|         ) as (ctx, first): | ||||
| 
 | ||||
|             assert first == 'start' | ||||
| 
 | ||||
|             # pause in parent to ensure no cross-actor | ||||
|             # locking problems exist! | ||||
|             await tractor.pause()  # trio-root | ||||
| 
 | ||||
|             if cancel_from_root: | ||||
|                 await ctx.cancel() | ||||
| 
 | ||||
|             if err_from_root: | ||||
|                 assert 0 | ||||
|             else: | ||||
|                 await trio.sleep_forever() | ||||
|             if bps_all_over: | ||||
|                 await tractor.breakpoint() | ||||
| 
 | ||||
|             # await trio.sleep_forever() | ||||
|             await ctx.cancel() | ||||
|             assert 0 | ||||
| 
 | ||||
|         # TODO: case where we cancel from trio-side while asyncio task | ||||
|         # has debugger lock? | ||||
|         # await ptl.cancel_actor() | ||||
|         # await p.cancel_actor() | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == '__main__': | ||||
|  |  | |||
|  | @ -1,9 +0,0 @@ | |||
| ''' | ||||
| Reproduce a bug where enabling debug mode for a sub-actor actually causes | ||||
| a hang on teardown... | ||||
| 
 | ||||
| ''' | ||||
| import asyncio | ||||
| 
 | ||||
| import trio | ||||
| import tractor | ||||
|  | @ -1,5 +1,5 @@ | |||
| ''' | ||||
| Fast fail test with a `Context`. | ||||
| Fast fail test with a context. | ||||
| 
 | ||||
| Ensure the partially initialized sub-actor process | ||||
| doesn't cause a hang on error/cancel of the parent | ||||
|  |  | |||
|  | @ -4,15 +4,9 @@ import trio | |||
| 
 | ||||
| async def breakpoint_forever(): | ||||
|     "Indefinitely re-enter debugger in child actor." | ||||
|     try: | ||||
|         while True: | ||||
|             yield 'yo' | ||||
|             await tractor.pause() | ||||
|     except BaseException: | ||||
|         tractor.log.get_console_log().exception( | ||||
|             'Cancelled while trying to enter pause point!' | ||||
|         ) | ||||
|         raise | ||||
|     while True: | ||||
|         yield 'yo' | ||||
|         await tractor.breakpoint() | ||||
| 
 | ||||
| 
 | ||||
| async def name_error(): | ||||
|  | @ -21,14 +15,11 @@ async def name_error(): | |||
| 
 | ||||
| 
 | ||||
| async def main(): | ||||
|     ''' | ||||
|     Test breakpoint in a streaming actor. | ||||
| 
 | ||||
|     ''' | ||||
|     """Test breakpoint in a streaming actor. | ||||
|     """ | ||||
|     async with tractor.open_nursery( | ||||
|         debug_mode=True, | ||||
|         loglevel='cancel', | ||||
|         # loglevel='devx', | ||||
|         loglevel='error', | ||||
|     ) as n: | ||||
| 
 | ||||
|         p0 = await n.start_actor('bp_forever', enable_modules=[__name__]) | ||||
|  | @ -41,7 +32,7 @@ async def main(): | |||
|             try: | ||||
|                 await p1.run(name_error) | ||||
|             except tractor.RemoteActorError as rae: | ||||
|                 assert rae.boxed_type is NameError | ||||
|                 assert rae.type is NameError | ||||
| 
 | ||||
|             async for i in stream: | ||||
| 
 | ||||
|  |  | |||
|  | @ -10,7 +10,7 @@ async def name_error(): | |||
| async def breakpoint_forever(): | ||||
|     "Indefinitely re-enter debugger in child actor." | ||||
|     while True: | ||||
|         await tractor.pause() | ||||
|         await tractor.breakpoint() | ||||
| 
 | ||||
|         # NOTE: if the test never sent 'q'/'quit' commands | ||||
|         # on the pdb repl, without this checkpoint line the | ||||
|  | @ -45,7 +45,6 @@ async def spawn_until(depth=0): | |||
|             ) | ||||
| 
 | ||||
| 
 | ||||
| # TODO: notes on the new boxed-relayed errors through proxy actors | ||||
| async def main(): | ||||
|     """The main ``tractor`` routine. | ||||
| 
 | ||||
|  |  | |||
|  | @ -40,7 +40,7 @@ async def main(): | |||
|     """ | ||||
|     async with tractor.open_nursery( | ||||
|         debug_mode=True, | ||||
|         loglevel='devx', | ||||
|         # loglevel='cancel', | ||||
|     ) as n: | ||||
| 
 | ||||
|         # spawn both actors | ||||
|  |  | |||
|  | @ -6,7 +6,7 @@ async def breakpoint_forever(): | |||
|     "Indefinitely re-enter debugger in child actor." | ||||
|     while True: | ||||
|         await trio.sleep(0.1) | ||||
|         await tractor.pause() | ||||
|         await tractor.breakpoint() | ||||
| 
 | ||||
| 
 | ||||
| async def name_error(): | ||||
|  | @ -38,7 +38,6 @@ async def main(): | |||
|     """ | ||||
|     async with tractor.open_nursery( | ||||
|         debug_mode=True, | ||||
|         # loglevel='runtime', | ||||
|     ) as n: | ||||
| 
 | ||||
|         # Spawn both actors, don't bother with collecting results | ||||
|  |  | |||
|  | @ -23,6 +23,5 @@ async def main(): | |||
|             n.start_soon(debug_actor.run, die) | ||||
|             n.start_soon(crash_boi.run, die) | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == '__main__': | ||||
|     trio.run(main) | ||||
|  |  | |||
|  | @ -1,56 +0,0 @@ | |||
| import trio | ||||
| import tractor | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def name_error( | ||||
|     ctx: tractor.Context, | ||||
| ): | ||||
|     ''' | ||||
|     Raise a `NameError`, catch it and enter `.post_mortem()`, then | ||||
|     expect the `._rpc._invoke()` crash handler to also engage. | ||||
| 
 | ||||
|     ''' | ||||
|     try: | ||||
|         getattr(doggypants)  # noqa (on purpose) | ||||
|     except NameError: | ||||
|         await tractor.post_mortem() | ||||
|         raise | ||||
| 
 | ||||
| 
 | ||||
| async def main(): | ||||
|     ''' | ||||
|     Test 3 `PdbREPL` entries: | ||||
|       - one in the child due to manual `.post_mortem()`, | ||||
|       - another in the child due to runtime RPC crash handling. | ||||
|       - final one here in parent from the RAE. | ||||
| 
 | ||||
|     ''' | ||||
|     # XXX NOTE: ideally the REPL arrives at this frame in the parent | ||||
|     # ONE UP FROM the inner ctx block below! | ||||
|     async with tractor.open_nursery( | ||||
|         debug_mode=True, | ||||
|         # loglevel='cancel', | ||||
|     ) as an: | ||||
|         p: tractor.Portal = await an.start_actor( | ||||
|             'child', | ||||
|             enable_modules=[__name__], | ||||
|         ) | ||||
| 
 | ||||
|         # XXX should raise `RemoteActorError[NameError]` | ||||
|         # AND be the active frame when REPL enters! | ||||
|         try: | ||||
|             async with p.open_context(name_error) as (ctx, first): | ||||
|                 assert first | ||||
|         except tractor.RemoteActorError as rae: | ||||
|             assert rae.boxed_type is NameError | ||||
| 
 | ||||
|             # manually handle in root's parent task | ||||
|             await tractor.post_mortem() | ||||
|             raise | ||||
|         else: | ||||
|             raise RuntimeError('IPC ctx should have remote errored!?') | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == '__main__': | ||||
|     trio.run(main) | ||||
|  | @ -4,55 +4,21 @@ import sys | |||
| import trio | ||||
| import tractor | ||||
| 
 | ||||
| # ensure mod-path is correct! | ||||
| from tractor.devx.debug import ( | ||||
|     _sync_pause_from_builtin as _sync_pause_from_builtin, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| async def main() -> None: | ||||
|     async with tractor.open_nursery(debug_mode=True) as an: | ||||
| 
 | ||||
|     # intially unset, no entry. | ||||
|     orig_pybp_var: int = os.environ.get('PYTHONBREAKPOINT') | ||||
|     assert orig_pybp_var in {None, "0"} | ||||
| 
 | ||||
|     async with tractor.open_nursery( | ||||
|         debug_mode=True, | ||||
|         loglevel='devx', | ||||
|         maybe_enable_greenback=True, | ||||
|         # ^XXX REQUIRED to enable `breakpoint()` support (from sync | ||||
|         # fns) and thus required here to avoid an assertion err | ||||
|         # on the next line | ||||
|     ): | ||||
|         assert ( | ||||
|             (pybp_var := os.environ['PYTHONBREAKPOINT']) | ||||
|             == | ||||
|             'tractor.devx.debug._sync_pause_from_builtin' | ||||
|         ) | ||||
|         assert os.environ['PYTHONBREAKPOINT'] == 'tractor._debug._set_trace' | ||||
| 
 | ||||
|         # TODO: an assert that verifies the hook has indeed been, hooked | ||||
|         # XD | ||||
|         assert ( | ||||
|             (pybp_hook := sys.breakpointhook) | ||||
|             is not tractor.devx.debug._set_trace | ||||
|         ) | ||||
|         assert sys.breakpointhook is not tractor._debug._set_trace | ||||
| 
 | ||||
|         print( | ||||
|             f'$PYTHONOBREAKPOINT: {pybp_var!r}\n' | ||||
|             f'`sys.breakpointhook`: {pybp_hook!r}\n' | ||||
|         ) | ||||
|         breakpoint()  # first bp, tractor hook set. | ||||
|         breakpoint() | ||||
| 
 | ||||
|     # XXX AFTER EXIT (of actor-runtime) verify the hook is unset.. | ||||
|     # | ||||
|     # YES, this is weird but it's how stdlib docs say to do it.. | ||||
|     # https://docs.python.org/3/library/sys.html#sys.breakpointhook | ||||
|     assert os.environ.get('PYTHONBREAKPOINT') is orig_pybp_var | ||||
|     # TODO: an assert that verifies the hook is unhooked.. | ||||
|     assert sys.breakpointhook | ||||
| 
 | ||||
|     # now ensure a regular builtin pause still works | ||||
|     breakpoint()  # last bp, stdlib hook restored | ||||
| 
 | ||||
|     breakpoint() | ||||
| 
 | ||||
| if __name__ == '__main__': | ||||
|     trio.run(main) | ||||
|  |  | |||
|  | @ -10,7 +10,7 @@ async def main(): | |||
| 
 | ||||
|         await trio.sleep(0.1) | ||||
| 
 | ||||
|         await tractor.pause() | ||||
|         await tractor.breakpoint() | ||||
| 
 | ||||
|         await trio.sleep(0.1) | ||||
| 
 | ||||
|  |  | |||
|  | @ -2,16 +2,13 @@ import trio | |||
| import tractor | ||||
| 
 | ||||
| 
 | ||||
| async def main( | ||||
|     registry_addrs: tuple[str, int]|None = None | ||||
| ): | ||||
| async def main(): | ||||
| 
 | ||||
|     async with tractor.open_root_actor( | ||||
|         debug_mode=True, | ||||
|         # loglevel='runtime', | ||||
|     ): | ||||
|         while True: | ||||
|             await tractor.pause() | ||||
|             await tractor.breakpoint() | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == '__main__': | ||||
|  |  | |||
|  | @ -24,9 +24,10 @@ async def spawn_until(depth=0): | |||
| 
 | ||||
| 
 | ||||
| async def main(): | ||||
|     ''' | ||||
|     The process tree should look as approximately as follows when the | ||||
|     debugger first engages: | ||||
|     """The main ``tractor`` routine. | ||||
| 
 | ||||
|     The process tree should look as approximately as follows when the debugger | ||||
|     first engages: | ||||
| 
 | ||||
|     python examples/debugging/multi_nested_subactors_bp_forever.py | ||||
|     ├─ python -m tractor._child --uid ('spawner1', '7eab8462 ...) | ||||
|  | @ -36,11 +37,10 @@ async def main(): | |||
|     └─ python -m tractor._child --uid ('spawner0', '1d42012b ...) | ||||
|        └─ python -m tractor._child --uid ('name_error', '6c2733b8 ...) | ||||
| 
 | ||||
|     ''' | ||||
|     """ | ||||
|     async with tractor.open_nursery( | ||||
|         debug_mode=True, | ||||
|         loglevel='devx', | ||||
|         enable_transports=['uds'], | ||||
|         loglevel='warning' | ||||
|     ) as n: | ||||
| 
 | ||||
|         # spawn both actors | ||||
|  |  | |||
|  | @ -1,35 +0,0 @@ | |||
| import trio | ||||
| import tractor | ||||
| 
 | ||||
| 
 | ||||
| async def main(): | ||||
|     async with tractor.open_root_actor( | ||||
|         debug_mode=True, | ||||
|         loglevel='cancel', | ||||
|     ) as _root: | ||||
| 
 | ||||
|         # manually trigger self-cancellation and wait | ||||
|         # for it to fully trigger. | ||||
|         _root.cancel_soon() | ||||
|         await _root._cancel_complete.wait() | ||||
|         print('root cancelled') | ||||
| 
 | ||||
|         # now ensure we can still use the REPL | ||||
|         try: | ||||
|             await tractor.pause() | ||||
|         except trio.Cancelled as _taskc: | ||||
|             assert (root_cs := _root._root_tn.cancel_scope).cancel_called | ||||
|             # NOTE^^ above logic but inside `open_root_actor()` and | ||||
|             # passed to the `shield=` expression is effectively what | ||||
|             # we're testing here! | ||||
|             await tractor.pause(shield=root_cs.cancel_called) | ||||
| 
 | ||||
|         # XXX, if shield logic *is wrong* inside `open_root_actor()`'s | ||||
|         # crash-handler block this should never be interacted, | ||||
|         # instead `trio.Cancelled` would be bubbled up: the original | ||||
|         # BUG. | ||||
|         assert 0 | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == '__main__': | ||||
|     trio.run(main) | ||||
|  | @ -1,84 +0,0 @@ | |||
| ''' | ||||
| Verify we can dump a `stackscope` tree on a hang. | ||||
| 
 | ||||
| ''' | ||||
| import os | ||||
| import signal | ||||
| 
 | ||||
| import trio | ||||
| import tractor | ||||
| 
 | ||||
| @tractor.context | ||||
| async def start_n_shield_hang( | ||||
|     ctx: tractor.Context, | ||||
| ): | ||||
|     # actor: tractor.Actor = tractor.current_actor() | ||||
| 
 | ||||
|     # sync to parent-side task | ||||
|     await ctx.started(os.getpid()) | ||||
| 
 | ||||
|     print('Entering shield sleep..') | ||||
|     with trio.CancelScope(shield=True): | ||||
|         await trio.sleep_forever()  # in subactor | ||||
| 
 | ||||
|     # XXX NOTE ^^^ since this shields, we expect | ||||
|     # the zombie reaper (aka T800) to engage on | ||||
|     # SIGINT from the user and eventually hard-kill | ||||
|     # this subprocess! | ||||
| 
 | ||||
| 
 | ||||
| async def main( | ||||
|     from_test: bool = False, | ||||
| ) -> None: | ||||
| 
 | ||||
|     async with ( | ||||
|         tractor.open_nursery( | ||||
|             debug_mode=True, | ||||
|             enable_stack_on_sig=True, | ||||
|             # maybe_enable_greenback=False, | ||||
|             loglevel='devx', | ||||
|             enable_transports=['uds'], | ||||
|         ) as an, | ||||
|     ): | ||||
|         ptl: tractor.Portal  = await an.start_actor( | ||||
|             'hanger', | ||||
|             enable_modules=[__name__], | ||||
|             debug_mode=True, | ||||
|         ) | ||||
|         async with ptl.open_context( | ||||
|             start_n_shield_hang, | ||||
|         ) as (ctx, cpid): | ||||
| 
 | ||||
|             _, proc, _ = an._children[ptl.chan.uid] | ||||
|             assert cpid == proc.pid | ||||
| 
 | ||||
|             print( | ||||
|                 'Yo my child hanging..?\n' | ||||
|                 # "i'm a user who wants to see a `stackscope` tree!\n" | ||||
|             ) | ||||
| 
 | ||||
|             # XXX simulate the wrapping test's "user actions" | ||||
|             # (i.e. if a human didn't run this manually but wants to | ||||
|             # know what they should do to reproduce test behaviour) | ||||
|             if from_test: | ||||
|                 print( | ||||
|                     f'Sending SIGUSR1 to {cpid!r}!\n' | ||||
|                 ) | ||||
|                 os.kill( | ||||
|                     cpid, | ||||
|                     signal.SIGUSR1, | ||||
|                 ) | ||||
| 
 | ||||
|                 # simulate user cancelling program | ||||
|                 await trio.sleep(0.5) | ||||
|                 os.kill( | ||||
|                     os.getpid(), | ||||
|                     signal.SIGINT, | ||||
|                 ) | ||||
|             else: | ||||
|                 # actually let user send the ctl-c | ||||
|                 await trio.sleep_forever()  # in root | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == '__main__': | ||||
|     trio.run(main) | ||||
|  | @ -1,88 +0,0 @@ | |||
| import trio | ||||
| import tractor | ||||
| 
 | ||||
| 
 | ||||
| async def cancellable_pause_loop( | ||||
|     task_status: trio.TaskStatus[trio.CancelScope] = trio.TASK_STATUS_IGNORED | ||||
| ): | ||||
|     with trio.CancelScope() as cs: | ||||
|         task_status.started(cs) | ||||
|         for _ in range(3): | ||||
|             try: | ||||
|                 # ON first entry, there is no level triggered | ||||
|                 # cancellation yet, so this cp does a parent task | ||||
|                 # ctx-switch so that this scope raises for the NEXT | ||||
|                 # checkpoint we hit. | ||||
|                 await trio.lowlevel.checkpoint() | ||||
|                 await tractor.pause() | ||||
| 
 | ||||
|                 cs.cancel() | ||||
| 
 | ||||
|                 # parent should have called `cs.cancel()` by now | ||||
|                 await trio.lowlevel.checkpoint() | ||||
| 
 | ||||
|             except trio.Cancelled: | ||||
|                 print('INSIDE SHIELDED PAUSE') | ||||
|                 await tractor.pause(shield=True) | ||||
|         else: | ||||
|             # should raise it again, bubbling up to parent | ||||
|             print('BUBBLING trio.Cancelled to parent task-nursery') | ||||
|             await trio.lowlevel.checkpoint() | ||||
| 
 | ||||
| 
 | ||||
| async def pm_on_cancelled(): | ||||
|     async with trio.open_nursery() as tn: | ||||
|         tn.cancel_scope.cancel() | ||||
|         try: | ||||
|             await trio.sleep_forever() | ||||
|         except trio.Cancelled: | ||||
|             # should also raise `Cancelled` since | ||||
|             # we didn't pass `shield=True`. | ||||
|             try: | ||||
|                 await tractor.post_mortem(hide_tb=False) | ||||
|             except trio.Cancelled as taskc: | ||||
| 
 | ||||
|                 # should enter just fine, in fact it should | ||||
|                 # be debugging the internals of the previous | ||||
|                 # sin-shield call above Bo | ||||
|                 await tractor.post_mortem( | ||||
|                     hide_tb=False, | ||||
|                     shield=True, | ||||
|                 ) | ||||
|                 raise taskc | ||||
| 
 | ||||
|         else: | ||||
|             raise RuntimeError('Dint cancel as expected!?') | ||||
| 
 | ||||
| 
 | ||||
| async def cancelled_before_pause( | ||||
| ): | ||||
|     ''' | ||||
|     Verify that using a shielded pause works despite surrounding | ||||
|     cancellation called state in the calling task. | ||||
| 
 | ||||
|     ''' | ||||
|     async with trio.open_nursery() as tn: | ||||
|         cs: trio.CancelScope = await tn.start(cancellable_pause_loop) | ||||
|         await trio.sleep(0.1) | ||||
| 
 | ||||
|     assert cs.cancelled_caught | ||||
| 
 | ||||
|     await pm_on_cancelled() | ||||
| 
 | ||||
| 
 | ||||
| async def main(): | ||||
|     async with tractor.open_nursery( | ||||
|         debug_mode=True, | ||||
|     ) as n: | ||||
|         portal: tractor.Portal = await n.run_in_actor( | ||||
|             cancelled_before_pause, | ||||
|         ) | ||||
|         await portal.result() | ||||
| 
 | ||||
|         # ensure the same works in the root actor! | ||||
|         await pm_on_cancelled() | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == '__main__': | ||||
|     trio.run(main) | ||||
|  | @ -4,9 +4,9 @@ import trio | |||
| 
 | ||||
| async def gen(): | ||||
|     yield 'yo' | ||||
|     await tractor.pause() | ||||
|     await tractor.breakpoint() | ||||
|     yield 'yo' | ||||
|     await tractor.pause() | ||||
|     await tractor.breakpoint() | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
|  | @ -15,7 +15,7 @@ async def just_bp( | |||
| ) -> None: | ||||
| 
 | ||||
|     await ctx.started() | ||||
|     await tractor.pause() | ||||
|     await tractor.breakpoint() | ||||
| 
 | ||||
|     # TODO: bps and errors in this call.. | ||||
|     async for val in gen(): | ||||
|  | @ -33,11 +33,8 @@ async def just_bp( | |||
| 
 | ||||
| 
 | ||||
| async def main(): | ||||
| 
 | ||||
|     async with tractor.open_nursery( | ||||
|         debug_mode=True, | ||||
|         enable_transports=['uds'], | ||||
|         loglevel='devx', | ||||
|     ) as n: | ||||
|         p = await n.start_actor( | ||||
|             'bp_boi', | ||||
|  |  | |||
|  | @ -3,20 +3,17 @@ import tractor | |||
| 
 | ||||
| 
 | ||||
| async def breakpoint_forever(): | ||||
|     ''' | ||||
|     Indefinitely re-enter debugger in child actor. | ||||
| 
 | ||||
|     ''' | ||||
|     """Indefinitely re-enter debugger in child actor. | ||||
|     """ | ||||
|     while True: | ||||
|         await trio.sleep(0.1) | ||||
|         await tractor.pause() | ||||
|         await tractor.breakpoint() | ||||
| 
 | ||||
| 
 | ||||
| async def main(): | ||||
| 
 | ||||
|     async with tractor.open_nursery( | ||||
|         debug_mode=True, | ||||
|         loglevel='cancel', | ||||
|     ) as n: | ||||
| 
 | ||||
|         portal = await n.run_in_actor( | ||||
|  |  | |||
|  | @ -3,26 +3,16 @@ import tractor | |||
| 
 | ||||
| 
 | ||||
| async def name_error(): | ||||
|     getattr(doggypants)  # noqa (on purpose) | ||||
|     getattr(doggypants) | ||||
| 
 | ||||
| 
 | ||||
| async def main(): | ||||
|     async with tractor.open_nursery( | ||||
|         debug_mode=True, | ||||
|         # loglevel='transport', | ||||
|     ) as an: | ||||
|     ) as n: | ||||
| 
 | ||||
|         # TODO: ideally the REPL arrives at this frame in the parent, | ||||
|         # ABOVE the @api_frame of `Portal.run_in_actor()` (which | ||||
|         # should eventually not even be a portal method ... XD) | ||||
|         # await tractor.pause() | ||||
|         p: tractor.Portal = await an.run_in_actor(name_error) | ||||
| 
 | ||||
|         # with this style, should raise on this line | ||||
|         await p.result() | ||||
| 
 | ||||
|         # with this alt style should raise at `open_nusery()` | ||||
|         # return await p.result() | ||||
|         portal = await n.run_in_actor(name_error) | ||||
|         await portal.result() | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == '__main__': | ||||
|  |  | |||
|  | @ -1,169 +0,0 @@ | |||
| from functools import partial | ||||
| import time | ||||
| 
 | ||||
| import trio | ||||
| import tractor | ||||
| 
 | ||||
| # TODO: only import these when not running from test harness? | ||||
| # can we detect `pexpect` usage maybe? | ||||
| # from tractor.devx.debug import ( | ||||
| #     get_lock, | ||||
| #     get_debug_req, | ||||
| # ) | ||||
| 
 | ||||
| 
 | ||||
| def sync_pause( | ||||
|     use_builtin: bool = False, | ||||
|     error: bool = False, | ||||
|     hide_tb: bool = True, | ||||
|     pre_sleep: float|None = None, | ||||
| ): | ||||
|     if pre_sleep: | ||||
|         time.sleep(pre_sleep) | ||||
| 
 | ||||
|     if use_builtin: | ||||
|         breakpoint(hide_tb=hide_tb) | ||||
| 
 | ||||
|     else: | ||||
|         # TODO: maybe for testing some kind of cm style interface | ||||
|         # where the `._set_trace()` call doesn't happen until block | ||||
|         # exit? | ||||
|         # assert get_lock().ctx_in_debug is None | ||||
|         # assert get_debug_req().repl is None | ||||
|         tractor.pause_from_sync() | ||||
|         # assert get_debug_req().repl is None | ||||
| 
 | ||||
|     if error: | ||||
|         raise RuntimeError('yoyo sync code error') | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def start_n_sync_pause( | ||||
|     ctx: tractor.Context, | ||||
| ): | ||||
|     actor: tractor.Actor = tractor.current_actor() | ||||
| 
 | ||||
|     # sync to parent-side task | ||||
|     await ctx.started() | ||||
| 
 | ||||
|     print(f'Entering `sync_pause()` in subactor: {actor.uid}\n') | ||||
|     sync_pause() | ||||
|     print(f'Exited `sync_pause()` in subactor: {actor.uid}\n') | ||||
| 
 | ||||
| 
 | ||||
| async def main() -> None: | ||||
|     async with ( | ||||
|         tractor.open_nursery( | ||||
|             debug_mode=True, | ||||
|             maybe_enable_greenback=True, | ||||
|             enable_stack_on_sig=True, | ||||
|             # loglevel='warning', | ||||
|             # loglevel='devx', | ||||
|         ) as an, | ||||
|         trio.open_nursery() as tn, | ||||
|     ): | ||||
|         # just from root task | ||||
|         sync_pause() | ||||
| 
 | ||||
|         p: tractor.Portal  = await an.start_actor( | ||||
|             'subactor', | ||||
|             enable_modules=[__name__], | ||||
|             # infect_asyncio=True, | ||||
|             debug_mode=True, | ||||
|         ) | ||||
| 
 | ||||
|         # TODO: 3 sub-actor usage cases: | ||||
|         # -[x] via a `.open_context()` | ||||
|         # -[ ] via a `.run_in_actor()` call | ||||
|         # -[ ] via a `.run()` | ||||
|         # -[ ] via a `.to_thread.run_sync()` in subactor | ||||
|         async with p.open_context( | ||||
|             start_n_sync_pause, | ||||
|         ) as (ctx, first): | ||||
|             assert first is None | ||||
| 
 | ||||
|             # TODO: handle bg-thread-in-root-actor special cases! | ||||
|             # | ||||
|             # there are a couple very subtle situations possible here | ||||
|             # and they are likely to become more important as cpython | ||||
|             # moves to support no-GIL. | ||||
|             # | ||||
|             # Cases: | ||||
|             # 1. root-actor bg-threads that call `.pause_from_sync()` | ||||
|             #   whilst an in-tree subactor also is using ` .pause()`. | ||||
|             # |_ since the root-actor bg thread can not | ||||
|             #   `Lock._debug_lock.acquire_nowait()` without running | ||||
|             #   a `trio.Task`, AND because the | ||||
|             #   `PdbREPL.set_continue()` is called from that | ||||
|             #   bg-thread, we can not `._debug_lock.release()` | ||||
|             #   either! | ||||
|             #  |_ this results in no actor-tree `Lock` being used | ||||
|             #    on behalf of the bg-thread and thus the subactor's | ||||
|             #    task and the thread trying to to use stdio | ||||
|             #    simultaneously which results in the classic TTY | ||||
|             #    clobbering! | ||||
|             # | ||||
|             # 2. mutiple sync-bg-threads that call | ||||
|             #   `.pause_from_sync()` where one is scheduled via | ||||
|             #   `Nursery.start_soon(to_thread.run_sync)` in a bg | ||||
|             #   task. | ||||
|             # | ||||
|             #   Due to the GIL, the threads never truly try to step | ||||
|             #   through the REPL simultaneously, BUT their `logging` | ||||
|             #   and traceback outputs are interleaved since the GIL | ||||
|             #   (seemingly) on every REPL-input from the user | ||||
|             #   switches threads.. | ||||
|             # | ||||
|             #   Soo, the context switching semantics of the GIL | ||||
|             #   result in a very confusing and messy interaction UX | ||||
|             #   since eval and (tb) print output is NOT synced to | ||||
|             #   each REPL-cycle (like we normally make it via | ||||
|             #   a `.set_continue()` callback triggering the | ||||
|             #   `Lock.release()`). Ideally we can solve this | ||||
|             #   usability issue NOW because this will of course be | ||||
|             #   that much more important when eventually there is no | ||||
|             #   GIL! | ||||
| 
 | ||||
|             # XXX should cause double REPL entry and thus TTY | ||||
|             # clobbering due to case 1. above! | ||||
|             tn.start_soon( | ||||
|                 partial( | ||||
|                     trio.to_thread.run_sync, | ||||
|                     partial( | ||||
|                         sync_pause, | ||||
|                         use_builtin=False, | ||||
|                         # pre_sleep=0.5, | ||||
|                     ), | ||||
|                     abandon_on_cancel=True, | ||||
|                     thread_name='start_soon_root_bg_thread', | ||||
|                 ) | ||||
|             ) | ||||
| 
 | ||||
|             await tractor.pause() | ||||
| 
 | ||||
|             # XXX should cause double REPL entry and thus TTY | ||||
|             # clobbering due to case 2. above! | ||||
|             await trio.to_thread.run_sync( | ||||
|                 partial( | ||||
|                     sync_pause, | ||||
|                     # NOTE this already works fine since in the new | ||||
|                     # thread the `breakpoint()` built-in is never | ||||
|                     # overloaded, thus NO locking is used, HOWEVER | ||||
|                     # the case 2. from above still exists! | ||||
|                     use_builtin=True, | ||||
|                 ), | ||||
|                 # TODO: with this `False` we can hang!??! | ||||
|                 # abandon_on_cancel=False, | ||||
|                 abandon_on_cancel=True, | ||||
|                 thread_name='inline_root_bg_thread', | ||||
|             ) | ||||
| 
 | ||||
|         await ctx.cancel() | ||||
| 
 | ||||
|         # TODO: case where we cancel from trio-side while asyncio task | ||||
|         # has debugger lock? | ||||
|         await p.cancel_actor() | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == '__main__': | ||||
|     trio.run(main) | ||||
|  | @ -1,11 +1,6 @@ | |||
| import time | ||||
| import trio | ||||
| import tractor | ||||
| from tractor import ( | ||||
|     ActorNursery, | ||||
|     MsgStream, | ||||
|     Portal, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| # this is the first 2 actors, streamer_1 and streamer_2 | ||||
|  | @ -17,18 +12,14 @@ async def stream_data(seed): | |||
| 
 | ||||
| # this is the third actor; the aggregator | ||||
| async def aggregate(seed): | ||||
|     ''' | ||||
|     Ensure that the two streams we receive match but only stream | ||||
|     """Ensure that the two streams we receive match but only stream | ||||
|     a single set of values to the parent. | ||||
| 
 | ||||
|     ''' | ||||
|     an: ActorNursery | ||||
|     async with tractor.open_nursery() as an: | ||||
|         portals: list[Portal] = [] | ||||
|     """ | ||||
|     async with tractor.open_nursery() as nursery: | ||||
|         portals = [] | ||||
|         for i in range(1, 3): | ||||
| 
 | ||||
|             # fork/spawn call | ||||
|             portal = await an.start_actor( | ||||
|             # fork point | ||||
|             portal = await nursery.start_actor( | ||||
|                 name=f'streamer_{i}', | ||||
|                 enable_modules=[__name__], | ||||
|             ) | ||||
|  | @ -52,11 +43,7 @@ async def aggregate(seed): | |||
|         async with trio.open_nursery() as n: | ||||
| 
 | ||||
|             for portal in portals: | ||||
|                 n.start_soon( | ||||
|                     push_to_chan, | ||||
|                     portal, | ||||
|                     send_chan.clone(), | ||||
|                 ) | ||||
|                 n.start_soon(push_to_chan, portal, send_chan.clone()) | ||||
| 
 | ||||
|             # close this local task's reference to send side | ||||
|             await send_chan.aclose() | ||||
|  | @ -73,7 +60,7 @@ async def aggregate(seed): | |||
| 
 | ||||
|             print("FINISHED ITERATING in aggregator") | ||||
| 
 | ||||
|         await an.cancel() | ||||
|         await nursery.cancel() | ||||
|         print("WAITING on `ActorNursery` to finish") | ||||
|     print("AGGREGATOR COMPLETE!") | ||||
| 
 | ||||
|  | @ -88,21 +75,18 @@ async def main() -> list[int]: | |||
| 
 | ||||
|     ''' | ||||
|     # yes, a nursery which spawns `trio`-"actors" B) | ||||
|     an: ActorNursery | ||||
|     async with tractor.open_nursery( | ||||
|         loglevel='cancel', | ||||
|         # debug_mode=True, | ||||
|     ) as an: | ||||
|     nursery: tractor.ActorNursery | ||||
|     async with tractor.open_nursery() as nursery: | ||||
| 
 | ||||
|         seed = int(1e3) | ||||
|         pre_start = time.time() | ||||
| 
 | ||||
|         portal: Portal = await an.start_actor( | ||||
|         portal: tractor.Portal = await nursery.start_actor( | ||||
|             name='aggregator', | ||||
|             enable_modules=[__name__], | ||||
|         ) | ||||
| 
 | ||||
|         stream: MsgStream | ||||
|         stream: tractor.MsgStream | ||||
|         async with portal.open_stream_from( | ||||
|             aggregate, | ||||
|             seed=seed, | ||||
|  | @ -111,12 +95,11 @@ async def main() -> list[int]: | |||
|             start = time.time() | ||||
|             # the portal call returns exactly what you'd expect | ||||
|             # as if the remote "aggregate" function was called locally | ||||
|             result_stream: list[int] = [] | ||||
|             result_stream = [] | ||||
|             async for value in stream: | ||||
|                 result_stream.append(value) | ||||
| 
 | ||||
|         cancelled: bool = await portal.cancel_actor() | ||||
|         assert cancelled | ||||
|         await portal.cancel_actor() | ||||
| 
 | ||||
|         print(f"STREAM TIME = {time.time() - start}") | ||||
|         print(f"STREAM + SPAWN TIME = {time.time() - pre_start}") | ||||
|  |  | |||
|  | @ -8,10 +8,7 @@ This uses no extra threads, fancy semaphores or futures; all we need | |||
| is ``tractor``'s channels. | ||||
| 
 | ||||
| """ | ||||
| from contextlib import ( | ||||
|     asynccontextmanager as acm, | ||||
|     aclosing, | ||||
| ) | ||||
| from contextlib import asynccontextmanager | ||||
| from typing import Callable | ||||
| import itertools | ||||
| import math | ||||
|  | @ -19,6 +16,7 @@ import time | |||
| 
 | ||||
| import tractor | ||||
| import trio | ||||
| from async_generator import aclosing | ||||
| 
 | ||||
| 
 | ||||
| PRIMES = [ | ||||
|  | @ -46,7 +44,7 @@ async def is_prime(n): | |||
|     return True | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| @asynccontextmanager | ||||
| async def worker_pool(workers=4): | ||||
|     """Though it's a trivial special case for ``tractor``, the well | ||||
|     known "worker pool" seems to be the defacto "but, I want this | ||||
|  |  | |||
|  | @ -3,18 +3,20 @@ import trio | |||
| import tractor | ||||
| 
 | ||||
| 
 | ||||
| async def sleepy_jane() -> None: | ||||
|     uid: tuple = tractor.current_actor().uid | ||||
| async def sleepy_jane(): | ||||
|     uid = tractor.current_actor().uid | ||||
|     print(f'Yo i am actor {uid}') | ||||
|     await trio.sleep_forever() | ||||
| 
 | ||||
| 
 | ||||
| async def main(): | ||||
|     ''' | ||||
|     Spawn a flat actor cluster, with one process per detected core. | ||||
|     Spawn a flat actor cluster, with one process per | ||||
|     detected core. | ||||
| 
 | ||||
|     ''' | ||||
|     portal_map: dict[str, tractor.Portal] | ||||
|     results: dict[str, str] | ||||
| 
 | ||||
|     # look at this hip new syntax! | ||||
|     async with ( | ||||
|  | @ -23,15 +25,11 @@ async def main(): | |||
|             modules=[__name__] | ||||
|         ) as portal_map, | ||||
| 
 | ||||
|         tractor.trionics.collapse_eg(), | ||||
|         trio.open_nursery() as tn, | ||||
|         trio.open_nursery() as n, | ||||
|     ): | ||||
| 
 | ||||
|         for (name, portal) in portal_map.items(): | ||||
|             tn.start_soon( | ||||
|                 portal.run, | ||||
|                 sleepy_jane, | ||||
|             ) | ||||
|             n.start_soon(portal.run, sleepy_jane) | ||||
| 
 | ||||
|         await trio.sleep(0.5) | ||||
| 
 | ||||
|  | @ -43,4 +41,4 @@ if __name__ == '__main__': | |||
|     try: | ||||
|         trio.run(main) | ||||
|     except KeyboardInterrupt: | ||||
|         print('trio cancelled by KBI') | ||||
|         pass | ||||
|  |  | |||
|  | @ -13,7 +13,7 @@ async def simple_rpc( | |||
| 
 | ||||
|     ''' | ||||
|     # signal to parent that we're up much like | ||||
|     # ``trio.TaskStatus.started()`` | ||||
|     # ``trio_typing.TaskStatus.started()`` | ||||
|     await ctx.started(data + 1) | ||||
| 
 | ||||
|     async with ctx.open_stream() as stream: | ||||
|  |  | |||
|  | @ -9,7 +9,7 @@ async def main(service_name): | |||
|     async with tractor.open_nursery() as an: | ||||
|         await an.start_actor(service_name) | ||||
| 
 | ||||
|         async with tractor.get_registry() as portal: | ||||
|         async with tractor.get_arbiter('127.0.0.1', 1616) as portal: | ||||
|             print(f"Arbiter is listening on {portal.channel}") | ||||
| 
 | ||||
|         async with tractor.wait_for_actor(service_name) as sockaddr: | ||||
|  |  | |||
|  | @ -1,85 +0,0 @@ | |||
| from contextlib import ( | ||||
|     asynccontextmanager as acm, | ||||
| ) | ||||
| from functools import partial | ||||
| 
 | ||||
| import tractor | ||||
| import trio | ||||
| 
 | ||||
| 
 | ||||
| log = tractor.log.get_logger( | ||||
|     name=__name__ | ||||
| ) | ||||
| 
 | ||||
| _lock: trio.Lock|None = None | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def acquire_singleton_lock( | ||||
| ) -> None: | ||||
|     global _lock | ||||
|     if _lock is None: | ||||
|         log.info('Allocating LOCK') | ||||
|         _lock = trio.Lock() | ||||
| 
 | ||||
|     log.info('TRYING TO LOCK ACQUIRE') | ||||
|     async with _lock: | ||||
|         log.info('ACQUIRED') | ||||
|         yield _lock | ||||
| 
 | ||||
|     log.info('RELEASED') | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| async def hold_lock_forever( | ||||
|     task_status=trio.TASK_STATUS_IGNORED | ||||
| ): | ||||
|     async with ( | ||||
|         tractor.trionics.maybe_raise_from_masking_exc(), | ||||
|         acquire_singleton_lock() as lock, | ||||
|     ): | ||||
|         task_status.started(lock) | ||||
|         await trio.sleep_forever() | ||||
| 
 | ||||
| 
 | ||||
| async def main( | ||||
|     ignore_special_cases: bool, | ||||
|     loglevel: str = 'info', | ||||
|     debug_mode: bool = True, | ||||
| ): | ||||
|     async with ( | ||||
|         trio.open_nursery() as tn, | ||||
| 
 | ||||
|         # tractor.trionics.maybe_raise_from_masking_exc() | ||||
|         # ^^^ XXX NOTE, interestingly putting the unmasker | ||||
|         # here does not exhibit the same behaviour ?? | ||||
|     ): | ||||
|         if not ignore_special_cases: | ||||
|             from tractor.trionics import _taskc | ||||
|             _taskc._mask_cases.clear() | ||||
| 
 | ||||
|         _lock = await tn.start( | ||||
|             hold_lock_forever, | ||||
|         ) | ||||
|         with trio.move_on_after(0.2): | ||||
|             await tn.start( | ||||
|                 hold_lock_forever, | ||||
|             ) | ||||
| 
 | ||||
|         tn.cancel_scope.cancel() | ||||
| 
 | ||||
| 
 | ||||
| # XXX, manual test as script | ||||
| if __name__ == '__main__': | ||||
|     tractor.log.get_console_log(level='info') | ||||
|     for case in [True, False]: | ||||
|         log.info( | ||||
|             f'\n' | ||||
|             f'------ RUNNING SCRIPT TRIAL ------\n' | ||||
|             f'ignore_special_cases: {case!r}\n' | ||||
|         ) | ||||
|         trio.run(partial( | ||||
|             main, | ||||
|             ignore_special_cases=case, | ||||
|             loglevel='info', | ||||
|         )) | ||||
|  | @ -1,195 +0,0 @@ | |||
| from contextlib import ( | ||||
|     contextmanager as cm, | ||||
|     # TODO, any diff in async case(s)?? | ||||
|     # asynccontextmanager as acm, | ||||
| ) | ||||
| from functools import partial | ||||
| 
 | ||||
| import tractor | ||||
| import trio | ||||
| 
 | ||||
| 
 | ||||
| log = tractor.log.get_logger( | ||||
|     name=__name__ | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| @cm | ||||
| def teardown_on_exc( | ||||
|     raise_from_handler: bool = False, | ||||
| ): | ||||
|     ''' | ||||
|     You could also have a teardown handler which catches any exc and | ||||
|     does some required teardown. In this case the problem is | ||||
|     compounded UNLESS you ensure the handler's scope is OUTSIDE the | ||||
|     `ux.aclose()`.. that is in the caller's enclosing scope. | ||||
| 
 | ||||
|     ''' | ||||
|     try: | ||||
|         yield | ||||
|     except BaseException as _berr: | ||||
|         berr = _berr | ||||
|         log.exception( | ||||
|             f'Handling termination teardown in child due to,\n' | ||||
|             f'{berr!r}\n' | ||||
|         ) | ||||
|         if raise_from_handler: | ||||
|             # XXX teardown ops XXX | ||||
|             # on termination these steps say need to be run to | ||||
|             # ensure wider system consistency (like the state of | ||||
|             # remote connections/services). | ||||
|             # | ||||
|             # HOWEVER, any bug in this teardown code is also | ||||
|             # masked by the `tx.aclose()`! | ||||
|             # this is also true if `_tn.cancel_scope` is | ||||
|             # `.cancel_called` by the parent in a graceful | ||||
|             # request case.. | ||||
| 
 | ||||
|             # simulate a bug in teardown handler. | ||||
|             raise RuntimeError( | ||||
|                 'woopsie teardown bug!' | ||||
|             ) | ||||
| 
 | ||||
|         raise  # no teardown bug. | ||||
| 
 | ||||
| 
 | ||||
| async def finite_stream_to_rent( | ||||
|     tx: trio.abc.SendChannel, | ||||
|     child_errors_mid_stream: bool, | ||||
|     raise_unmasked: bool, | ||||
| 
 | ||||
|     task_status: trio.TaskStatus[ | ||||
|         trio.CancelScope, | ||||
|     ] = trio.TASK_STATUS_IGNORED, | ||||
| ): | ||||
|     async with ( | ||||
|         # XXX without this unmasker the mid-streaming RTE is never | ||||
|         # reported since it is masked by the `tx.aclose()` | ||||
|         # call which in turn raises `Cancelled`! | ||||
|         # | ||||
|         # NOTE, this is WITHOUT doing any exception handling | ||||
|         # inside the child  task! | ||||
|         # | ||||
|         # TODO, uncomment next LoC to see the supprsessed beg[RTE]! | ||||
|         tractor.trionics.maybe_raise_from_masking_exc( | ||||
|             raise_unmasked=raise_unmasked, | ||||
|         ), | ||||
| 
 | ||||
|         tx as tx,  # .aclose() is the guilty masker chkpt! | ||||
| 
 | ||||
|         # XXX, this ONLY matters in the | ||||
|         # `child_errors_mid_stream=False` case oddly!? | ||||
|         # THAT IS, if no tn is opened in that case then the | ||||
|         # test will not fail; it raises the RTE correctly? | ||||
|         # | ||||
|         # -> so it seems this new scope somehow affects the form of | ||||
|         #    eventual in the parent EG? | ||||
|         tractor.trionics.maybe_open_nursery( | ||||
|             nursery=( | ||||
|                 None | ||||
|                 if not child_errors_mid_stream | ||||
|                 else True | ||||
|             ), | ||||
|         ) as _tn, | ||||
|     ): | ||||
|         # pass our scope back to parent for supervision\ | ||||
|         # control. | ||||
|         cs: trio.CancelScope|None = ( | ||||
|             None | ||||
|             if _tn is True | ||||
|             else _tn.cancel_scope | ||||
|         ) | ||||
|         task_status.started(cs) | ||||
| 
 | ||||
|         with teardown_on_exc( | ||||
|             raise_from_handler=not child_errors_mid_stream, | ||||
|         ): | ||||
|             for i in range(100): | ||||
|                 log.debug( | ||||
|                     f'Child tx {i!r}\n' | ||||
|                 ) | ||||
|                 if ( | ||||
|                     child_errors_mid_stream | ||||
|                     and | ||||
|                     i == 66 | ||||
|                 ): | ||||
|                     # oh wait but WOOPS there's a bug | ||||
|                     # in that teardown code!? | ||||
|                     raise RuntimeError( | ||||
|                         'woopsie, a mid-streaming bug!?' | ||||
|                     ) | ||||
| 
 | ||||
|                 await tx.send(i) | ||||
| 
 | ||||
| 
 | ||||
| async def main( | ||||
|     # TODO! toggle this for the 2 cases! | ||||
|     # 1. child errors mid-stream while parent is also requesting | ||||
|     #   (graceful) cancel of that child streamer. | ||||
|     # | ||||
|     # 2. child contains a teardown handler which contains a | ||||
|     #   bug and raises. | ||||
|     # | ||||
|     child_errors_mid_stream: bool, | ||||
| 
 | ||||
|     raise_unmasked: bool = False, | ||||
|     loglevel: str = 'info', | ||||
| ): | ||||
|     tractor.log.get_console_log(level=loglevel) | ||||
| 
 | ||||
|     # the `.aclose()` being checkpoints on these | ||||
|     # is the source of the problem.. | ||||
|     tx, rx = trio.open_memory_channel(1) | ||||
| 
 | ||||
|     async with ( | ||||
|         tractor.trionics.collapse_eg(), | ||||
|         trio.open_nursery() as tn, | ||||
|         rx as rx, | ||||
|     ): | ||||
|         _child_cs = await tn.start( | ||||
|             partial( | ||||
|                 finite_stream_to_rent, | ||||
|                 child_errors_mid_stream=child_errors_mid_stream, | ||||
|                 raise_unmasked=raise_unmasked, | ||||
|                 tx=tx, | ||||
|             ) | ||||
|         ) | ||||
|         async for msg in rx: | ||||
|             log.debug( | ||||
|                 f'Rent rx {msg!r}\n' | ||||
|             ) | ||||
| 
 | ||||
|             # simulate some external cancellation | ||||
|             # request **JUST BEFORE** the child errors. | ||||
|             if msg == 65: | ||||
|                 log.cancel( | ||||
|                     f'Cancelling parent on,\n' | ||||
|                     f'msg={msg}\n' | ||||
|                     f'\n' | ||||
|                     f'Simulates OOB cancel request!\n' | ||||
|                 ) | ||||
|                 tn.cancel_scope.cancel() | ||||
| 
 | ||||
| 
 | ||||
| # XXX, manual test as script | ||||
| if __name__ == '__main__': | ||||
|     tractor.log.get_console_log(level='info') | ||||
|     for case in [True, False]: | ||||
|         log.info( | ||||
|             f'\n' | ||||
|             f'------ RUNNING SCRIPT TRIAL ------\n' | ||||
|             f'child_errors_midstream: {case!r}\n' | ||||
|         ) | ||||
|         try: | ||||
|             trio.run(partial( | ||||
|                 main, | ||||
|                 child_errors_mid_stream=case, | ||||
|                 # raise_unmasked=True, | ||||
|                 loglevel='info', | ||||
|             )) | ||||
|         except Exception as _exc: | ||||
|             exc = _exc | ||||
|             log.exception( | ||||
|                 'Should have raised an RTE or Cancelled?\n' | ||||
|             ) | ||||
|             breakpoint() | ||||
|  | @ -1,18 +0,0 @@ | |||
| First generate a built disti: | ||||
| 
 | ||||
| ``` | ||||
| python -m pip install --upgrade build | ||||
| python -m build --sdist --outdir dist/alpha5/ | ||||
| ``` | ||||
| 
 | ||||
| Then try a test ``pypi`` upload: | ||||
| 
 | ||||
| ``` | ||||
| python -m twine upload --repository testpypi dist/alpha5/* | ||||
| ``` | ||||
| 
 | ||||
| The push to `pypi` for realz. | ||||
| 
 | ||||
| ``` | ||||
| python -m twine upload --repository testpypi dist/alpha5/* | ||||
| ``` | ||||
							
								
								
									
										126
									
								
								pyproject.toml
								
								
								
								
							
							
						
						
									
										126
									
								
								pyproject.toml
								
								
								
								
							|  | @ -1,117 +1,3 @@ | |||
| [build-system] | ||||
| requires = ["hatchling"] | ||||
| build-backend = "hatchling.build" | ||||
| 
 | ||||
| # ------ build-system ------ | ||||
| 
 | ||||
| [project] | ||||
| name = "tractor" | ||||
| version = "0.1.0a6dev0" | ||||
| description = 'structured concurrent `trio`-"actors"' | ||||
| authors = [{ name = "Tyler Goodlet", email = "goodboy_foss@protonmail.com" }] | ||||
| requires-python = ">= 3.11" | ||||
| readme = "docs/README.rst" | ||||
| license = "AGPL-3.0-or-later" | ||||
| keywords = [ | ||||
|   "trio", | ||||
|   "async", | ||||
|   "concurrency", | ||||
|   "structured concurrency", | ||||
|   "actor model", | ||||
|   "distributed", | ||||
|   "multiprocessing", | ||||
| ] | ||||
| classifiers = [ | ||||
|   "Development Status :: 3 - Alpha", | ||||
|   "Operating System :: POSIX :: Linux", | ||||
|   "Framework :: Trio", | ||||
|   "License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)", | ||||
|   "Programming Language :: Python :: Implementation :: CPython", | ||||
|   "Programming Language :: Python :: 3 :: Only", | ||||
|   "Programming Language :: Python :: 3.11", | ||||
|   "Topic :: System :: Distributed Computing", | ||||
| ] | ||||
| dependencies = [ | ||||
|   # trio runtime and friends | ||||
|   # (poetry) proper range specs, | ||||
|   # https://packaging.python.org/en/latest/discussions/install-requires-vs-requirements/#id5 | ||||
|   # TODO, for 3.13 we must go go `0.27` which means we have to | ||||
|   # disable strict egs or port to handling them internally! | ||||
|   "trio>0.27", | ||||
|   "tricycle>=0.4.1,<0.5", | ||||
|   "wrapt>=1.16.0,<2", | ||||
|   "colorlog>=6.8.2,<7", | ||||
|   # built-in multi-actor `pdb` REPL | ||||
|   "pdbp>=1.6,<2", # windows only (from `pdbp`) | ||||
|   # typed IPC msging | ||||
|   "msgspec>=0.19.0", | ||||
|   "cffi>=1.17.1", | ||||
|   "bidict>=0.23.1", | ||||
| ] | ||||
| 
 | ||||
| # ------ project ------ | ||||
| 
 | ||||
| [dependency-groups] | ||||
| dev = [ | ||||
|   # test suite | ||||
|   # TODO: maybe some of these layout choices? | ||||
|   # https://docs.pytest.org/en/8.0.x/explanation/goodpractices.html#choosing-a-test-layout-import-rules | ||||
|   "pytest>=8.3.5", | ||||
|   "pexpect>=4.9.0,<5", | ||||
|   # `tractor.devx` tooling | ||||
|   "greenback>=1.2.1,<2", | ||||
|   "stackscope>=0.2.2,<0.3", | ||||
|   # ^ requires this? | ||||
|   "typing-extensions>=4.14.1", | ||||
| 
 | ||||
|   "pyperclip>=1.9.0", | ||||
|   "prompt-toolkit>=3.0.50", | ||||
|   "xonsh>=0.19.2", | ||||
|   "psutil>=7.0.0", | ||||
| ] | ||||
| # TODO, add these with sane versions; were originally in | ||||
| # `requirements-docs.txt`.. | ||||
| # docs = [ | ||||
| #   "sphinx>=" | ||||
| #   "sphinx_book_theme>=" | ||||
| # ] | ||||
| 
 | ||||
| # ------ dependency-groups ------ | ||||
| 
 | ||||
| # ------ dependency-groups ------ | ||||
| 
 | ||||
| [tool.uv.sources] | ||||
| # XXX NOTE, only for @goodboy's hacking on `pprint(sort_dicts=False)` | ||||
| # for the `pp` alias.. | ||||
| # pdbp = { path = "../pdbp", editable = true } | ||||
| 
 | ||||
| # ------ tool.uv.sources ------ | ||||
| # TODO, distributed (multi-host) extensions | ||||
| # linux kernel networking | ||||
| # 'pyroute2 | ||||
| 
 | ||||
| # ------ tool.uv.sources ------ | ||||
| 
 | ||||
| [tool.uv] | ||||
| # XXX NOTE, prefer the sys python bc apparently the distis from | ||||
| # `astral` are built in a way that breaks `pdbp`+`tabcompleter`'s | ||||
| # likely due to linking against `libedit` over `readline`.. | ||||
| # |_https://docs.astral.sh/uv/concepts/python-versions/#managed-python-distributions | ||||
| # |_https://gregoryszorc.com/docs/python-build-standalone/main/quirks.html#use-of-libedit-on-linux | ||||
| # | ||||
| # https://docs.astral.sh/uv/reference/settings/#python-preference | ||||
| python-preference = 'system' | ||||
| 
 | ||||
| # ------ tool.uv ------ | ||||
| 
 | ||||
| [tool.hatch.build.targets.sdist] | ||||
| include = ["tractor"] | ||||
| 
 | ||||
| [tool.hatch.build.targets.wheel] | ||||
| include = ["tractor"] | ||||
| 
 | ||||
| # ------ tool.hatch ------ | ||||
| 
 | ||||
| [tool.towncrier] | ||||
| package = "tractor" | ||||
| filename = "NEWS.rst" | ||||
|  | @ -121,27 +7,26 @@ title_format = "tractor {version} ({project_date})" | |||
| template = "nooz/_template.rst" | ||||
| all_bullets = true | ||||
| 
 | ||||
| [[tool.towncrier.type]] | ||||
|   [[tool.towncrier.type]] | ||||
|   directory = "feature" | ||||
|   name = "Features" | ||||
|   showcontent = true | ||||
| 
 | ||||
| [[tool.towncrier.type]] | ||||
|   [[tool.towncrier.type]] | ||||
|   directory = "bugfix" | ||||
|   name = "Bug Fixes" | ||||
|   showcontent = true | ||||
| 
 | ||||
| [[tool.towncrier.type]] | ||||
|   [[tool.towncrier.type]] | ||||
|   directory = "doc" | ||||
|   name = "Improved Documentation" | ||||
|   showcontent = true | ||||
| 
 | ||||
| [[tool.towncrier.type]] | ||||
|   [[tool.towncrier.type]] | ||||
|   directory = "trivial" | ||||
|   name = "Trivial/Internal Changes" | ||||
|   showcontent = true | ||||
| 
 | ||||
| # ------ tool.towncrier ------ | ||||
| 
 | ||||
| [tool.pytest.ini_options] | ||||
| minversion = '6.0' | ||||
|  | @ -157,8 +42,7 @@ addopts = [ | |||
|   '--show-capture=no', | ||||
| ] | ||||
| log_cli = false | ||||
| 
 | ||||
| # TODO: maybe some of these layout choices? | ||||
| # https://docs.pytest.org/en/8.0.x/explanation/goodpractices.html#choosing-a-test-layout-import-rules | ||||
| # pythonpath = "src" | ||||
| 
 | ||||
| # ------ tool.pytest ------ | ||||
|  |  | |||
|  | @ -1,8 +0,0 @@ | |||
| # vim: ft=ini | ||||
| # pytest.ini for tractor | ||||
| 
 | ||||
| [pytest] | ||||
| # don't show frickin captured logs AGAIN in the report.. | ||||
| addopts = --show-capture='no' | ||||
| log_cli = false | ||||
| ; minversion = 6.0 | ||||
|  | @ -0,0 +1,2 @@ | |||
| sphinx | ||||
| sphinx_book_theme | ||||
|  | @ -0,0 +1,9 @@ | |||
| pytest | ||||
| pytest-trio | ||||
| pytest-timeout | ||||
| pdbp | ||||
| mypy | ||||
| trio_typing | ||||
| pexpect | ||||
| towncrier | ||||
| numpy | ||||
							
								
								
									
										82
									
								
								ruff.toml
								
								
								
								
							
							
						
						
									
										82
									
								
								ruff.toml
								
								
								
								
							|  | @ -1,82 +0,0 @@ | |||
| # from default `ruff.toml` @ | ||||
| # https://docs.astral.sh/ruff/configuration/ | ||||
| 
 | ||||
| # Exclude a variety of commonly ignored directories. | ||||
| exclude = [ | ||||
|     ".bzr", | ||||
|     ".direnv", | ||||
|     ".eggs", | ||||
|     ".git", | ||||
|     ".git-rewrite", | ||||
|     ".hg", | ||||
|     ".ipynb_checkpoints", | ||||
|     ".mypy_cache", | ||||
|     ".nox", | ||||
|     ".pants.d", | ||||
|     ".pyenv", | ||||
|     ".pytest_cache", | ||||
|     ".pytype", | ||||
|     ".ruff_cache", | ||||
|     ".svn", | ||||
|     ".tox", | ||||
|     ".venv", | ||||
|     ".vscode", | ||||
|     "__pypackages__", | ||||
|     "_build", | ||||
|     "buck-out", | ||||
|     "build", | ||||
|     "dist", | ||||
|     "node_modules", | ||||
|     "site-packages", | ||||
|     "venv", | ||||
| ] | ||||
| 
 | ||||
| # Same as Black. | ||||
| line-length = 88 | ||||
| indent-width = 4 | ||||
| 
 | ||||
| # Assume Python 3.9 | ||||
| target-version = "py311" | ||||
| 
 | ||||
| [lint] | ||||
| # Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`)  codes by default. | ||||
| # Unlike Flake8, Ruff doesn't enable pycodestyle warnings (`W`) or | ||||
| # McCabe complexity (`C901`) by default. | ||||
| select = ["E4", "E7", "E9", "F"] | ||||
| ignore = [ | ||||
|   'E402',  # https://docs.astral.sh/ruff/rules/module-import-not-at-top-of-file/ | ||||
| ] | ||||
| 
 | ||||
| # Allow fix for all enabled rules (when `--fix`) is provided. | ||||
| fixable = ["ALL"] | ||||
| unfixable = [] | ||||
| 
 | ||||
| # Allow unused variables when underscore-prefixed. | ||||
| # dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" | ||||
| 
 | ||||
| [format] | ||||
| # Use single quotes in `ruff format`. | ||||
| quote-style = "single" | ||||
| 
 | ||||
| # Like Black, indent with spaces, rather than tabs. | ||||
| indent-style = "space" | ||||
| 
 | ||||
| # Like Black, respect magic trailing commas. | ||||
| skip-magic-trailing-comma = false | ||||
| 
 | ||||
| # Like Black, automatically detect the appropriate line ending. | ||||
| line-ending = "auto" | ||||
| 
 | ||||
| # Enable auto-formatting of code examples in docstrings. Markdown, | ||||
| # reStructuredText code/literal blocks and doctests are all supported. | ||||
| # | ||||
| # This is currently disabled by default, but it is planned for this | ||||
| # to be opt-out in the future. | ||||
| docstring-code-format = false | ||||
| 
 | ||||
| # Set the line length limit used when formatting code snippets in | ||||
| # docstrings. | ||||
| # | ||||
| # This only has an effect when the `docstring-code-format` setting is | ||||
| # enabled. | ||||
| docstring-code-line-length = "dynamic" | ||||
|  | @ -0,0 +1,101 @@ | |||
| #!/usr/bin/env python | ||||
| # | ||||
| # tractor: structured concurrent "actors". | ||||
| # | ||||
| # Copyright 2018-eternity Tyler Goodlet. | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| from setuptools import setup | ||||
| 
 | ||||
| with open('docs/README.rst', encoding='utf-8') as f: | ||||
|     readme = f.read() | ||||
| 
 | ||||
| 
 | ||||
| setup( | ||||
|     name="tractor", | ||||
|     version='0.1.0a6dev0',  # alpha zone | ||||
|     description='structured concurrent `trio`-"actors"', | ||||
|     long_description=readme, | ||||
|     license='AGPLv3', | ||||
|     author='Tyler Goodlet', | ||||
|     maintainer='Tyler Goodlet', | ||||
|     maintainer_email='goodboy_foss@protonmail.com', | ||||
|     url='https://github.com/goodboy/tractor', | ||||
|     platforms=['linux', 'windows'], | ||||
|     packages=[ | ||||
|         'tractor', | ||||
|         'tractor.experimental',  # wacky ideas | ||||
|         'tractor.trionics',  # trio extensions | ||||
|         'tractor.msg',  # lowlevel data types | ||||
|         'tractor.devx',  # "dev-experience" | ||||
|     ], | ||||
|     install_requires=[ | ||||
| 
 | ||||
|         # trio related | ||||
|         # proper range spec: | ||||
|         # https://packaging.python.org/en/latest/discussions/install-requires-vs-requirements/#id5 | ||||
|         'trio >= 0.22', | ||||
|         'async_generator', | ||||
|         'trio_typing', | ||||
|         'exceptiongroup', | ||||
| 
 | ||||
|         # tooling | ||||
|         'stackscope', | ||||
|         'tricycle', | ||||
|         'trio_typing', | ||||
|         'colorlog', | ||||
|         'wrapt', | ||||
| 
 | ||||
|         # IPC serialization | ||||
|         'msgspec', | ||||
| 
 | ||||
|         # debug mode REPL | ||||
|         'pdbp', | ||||
| 
 | ||||
|         # TODO: distributed transport using | ||||
|         # linux kernel networking | ||||
|         # 'pyroute2', | ||||
| 
 | ||||
|         # pip ref docs on these specs: | ||||
|         # https://pip.pypa.io/en/stable/reference/requirement-specifiers/#examples | ||||
|         # and pep: | ||||
|         # https://peps.python.org/pep-0440/#version-specifiers | ||||
| 
 | ||||
|     ], | ||||
|     tests_require=['pytest'], | ||||
|     python_requires=">=3.10", | ||||
|     keywords=[ | ||||
|         'trio', | ||||
|         'async', | ||||
|         'concurrency', | ||||
|         'structured concurrency', | ||||
|         'actor model', | ||||
|         'distributed', | ||||
|         'multiprocessing' | ||||
|     ], | ||||
|     classifiers=[ | ||||
|         "Development Status :: 3 - Alpha", | ||||
|         "Operating System :: POSIX :: Linux", | ||||
|         "Operating System :: Microsoft :: Windows", | ||||
|         "Framework :: Trio", | ||||
|         "License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)", | ||||
|         "Programming Language :: Python :: Implementation :: CPython", | ||||
|         "Programming Language :: Python :: 3 :: Only", | ||||
|         "Programming Language :: Python :: 3.10", | ||||
|         "Intended Audience :: Science/Research", | ||||
|         "Intended Audience :: Developers", | ||||
|         "Topic :: System :: Distributed Computing", | ||||
|     ], | ||||
| ) | ||||
|  | @ -1,27 +1,24 @@ | |||
| """ | ||||
| Top level of the testing suites! | ||||
| 
 | ||||
| ``tractor`` testing!! | ||||
| """ | ||||
| from __future__ import annotations | ||||
| import sys | ||||
| import subprocess | ||||
| import os | ||||
| import random | ||||
| import signal | ||||
| import platform | ||||
| import time | ||||
| 
 | ||||
| import pytest | ||||
| import tractor | ||||
| from tractor._testing import ( | ||||
|     examples_dir as examples_dir, | ||||
|     tractor_test as tractor_test, | ||||
|     expect_ctxc as expect_ctxc, | ||||
| ) | ||||
| 
 | ||||
| pytest_plugins: list[str] = [ | ||||
|     'pytester', | ||||
|     'tractor._testing.pytest', | ||||
| ] | ||||
| 
 | ||||
| # TODO: include wtv plugin(s) we build in `._testing.pytest`? | ||||
| pytest_plugins = ['pytester'] | ||||
| 
 | ||||
| # Sending signal.SIGINT on subprocess fails on windows. Use CTRL_* alternatives | ||||
| if platform.system() == 'Windows': | ||||
|  | @ -33,11 +30,7 @@ else: | |||
|     _KILL_SIGNAL = signal.SIGKILL | ||||
|     _INT_SIGNAL = signal.SIGINT | ||||
|     _INT_RETURN_CODE = 1 if sys.version_info < (3, 8) else -signal.SIGINT.value | ||||
|     _PROC_SPAWN_WAIT = ( | ||||
|         0.6 | ||||
|         if sys.version_info < (3, 7) | ||||
|         else 0.4 | ||||
|     ) | ||||
|     _PROC_SPAWN_WAIT = 0.6 if sys.version_info < (3, 7) else 0.4 | ||||
| 
 | ||||
| 
 | ||||
| no_windows = pytest.mark.skipif( | ||||
|  | @ -46,12 +39,7 @@ no_windows = pytest.mark.skipif( | |||
| ) | ||||
| 
 | ||||
| 
 | ||||
| def pytest_addoption( | ||||
|     parser: pytest.Parser, | ||||
| ): | ||||
|     # ?TODO? should this be exposed from our `._testing.pytest` | ||||
|     # plugin or should we make it more explicit with `--tl` for | ||||
|     # tractor logging like we do in other client projects? | ||||
| def pytest_addoption(parser): | ||||
|     parser.addoption( | ||||
|         "--ll", | ||||
|         action="store", | ||||
|  | @ -59,10 +47,39 @@ def pytest_addoption( | |||
|         default='ERROR', help="logging level to set when testing" | ||||
|     ) | ||||
| 
 | ||||
|     parser.addoption( | ||||
|         "--spawn-backend", | ||||
|         action="store", | ||||
|         dest='spawn_backend', | ||||
|         default='trio', | ||||
|         help="Processing spawning backend to use for test run", | ||||
|     ) | ||||
| 
 | ||||
|     parser.addoption( | ||||
|         "--tpdb", "--debug-mode", | ||||
|         action="store_true", | ||||
|         dest='tractor_debug_mode', | ||||
|         # default=False, | ||||
|         help=( | ||||
|             'Enable a flag that can be used by tests to to set the ' | ||||
|             '`debug_mode: bool` for engaging the internal ' | ||||
|             'multi-proc debugger sys.' | ||||
|         ), | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| def pytest_configure(config): | ||||
|     backend = config.option.spawn_backend | ||||
|     tractor._spawn.try_set_start_method(backend) | ||||
| 
 | ||||
| 
 | ||||
| @pytest.fixture(scope='session') | ||||
| def debug_mode(request): | ||||
|     return request.config.option.tractor_debug_mode | ||||
| 
 | ||||
| 
 | ||||
| @pytest.fixture(scope='session', autouse=True) | ||||
| def loglevel(request): | ||||
|     import tractor | ||||
|     orig = tractor.log._default_loglevel | ||||
|     level = tractor.log._default_loglevel = request.config.option.loglevel | ||||
|     tractor.log.get_console_log(level) | ||||
|  | @ -70,44 +87,88 @@ def loglevel(request): | |||
|     tractor.log._default_loglevel = orig | ||||
| 
 | ||||
| 
 | ||||
| @pytest.fixture(scope='session') | ||||
| def spawn_backend(request) -> str: | ||||
|     return request.config.option.spawn_backend | ||||
| 
 | ||||
| 
 | ||||
| _ci_env: bool = os.environ.get('CI', False) | ||||
| 
 | ||||
| 
 | ||||
| @pytest.fixture(scope='session') | ||||
| def ci_env() -> bool: | ||||
|     ''' | ||||
|     Detect CI environment. | ||||
|     Detect CI envoirment. | ||||
| 
 | ||||
|     ''' | ||||
|     return _ci_env | ||||
| 
 | ||||
| 
 | ||||
| def sig_prog( | ||||
|     proc: subprocess.Popen, | ||||
|     sig: int, | ||||
|     canc_timeout: float = 0.1, | ||||
| ) -> int: | ||||
| # TODO: also move this to `._testing` for now? | ||||
| # -[ ] possibly generalize and re-use for multi-tree spawning | ||||
| #    along with the new stuff for multi-addrs in distribute_dis | ||||
| #    branch? | ||||
| # | ||||
| # choose randomly at import time | ||||
| _reg_addr: tuple[str, int] = ( | ||||
|     '127.0.0.1', | ||||
|     random.randint(1000, 9999), | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| @pytest.fixture(scope='session') | ||||
| def reg_addr() -> tuple[str, int]: | ||||
| 
 | ||||
|     # globally override the runtime to the per-test-session-dynamic | ||||
|     # addr so that all tests never conflict with any other actor | ||||
|     # tree using the default. | ||||
|     from tractor import _root | ||||
|     _root._default_lo_addrs = [_reg_addr] | ||||
| 
 | ||||
|     return _reg_addr | ||||
| 
 | ||||
| 
 | ||||
| def pytest_generate_tests(metafunc): | ||||
|     spawn_backend = metafunc.config.option.spawn_backend | ||||
| 
 | ||||
|     if not spawn_backend: | ||||
|         # XXX some weird windows bug with `pytest`? | ||||
|         spawn_backend = 'trio' | ||||
| 
 | ||||
|     # TODO: maybe just use the literal `._spawn.SpawnMethodKey`? | ||||
|     assert spawn_backend in ( | ||||
|         'mp_spawn', | ||||
|         'mp_forkserver', | ||||
|         'trio', | ||||
|     ) | ||||
| 
 | ||||
|     # NOTE: used to be used to dyanmically parametrize tests for when | ||||
|     # you just passed --spawn-backend=`mp` on the cli, but now we expect | ||||
|     # that cli input to be manually specified, BUT, maybe we'll do | ||||
|     # something like this again in the future? | ||||
|     if 'start_method' in metafunc.fixturenames: | ||||
|         metafunc.parametrize("start_method", [spawn_backend], scope='module') | ||||
| 
 | ||||
| 
 | ||||
| def sig_prog(proc, sig): | ||||
|     "Kill the actor-process with ``sig``." | ||||
|     proc.send_signal(sig) | ||||
|     time.sleep(canc_timeout) | ||||
|     time.sleep(0.1) | ||||
|     if not proc.poll(): | ||||
|         # TODO: why sometimes does SIGINT not work on teardown? | ||||
|         # seems to happen only when trace logging enabled? | ||||
|         proc.send_signal(_KILL_SIGNAL) | ||||
|     ret: int = proc.wait() | ||||
|     ret = proc.wait() | ||||
|     assert ret | ||||
| 
 | ||||
| 
 | ||||
| # TODO: factor into @cm and move to `._testing`? | ||||
| @pytest.fixture | ||||
| def daemon( | ||||
|     debug_mode: bool, | ||||
|     loglevel: str, | ||||
|     testdir: pytest.Pytester, | ||||
|     testdir, | ||||
|     reg_addr: tuple[str, int], | ||||
|     tpt_proto: str, | ||||
| 
 | ||||
| ) -> subprocess.Popen: | ||||
| ): | ||||
|     ''' | ||||
|     Run a daemon root actor as a separate actor-process tree and | ||||
|     "remote registrar" for discovery-protocol related tests. | ||||
|  | @ -118,100 +179,28 @@ def daemon( | |||
|         loglevel: str = 'info' | ||||
| 
 | ||||
|     code: str = ( | ||||
|         "import tractor; " | ||||
|         "tractor.run_daemon([], " | ||||
|         "registry_addrs={reg_addrs}, " | ||||
|         "debug_mode={debug_mode}, " | ||||
|         "loglevel={ll})" | ||||
|             "import tractor; " | ||||
|             "tractor.run_daemon([], registry_addrs={reg_addrs}, loglevel={ll})" | ||||
|     ).format( | ||||
|         reg_addrs=str([reg_addr]), | ||||
|         ll="'{}'".format(loglevel) if loglevel else None, | ||||
|         debug_mode=debug_mode, | ||||
|     ) | ||||
|     cmd: list[str] = [ | ||||
|         sys.executable, | ||||
|         '-c', code, | ||||
|     ] | ||||
|     # breakpoint() | ||||
|     kwargs = {} | ||||
|     if platform.system() == 'Windows': | ||||
|         # without this, tests hang on windows forever | ||||
|         kwargs['creationflags'] = subprocess.CREATE_NEW_PROCESS_GROUP | ||||
| 
 | ||||
|     proc: subprocess.Popen = testdir.popen( | ||||
|     proc = testdir.popen( | ||||
|         cmd, | ||||
|         stdout=subprocess.PIPE, | ||||
|         stderr=subprocess.PIPE, | ||||
|         **kwargs, | ||||
|     ) | ||||
| 
 | ||||
|     # UDS sockets are **really** fast to bind()/listen()/connect() | ||||
|     # so it's often required that we delay a bit more starting | ||||
|     # the first actor-tree.. | ||||
|     if tpt_proto == 'uds': | ||||
|         global _PROC_SPAWN_WAIT | ||||
|         _PROC_SPAWN_WAIT = 0.6 | ||||
| 
 | ||||
|     time.sleep(_PROC_SPAWN_WAIT) | ||||
| 
 | ||||
|     assert not proc.returncode | ||||
|     time.sleep(_PROC_SPAWN_WAIT) | ||||
|     yield proc | ||||
|     sig_prog(proc, _INT_SIGNAL) | ||||
| 
 | ||||
|     # XXX! yeah.. just be reaaal careful with this bc sometimes it | ||||
|     # can lock up on the `_io.BufferedReader` and hang.. | ||||
|     stderr: str = proc.stderr.read().decode() | ||||
|     if stderr: | ||||
|         print( | ||||
|             f'Daemon actor tree produced STDERR:\n' | ||||
|             f'{proc.args}\n' | ||||
|             f'\n' | ||||
|             f'{stderr}\n' | ||||
|         ) | ||||
|     if proc.returncode != -2: | ||||
|         raise RuntimeError( | ||||
|             'Daemon actor tree failed !?\n' | ||||
|             f'{proc.args}\n' | ||||
|         ) | ||||
| 
 | ||||
| 
 | ||||
| # @pytest.fixture(autouse=True) | ||||
| # def shared_last_failed(pytestconfig): | ||||
| #     val = pytestconfig.cache.get("example/value", None) | ||||
| #     breakpoint() | ||||
| #     if val is None: | ||||
| #         pytestconfig.cache.set("example/value", val) | ||||
| #     return val | ||||
| 
 | ||||
| 
 | ||||
| # TODO: a way to let test scripts (like from `examples/`) | ||||
| # guarantee they won't `registry_addrs` collide! | ||||
| # -[ ] maybe use some kinda standard `def main()` arg-spec that | ||||
| #     we can introspect from a fixture that is called from the test | ||||
| #     body? | ||||
| # -[ ] test and figure out typing for below prototype! Bp | ||||
| # | ||||
| # @pytest.fixture | ||||
| # def set_script_runtime_args( | ||||
| #     reg_addr: tuple, | ||||
| # ) -> Callable[[...], None]: | ||||
| 
 | ||||
| #     def import_n_partial_in_args_n_triorun( | ||||
| #         script: Path,  # under examples? | ||||
| #         **runtime_args, | ||||
| #     ) -> Callable[[], Any]:  # a `partial`-ed equiv of `trio.run()` | ||||
| 
 | ||||
| #         # NOTE, below is taken from | ||||
| #         # `.test_advanced_faults.test_ipc_channel_break_during_stream` | ||||
| #         mod: ModuleType = import_path( | ||||
| #             examples_dir() / 'advanced_faults' | ||||
| #             / 'ipc_failure_during_stream.py', | ||||
| #             root=examples_dir(), | ||||
| #             consider_namespace_packages=False, | ||||
| #         ) | ||||
| #         return partial( | ||||
| #             trio.run, | ||||
| #             partial( | ||||
| #                 mod.main, | ||||
| #                 **runtime_args, | ||||
| #             ) | ||||
| #         ) | ||||
| #     return import_n_partial_in_args_n_triorun | ||||
|  |  | |||
|  | @ -1,253 +0,0 @@ | |||
| ''' | ||||
| `tractor.devx.*` tooling sub-pkg test space. | ||||
| 
 | ||||
| ''' | ||||
| from __future__ import annotations | ||||
| import time | ||||
| from typing import ( | ||||
|     Callable, | ||||
|     TYPE_CHECKING, | ||||
| ) | ||||
| 
 | ||||
| import pytest | ||||
| from pexpect.exceptions import ( | ||||
|     TIMEOUT, | ||||
| ) | ||||
| from pexpect.spawnbase import SpawnBase | ||||
| 
 | ||||
| from tractor._testing import ( | ||||
|     mk_cmd, | ||||
| ) | ||||
| from tractor.devx.debug import ( | ||||
|     _pause_msg as _pause_msg, | ||||
|     _crash_msg as _crash_msg, | ||||
|     _repl_fail_msg as _repl_fail_msg, | ||||
|     _ctlc_ignore_header as _ctlc_ignore_header, | ||||
| ) | ||||
| from ..conftest import ( | ||||
|     _ci_env, | ||||
| ) | ||||
| 
 | ||||
| if TYPE_CHECKING: | ||||
|     from pexpect import pty_spawn | ||||
| 
 | ||||
| 
 | ||||
| # a fn that sub-instantiates a `pexpect.spawn()` | ||||
| # and returns it. | ||||
| type PexpectSpawner = Callable[[str], pty_spawn.spawn] | ||||
| 
 | ||||
| 
 | ||||
| @pytest.fixture | ||||
| def spawn( | ||||
|     start_method: str, | ||||
|     testdir: pytest.Pytester, | ||||
|     reg_addr: tuple[str, int], | ||||
| 
 | ||||
| ) -> PexpectSpawner: | ||||
|     ''' | ||||
|     Use the `pexpect` module shipped via `testdir.spawn()` to | ||||
|     run an `./examples/..` script by name. | ||||
| 
 | ||||
|     ''' | ||||
|     if start_method != 'trio': | ||||
|         pytest.skip( | ||||
|             '`pexpect` based tests only supported on `trio` backend' | ||||
|         ) | ||||
| 
 | ||||
|     def unset_colors(): | ||||
|         ''' | ||||
|         Python 3.13 introduced colored tracebacks that break patt | ||||
|         matching, | ||||
| 
 | ||||
|         https://docs.python.org/3/using/cmdline.html#envvar-PYTHON_COLORS | ||||
|         https://docs.python.org/3/using/cmdline.html#using-on-controlling-color | ||||
| 
 | ||||
|         ''' | ||||
|         import os | ||||
|         os.environ['PYTHON_COLORS'] = '0' | ||||
| 
 | ||||
|     def _spawn( | ||||
|         cmd: str, | ||||
|         **mkcmd_kwargs, | ||||
|     ) -> pty_spawn.spawn: | ||||
|         unset_colors() | ||||
|         return testdir.spawn( | ||||
|             cmd=mk_cmd( | ||||
|                 cmd, | ||||
|                 **mkcmd_kwargs, | ||||
|             ), | ||||
|             expect_timeout=3, | ||||
|             # preexec_fn=unset_colors, | ||||
|             # ^TODO? get `pytest` core to expose underlying | ||||
|             # `pexpect.spawn()` stuff? | ||||
|         ) | ||||
| 
 | ||||
|     # such that test-dep can pass input script name. | ||||
|     return _spawn  # the `PexpectSpawner`, type alias. | ||||
| 
 | ||||
| 
 | ||||
| @pytest.fixture( | ||||
|     params=[False, True], | ||||
|     ids='ctl-c={}'.format, | ||||
| ) | ||||
| def ctlc( | ||||
|     request, | ||||
|     ci_env: bool, | ||||
| 
 | ||||
| ) -> bool: | ||||
| 
 | ||||
|     use_ctlc = request.param | ||||
| 
 | ||||
|     node = request.node | ||||
|     markers = node.own_markers | ||||
|     for mark in markers: | ||||
|         if mark.name == 'has_nested_actors': | ||||
|             pytest.skip( | ||||
|                 f'Test {node} has nested actors and fails with Ctrl-C.\n' | ||||
|                 f'The test can sometimes run fine locally but until' | ||||
|                 ' we solve' 'this issue this CI test will be xfail:\n' | ||||
|                 'https://github.com/goodboy/tractor/issues/320' | ||||
|             ) | ||||
| 
 | ||||
|         if mark.name == 'ctlcs_bish': | ||||
|             pytest.skip( | ||||
|                 f'Test {node} prolly uses something from the stdlib (namely `asyncio`..)\n' | ||||
|                 f'The test and/or underlying example script can *sometimes* run fine ' | ||||
|                 f'locally but more then likely until the cpython peeps get their sh#$ together, ' | ||||
|                 f'this test will definitely not behave like `trio` under SIGINT..\n' | ||||
|             ) | ||||
| 
 | ||||
|     if use_ctlc: | ||||
|         # XXX: disable pygments highlighting for auto-tests | ||||
|         # since some envs (like actions CI) will struggle | ||||
|         # the the added color-char encoding.. | ||||
|         from tractor.devx.debug import TractorConfig | ||||
|         TractorConfig.use_pygements = False | ||||
| 
 | ||||
|     yield use_ctlc | ||||
| 
 | ||||
| 
 | ||||
| def expect( | ||||
|     child, | ||||
| 
 | ||||
|     # normally a `pdb` prompt by default | ||||
|     patt: str, | ||||
| 
 | ||||
|     **kwargs, | ||||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|     Expect wrapper that prints last seen console | ||||
|     data before failing. | ||||
| 
 | ||||
|     ''' | ||||
|     try: | ||||
|         child.expect( | ||||
|             patt, | ||||
|             **kwargs, | ||||
|         ) | ||||
|     except TIMEOUT: | ||||
|         before = str(child.before.decode()) | ||||
|         print(before) | ||||
|         raise | ||||
| 
 | ||||
| 
 | ||||
| PROMPT = r"\(Pdb\+\)" | ||||
| 
 | ||||
| 
 | ||||
| def in_prompt_msg( | ||||
|     child: SpawnBase, | ||||
|     parts: list[str], | ||||
| 
 | ||||
|     pause_on_false: bool = False, | ||||
|     err_on_false: bool = False, | ||||
|     print_prompt_on_false: bool = True, | ||||
| 
 | ||||
| ) -> bool: | ||||
|     ''' | ||||
|     Predicate check if (the prompt's) std-streams output has all | ||||
|     `str`-parts in it. | ||||
| 
 | ||||
|     Can be used in test asserts for bulk matching expected | ||||
|     log/REPL output for a given `pdb` interact point. | ||||
| 
 | ||||
|     ''' | ||||
|     __tracebackhide__: bool = False | ||||
| 
 | ||||
|     before: str = str(child.before.decode()) | ||||
|     for part in parts: | ||||
|         if part not in before: | ||||
|             if pause_on_false: | ||||
|                 import pdbp | ||||
|                 pdbp.set_trace() | ||||
| 
 | ||||
|             if print_prompt_on_false: | ||||
|                 print(before) | ||||
| 
 | ||||
|             if err_on_false: | ||||
|                 raise ValueError( | ||||
|                     f'Could not find pattern in `before` output?\n' | ||||
|                     f'part: {part!r}\n' | ||||
|                 ) | ||||
|             return False | ||||
| 
 | ||||
|     return True | ||||
| 
 | ||||
| 
 | ||||
| # TODO: todo support terminal color-chars stripping so we can match | ||||
| # against call stack frame output from the the 'll' command the like! | ||||
| # -[ ] SO answer for stipping ANSI codes: https://stackoverflow.com/a/14693789 | ||||
| def assert_before( | ||||
|     child: SpawnBase, | ||||
|     patts: list[str], | ||||
| 
 | ||||
|     **kwargs, | ||||
| 
 | ||||
| ) -> None: | ||||
|     __tracebackhide__: bool = False | ||||
| 
 | ||||
|     assert in_prompt_msg( | ||||
|         child=child, | ||||
|         parts=patts, | ||||
| 
 | ||||
|         # since this is an "assert" helper ;) | ||||
|         err_on_false=True, | ||||
|         **kwargs | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| def do_ctlc( | ||||
|     child, | ||||
|     count: int = 3, | ||||
|     delay: float = 0.1, | ||||
|     patt: str|None = None, | ||||
| 
 | ||||
|     # expect repl UX to reprint the prompt after every | ||||
|     # ctrl-c send. | ||||
|     # XXX: no idea but, in CI this never seems to work even on 3.10 so | ||||
|     # needs some further investigation potentially... | ||||
|     expect_prompt: bool = not _ci_env, | ||||
| 
 | ||||
| ) -> str|None: | ||||
| 
 | ||||
|     before: str|None = None | ||||
| 
 | ||||
|     # make sure ctl-c sends don't do anything but repeat output | ||||
|     for _ in range(count): | ||||
|         time.sleep(delay) | ||||
|         child.sendcontrol('c') | ||||
| 
 | ||||
|         # TODO: figure out why this makes CI fail.. | ||||
|         # if you run this test manually it works just fine.. | ||||
|         if expect_prompt: | ||||
|             time.sleep(delay) | ||||
|             child.expect(PROMPT) | ||||
|             before = str(child.before.decode()) | ||||
|             time.sleep(delay) | ||||
| 
 | ||||
|             if patt: | ||||
|                 # should see the last line on console | ||||
|                 assert patt in before | ||||
| 
 | ||||
|     # return the console content up to the final prompt | ||||
|     return before | ||||
|  | @ -1,381 +0,0 @@ | |||
| ''' | ||||
| That "foreign loop/thread" debug REPL support better ALSO WORK! | ||||
| 
 | ||||
| Same as `test_native_pause.py`. | ||||
| All these tests can be understood (somewhat) by running the | ||||
| equivalent `examples/debugging/` scripts manually. | ||||
| 
 | ||||
| ''' | ||||
| from contextlib import ( | ||||
|     contextmanager as cm, | ||||
| ) | ||||
| # from functools import partial | ||||
| # import itertools | ||||
| import time | ||||
| # from typing import ( | ||||
| #     Iterator, | ||||
| # ) | ||||
| 
 | ||||
| import pytest | ||||
| from pexpect.exceptions import ( | ||||
|     TIMEOUT, | ||||
|     EOF, | ||||
| ) | ||||
| 
 | ||||
| from .conftest import ( | ||||
|     # _ci_env, | ||||
|     do_ctlc, | ||||
|     PROMPT, | ||||
|     # expect, | ||||
|     in_prompt_msg, | ||||
|     assert_before, | ||||
|     _pause_msg, | ||||
|     _crash_msg, | ||||
|     _ctlc_ignore_header, | ||||
|     # _repl_fail_msg, | ||||
| ) | ||||
| 
 | ||||
| @cm | ||||
| def maybe_expect_timeout( | ||||
|     ctlc: bool = False, | ||||
| ) -> None: | ||||
|     try: | ||||
|         yield | ||||
|     except TIMEOUT: | ||||
|         # breakpoint() | ||||
|         if ctlc: | ||||
|             pytest.xfail( | ||||
|                 'Some kinda redic threading SIGINT bug i think?\n' | ||||
|                 'See the notes in `examples/debugging/sync_bp.py`..\n' | ||||
|             ) | ||||
|         raise | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.ctlcs_bish | ||||
| def test_pause_from_sync( | ||||
|     spawn, | ||||
|     ctlc: bool, | ||||
| ): | ||||
|     ''' | ||||
|     Verify we can use the `pdbp` REPL from sync functions AND from | ||||
|     any thread spawned with `trio.to_thread.run_sync()`. | ||||
| 
 | ||||
|     `examples/debugging/sync_bp.py` | ||||
| 
 | ||||
|     ''' | ||||
|     child = spawn('sync_bp') | ||||
| 
 | ||||
|     # first `sync_pause()` after nurseries open | ||||
|     child.expect(PROMPT) | ||||
|     assert_before( | ||||
|         child, | ||||
|         [ | ||||
|             # pre-prompt line | ||||
|             _pause_msg, | ||||
|             "<Task '__main__.main'", | ||||
|             "('root'", | ||||
|         ] | ||||
|     ) | ||||
|     if ctlc: | ||||
|         do_ctlc(child) | ||||
|         # ^NOTE^ subactor not spawned yet; don't need extra delay. | ||||
| 
 | ||||
|     child.sendline('c') | ||||
| 
 | ||||
|     # first `await tractor.pause()` inside `p.open_context()` body | ||||
|     child.expect(PROMPT) | ||||
| 
 | ||||
|     # XXX shouldn't see gb loaded message with PDB loglevel! | ||||
|     # assert not in_prompt_msg( | ||||
|     #     child, | ||||
|     #     ['`greenback` portal opened!'], | ||||
|     # ) | ||||
|     # should be same root task | ||||
|     assert_before( | ||||
|         child, | ||||
|         [ | ||||
|             _pause_msg, | ||||
|             "<Task '__main__.main'", | ||||
|             "('root'", | ||||
|         ] | ||||
|     ) | ||||
| 
 | ||||
|     if ctlc: | ||||
|         do_ctlc( | ||||
|             child, | ||||
|             # NOTE: setting this to 0 (or some other sufficient | ||||
|             # small val) can cause the test to fail since the | ||||
|             # `subactor` suffers a race where the root/parent | ||||
|             # sends an actor-cancel prior to it hitting its pause | ||||
|             # point; by def the value is 0.1 | ||||
|             delay=0.4, | ||||
|         ) | ||||
| 
 | ||||
|     # XXX, fwiw without a brief sleep here the SIGINT might actually | ||||
|     # trigger "subactor" cancellation by its parent  before the | ||||
|     # shield-handler is engaged. | ||||
|     # | ||||
|     # => similar to the `delay` input to `do_ctlc()` below, setting | ||||
|     # this too low can cause the test to fail since the `subactor` | ||||
|     # suffers a race where the root/parent sends an actor-cancel | ||||
|     # prior to the context task hitting its pause point (and thus | ||||
|     # engaging the `sigint_shield()` handler in time); this value | ||||
|     # seems be good enuf? | ||||
|     time.sleep(0.6) | ||||
| 
 | ||||
|     # one of the bg thread or subactor should have | ||||
|     # `Lock.acquire()`-ed | ||||
|     # (NOT both, which will result in REPL clobbering!) | ||||
|     attach_patts: dict[str, list[str]] = { | ||||
|         'subactor': [ | ||||
|             "'start_n_sync_pause'", | ||||
|             "('subactor'", | ||||
|         ], | ||||
|         'inline_root_bg_thread': [ | ||||
|             "<Thread(inline_root_bg_thread", | ||||
|             "('root'", | ||||
|         ], | ||||
|         'start_soon_root_bg_thread': [ | ||||
|             "<Thread(start_soon_root_bg_thread", | ||||
|             "('root'", | ||||
|         ], | ||||
|     } | ||||
|     conts: int = 0  # for debugging below matching logic on failure | ||||
|     while attach_patts: | ||||
|         child.sendline('c') | ||||
|         conts += 1 | ||||
|         child.expect(PROMPT) | ||||
|         before = str(child.before.decode()) | ||||
|         for key in attach_patts: | ||||
|             if key in before: | ||||
|                 attach_key: str = key | ||||
|                 expected_patts: str = attach_patts.pop(key) | ||||
|                 assert_before( | ||||
|                     child, | ||||
|                     [_pause_msg] | ||||
|                     + | ||||
|                     expected_patts | ||||
|                 ) | ||||
|                 break | ||||
|         else: | ||||
|             pytest.fail( | ||||
|                 f'No keys found?\n\n' | ||||
|                 f'{attach_patts.keys()}\n\n' | ||||
|                 f'{before}\n' | ||||
|             ) | ||||
| 
 | ||||
|         # ensure no other task/threads engaged a REPL | ||||
|         # at the same time as the one that was detected above. | ||||
|         for key, other_patts in attach_patts.copy().items(): | ||||
|             assert not in_prompt_msg( | ||||
|                 child, | ||||
|                 other_patts, | ||||
|             ) | ||||
| 
 | ||||
|         if ctlc: | ||||
|             do_ctlc( | ||||
|                 child, | ||||
|                 patt=attach_key, | ||||
|                 # NOTE same as comment above | ||||
|                 delay=0.4, | ||||
|             ) | ||||
| 
 | ||||
|     child.sendline('c') | ||||
| 
 | ||||
|     # XXX TODO, weird threading bug it seems despite the | ||||
|     # `abandon_on_cancel: bool` setting to | ||||
|     # `trio.to_thread.run_sync()`.. | ||||
|     with maybe_expect_timeout( | ||||
|         ctlc=ctlc, | ||||
|     ): | ||||
|         child.expect(EOF) | ||||
| 
 | ||||
| 
 | ||||
| def expect_any_of( | ||||
|     attach_patts: dict[str, list[str]], | ||||
|     child,   # what type? | ||||
|     ctlc: bool = False, | ||||
|     prompt: str = _ctlc_ignore_header, | ||||
|     ctlc_delay: float = .4, | ||||
| 
 | ||||
| ) -> list[str]: | ||||
|     ''' | ||||
|     Receive any of a `list[str]` of patterns provided in | ||||
|     `attach_patts`. | ||||
| 
 | ||||
|     Used to test racing prompts from multiple actors and/or | ||||
|     tasks using a common root process' `pdbp` REPL. | ||||
| 
 | ||||
|     ''' | ||||
|     assert attach_patts | ||||
| 
 | ||||
|     child.expect(PROMPT) | ||||
|     before = str(child.before.decode()) | ||||
| 
 | ||||
|     for attach_key in attach_patts: | ||||
|         if attach_key in before: | ||||
|             expected_patts: str = attach_patts.pop(attach_key) | ||||
|             assert_before( | ||||
|                 child, | ||||
|                 expected_patts | ||||
|             ) | ||||
|             break  # from for | ||||
|     else: | ||||
|         pytest.fail( | ||||
|             f'No keys found?\n\n' | ||||
|             f'{attach_patts.keys()}\n\n' | ||||
|             f'{before}\n' | ||||
|         ) | ||||
| 
 | ||||
|     # ensure no other task/threads engaged a REPL | ||||
|     # at the same time as the one that was detected above. | ||||
|     for key, other_patts in attach_patts.copy().items(): | ||||
|         assert not in_prompt_msg( | ||||
|             child, | ||||
|             other_patts, | ||||
|         ) | ||||
| 
 | ||||
|     if ctlc: | ||||
|         do_ctlc( | ||||
|             child, | ||||
|             patt=prompt, | ||||
|             # NOTE same as comment above | ||||
|             delay=ctlc_delay, | ||||
|         ) | ||||
| 
 | ||||
|     return expected_patts | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.ctlcs_bish | ||||
| def test_sync_pause_from_aio_task( | ||||
|     spawn, | ||||
| 
 | ||||
|     ctlc: bool | ||||
|     # ^TODO, fix for `asyncio`!! | ||||
| ): | ||||
|     ''' | ||||
|     Verify we can use the `pdbp` REPL from an `asyncio.Task` spawned using | ||||
|     APIs in `.to_asyncio`. | ||||
| 
 | ||||
|     `examples/debugging/asycio_bp.py` | ||||
| 
 | ||||
|     ''' | ||||
|     child = spawn('asyncio_bp') | ||||
| 
 | ||||
|     # RACE on whether trio/asyncio task bps first | ||||
|     attach_patts: dict[str, list[str]] = { | ||||
| 
 | ||||
|         # first pause in guest-mode (aka "infecting") | ||||
|         # `trio.Task`. | ||||
|         'trio-side': [ | ||||
|             _pause_msg, | ||||
|             "<Task 'trio_ctx'", | ||||
|             "('aio_daemon'", | ||||
|         ], | ||||
| 
 | ||||
|         # `breakpoint()` from `asyncio.Task`. | ||||
|         'asyncio-side': [ | ||||
|             _pause_msg, | ||||
|             "<Task pending name='Task-2' coro=<greenback_shim()", | ||||
|             "('aio_daemon'", | ||||
|         ], | ||||
|     } | ||||
| 
 | ||||
|     while attach_patts: | ||||
|         expect_any_of( | ||||
|             attach_patts=attach_patts, | ||||
|             child=child, | ||||
|             ctlc=ctlc, | ||||
|         ) | ||||
|         child.sendline('c') | ||||
| 
 | ||||
|     # NOW in race order, | ||||
|     # - the asyncio-task will error | ||||
|     # - the root-actor parent task will pause | ||||
|     # | ||||
|     attach_patts: dict[str, list[str]] = { | ||||
| 
 | ||||
|         # error raised in `asyncio.Task` | ||||
|         "raise ValueError('asyncio side error!')": [ | ||||
|             _crash_msg, | ||||
|             "<Task 'trio_ctx'", | ||||
|             "@ ('aio_daemon'", | ||||
|             "ValueError: asyncio side error!", | ||||
| 
 | ||||
|             # XXX, we no longer show this frame by default! | ||||
|             # 'return await chan.receive()',  # `.to_asyncio` impl internals in tb | ||||
|         ], | ||||
| 
 | ||||
|         # parent-side propagation via actor-nursery/portal | ||||
|         # "tractor._exceptions.RemoteActorError: remote task raised a 'ValueError'": [ | ||||
|         "remote task raised a 'ValueError'": [ | ||||
|             _crash_msg, | ||||
|             "src_uid=('aio_daemon'", | ||||
|             "('aio_daemon'", | ||||
|         ], | ||||
| 
 | ||||
|         # a final pause in root-actor | ||||
|         "<Task '__main__.main'": [ | ||||
|             _pause_msg, | ||||
|             "<Task '__main__.main'", | ||||
|             "('root'", | ||||
|         ], | ||||
|     } | ||||
|     while attach_patts: | ||||
|         expect_any_of( | ||||
|             attach_patts=attach_patts, | ||||
|             child=child, | ||||
|             ctlc=ctlc, | ||||
|         ) | ||||
|         child.sendline('c') | ||||
| 
 | ||||
|     assert not attach_patts | ||||
| 
 | ||||
|     # final boxed error propagates to root | ||||
|     assert_before( | ||||
|         child, | ||||
|         [ | ||||
|             _crash_msg, | ||||
|             "<Task '__main__.main'", | ||||
|             "('root'", | ||||
|             "remote task raised a 'ValueError'", | ||||
|             "ValueError: asyncio side error!", | ||||
|         ] | ||||
|     ) | ||||
| 
 | ||||
|     if ctlc: | ||||
|         do_ctlc( | ||||
|             child, | ||||
|             # NOTE: setting this to 0 (or some other sufficient | ||||
|             # small val) can cause the test to fail since the | ||||
|             # `subactor` suffers a race where the root/parent | ||||
|             # sends an actor-cancel prior to it hitting its pause | ||||
|             # point; by def the value is 0.1 | ||||
|             delay=0.4, | ||||
|         ) | ||||
| 
 | ||||
|     child.sendline('c') | ||||
|     # with maybe_expect_timeout(): | ||||
|     child.expect(EOF) | ||||
| 
 | ||||
| 
 | ||||
| def test_sync_pause_from_non_greenbacked_aio_task(): | ||||
|     ''' | ||||
|     Where the `breakpoint()` caller task is NOT spawned by | ||||
|     `tractor.to_asyncio` and thus never activates | ||||
|     a `greenback.ensure_portal()` beforehand, presumably bc the task | ||||
|     was started by some lib/dep as in often seen in the field. | ||||
| 
 | ||||
|     Ensure sync pausing works when the pause is in, | ||||
| 
 | ||||
|     - the root actor running in infected-mode? | ||||
|       |_ since we don't need any IPC to acquire the debug lock? | ||||
|       |_ is there some way to handle this like the non-main-thread case? | ||||
| 
 | ||||
|     All other cases need to error out appropriately right? | ||||
| 
 | ||||
|     - for any subactor we can't avoid needing the repl lock.. | ||||
|       |_ is there a way to hook into `asyncio.ensure_future(obj)`? | ||||
| 
 | ||||
|     ''' | ||||
|     pass | ||||
|  | @ -1,306 +0,0 @@ | |||
| ''' | ||||
| That "native" runtime-hackin toolset better be dang useful! | ||||
| 
 | ||||
| Verify the funtion of a variety of "developer-experience" tools we | ||||
| offer from the `.devx` sub-pkg: | ||||
| 
 | ||||
| - use of the lovely `stackscope` for dumping actor `trio`-task trees | ||||
|   during operation and hangs. | ||||
| 
 | ||||
| TODO: | ||||
| - demonstration of `CallerInfo` call stack frame filtering such that | ||||
|   for logging and REPL purposes a user sees exactly the layers needed | ||||
|   when debugging a problem inside the stack vs. in their app. | ||||
| 
 | ||||
| ''' | ||||
| from __future__ import annotations | ||||
| from contextlib import ( | ||||
|     contextmanager as cm, | ||||
| ) | ||||
| import os | ||||
| import signal | ||||
| import time | ||||
| from typing import ( | ||||
|     TYPE_CHECKING, | ||||
| ) | ||||
| 
 | ||||
| from .conftest import ( | ||||
|     expect, | ||||
|     assert_before, | ||||
|     in_prompt_msg, | ||||
|     PROMPT, | ||||
|     _pause_msg, | ||||
| ) | ||||
| 
 | ||||
| import pytest | ||||
| from pexpect.exceptions import ( | ||||
|     # TIMEOUT, | ||||
|     EOF, | ||||
| ) | ||||
| 
 | ||||
| if TYPE_CHECKING: | ||||
|     from ..conftest import PexpectSpawner | ||||
| 
 | ||||
| 
 | ||||
| def test_shield_pause( | ||||
|     spawn: PexpectSpawner, | ||||
| ): | ||||
|     ''' | ||||
|     Verify the `tractor.pause()/.post_mortem()` API works inside an | ||||
|     already cancelled `trio.CancelScope` and that you can step to the | ||||
|     next checkpoint wherein the cancelled will get raised. | ||||
| 
 | ||||
|     ''' | ||||
|     child = spawn( | ||||
|         'shield_hang_in_sub' | ||||
|     ) | ||||
|     expect( | ||||
|         child, | ||||
|         'Yo my child hanging..?', | ||||
|     ) | ||||
|     assert_before( | ||||
|         child, | ||||
|         [ | ||||
|             'Entering shield sleep..', | ||||
|             'Enabling trace-trees on `SIGUSR1` since `stackscope` is installed @', | ||||
|         ] | ||||
|     ) | ||||
| 
 | ||||
|     script_pid: int = child.pid | ||||
|     print( | ||||
|         f'Sending SIGUSR1 to {script_pid}\n' | ||||
|         f'(kill -s SIGUSR1 {script_pid})\n' | ||||
|     ) | ||||
|     os.kill( | ||||
|         script_pid, | ||||
|         signal.SIGUSR1, | ||||
|     ) | ||||
|     time.sleep(0.2) | ||||
|     expect( | ||||
|         child, | ||||
|         # end-of-tree delimiter | ||||
|         "end-of-\('root'", | ||||
|     ) | ||||
|     assert_before( | ||||
|         child, | ||||
|         [ | ||||
|             # 'Srying to dump `stackscope` tree..', | ||||
|             # 'Dumping `stackscope` tree for actor', | ||||
|             "('root'",  # uid line | ||||
| 
 | ||||
|             # TODO!? this used to show? | ||||
|             # -[ ] mk reproducable for @oremanj? | ||||
|             # | ||||
|             # parent block point (non-shielded) | ||||
|             # 'await trio.sleep_forever()  # in root', | ||||
|         ] | ||||
|     ) | ||||
|     expect( | ||||
|         child, | ||||
|         # end-of-tree delimiter | ||||
|         "end-of-\('hanger'", | ||||
|     ) | ||||
|     assert_before( | ||||
|         child, | ||||
|         [ | ||||
|             # relay to the sub should be reported | ||||
|             'Relaying `SIGUSR1`[10] to sub-actor', | ||||
| 
 | ||||
|             "('hanger'",  # uid line | ||||
| 
 | ||||
|             # TODO!? SEE ABOVE | ||||
|             # hanger LOC where it's shield-halted | ||||
|             # 'await trio.sleep_forever()  # in subactor', | ||||
|         ] | ||||
|     ) | ||||
| 
 | ||||
|     # simulate the user sending a ctl-c to the hanging program. | ||||
|     # this should result in the terminator kicking in since | ||||
|     # the sub is shield blocking and can't respond to SIGINT. | ||||
|     os.kill( | ||||
|         child.pid, | ||||
|         signal.SIGINT, | ||||
|     ) | ||||
|     from tractor._supervise import _shutdown_msg | ||||
|     expect( | ||||
|         child, | ||||
|         # 'Shutting down actor runtime', | ||||
|         _shutdown_msg, | ||||
|         timeout=6, | ||||
|     ) | ||||
|     assert_before( | ||||
|         child, | ||||
|         [ | ||||
|             'raise KeyboardInterrupt', | ||||
|             # 'Shutting down actor runtime', | ||||
|             '#T-800 deployed to collect zombie B0', | ||||
|             "'--uid', \"('hanger',", | ||||
|         ] | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| def test_breakpoint_hook_restored( | ||||
|     spawn: PexpectSpawner, | ||||
| ): | ||||
|     ''' | ||||
|     Ensures our actor runtime sets a custom `breakpoint()` hook | ||||
|     on open then restores the stdlib's default on close. | ||||
| 
 | ||||
|     The hook state validation is done via `assert`s inside the | ||||
|     invoked script with only `breakpoint()` (not `tractor.pause()`) | ||||
|     calls used. | ||||
| 
 | ||||
|     ''' | ||||
|     child = spawn('restore_builtin_breakpoint') | ||||
| 
 | ||||
|     child.expect(PROMPT) | ||||
|     try: | ||||
|         assert_before( | ||||
|             child, | ||||
|             [ | ||||
|                 _pause_msg, | ||||
|                 "<Task '__main__.main'", | ||||
|                 "('root'", | ||||
|                 "first bp, tractor hook set", | ||||
|             ] | ||||
|         ) | ||||
|     # XXX if the above raises `AssertionError`, without sending | ||||
|     # the final 'continue' cmd to the REPL-active sub-process, | ||||
|     # we'll hang waiting for that pexpect instance to terminate.. | ||||
|     finally: | ||||
|         child.sendline('c') | ||||
| 
 | ||||
|     child.expect(PROMPT) | ||||
|     assert_before( | ||||
|         child, | ||||
|         [ | ||||
|             "last bp, stdlib hook restored", | ||||
|         ] | ||||
|     ) | ||||
| 
 | ||||
|     # since the stdlib hook was already restored there should be NO | ||||
|     # `tractor` `log.pdb()` content from console! | ||||
|     assert not in_prompt_msg( | ||||
|         child, | ||||
|         [ | ||||
|             _pause_msg, | ||||
|             "<Task '__main__.main'", | ||||
|             "('root'", | ||||
|         ], | ||||
|     ) | ||||
|     child.sendline('c') | ||||
|     child.expect(EOF) | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| _to_raise = Exception('Triggering a crash') | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     'to_raise', | ||||
|     [ | ||||
|         None, | ||||
|         _to_raise, | ||||
|         RuntimeError('Never crash handle this!'), | ||||
|     ], | ||||
| ) | ||||
| @pytest.mark.parametrize( | ||||
|     'raise_on_exit', | ||||
|     [ | ||||
|         True, | ||||
|         [type(_to_raise)], | ||||
|         False, | ||||
|     ] | ||||
| ) | ||||
| def test_crash_handler_cms( | ||||
|     debug_mode: bool, | ||||
|     to_raise: Exception, | ||||
|     raise_on_exit: bool|list[Exception], | ||||
| ): | ||||
|     ''' | ||||
|     Verify the `.devx.open_crash_handler()` API(s) by also | ||||
|     (conveniently enough) tesing its `repl_fixture: ContextManager` | ||||
|     param support which for this suite allows use to avoid use of | ||||
|     a `pexpect`-style-test since we use the fixture to avoid actually | ||||
|     entering `PdbpREPL.iteract()` :smirk: | ||||
| 
 | ||||
|     ''' | ||||
|     import tractor | ||||
|     # import trio | ||||
| 
 | ||||
|     # state flags | ||||
|     repl_acquired: bool = False | ||||
|     repl_released: bool = False | ||||
| 
 | ||||
|     @cm | ||||
|     def block_repl_ux( | ||||
|         repl: tractor.devx.debug.PdbREPL, | ||||
|         maybe_bxerr: ( | ||||
|             tractor.devx._debug.BoxedMaybeException | ||||
|             |None | ||||
|         ) = None, | ||||
|         enter_repl: bool = True, | ||||
| 
 | ||||
|     ) -> bool: | ||||
|         ''' | ||||
|         Set pre/post-REPL state vars and bypass actual conole | ||||
|         interaction. | ||||
| 
 | ||||
|         ''' | ||||
|         nonlocal repl_acquired, repl_released | ||||
| 
 | ||||
|         # task: trio.Task = trio.lowlevel.current_task() | ||||
|         # print(f'pre-REPL active_task={task.name}') | ||||
| 
 | ||||
|         print('pre-REPL') | ||||
|         repl_acquired = True | ||||
|         yield False  # never actually .interact() | ||||
|         print('post-REPL') | ||||
|         repl_released = True | ||||
| 
 | ||||
|     try: | ||||
|         # TODO, with runtime's `debug_mode` setting | ||||
|         # -[ ] need to open runtime tho obvi.. | ||||
|         # | ||||
|         # with tractor.devx.maybe_open_crash_handler( | ||||
|         #     pdb=True, | ||||
| 
 | ||||
|         with tractor.devx.open_crash_handler( | ||||
|             raise_on_exit=raise_on_exit, | ||||
|             repl_fixture=block_repl_ux | ||||
|         ) as bxerr: | ||||
|             if to_raise is not None: | ||||
|                 raise to_raise | ||||
| 
 | ||||
|     except Exception as _exc: | ||||
|         exc = _exc | ||||
|         if ( | ||||
|             raise_on_exit is True | ||||
|             or | ||||
|             type(to_raise) in raise_on_exit | ||||
|         ): | ||||
|             assert ( | ||||
|                 exc | ||||
|                 is | ||||
|                 to_raise | ||||
|                 is | ||||
|                 bxerr.value | ||||
|             ) | ||||
| 
 | ||||
|         else: | ||||
|             raise | ||||
|     else: | ||||
|         assert ( | ||||
|             to_raise is None | ||||
|             or | ||||
|             not raise_on_exit | ||||
|             or | ||||
|             type(to_raise) not in raise_on_exit | ||||
|         ) | ||||
|         assert bxerr.value is to_raise | ||||
| 
 | ||||
|     assert bxerr.raise_on_exit == raise_on_exit | ||||
| 
 | ||||
|     if to_raise is not None: | ||||
|         assert repl_acquired | ||||
|         assert repl_released | ||||
|  | @ -1,4 +0,0 @@ | |||
| ''' | ||||
| `tractor.ipc` subsystem(s)/unit testing suites. | ||||
| 
 | ||||
| ''' | ||||
|  | @ -1,114 +0,0 @@ | |||
| ''' | ||||
| Unit-ish tests for specific IPC transport protocol backends. | ||||
| 
 | ||||
| ''' | ||||
| from __future__ import annotations | ||||
| from pathlib import Path | ||||
| 
 | ||||
| import pytest | ||||
| import trio | ||||
| import tractor | ||||
| from tractor import ( | ||||
|     Actor, | ||||
|     _state, | ||||
|     _addr, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| @pytest.fixture | ||||
| def bindspace_dir_str() -> str: | ||||
| 
 | ||||
|     rt_dir: Path = tractor._state.get_rt_dir() | ||||
|     bs_dir: Path = rt_dir / 'doggy' | ||||
|     bs_dir_str: str = str(bs_dir) | ||||
|     assert not bs_dir.is_dir() | ||||
| 
 | ||||
|     yield bs_dir_str | ||||
| 
 | ||||
|     # delete it on suite teardown. | ||||
|     # ?TODO? should we support this internally | ||||
|     # or is leaking it ok? | ||||
|     if bs_dir.is_dir(): | ||||
|         bs_dir.rmdir() | ||||
| 
 | ||||
| 
 | ||||
| def test_uds_bindspace_created_implicitly( | ||||
|     debug_mode: bool, | ||||
|     bindspace_dir_str: str, | ||||
| ): | ||||
|     registry_addr: tuple = ( | ||||
|         f'{bindspace_dir_str}', | ||||
|         'registry@doggy.sock', | ||||
|     ) | ||||
|     bs_dir_str: str = registry_addr[0] | ||||
| 
 | ||||
|     # XXX, ensure bindspace-dir DNE beforehand! | ||||
|     assert not Path(bs_dir_str).is_dir() | ||||
| 
 | ||||
|     async def main(): | ||||
|         async with tractor.open_nursery( | ||||
|             enable_transports=['uds'], | ||||
|             registry_addrs=[registry_addr], | ||||
|             debug_mode=debug_mode, | ||||
|         ) as _an: | ||||
| 
 | ||||
|             # XXX MUST be created implicitly by | ||||
|             # `.ipc._uds.start_listener()`! | ||||
|             assert Path(bs_dir_str).is_dir() | ||||
| 
 | ||||
|             root: Actor = tractor.current_actor() | ||||
|             assert root.is_registrar | ||||
| 
 | ||||
|             assert registry_addr in root.reg_addrs | ||||
|             assert ( | ||||
|                 registry_addr | ||||
|                 in | ||||
|                 _state._runtime_vars['_registry_addrs'] | ||||
|             ) | ||||
|             assert ( | ||||
|                 _addr.wrap_address(registry_addr) | ||||
|                 in | ||||
|                 root.registry_addrs | ||||
|             ) | ||||
| 
 | ||||
|     trio.run(main) | ||||
| 
 | ||||
| 
 | ||||
| def test_uds_double_listen_raises_connerr( | ||||
|     debug_mode: bool, | ||||
|     bindspace_dir_str: str, | ||||
| ): | ||||
|     registry_addr: tuple = ( | ||||
|         f'{bindspace_dir_str}', | ||||
|         'registry@doggy.sock', | ||||
|     ) | ||||
| 
 | ||||
|     async def main(): | ||||
|         async with tractor.open_nursery( | ||||
|             enable_transports=['uds'], | ||||
|             registry_addrs=[registry_addr], | ||||
|             debug_mode=debug_mode, | ||||
|         ) as _an: | ||||
| 
 | ||||
|             # runtime up | ||||
|             root: Actor = tractor.current_actor() | ||||
| 
 | ||||
|             from tractor.ipc._uds import ( | ||||
|                 start_listener, | ||||
|                 UDSAddress, | ||||
|             ) | ||||
|             ya_bound_addr: UDSAddress = root.registry_addrs[0] | ||||
|             try: | ||||
|                 await start_listener( | ||||
|                     addr=ya_bound_addr, | ||||
|                 ) | ||||
|             except ConnectionError as connerr: | ||||
|                 assert type(src_exc := connerr.__context__) is OSError | ||||
|                 assert 'Address already in use' in src_exc.args | ||||
|                 # complete, exit test. | ||||
| 
 | ||||
|             else: | ||||
|                 pytest.fail('It dint raise a connerr !?') | ||||
| 
 | ||||
| 
 | ||||
|     trio.run(main) | ||||
|  | @ -1,95 +0,0 @@ | |||
| ''' | ||||
| Verify the `enable_transports` param drives various | ||||
| per-root/sub-actor IPC endpoint/server settings. | ||||
| 
 | ||||
| ''' | ||||
| from __future__ import annotations | ||||
| 
 | ||||
| import pytest | ||||
| import trio | ||||
| import tractor | ||||
| from tractor import ( | ||||
|     Actor, | ||||
|     Portal, | ||||
|     ipc, | ||||
|     msg, | ||||
|     _state, | ||||
|     _addr, | ||||
| ) | ||||
| 
 | ||||
| @tractor.context | ||||
| async def chk_tpts( | ||||
|     ctx: tractor.Context, | ||||
|     tpt_proto_key: str, | ||||
| ): | ||||
|     rtvars = _state._runtime_vars | ||||
|     assert ( | ||||
|         tpt_proto_key | ||||
|         in | ||||
|         rtvars['_enable_tpts'] | ||||
|     ) | ||||
|     actor: Actor = tractor.current_actor() | ||||
|     spec: msg.types.SpawnSpec = actor._spawn_spec | ||||
|     assert spec._runtime_vars == rtvars | ||||
| 
 | ||||
|     # ensure individual IPC ep-addr types | ||||
|     serv: ipc._server.Server = actor.ipc_server | ||||
|     addr: ipc._types.Address | ||||
|     for addr in serv.addrs: | ||||
|         assert addr.proto_key == tpt_proto_key | ||||
| 
 | ||||
|     # Actor delegate-props enforcement | ||||
|     assert ( | ||||
|         actor.accept_addrs | ||||
|         == | ||||
|         serv.accept_addrs | ||||
|     ) | ||||
| 
 | ||||
|     await ctx.started(serv.accept_addrs) | ||||
| 
 | ||||
| 
 | ||||
| # TODO, parametrize over mis-matched-proto-typed `registry_addrs` | ||||
| # since i seems to work in `piker` but not exactly sure if both tcp | ||||
| # & uds are being deployed then? | ||||
| # | ||||
| @pytest.mark.parametrize( | ||||
|     'tpt_proto_key', | ||||
|     ['tcp', 'uds'], | ||||
|     ids=lambda item: f'ipc_tpt={item!r}' | ||||
| ) | ||||
| def test_root_passes_tpt_to_sub( | ||||
|     tpt_proto_key: str, | ||||
|     reg_addr: tuple, | ||||
|     debug_mode: bool, | ||||
| ): | ||||
|     async def main(): | ||||
|         async with tractor.open_nursery( | ||||
|             enable_transports=[tpt_proto_key], | ||||
|             registry_addrs=[reg_addr], | ||||
|             debug_mode=debug_mode, | ||||
|         ) as an: | ||||
| 
 | ||||
|             assert ( | ||||
|                 tpt_proto_key | ||||
|                 in | ||||
|                 _state._runtime_vars['_enable_tpts'] | ||||
|             ) | ||||
| 
 | ||||
|             ptl: Portal = await an.start_actor( | ||||
|                 name='sub', | ||||
|                 enable_modules=[__name__], | ||||
|             ) | ||||
|             async with ptl.open_context( | ||||
|                 chk_tpts, | ||||
|                 tpt_proto_key=tpt_proto_key, | ||||
|             ) as (ctx, accept_addrs): | ||||
| 
 | ||||
|                 uw_addr: tuple | ||||
|                 for uw_addr in accept_addrs: | ||||
|                     addr = _addr.wrap_address(uw_addr) | ||||
|                     assert addr.is_valid | ||||
| 
 | ||||
|             # shudown sub-actor(s) | ||||
|             await an.cancel() | ||||
| 
 | ||||
|     trio.run(main) | ||||
|  | @ -1,72 +0,0 @@ | |||
| ''' | ||||
| High-level `.ipc._server` unit tests. | ||||
| 
 | ||||
| ''' | ||||
| from __future__ import annotations | ||||
| 
 | ||||
| import pytest | ||||
| import trio | ||||
| from tractor import ( | ||||
|     devx, | ||||
|     ipc, | ||||
|     log, | ||||
| ) | ||||
| from tractor._testing.addr import ( | ||||
|     get_rando_addr, | ||||
| ) | ||||
| # TODO, use/check-roundtripping with some of these wrapper types? | ||||
| # | ||||
| # from .._addr import Address | ||||
| # from ._chan import Channel | ||||
| # from ._transport import MsgTransport | ||||
| # from ._uds import UDSAddress | ||||
| # from ._tcp import TCPAddress | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     '_tpt_proto', | ||||
|     ['uds', 'tcp'] | ||||
| ) | ||||
| def test_basic_ipc_server( | ||||
|     _tpt_proto: str, | ||||
|     debug_mode: bool, | ||||
|     loglevel: str, | ||||
| ): | ||||
| 
 | ||||
|     # so we see the socket-listener reporting on console | ||||
|     log.get_console_log("INFO") | ||||
| 
 | ||||
|     rando_addr: tuple = get_rando_addr( | ||||
|         tpt_proto=_tpt_proto, | ||||
|     ) | ||||
|     async def main(): | ||||
|         async with ipc._server.open_ipc_server() as server: | ||||
| 
 | ||||
|             assert ( | ||||
|                 server._parent_tn | ||||
|                 and | ||||
|                 server._parent_tn is server._stream_handler_tn | ||||
|             ) | ||||
|             assert server._no_more_peers.is_set() | ||||
| 
 | ||||
|             eps: list[ipc._server.Endpoint] = await server.listen_on( | ||||
|                 accept_addrs=[rando_addr], | ||||
|                 stream_handler_nursery=None, | ||||
|             ) | ||||
|             assert ( | ||||
|                 len(eps) == 1 | ||||
|                 and | ||||
|                 (ep := eps[0])._listener | ||||
|                 and | ||||
|                 not ep.peer_tpts | ||||
|             ) | ||||
| 
 | ||||
|             server._parent_tn.cancel_scope.cancel() | ||||
| 
 | ||||
|         # !TODO! actually make a bg-task connection from a client | ||||
|         # using `ipc._chan._connect_chan()` | ||||
| 
 | ||||
|     with devx.maybe_open_crash_handler( | ||||
|         pdb=debug_mode, | ||||
|     ): | ||||
|         trio.run(main) | ||||
|  | @ -3,6 +3,7 @@ Sketchy network blackoutz, ugly byzantine gens, puedes eschuchar la | |||
| cancelacion?.. | ||||
| 
 | ||||
| ''' | ||||
| import itertools | ||||
| from functools import partial | ||||
| from types import ModuleType | ||||
| 
 | ||||
|  | @ -10,12 +11,8 @@ import pytest | |||
| from _pytest.pathlib import import_path | ||||
| import trio | ||||
| import tractor | ||||
| from tractor import ( | ||||
|     TransportClosed, | ||||
| ) | ||||
| from tractor._testing import ( | ||||
|     examples_dir, | ||||
|     break_ipc, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
|  | @ -77,7 +74,6 @@ def test_ipc_channel_break_during_stream( | |||
|     spawn_backend: str, | ||||
|     ipc_break: dict|None, | ||||
|     pre_aclose_msgstream: bool, | ||||
|     tpt_proto: str, | ||||
| ): | ||||
|     ''' | ||||
|     Ensure we can have an IPC channel break its connection during | ||||
|  | @ -89,37 +85,29 @@ def test_ipc_channel_break_during_stream( | |||
| 
 | ||||
|     ''' | ||||
|     if spawn_backend != 'trio': | ||||
|         if debug_mode: | ||||
|             pytest.skip('`debug_mode` only supported on `trio` spawner') | ||||
|     #     if debug_mode: | ||||
|     #         pytest.skip('`debug_mode` only supported on `trio` spawner') | ||||
| 
 | ||||
|         # non-`trio` spawners should never hit the hang condition that | ||||
|         # requires the user to do ctl-c to cancel the actor tree. | ||||
|         # expect_final_exc = trio.ClosedResourceError | ||||
|         expect_final_exc = TransportClosed | ||||
|         expect_final_exc = trio.ClosedResourceError | ||||
| 
 | ||||
|     mod: ModuleType = import_path( | ||||
|         examples_dir() / 'advanced_faults' | ||||
|         / 'ipc_failure_during_stream.py', | ||||
|         examples_dir() / 'advanced_faults' / 'ipc_failure_during_stream.py', | ||||
|         root=examples_dir(), | ||||
|         consider_namespace_packages=False, | ||||
|     ) | ||||
| 
 | ||||
|     # by def we expect KBI from user after a simulated "hang | ||||
|     # period" wherein the user eventually hits ctl-c to kill the | ||||
|     # root-actor tree. | ||||
|     expect_final_exc: BaseException = KeyboardInterrupt | ||||
|     expect_final_cause: BaseException|None = None | ||||
| 
 | ||||
|     if ( | ||||
|         # only expect EoC if trans is broken on the child side, | ||||
|         ipc_break['break_child_ipc_after'] is not False | ||||
|         # AND we tell the child to call `MsgStream.aclose()`. | ||||
|         and pre_aclose_msgstream | ||||
|     ): | ||||
|         # expect_final_exc = trio.EndOfChannel | ||||
|         # ^XXX NOPE! XXX^ since now `.open_stream()` absorbs this | ||||
|         # gracefully! | ||||
|         expect_final_exc = KeyboardInterrupt | ||||
|         expect_final_exc = trio.EndOfChannel | ||||
| 
 | ||||
|     # NOTE when ONLY the child breaks or it breaks BEFORE the | ||||
|     # parent we expect the parent to get a closed resource error | ||||
|  | @ -132,28 +120,11 @@ def test_ipc_channel_break_during_stream( | |||
|         and | ||||
|         ipc_break['break_parent_ipc_after'] is False | ||||
|     ): | ||||
|         # NOTE: we DO NOT expect this any more since | ||||
|         # the child side's channel will be broken silently | ||||
|         # and nothing on the parent side will indicate this! | ||||
|         # expect_final_exc = trio.ClosedResourceError | ||||
|         expect_final_exc = trio.ClosedResourceError | ||||
| 
 | ||||
|         # NOTE: child will send a 'stop' msg before it breaks | ||||
|         # the transport channel BUT, that will be absorbed by the | ||||
|         # `ctx.open_stream()` block and thus the `.open_context()` | ||||
|         # should hang, after which the test script simulates | ||||
|         # a user sending ctl-c by raising a KBI. | ||||
|         # if child calls `MsgStream.aclose()` then expect EoC. | ||||
|         if pre_aclose_msgstream: | ||||
|             expect_final_exc = KeyboardInterrupt | ||||
|             if tpt_proto == 'uds': | ||||
|                 expect_final_exc = TransportClosed | ||||
|                 expect_final_cause = trio.BrokenResourceError | ||||
| 
 | ||||
|             # XXX OLD XXX | ||||
|             # if child calls `MsgStream.aclose()` then expect EoC. | ||||
|             # ^ XXX not any more ^ since eoc is always absorbed | ||||
|             # gracefully and NOT bubbled to the `.open_context()` | ||||
|             # block! | ||||
|             # expect_final_exc = trio.EndOfChannel | ||||
|             expect_final_exc = trio.EndOfChannel | ||||
| 
 | ||||
|     # BOTH but, CHILD breaks FIRST | ||||
|     elif ( | ||||
|  | @ -163,14 +134,14 @@ def test_ipc_channel_break_during_stream( | |||
|             > ipc_break['break_child_ipc_after'] | ||||
|         ) | ||||
|     ): | ||||
|         expect_final_exc = trio.ClosedResourceError | ||||
| 
 | ||||
|         # child will send a 'stop' msg before it breaks | ||||
|         # the transport channel. | ||||
|         if pre_aclose_msgstream: | ||||
|             expect_final_exc = KeyboardInterrupt | ||||
|             expect_final_exc = trio.EndOfChannel | ||||
| 
 | ||||
|             if tpt_proto == 'uds': | ||||
|                 expect_final_exc = TransportClosed | ||||
|                 expect_final_cause = trio.BrokenResourceError | ||||
| 
 | ||||
|     # NOTE when the parent IPC side dies (even if the child does as well | ||||
|     # NOTE when the parent IPC side dies (even if the child's does as well | ||||
|     # but the child fails BEFORE the parent) we always expect the | ||||
|     # IPC layer to raise a closed-resource, NEVER do we expect | ||||
|     # a stop msg since the parent-side ctx apis will error out | ||||
|  | @ -182,20 +153,17 @@ def test_ipc_channel_break_during_stream( | |||
|         and | ||||
|         ipc_break['break_child_ipc_after'] is False | ||||
|     ): | ||||
|         expect_final_exc = tractor.TransportClosed | ||||
|         expect_final_cause = trio.ClosedResourceError | ||||
|         expect_final_exc = trio.ClosedResourceError | ||||
| 
 | ||||
|     # BOTH but, PARENT breaks FIRST | ||||
|     elif ( | ||||
|         ipc_break['break_parent_ipc_after'] is not False | ||||
|         and ( | ||||
|             ipc_break['break_child_ipc_after'] | ||||
|             > | ||||
|             ipc_break['break_parent_ipc_after'] | ||||
|             > ipc_break['break_parent_ipc_after'] | ||||
|         ) | ||||
|     ): | ||||
|         expect_final_exc = tractor.TransportClosed | ||||
|         expect_final_cause = trio.ClosedResourceError | ||||
|         expect_final_exc = trio.ClosedResourceError | ||||
| 
 | ||||
|     with pytest.raises( | ||||
|         expected_exception=( | ||||
|  | @ -211,12 +179,11 @@ def test_ipc_channel_break_during_stream( | |||
|                     start_method=spawn_backend, | ||||
|                     loglevel=loglevel, | ||||
|                     pre_close=pre_aclose_msgstream, | ||||
|                     tpt_proto=tpt_proto, | ||||
|                     **ipc_break, | ||||
|                 ) | ||||
|             ) | ||||
|         except KeyboardInterrupt as _kbi: | ||||
|             kbi = _kbi | ||||
|         except KeyboardInterrupt as kbi: | ||||
|             _err = kbi | ||||
|             if expect_final_exc is not KeyboardInterrupt: | ||||
|                 pytest.fail( | ||||
|                     'Rxed unexpected KBI !?\n' | ||||
|  | @ -225,33 +192,16 @@ def test_ipc_channel_break_during_stream( | |||
| 
 | ||||
|             raise | ||||
| 
 | ||||
|         except tractor.TransportClosed as _tc: | ||||
|             tc = _tc | ||||
|             if expect_final_exc is KeyboardInterrupt: | ||||
|                 pytest.fail( | ||||
|                     'Unexpected transport failure !?\n' | ||||
|                     f'{repr(tc)}' | ||||
|                 ) | ||||
|             cause: Exception = tc.__cause__ | ||||
|             assert ( | ||||
|                 # type(cause) is trio.ClosedResourceError | ||||
|                 type(cause) is expect_final_cause | ||||
| 
 | ||||
|                 # TODO, should we expect a certain exc-message (per | ||||
|                 # tpt) as well?? | ||||
|                 # and | ||||
|                 # cause.args[0] == 'another task closed this fd' | ||||
|             ) | ||||
| 
 | ||||
|             raise | ||||
| 
 | ||||
|     # get raw instance from pytest wrapper | ||||
|     value = excinfo.value | ||||
|     if isinstance(value, ExceptionGroup): | ||||
|         excs = value.exceptions | ||||
|         assert len(excs) == 1 | ||||
|         final_exc = excs[0] | ||||
|         assert isinstance(final_exc, expect_final_exc) | ||||
|         value = next( | ||||
|             itertools.dropwhile( | ||||
|                 lambda exc: not isinstance(exc, expect_final_exc), | ||||
|                 value.exceptions, | ||||
|             ) | ||||
|         ) | ||||
|         assert value | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
|  | @ -260,50 +210,39 @@ async def break_ipc_after_started( | |||
| ) -> None: | ||||
|     await ctx.started() | ||||
|     async with ctx.open_stream() as stream: | ||||
| 
 | ||||
|         # TODO: make a test which verifies the error | ||||
|         # for this, i.e. raises a `MsgTypeError` | ||||
|         # await ctx.chan.send(None) | ||||
| 
 | ||||
|         await break_ipc( | ||||
|             stream=stream, | ||||
|             pre_close=True, | ||||
|         ) | ||||
|         await stream.aclose() | ||||
|         await trio.sleep(0.2) | ||||
|         await ctx.chan.send(None) | ||||
|         print('child broke IPC and terminating') | ||||
| 
 | ||||
| 
 | ||||
| def test_stream_closed_right_after_ipc_break_and_zombie_lord_engages(): | ||||
|     ''' | ||||
|     Verify that is a subactor's IPC goes down just after bringing up | ||||
|     a stream the parent can trigger a SIGINT and the child will be | ||||
|     reaped out-of-IPC by the localhost process supervision machinery: | ||||
|     aka "zombie lord". | ||||
|     Verify that is a subactor's IPC goes down just after bringing up a stream | ||||
|     the parent can trigger a SIGINT and the child will be reaped out-of-IPC by | ||||
|     the localhost process supervision machinery: aka "zombie lord". | ||||
| 
 | ||||
|     ''' | ||||
|     async def main(): | ||||
|         with trio.fail_after(3): | ||||
|             async with tractor.open_nursery() as an: | ||||
|                 portal = await an.start_actor( | ||||
|                     'ipc_breaker', | ||||
|                     enable_modules=[__name__], | ||||
|                 ) | ||||
|         async with tractor.open_nursery() as n: | ||||
|             portal = await n.start_actor( | ||||
|                 'ipc_breaker', | ||||
|                 enable_modules=[__name__], | ||||
|             ) | ||||
| 
 | ||||
|                 with trio.move_on_after(1): | ||||
|                     async with ( | ||||
|                         portal.open_context( | ||||
|                             break_ipc_after_started | ||||
|                         ) as (ctx, sent), | ||||
|                     ): | ||||
|                         async with ctx.open_stream(): | ||||
|                             await trio.sleep(0.5) | ||||
|             with trio.move_on_after(1): | ||||
|                 async with ( | ||||
|                     portal.open_context( | ||||
|                         break_ipc_after_started | ||||
|                     ) as (ctx, sent), | ||||
|                 ): | ||||
|                     async with ctx.open_stream(): | ||||
|                         await trio.sleep(0.5) | ||||
| 
 | ||||
|                         print('parent waiting on context') | ||||
|                     print('parent waiting on context') | ||||
| 
 | ||||
|                 print( | ||||
|                     'parent exited context\n' | ||||
|                     'parent raising KBI..\n' | ||||
|                 ) | ||||
|                 raise KeyboardInterrupt | ||||
|             print('parent exited context') | ||||
|             raise KeyboardInterrupt | ||||
| 
 | ||||
|     with pytest.raises(KeyboardInterrupt): | ||||
|         trio.run(main) | ||||
|  |  | |||
|  | @ -307,13 +307,6 @@ async def inf_streamer( | |||
| 
 | ||||
|     async with ( | ||||
|         ctx.open_stream() as stream, | ||||
| 
 | ||||
|         # XXX TODO, INTERESTING CASE!! | ||||
|         # - if we don't collapse the eg then the embedded | ||||
|         # `trio.EndOfChannel` doesn't propagate directly to the above | ||||
|         # .open_stream() parent, resulting in it also raising instead | ||||
|         # of gracefully absorbing as normal.. so how to handle? | ||||
|         tractor.trionics.collapse_eg(), | ||||
|         trio.open_nursery() as tn, | ||||
|     ): | ||||
|         async def close_stream_on_sentinel(): | ||||
|  |  | |||
|  | @ -8,13 +8,17 @@ import platform | |||
| import time | ||||
| from itertools import repeat | ||||
| 
 | ||||
| from exceptiongroup import ( | ||||
|     BaseExceptionGroup, | ||||
|     ExceptionGroup, | ||||
| ) | ||||
| import pytest | ||||
| import trio | ||||
| import tractor | ||||
| from tractor._testing import ( | ||||
|     tractor_test, | ||||
| ) | ||||
| from .conftest import no_windows | ||||
| from conftest import no_windows | ||||
| 
 | ||||
| 
 | ||||
| def is_win(): | ||||
|  | @ -77,7 +81,7 @@ def test_remote_error(reg_addr, args_err): | |||
|                 # of this actor nursery. | ||||
|                 await portal.result() | ||||
|             except tractor.RemoteActorError as err: | ||||
|                 assert err.boxed_type == errtype | ||||
|                 assert err.type == errtype | ||||
|                 print("Look Maa that actor failed hard, hehh") | ||||
|                 raise | ||||
| 
 | ||||
|  | @ -86,33 +90,20 @@ def test_remote_error(reg_addr, args_err): | |||
|         with pytest.raises(tractor.RemoteActorError) as excinfo: | ||||
|             trio.run(main) | ||||
| 
 | ||||
|         assert excinfo.value.boxed_type == errtype | ||||
|         assert excinfo.value.type == errtype | ||||
| 
 | ||||
|     else: | ||||
|         # the root task will also error on the `Portal.result()` | ||||
|         # call so we expect an error from there AND the child. | ||||
|         # |_ tho seems like on new `trio` this doesn't always | ||||
|         #    happen? | ||||
|         with pytest.raises(( | ||||
|             BaseExceptionGroup, | ||||
|             tractor.RemoteActorError, | ||||
|         )) as excinfo: | ||||
|         # the root task will also error on the `.result()` call | ||||
|         # so we expect an error from there AND the child. | ||||
|         with pytest.raises(BaseExceptionGroup) as excinfo: | ||||
|             trio.run(main) | ||||
| 
 | ||||
|         # ensure boxed errors are `errtype` | ||||
|         err: BaseException = excinfo.value | ||||
|         if isinstance(err, BaseExceptionGroup): | ||||
|             suberrs: list[BaseException] = err.exceptions | ||||
|         else: | ||||
|             suberrs: list[BaseException] = [err] | ||||
| 
 | ||||
|         for exc in suberrs: | ||||
|             assert exc.boxed_type == errtype | ||||
|         # ensure boxed errors | ||||
|         for exc in excinfo.value.exceptions: | ||||
|             assert exc.type == errtype | ||||
| 
 | ||||
| 
 | ||||
| def test_multierror( | ||||
|     reg_addr: tuple[str, int], | ||||
| ): | ||||
| def test_multierror(reg_addr): | ||||
|     ''' | ||||
|     Verify we raise a ``BaseExceptionGroup`` out of a nursery where | ||||
|     more then one actor errors. | ||||
|  | @ -130,7 +121,7 @@ def test_multierror( | |||
|             try: | ||||
|                 await portal2.result() | ||||
|             except tractor.RemoteActorError as err: | ||||
|                 assert err.boxed_type is AssertionError | ||||
|                 assert err.type == AssertionError | ||||
|                 print("Look Maa that first actor failed hard, hehh") | ||||
|                 raise | ||||
| 
 | ||||
|  | @ -182,7 +173,7 @@ def test_multierror_fast_nursery(reg_addr, start_method, num_subactors, delay): | |||
| 
 | ||||
|     for exc in exceptions: | ||||
|         assert isinstance(exc, tractor.RemoteActorError) | ||||
|         assert exc.boxed_type is AssertionError | ||||
|         assert exc.type == AssertionError | ||||
| 
 | ||||
| 
 | ||||
| async def do_nothing(): | ||||
|  | @ -236,10 +227,7 @@ async def stream_forever(): | |||
| async def test_cancel_infinite_streamer(start_method): | ||||
| 
 | ||||
|     # stream for at most 1 seconds | ||||
|     with ( | ||||
|         trio.fail_after(4), | ||||
|         trio.move_on_after(1) as cancel_scope | ||||
|     ): | ||||
|     with trio.move_on_after(1) as cancel_scope: | ||||
|         async with tractor.open_nursery() as n: | ||||
|             portal = await n.start_actor( | ||||
|                 'donny', | ||||
|  | @ -287,32 +275,20 @@ async def test_cancel_infinite_streamer(start_method): | |||
|     ], | ||||
| ) | ||||
| @tractor_test | ||||
| async def test_some_cancels_all( | ||||
|     num_actors_and_errs: tuple, | ||||
|     start_method: str, | ||||
|     loglevel: str, | ||||
| ): | ||||
|     ''' | ||||
|     Verify a subset of failed subactors causes all others in | ||||
| async def test_some_cancels_all(num_actors_and_errs, start_method, loglevel): | ||||
|     """Verify a subset of failed subactors causes all others in | ||||
|     the nursery to be cancelled just like the strategy in trio. | ||||
| 
 | ||||
|     This is the first and only supervisory strategy at the moment. | ||||
| 
 | ||||
|     ''' | ||||
|     ( | ||||
|         num_actors, | ||||
|         first_err, | ||||
|         err_type, | ||||
|         ria_func, | ||||
|         da_func, | ||||
|     ) = num_actors_and_errs | ||||
|     """ | ||||
|     num_actors, first_err, err_type, ria_func, da_func = num_actors_and_errs | ||||
|     try: | ||||
|         async with tractor.open_nursery() as an: | ||||
|         async with tractor.open_nursery() as n: | ||||
| 
 | ||||
|             # spawn the same number of deamon actors which should be cancelled | ||||
|             dactor_portals = [] | ||||
|             for i in range(num_actors): | ||||
|                 dactor_portals.append(await an.start_actor( | ||||
|                 dactor_portals.append(await n.start_actor( | ||||
|                     f'deamon_{i}', | ||||
|                     enable_modules=[__name__], | ||||
|                 )) | ||||
|  | @ -322,7 +298,7 @@ async def test_some_cancels_all( | |||
|             for i in range(num_actors): | ||||
|                 # start actor(s) that will fail immediately | ||||
|                 riactor_portals.append( | ||||
|                     await an.run_in_actor( | ||||
|                     await n.run_in_actor( | ||||
|                         func, | ||||
|                         name=f'actor_{i}', | ||||
|                         **kwargs | ||||
|  | @ -338,7 +314,7 @@ async def test_some_cancels_all( | |||
|                         await portal.run(func, **kwargs) | ||||
| 
 | ||||
|                     except tractor.RemoteActorError as err: | ||||
|                         assert err.boxed_type == err_type | ||||
|                         assert err.type == err_type | ||||
|                         # we only expect this first error to propogate | ||||
|                         # (all other daemons are cancelled before they | ||||
|                         # can be scheduled) | ||||
|  | @ -352,20 +328,19 @@ async def test_some_cancels_all( | |||
| 
 | ||||
|         # should error here with a ``RemoteActorError`` or ``MultiError`` | ||||
| 
 | ||||
|     except first_err as _err: | ||||
|         err = _err | ||||
|     except first_err as err: | ||||
|         if isinstance(err, BaseExceptionGroup): | ||||
|             assert len(err.exceptions) == num_actors | ||||
|             for exc in err.exceptions: | ||||
|                 if isinstance(exc, tractor.RemoteActorError): | ||||
|                     assert exc.boxed_type == err_type | ||||
|                     assert exc.type == err_type | ||||
|                 else: | ||||
|                     assert isinstance(exc, trio.Cancelled) | ||||
|         elif isinstance(err, tractor.RemoteActorError): | ||||
|             assert err.boxed_type == err_type | ||||
|             assert err.type == err_type | ||||
| 
 | ||||
|         assert an.cancelled is True | ||||
|         assert not an._children | ||||
|         assert n.cancelled is True | ||||
|         assert not n._children | ||||
|     else: | ||||
|         pytest.fail("Should have gotten a remote assertion error?") | ||||
| 
 | ||||
|  | @ -441,7 +416,7 @@ async def test_nested_multierrors(loglevel, start_method): | |||
|                     elif isinstance(subexc, tractor.RemoteActorError): | ||||
|                         # on windows it seems we can't exactly be sure wtf | ||||
|                         # will happen.. | ||||
|                         assert subexc.boxed_type in ( | ||||
|                         assert subexc.type in ( | ||||
|                             tractor.RemoteActorError, | ||||
|                             trio.Cancelled, | ||||
|                             BaseExceptionGroup, | ||||
|  | @ -451,7 +426,7 @@ async def test_nested_multierrors(loglevel, start_method): | |||
|                         for subsub in subexc.exceptions: | ||||
| 
 | ||||
|                             if subsub in (tractor.RemoteActorError,): | ||||
|                                 subsub = subsub.boxed_type | ||||
|                                 subsub = subsub.type | ||||
| 
 | ||||
|                             assert type(subsub) in ( | ||||
|                                 trio.Cancelled, | ||||
|  | @ -466,16 +441,16 @@ async def test_nested_multierrors(loglevel, start_method): | |||
|                     # we get back the (sent) cancel signal instead | ||||
|                     if is_win(): | ||||
|                         if isinstance(subexc, tractor.RemoteActorError): | ||||
|                             assert subexc.boxed_type in ( | ||||
|                             assert subexc.type in ( | ||||
|                                 BaseExceptionGroup, | ||||
|                                 tractor.RemoteActorError | ||||
|                             ) | ||||
|                         else: | ||||
|                             assert isinstance(subexc, BaseExceptionGroup) | ||||
|                     else: | ||||
|                         assert subexc.boxed_type is ExceptionGroup | ||||
|                         assert subexc.type is ExceptionGroup | ||||
|                 else: | ||||
|                     assert subexc.boxed_type in ( | ||||
|                     assert subexc.type in ( | ||||
|                         tractor.RemoteActorError, | ||||
|                         trio.Cancelled | ||||
|                     ) | ||||
|  | @ -520,9 +495,7 @@ def test_cancel_via_SIGINT_other_task( | |||
|     if is_win():  # smh | ||||
|         timeout += 1 | ||||
| 
 | ||||
|     async def spawn_and_sleep_forever( | ||||
|         task_status=trio.TASK_STATUS_IGNORED | ||||
|     ): | ||||
|     async def spawn_and_sleep_forever(task_status=trio.TASK_STATUS_IGNORED): | ||||
|         async with tractor.open_nursery() as tn: | ||||
|             for i in range(3): | ||||
|                 await tn.run_in_actor( | ||||
|  | @ -535,15 +508,8 @@ def test_cancel_via_SIGINT_other_task( | |||
|     async def main(): | ||||
|         # should never timeout since SIGINT should cancel the current program | ||||
|         with trio.fail_after(timeout): | ||||
|             async with ( | ||||
| 
 | ||||
|                 # XXX ?TODO? why no work!? | ||||
|                 # tractor.trionics.collapse_eg(), | ||||
|                 trio.open_nursery( | ||||
|                     strict_exception_groups=False, | ||||
|                 ) as tn, | ||||
|             ): | ||||
|                 await tn.start(spawn_and_sleep_forever) | ||||
|             async with trio.open_nursery() as n: | ||||
|                 await n.start(spawn_and_sleep_forever) | ||||
|                 if 'mp' in spawn_backend: | ||||
|                     time.sleep(0.1) | ||||
|                 os.kill(pid, signal.SIGINT) | ||||
|  | @ -554,123 +520,38 @@ def test_cancel_via_SIGINT_other_task( | |||
| 
 | ||||
| async def spin_for(period=3): | ||||
|     "Sync sleep." | ||||
|     print(f'sync sleeping in sub-sub for {period}\n') | ||||
|     time.sleep(period) | ||||
| 
 | ||||
| 
 | ||||
| async def spawn_sub_with_sync_blocking_task(): | ||||
|     async with tractor.open_nursery() as an: | ||||
|         print('starting sync blocking subactor..\n') | ||||
|         await an.run_in_actor( | ||||
| async def spawn(): | ||||
|     async with tractor.open_nursery() as tn: | ||||
|         await tn.run_in_actor( | ||||
|             spin_for, | ||||
|             name='sleeper', | ||||
|         ) | ||||
|         print('exiting first subactor layer..\n') | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     'man_cancel_outer', | ||||
|     [ | ||||
|         False,  # passes if delay != 2 | ||||
| 
 | ||||
|         # always causes an unexpected eg-w-embedded-assert-err? | ||||
|         pytest.param(True, | ||||
|              marks=pytest.mark.xfail( | ||||
|                  reason=( | ||||
|                     'always causes an unexpected eg-w-embedded-assert-err?' | ||||
|                 ) | ||||
|             ), | ||||
|         ), | ||||
|     ], | ||||
| ) | ||||
| @no_windows | ||||
| def test_cancel_while_childs_child_in_sync_sleep( | ||||
|     loglevel: str, | ||||
|     start_method: str, | ||||
|     spawn_backend: str, | ||||
|     debug_mode: bool, | ||||
|     reg_addr: tuple, | ||||
|     man_cancel_outer: bool, | ||||
|     loglevel, | ||||
|     start_method, | ||||
|     spawn_backend, | ||||
| ): | ||||
|     ''' | ||||
|     Verify that a child cancelled while executing sync code is torn | ||||
|     """Verify that a child cancelled while executing sync code is torn | ||||
|     down even when that cancellation is triggered by the parent | ||||
|     2 nurseries "up". | ||||
| 
 | ||||
|     Though the grandchild should stay blocking its actor runtime, its | ||||
|     parent should issue a "zombie reaper" to hard kill it after | ||||
|     sufficient timeout. | ||||
| 
 | ||||
|     ''' | ||||
|     """ | ||||
|     if start_method == 'forkserver': | ||||
|         pytest.skip("Forksever sux hard at resuming from sync sleep...") | ||||
| 
 | ||||
|     async def main(): | ||||
|         # | ||||
|         # XXX BIG TODO NOTE XXX | ||||
|         # | ||||
|         # it seems there's a strange race that can happen | ||||
|         # where where the fail-after will trigger outer scope | ||||
|         # .cancel() which then causes the inner scope to raise, | ||||
|         # | ||||
|         # BaseExceptionGroup('Exceptions from Trio nursery', [ | ||||
|         #   BaseExceptionGroup('Exceptions from Trio nursery', | ||||
|         #   [ | ||||
|         #       Cancelled(), | ||||
|         #       Cancelled(), | ||||
|         #   ] | ||||
|         #   ), | ||||
|         #   AssertionError('assert 0') | ||||
|         # ]) | ||||
|         # | ||||
|         # WHY THIS DOESN'T MAKE SENSE: | ||||
|         # --------------------------- | ||||
|         # - it should raise too-slow-error when too slow.. | ||||
|         #  * verified that using simple-cs and manually cancelling | ||||
|         #    you get same outcome -> indicates that the fail-after | ||||
|         #    can have its TooSlowError overriden! | ||||
|         #  |_ to check this it's easy, simplly decrease the timeout | ||||
|         #     as per the var below. | ||||
|         # | ||||
|         # - when using the manual simple-cs the outcome is different | ||||
|         #   DESPITE the `assert 0` which means regardless of the | ||||
|         #   inner scope effectively failing in the same way, the | ||||
|         #   bubbling up **is NOT the same**. | ||||
|         # | ||||
|         # delays trigger diff outcomes.. | ||||
|         # --------------------------- | ||||
|         # as seen by uncommenting various lines below there is from | ||||
|         # my POV an unexpected outcome due to the delay=2 case. | ||||
|         # | ||||
|         # delay = 1  # no AssertionError in eg, TooSlowError raised. | ||||
|         # delay = 2  # is AssertionError in eg AND no TooSlowError !? | ||||
|         delay = 4  # is AssertionError in eg AND no _cs cancellation. | ||||
| 
 | ||||
|         with trio.fail_after(delay) as _cs: | ||||
|         # with trio.CancelScope() as cs: | ||||
|         # ^XXX^ can be used instead to see same outcome. | ||||
| 
 | ||||
|             async with ( | ||||
|                 # tractor.trionics.collapse_eg(),  # doesn't help | ||||
|                 tractor.open_nursery( | ||||
|                     hide_tb=False, | ||||
|                     debug_mode=debug_mode, | ||||
|                     registry_addrs=[reg_addr], | ||||
|                 ) as an, | ||||
|             ): | ||||
|                 await an.run_in_actor( | ||||
|                     spawn_sub_with_sync_blocking_task, | ||||
|                     name='sync_blocking_sub', | ||||
|         with trio.fail_after(2): | ||||
|             async with tractor.open_nursery() as tn: | ||||
|                 await tn.run_in_actor( | ||||
|                     spawn, | ||||
|                     name='spawn', | ||||
|                 ) | ||||
|                 await trio.sleep(1) | ||||
| 
 | ||||
|                 if man_cancel_outer: | ||||
|                     print('Cancelling manually in root') | ||||
|                     _cs.cancel() | ||||
| 
 | ||||
|                 # trigger exc-srced taskc down | ||||
|                 # the actor tree. | ||||
|                 print('RAISING IN ROOT') | ||||
|                 assert 0 | ||||
| 
 | ||||
|     with pytest.raises(AssertionError): | ||||
|  | @ -720,12 +601,6 @@ def test_fast_graceful_cancel_when_spawn_task_in_soft_proc_wait_for_daemon( | |||
|                     nurse.start_soon(delayed_kbi) | ||||
| 
 | ||||
|                     await p.run(do_nuthin) | ||||
| 
 | ||||
|         # need to explicitly re-raise the lone kbi..now | ||||
|         except* KeyboardInterrupt as kbi_eg: | ||||
|             assert (len(excs := kbi_eg.exceptions) == 1) | ||||
|             raise excs[0] | ||||
| 
 | ||||
|         finally: | ||||
|             duration = time.time() - start | ||||
|             if duration > timeout: | ||||
|  |  | |||
|  | @ -6,15 +6,13 @@ sub-sub-actor daemons. | |||
| ''' | ||||
| from typing import Optional | ||||
| import asyncio | ||||
| from contextlib import ( | ||||
|     asynccontextmanager as acm, | ||||
|     aclosing, | ||||
| ) | ||||
| from contextlib import asynccontextmanager as acm | ||||
| 
 | ||||
| import pytest | ||||
| import trio | ||||
| import tractor | ||||
| from tractor import RemoteActorError | ||||
| from async_generator import aclosing | ||||
| 
 | ||||
| 
 | ||||
| async def aio_streamer( | ||||
|  | @ -95,8 +93,8 @@ async def trio_main( | |||
| 
 | ||||
|     # stash a "service nursery" as "actor local" (aka a Python global) | ||||
|     global _nursery | ||||
|     tn = _nursery | ||||
|     assert tn | ||||
|     n = _nursery | ||||
|     assert n | ||||
| 
 | ||||
|     async def consume_stream(): | ||||
|         async with wrapper_mngr() as stream: | ||||
|  | @ -104,10 +102,10 @@ async def trio_main( | |||
|                 print(msg) | ||||
| 
 | ||||
|     # run 2 tasks to ensure broadcaster chan use | ||||
|     tn.start_soon(consume_stream) | ||||
|     tn.start_soon(consume_stream) | ||||
|     n.start_soon(consume_stream) | ||||
|     n.start_soon(consume_stream) | ||||
| 
 | ||||
|     tn.start_soon(trio_sleep_and_err) | ||||
|     n.start_soon(trio_sleep_and_err) | ||||
| 
 | ||||
|     await trio.sleep_forever() | ||||
| 
 | ||||
|  | @ -117,11 +115,8 @@ async def open_actor_local_nursery( | |||
|     ctx: tractor.Context, | ||||
| ): | ||||
|     global _nursery | ||||
|     async with ( | ||||
|         tractor.trionics.collapse_eg(), | ||||
|         trio.open_nursery() as tn | ||||
|     ): | ||||
|         _nursery = tn | ||||
|     async with trio.open_nursery() as n: | ||||
|         _nursery = n | ||||
|         await ctx.started() | ||||
|         await trio.sleep(10) | ||||
|         # await trio.sleep(1) | ||||
|  | @ -135,7 +130,7 @@ async def open_actor_local_nursery( | |||
|         # never yields back.. aka a scenario where the | ||||
|         # ``tractor.context`` task IS NOT in the service n's cancel | ||||
|         # scope. | ||||
|         tn.cancel_scope.cancel() | ||||
|         n.cancel_scope.cancel() | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|  | @ -160,7 +155,7 @@ def test_actor_managed_trio_nursery_task_error_cancels_aio( | |||
|         async with tractor.open_nursery() as n: | ||||
|             p = await n.start_actor( | ||||
|                 'nursery_mngr', | ||||
|                 infect_asyncio=asyncio_mode,  # TODO, is this enabling debug mode? | ||||
|                 infect_asyncio=asyncio_mode, | ||||
|                 enable_modules=[__name__], | ||||
|             ) | ||||
|             async with ( | ||||
|  | @ -174,4 +169,4 @@ def test_actor_managed_trio_nursery_task_error_cancels_aio( | |||
| 
 | ||||
|     # verify boxed error | ||||
|     err = excinfo.value | ||||
|     assert err.boxed_type is NameError | ||||
|     assert isinstance(err.type(), NameError) | ||||
|  |  | |||
|  | @ -13,24 +13,26 @@ MESSAGE = 'tractoring at full speed' | |||
| def test_empty_mngrs_input_raises() -> None: | ||||
| 
 | ||||
|     async def main(): | ||||
|         with trio.fail_after(3): | ||||
|         with trio.fail_after(1): | ||||
|             async with ( | ||||
|                 open_actor_cluster( | ||||
|                     modules=[__name__], | ||||
| 
 | ||||
|                     # NOTE: ensure we can passthrough runtime opts | ||||
|                     loglevel='cancel', | ||||
|                     debug_mode=False, | ||||
|                     loglevel='info', | ||||
|                     # debug_mode=True, | ||||
| 
 | ||||
|                 ) as portals, | ||||
| 
 | ||||
|                 gather_contexts(mngrs=()), | ||||
|                 gather_contexts( | ||||
|                     # NOTE: it's the use of inline-generator syntax | ||||
|                     # here that causes the empty input. | ||||
|                     mngrs=( | ||||
|                         p.open_context(worker) for p in portals.values() | ||||
|                     ), | ||||
|                 ), | ||||
|             ): | ||||
|                 # should fail before this? | ||||
|                 assert portals | ||||
| 
 | ||||
|                 # test should fail if we mk it here! | ||||
|                 assert 0, 'Should have raised val-err !?' | ||||
|                 assert 0 | ||||
| 
 | ||||
|     with pytest.raises(ValueError): | ||||
|         trio.run(main) | ||||
|  |  | |||
|  | @ -6,7 +6,6 @@ sync-opening a ``tractor.Context`` beforehand. | |||
| 
 | ||||
| ''' | ||||
| from itertools import count | ||||
| import math | ||||
| import platform | ||||
| from pprint import pformat | ||||
| from typing import ( | ||||
|  | @ -25,7 +24,6 @@ from tractor._exceptions import ( | |||
|     StreamOverrun, | ||||
|     ContextCancelled, | ||||
| ) | ||||
| from tractor._state import current_ipc_ctx | ||||
| 
 | ||||
| from tractor._testing import ( | ||||
|     tractor_test, | ||||
|  | @ -38,9 +36,9 @@ from tractor._testing import ( | |||
| # - standard setup/teardown: | ||||
| #   ``Portal.open_context()`` starts a new | ||||
| #   remote task context in another actor. The target actor's task must | ||||
| #   call ``Context.started()`` to unblock this entry on the parent side. | ||||
| #   the child task executes until complete and returns a final value | ||||
| #   which is delivered to the parent side and retreived via | ||||
| #   call ``Context.started()`` to unblock this entry on the caller side. | ||||
| #   the callee task executes until complete and returns a final value | ||||
| #   which is delivered to the caller side and retreived via | ||||
| #   ``Context.result()``. | ||||
| 
 | ||||
| # - cancel termination: | ||||
|  | @ -145,8 +143,6 @@ async def simple_setup_teardown( | |||
|     global _state | ||||
|     _state = True | ||||
| 
 | ||||
|     assert current_ipc_ctx() is ctx | ||||
| 
 | ||||
|     # signal to parent that we're up | ||||
|     await ctx.started(data + 1) | ||||
| 
 | ||||
|  | @ -170,9 +166,9 @@ async def assert_state(value: bool): | |||
|     [False, ValueError, KeyboardInterrupt], | ||||
| ) | ||||
| @pytest.mark.parametrize( | ||||
|     'child_blocks_forever', | ||||
|     'callee_blocks_forever', | ||||
|     [False, True], | ||||
|     ids=lambda item: f'child_blocks_forever={item}' | ||||
|     ids=lambda item: f'callee_blocks_forever={item}' | ||||
| ) | ||||
| @pytest.mark.parametrize( | ||||
|     'pointlessly_open_stream', | ||||
|  | @ -181,7 +177,7 @@ async def assert_state(value: bool): | |||
| ) | ||||
| def test_simple_context( | ||||
|     error_parent, | ||||
|     child_blocks_forever, | ||||
|     callee_blocks_forever, | ||||
|     pointlessly_open_stream, | ||||
|     debug_mode: bool, | ||||
| ): | ||||
|  | @ -204,13 +200,12 @@ def test_simple_context( | |||
|                         portal.open_context( | ||||
|                             simple_setup_teardown, | ||||
|                             data=10, | ||||
|                             block_forever=child_blocks_forever, | ||||
|                             block_forever=callee_blocks_forever, | ||||
|                         ) as (ctx, sent), | ||||
|                     ): | ||||
|                         assert current_ipc_ctx() is ctx | ||||
|                         assert sent == 11 | ||||
| 
 | ||||
|                         if child_blocks_forever: | ||||
|                         if callee_blocks_forever: | ||||
|                             await portal.run(assert_state, value=True) | ||||
|                         else: | ||||
|                             assert await ctx.result() == 'yo' | ||||
|  | @ -220,7 +215,7 @@ def test_simple_context( | |||
|                                 if error_parent: | ||||
|                                     raise error_parent | ||||
| 
 | ||||
|                                 if child_blocks_forever: | ||||
|                                 if callee_blocks_forever: | ||||
|                                     await ctx.cancel() | ||||
|                                 else: | ||||
|                                     # in this case the stream will send a | ||||
|  | @ -250,18 +245,18 @@ def test_simple_context( | |||
|             trio.run(main) | ||||
|         except error_parent: | ||||
|             pass | ||||
|         except BaseExceptionGroup as beg: | ||||
|         except trio.MultiError as me: | ||||
|             # XXX: on windows it seems we may have to expect the group error | ||||
|             from tractor.trionics import is_multi_cancelled | ||||
|             assert is_multi_cancelled(beg) | ||||
|             from tractor._exceptions import is_multi_cancelled | ||||
|             assert is_multi_cancelled(me) | ||||
|     else: | ||||
|         trio.run(main) | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     'child_returns_early', | ||||
|     'callee_returns_early', | ||||
|     [True, False], | ||||
|     ids=lambda item: f'child_returns_early={item}' | ||||
|     ids=lambda item: f'callee_returns_early={item}' | ||||
| ) | ||||
| @pytest.mark.parametrize( | ||||
|     'cancel_method', | ||||
|  | @ -273,14 +268,14 @@ def test_simple_context( | |||
|     [True, False], | ||||
|     ids=lambda item: f'chk_ctx_result_before_exit={item}' | ||||
| ) | ||||
| def test_parent_cancels( | ||||
| def test_caller_cancels( | ||||
|     cancel_method: str, | ||||
|     chk_ctx_result_before_exit: bool, | ||||
|     child_returns_early: bool, | ||||
|     callee_returns_early: bool, | ||||
|     debug_mode: bool, | ||||
| ): | ||||
|     ''' | ||||
|     Verify that when the opening side of a context (aka the parent) | ||||
|     Verify that when the opening side of a context (aka the caller) | ||||
|     cancels that context, the ctx does not raise a cancelled when | ||||
|     either calling `.result()` or on context exit. | ||||
| 
 | ||||
|  | @ -294,7 +289,7 @@ def test_parent_cancels( | |||
| 
 | ||||
|         if ( | ||||
|             cancel_method == 'portal' | ||||
|             and not child_returns_early | ||||
|             and not callee_returns_early | ||||
|         ): | ||||
|             try: | ||||
|                 res = await ctx.result() | ||||
|  | @ -318,7 +313,7 @@ def test_parent_cancels( | |||
|                 pytest.fail(f'should not have raised ctxc\n{ctxc}') | ||||
| 
 | ||||
|         # we actually get a result | ||||
|         if child_returns_early: | ||||
|         if callee_returns_early: | ||||
|             assert res == 'yo' | ||||
|             assert ctx.outcome is res | ||||
|             assert ctx.maybe_error is None | ||||
|  | @ -362,14 +357,14 @@ def test_parent_cancels( | |||
|             ) | ||||
|             timeout: float = ( | ||||
|                 0.5 | ||||
|                 if not child_returns_early | ||||
|                 if not callee_returns_early | ||||
|                 else 2 | ||||
|             ) | ||||
|             with trio.fail_after(timeout): | ||||
|                 async with ( | ||||
|                     expect_ctxc( | ||||
|                         yay=( | ||||
|                             not child_returns_early | ||||
|                             not callee_returns_early | ||||
|                             and cancel_method == 'portal' | ||||
|                         ) | ||||
|                     ), | ||||
|  | @ -377,13 +372,13 @@ def test_parent_cancels( | |||
|                     portal.open_context( | ||||
|                         simple_setup_teardown, | ||||
|                         data=10, | ||||
|                         block_forever=not child_returns_early, | ||||
|                         block_forever=not callee_returns_early, | ||||
|                     ) as (ctx, sent), | ||||
|                 ): | ||||
| 
 | ||||
|                     if child_returns_early: | ||||
|                     if callee_returns_early: | ||||
|                         # ensure we block long enough before sending | ||||
|                         # a cancel such that the child has already | ||||
|                         # a cancel such that the callee has already | ||||
|                         # returned it's result. | ||||
|                         await trio.sleep(0.5) | ||||
| 
 | ||||
|  | @ -421,7 +416,7 @@ def test_parent_cancels( | |||
|             #   which should in turn cause `ctx._scope` to | ||||
|             # catch any cancellation? | ||||
|             if ( | ||||
|                 not child_returns_early | ||||
|                 not callee_returns_early | ||||
|                 and cancel_method != 'portal' | ||||
|             ): | ||||
|                 assert not ctx._scope.cancelled_caught | ||||
|  | @ -430,11 +425,11 @@ def test_parent_cancels( | |||
| 
 | ||||
| 
 | ||||
| # basic stream terminations: | ||||
| # - child context closes without using stream | ||||
| # - parent context closes without using stream | ||||
| # - parent context calls `Context.cancel()` while streaming | ||||
| #   is ongoing resulting in child being cancelled | ||||
| # - child calls `Context.cancel()` while streaming and parent | ||||
| # - callee context closes without using stream | ||||
| # - caller context closes without using stream | ||||
| # - caller context calls `Context.cancel()` while streaming | ||||
| #   is ongoing resulting in callee being cancelled | ||||
| # - callee calls `Context.cancel()` while streaming and caller | ||||
| #   sees stream terminated in `RemoteActorError` | ||||
| 
 | ||||
| # TODO: future possible features | ||||
|  | @ -443,6 +438,7 @@ def test_parent_cancels( | |||
| 
 | ||||
| @tractor.context | ||||
| async def close_ctx_immediately( | ||||
| 
 | ||||
|     ctx: Context, | ||||
| 
 | ||||
| ) -> None: | ||||
|  | @ -453,24 +449,13 @@ async def close_ctx_immediately( | |||
|     async with ctx.open_stream(): | ||||
|         pass | ||||
| 
 | ||||
|     print('child returning!') | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     'parent_send_before_receive', | ||||
|     [ | ||||
|         False, | ||||
|         True, | ||||
|     ], | ||||
|     ids=lambda item: f'child_send_before_receive={item}' | ||||
| ) | ||||
| @tractor_test | ||||
| async def test_child_exits_ctx_after_stream_open( | ||||
| async def test_callee_closes_ctx_after_stream_open( | ||||
|     debug_mode: bool, | ||||
|     parent_send_before_receive: bool, | ||||
| ): | ||||
|     ''' | ||||
|     child context closes without using stream. | ||||
|     callee context closes without using stream. | ||||
| 
 | ||||
|     This should result in a msg sequence | ||||
|     |_<root>_ | ||||
|  | @ -484,9 +469,6 @@ async def test_child_exits_ctx_after_stream_open( | |||
|     => {'stop': True, 'cid': <str>} | ||||
| 
 | ||||
|     ''' | ||||
|     timeout: float = ( | ||||
|         0.5 if not debug_mode else 999 | ||||
|     ) | ||||
|     async with tractor.open_nursery( | ||||
|         debug_mode=debug_mode, | ||||
|     ) as an: | ||||
|  | @ -495,7 +477,7 @@ async def test_child_exits_ctx_after_stream_open( | |||
|             enable_modules=[__name__], | ||||
|         ) | ||||
| 
 | ||||
|         with trio.fail_after(timeout): | ||||
|         with trio.fail_after(0.5): | ||||
|             async with portal.open_context( | ||||
|                 close_ctx_immediately, | ||||
| 
 | ||||
|  | @ -507,56 +489,41 @@ async def test_child_exits_ctx_after_stream_open( | |||
| 
 | ||||
|                 with trio.fail_after(0.4): | ||||
|                     async with ctx.open_stream() as stream: | ||||
|                         if parent_send_before_receive: | ||||
|                             print('sending first msg from parent!') | ||||
|                             await stream.send('yo') | ||||
| 
 | ||||
|                         # should fall through since ``StopAsyncIteration`` | ||||
|                         # should be raised through translation of | ||||
|                         # a ``trio.EndOfChannel`` by | ||||
|                         # ``trio.abc.ReceiveChannel.__anext__()`` | ||||
|                         msg = 10 | ||||
|                         async for msg in stream: | ||||
|                         async for _ in stream: | ||||
|                             # trigger failure if we DO NOT | ||||
|                             # get an EOC! | ||||
|                             assert 0 | ||||
|                         else: | ||||
|                             # never should get anythinig new from | ||||
|                             # the underlying stream | ||||
|                             assert msg == 10 | ||||
| 
 | ||||
|                             # verify stream is now closed | ||||
|                             try: | ||||
|                                 with trio.fail_after(0.3): | ||||
|                                     print('parent trying to `.receive()` on EoC stream!') | ||||
|                                     await stream.receive() | ||||
|                                     assert 0, 'should have raised eoc!?' | ||||
|                             except trio.EndOfChannel: | ||||
|                                 print('parent got EoC as expected!') | ||||
|                                 pass | ||||
|                                 # raise | ||||
| 
 | ||||
|                 # TODO: should be just raise the closed resource err | ||||
|                 # directly here to enforce not allowing a re-open | ||||
|                 # of a stream to the context (at least until a time of | ||||
|                 # if/when we decide that's a good idea?) | ||||
|                 try: | ||||
|                     with trio.fail_after(timeout): | ||||
|                     with trio.fail_after(0.5): | ||||
|                         async with ctx.open_stream() as stream: | ||||
|                             pass | ||||
|                 except trio.ClosedResourceError: | ||||
|                     pass | ||||
| 
 | ||||
|                 # if ctx._rx_chan._state.data: | ||||
|                 #     await tractor.pause() | ||||
| 
 | ||||
|         await portal.cancel_actor() | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def expect_cancelled( | ||||
|     ctx: Context, | ||||
|     send_before_receive: bool = False, | ||||
| 
 | ||||
| ) -> None: | ||||
|     global _state | ||||
|  | @ -566,10 +533,6 @@ async def expect_cancelled( | |||
| 
 | ||||
|     try: | ||||
|         async with ctx.open_stream() as stream: | ||||
| 
 | ||||
|             if send_before_receive: | ||||
|                 await stream.send('yo') | ||||
| 
 | ||||
|             async for msg in stream: | ||||
|                 await stream.send(msg)  # echo server | ||||
| 
 | ||||
|  | @ -596,49 +559,26 @@ async def expect_cancelled( | |||
|         raise | ||||
| 
 | ||||
|     else: | ||||
|         assert 0, "child wasn't cancelled !?" | ||||
|         assert 0, "callee wasn't cancelled !?" | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     'child_send_before_receive', | ||||
|     [ | ||||
|         False, | ||||
|         True, | ||||
|     ], | ||||
|     ids=lambda item: f'child_send_before_receive={item}' | ||||
| ) | ||||
| @pytest.mark.parametrize( | ||||
|     'rent_wait_for_msg', | ||||
|     [ | ||||
|         False, | ||||
|         True, | ||||
|     ], | ||||
|     ids=lambda item: f'rent_wait_for_msg={item}' | ||||
| ) | ||||
| @pytest.mark.parametrize( | ||||
|     'use_ctx_cancel_method', | ||||
|     [ | ||||
|         False, | ||||
|         'pre_stream', | ||||
|         'post_stream_open', | ||||
|         'post_stream_close', | ||||
|     ], | ||||
|     ids=lambda item: f'use_ctx_cancel_method={item}' | ||||
|     [False, True], | ||||
| ) | ||||
| @tractor_test | ||||
| async def test_parent_exits_ctx_after_child_enters_stream( | ||||
|     use_ctx_cancel_method: bool|str, | ||||
| async def test_caller_closes_ctx_after_callee_opens_stream( | ||||
|     use_ctx_cancel_method: bool, | ||||
|     debug_mode: bool, | ||||
|     rent_wait_for_msg: bool, | ||||
|     child_send_before_receive: bool, | ||||
| ): | ||||
|     ''' | ||||
|     Parent-side of IPC context closes without sending on `MsgStream`. | ||||
|     caller context closes without using/opening stream | ||||
| 
 | ||||
|     ''' | ||||
|     async with tractor.open_nursery( | ||||
|         debug_mode=debug_mode, | ||||
|     ) as an: | ||||
| 
 | ||||
|         root: Actor = current_actor() | ||||
|         portal = await an.start_actor( | ||||
|             'ctx_cancelled', | ||||
|  | @ -647,52 +587,41 @@ async def test_parent_exits_ctx_after_child_enters_stream( | |||
| 
 | ||||
|         async with portal.open_context( | ||||
|             expect_cancelled, | ||||
|             send_before_receive=child_send_before_receive, | ||||
|         ) as (ctx, sent): | ||||
|             assert sent is None | ||||
| 
 | ||||
|             await portal.run(assert_state, value=True) | ||||
| 
 | ||||
|             # call `ctx.cancel()` explicitly | ||||
|             if use_ctx_cancel_method == 'pre_stream': | ||||
|             if use_ctx_cancel_method: | ||||
|                 await ctx.cancel() | ||||
| 
 | ||||
|                 # NOTE: means the local side `ctx._scope` will | ||||
|                 # have been cancelled by an ctxc ack and thus | ||||
|                 # `._scope.cancelled_caught` should be set. | ||||
|                 async with ( | ||||
|                     expect_ctxc( | ||||
|                         # XXX: the cause is US since we call | ||||
|                         # `Context.cancel()` just above! | ||||
|                         yay=True, | ||||
| 
 | ||||
|                         # XXX: must be propagated to __aexit__ | ||||
|                         # and should be silently absorbed there | ||||
|                         # since we called `.cancel()` just above ;) | ||||
|                         reraise=True, | ||||
|                     ) as maybe_ctxc, | ||||
|                 ): | ||||
|                 try: | ||||
|                     async with ctx.open_stream() as stream: | ||||
|                         async for msg in stream: | ||||
|                             pass | ||||
| 
 | ||||
|                         if rent_wait_for_msg: | ||||
|                             async for msg in stream: | ||||
|                                 print(f'PARENT rx: {msg!r}\n') | ||||
|                                 break | ||||
|                 except tractor.ContextCancelled as ctxc: | ||||
|                     # XXX: the cause is US since we call | ||||
|                     # `Context.cancel()` just above! | ||||
|                     assert ( | ||||
|                         ctxc.canceller | ||||
|                         == | ||||
|                         current_actor().uid | ||||
|                         == | ||||
|                         root.uid | ||||
|                     ) | ||||
| 
 | ||||
|                         if use_ctx_cancel_method == 'post_stream_open': | ||||
|                             await ctx.cancel() | ||||
|                     # XXX: must be propagated to __aexit__ | ||||
|                     # and should be silently absorbed there | ||||
|                     # since we called `.cancel()` just above ;) | ||||
|                     raise | ||||
| 
 | ||||
|                     if use_ctx_cancel_method == 'post_stream_close': | ||||
|                         await ctx.cancel() | ||||
| 
 | ||||
|                 ctxc: tractor.ContextCancelled = maybe_ctxc.value | ||||
|                 assert ( | ||||
|                     ctxc.canceller | ||||
|                     == | ||||
|                     current_actor().uid | ||||
|                     == | ||||
|                     root.uid | ||||
|                 ) | ||||
|                 else: | ||||
|                     assert 0, "Should have context cancelled?" | ||||
| 
 | ||||
|                 # channel should still be up | ||||
|                 assert portal.channel.connected() | ||||
|  | @ -703,20 +632,13 @@ async def test_parent_exits_ctx_after_child_enters_stream( | |||
|                     value=False, | ||||
|                 ) | ||||
| 
 | ||||
|             # XXX CHILD-BLOCKS case, we SHOULD NOT exit from the | ||||
|             # `.open_context()` before the child has returned, | ||||
|             # errored or been cancelled! | ||||
|             else: | ||||
|                 try: | ||||
|                     with trio.fail_after( | ||||
|                         0.5  # if not debug_mode else 999 | ||||
|                     ): | ||||
|                         res = await ctx.wait_for_result() | ||||
|                         assert res is not tractor._context.Unresolved | ||||
|                     with trio.fail_after(0.2): | ||||
|                         await ctx.result() | ||||
|                         assert 0, "Callee should have blocked!?" | ||||
|                 except trio.TooSlowError: | ||||
|                     # NO-OP -> since already triggered by | ||||
|                     # `trio.fail_after()` above! | ||||
|                     # NO-OP -> since already called above | ||||
|                     await ctx.cancel() | ||||
| 
 | ||||
|         # NOTE: local scope should have absorbed the cancellation since | ||||
|  | @ -756,7 +678,7 @@ async def test_parent_exits_ctx_after_child_enters_stream( | |||
| 
 | ||||
| 
 | ||||
| @tractor_test | ||||
| async def test_multitask_parent_cancels_from_nonroot_task( | ||||
| async def test_multitask_caller_cancels_from_nonroot_task( | ||||
|     debug_mode: bool, | ||||
| ): | ||||
|     async with tractor.open_nursery( | ||||
|  | @ -808,6 +730,7 @@ async def test_multitask_parent_cancels_from_nonroot_task( | |||
| 
 | ||||
| @tractor.context | ||||
| async def cancel_self( | ||||
| 
 | ||||
|     ctx: Context, | ||||
| 
 | ||||
| ) -> None: | ||||
|  | @ -847,11 +770,11 @@ async def cancel_self( | |||
| 
 | ||||
| 
 | ||||
| @tractor_test | ||||
| async def test_child_cancels_before_started( | ||||
| async def test_callee_cancels_before_started( | ||||
|     debug_mode: bool, | ||||
| ): | ||||
|     ''' | ||||
|     Callee calls `Context.cancel()` while streaming and parent | ||||
|     Callee calls `Context.cancel()` while streaming and caller | ||||
|     sees stream terminated in `ContextCancelled`. | ||||
| 
 | ||||
|     ''' | ||||
|  | @ -872,12 +795,10 @@ async def test_child_cancels_before_started( | |||
| 
 | ||||
|         # raises a special cancel signal | ||||
|         except tractor.ContextCancelled as ce: | ||||
|             _ce = ce  # for debug on crash | ||||
|             ce.boxed_type == trio.Cancelled | ||||
|             ce.type == trio.Cancelled | ||||
| 
 | ||||
|             # the traceback should be informative | ||||
|             assert 'itself' in ce.tb_str | ||||
|             assert ce.tb_str == ce.msgdata['tb_str'] | ||||
|             assert 'itself' in ce.msgdata['tb_str'] | ||||
| 
 | ||||
|         # teardown the actor | ||||
|         await portal.cancel_actor() | ||||
|  | @ -898,13 +819,14 @@ async def never_open_stream( | |||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def keep_sending_from_child( | ||||
| async def keep_sending_from_callee( | ||||
| 
 | ||||
|     ctx:  Context, | ||||
|     msg_buffer_size: int|None = None, | ||||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|     Send endlessly on the child stream. | ||||
|     Send endlessly on the calleee stream. | ||||
| 
 | ||||
|     ''' | ||||
|     await ctx.started() | ||||
|  | @ -912,7 +834,7 @@ async def keep_sending_from_child( | |||
|         msg_buffer_size=msg_buffer_size, | ||||
|     ) as stream: | ||||
|         for msg in count(): | ||||
|             print(f'child sending {msg}') | ||||
|             print(f'callee sending {msg}') | ||||
|             await stream.send(msg) | ||||
|             await trio.sleep(0.01) | ||||
| 
 | ||||
|  | @ -920,13 +842,10 @@ async def keep_sending_from_child( | |||
| @pytest.mark.parametrize( | ||||
|     'overrun_by', | ||||
|     [ | ||||
|         ('parent', 1, never_open_stream), | ||||
|         ('child', 0, keep_sending_from_child), | ||||
|         ('caller', 1, never_open_stream), | ||||
|         ('callee', 0, keep_sending_from_callee), | ||||
|     ], | ||||
|     ids=[ | ||||
|          ('parent_1buf_never_open_stream'), | ||||
|          ('child_0buf_keep_sending_from_child'), | ||||
|     ] | ||||
|     ids='overrun_condition={}'.format, | ||||
| ) | ||||
| def test_one_end_stream_not_opened( | ||||
|     overrun_by: tuple[str, int, Callable], | ||||
|  | @ -950,50 +869,50 @@ def test_one_end_stream_not_opened( | |||
|                 enable_modules=[__name__], | ||||
|             ) | ||||
| 
 | ||||
|             with trio.fail_after(1): | ||||
|                 async with portal.open_context( | ||||
|                     entrypoint, | ||||
|                 ) as (ctx, sent): | ||||
|                     assert sent is None | ||||
|             async with portal.open_context( | ||||
|                 entrypoint, | ||||
|             ) as (ctx, sent): | ||||
|                 assert sent is None | ||||
| 
 | ||||
|                     if 'parent' in overrunner: | ||||
|                         async with ctx.open_stream() as stream: | ||||
|                 if 'caller' in overrunner: | ||||
| 
 | ||||
|                             # itersend +1 msg more then the buffer size | ||||
|                             # to cause the most basic overrun. | ||||
|                             for i in range(buf_size): | ||||
|                                 print(f'sending {i}') | ||||
|                                 await stream.send(i) | ||||
|                     async with ctx.open_stream() as stream: | ||||
| 
 | ||||
|                             else: | ||||
|                                 # expect overrun error to be relayed back | ||||
|                                 # and this sleep interrupted | ||||
|                                 await trio.sleep_forever() | ||||
|                         # itersend +1 msg more then the buffer size | ||||
|                         # to cause the most basic overrun. | ||||
|                         for i in range(buf_size): | ||||
|                             print(f'sending {i}') | ||||
|                             await stream.send(i) | ||||
| 
 | ||||
|                     else: | ||||
|                         # child overruns parent case so we do nothing here | ||||
|                         await trio.sleep_forever() | ||||
|                         else: | ||||
|                             # expect overrun error to be relayed back | ||||
|                             # and this sleep interrupted | ||||
|                             await trio.sleep_forever() | ||||
| 
 | ||||
|                 else: | ||||
|                     # callee overruns caller case so we do nothing here | ||||
|                     await trio.sleep_forever() | ||||
| 
 | ||||
|             await portal.cancel_actor() | ||||
| 
 | ||||
|     # 2 overrun cases and the no overrun case (which pushes right up to | ||||
|     # the msg limit) | ||||
|     if ( | ||||
|         overrunner == 'parent' | ||||
|         overrunner == 'caller' | ||||
|     ): | ||||
|         with pytest.raises(tractor.RemoteActorError) as excinfo: | ||||
|             trio.run(main) | ||||
| 
 | ||||
|         assert excinfo.value.boxed_type == StreamOverrun | ||||
|         assert excinfo.value.type == StreamOverrun | ||||
| 
 | ||||
|     elif overrunner == 'child': | ||||
|     elif overrunner == 'callee': | ||||
|         with pytest.raises(tractor.RemoteActorError) as excinfo: | ||||
|             trio.run(main) | ||||
| 
 | ||||
|         # TODO: embedded remote errors so that we can verify the source | ||||
|         # error? the child delivers an error which is an overrun | ||||
|         # error? the callee delivers an error which is an overrun | ||||
|         # wrapped in a remote actor error. | ||||
|         assert excinfo.value.boxed_type == tractor.RemoteActorError | ||||
|         assert excinfo.value.type == tractor.RemoteActorError | ||||
| 
 | ||||
|     else: | ||||
|         trio.run(main) | ||||
|  | @ -1001,7 +920,8 @@ def test_one_end_stream_not_opened( | |||
| 
 | ||||
| @tractor.context | ||||
| async def echo_back_sequence( | ||||
|     ctx: Context, | ||||
| 
 | ||||
|     ctx:  Context, | ||||
|     seq: list[int], | ||||
|     wait_for_cancel: bool, | ||||
|     allow_overruns_side: str, | ||||
|  | @ -1010,12 +930,12 @@ async def echo_back_sequence( | |||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|     Send endlessly on the child stream using a small buffer size | ||||
|     Send endlessly on the calleee stream using a small buffer size | ||||
|     setting on the contex to simulate backlogging that would normally | ||||
|     cause overruns. | ||||
| 
 | ||||
|     ''' | ||||
|     # NOTE: ensure that if the parent is expecting to cancel this task | ||||
|     # NOTE: ensure that if the caller is expecting to cancel this task | ||||
|     # that we stay echoing much longer then they are so we don't | ||||
|     # return early instead of receive the cancel msg. | ||||
|     total_batches: int = ( | ||||
|  | @ -1024,7 +944,7 @@ async def echo_back_sequence( | |||
|     ) | ||||
| 
 | ||||
|     await ctx.started() | ||||
|     # await tractor.pause() | ||||
|     # await tractor.breakpoint() | ||||
|     async with ctx.open_stream( | ||||
|         msg_buffer_size=msg_buffer_size, | ||||
| 
 | ||||
|  | @ -1065,18 +985,18 @@ async def echo_back_sequence( | |||
|                 if be_slow: | ||||
|                     await trio.sleep(0.05) | ||||
| 
 | ||||
|                 print('child waiting on next') | ||||
|                 print('callee waiting on next') | ||||
| 
 | ||||
|             print(f'child echoing back latest batch\n{batch}') | ||||
|             print(f'callee echoing back latest batch\n{batch}') | ||||
|             for msg in batch: | ||||
|                 print(f'child sending msg\n{msg}') | ||||
|                 print(f'callee sending msg\n{msg}') | ||||
|                 await stream.send(msg) | ||||
| 
 | ||||
|     try: | ||||
|         return 'yo' | ||||
|     finally: | ||||
|         print( | ||||
|             'exiting child with context:\n' | ||||
|             'exiting callee with context:\n' | ||||
|             f'{pformat(ctx)}\n' | ||||
|         ) | ||||
| 
 | ||||
|  | @ -1130,68 +1050,59 @@ def test_maybe_allow_overruns_stream( | |||
|             debug_mode=debug_mode, | ||||
|         ) as an: | ||||
|             portal = await an.start_actor( | ||||
|                 'child_sends_forever', | ||||
|                 'callee_sends_forever', | ||||
|                 enable_modules=[__name__], | ||||
|                 loglevel=loglevel, | ||||
|                 debug_mode=debug_mode, | ||||
|             ) | ||||
|             seq = list(range(10)) | ||||
|             async with portal.open_context( | ||||
|                 echo_back_sequence, | ||||
|                 seq=seq, | ||||
|                 wait_for_cancel=cancel_ctx, | ||||
|                 be_slow=(slow_side == 'child'), | ||||
|                 allow_overruns_side=allow_overruns_side, | ||||
| 
 | ||||
|             # stream-sequence batch info with send delay to determine | ||||
|             # approx timeout determining whether test has hung. | ||||
|             total_batches: int = 2 | ||||
|             num_items: int = 10 | ||||
|             seq = list(range(num_items)) | ||||
|             parent_send_delay: float = 0.16 | ||||
|             timeout: float = math.ceil( | ||||
|                 total_batches * num_items * parent_send_delay | ||||
|             ) | ||||
|             with trio.fail_after(timeout): | ||||
|                 async with portal.open_context( | ||||
|                     echo_back_sequence, | ||||
|                     seq=seq, | ||||
|                     wait_for_cancel=cancel_ctx, | ||||
|                     be_slow=(slow_side == 'child'), | ||||
|                     allow_overruns_side=allow_overruns_side, | ||||
|             ) as (ctx, sent): | ||||
|                 assert sent is None | ||||
| 
 | ||||
|                 ) as (ctx, sent): | ||||
|                     assert sent is None | ||||
|                 async with ctx.open_stream( | ||||
|                     msg_buffer_size=1 if slow_side == 'parent' else None, | ||||
|                     allow_overruns=(allow_overruns_side in {'parent', 'both'}), | ||||
|                 ) as stream: | ||||
| 
 | ||||
|                     async with ctx.open_stream( | ||||
|                         msg_buffer_size=1 if slow_side == 'parent' else None, | ||||
|                         allow_overruns=(allow_overruns_side in {'parent', 'both'}), | ||||
|                     ) as stream: | ||||
|                     total_batches: int = 2 | ||||
|                     for _ in range(total_batches): | ||||
|                         for msg in seq: | ||||
|                             # print(f'root tx {msg}') | ||||
|                             await stream.send(msg) | ||||
|                             if slow_side == 'parent': | ||||
|                                 # NOTE: we make the parent slightly | ||||
|                                 # slower, when it is slow, to make sure | ||||
|                                 # that in the overruns everywhere case | ||||
|                                 await trio.sleep(0.16) | ||||
| 
 | ||||
|                         for _ in range(total_batches): | ||||
|                             for msg in seq: | ||||
|                                 # print(f'root tx {msg}') | ||||
|                                 await stream.send(msg) | ||||
|                                 if slow_side == 'parent': | ||||
|                                     # NOTE: we make the parent slightly | ||||
|                                     # slower, when it is slow, to make sure | ||||
|                                     # that in the overruns everywhere case | ||||
|                                     await trio.sleep(parent_send_delay) | ||||
| 
 | ||||
|                             batch = [] | ||||
|                             async for msg in stream: | ||||
|                                 print(f'root rx {msg}') | ||||
|                                 batch.append(msg) | ||||
|                                 if batch == seq: | ||||
|                                     break | ||||
| 
 | ||||
|                     if cancel_ctx: | ||||
|                         # cancel the remote task | ||||
|                         print('Requesting `ctx.cancel()` in parent!') | ||||
|                         await ctx.cancel() | ||||
| 
 | ||||
|                 res: str|ContextCancelled = await ctx.result() | ||||
|                         batch = [] | ||||
|                         async for msg in stream: | ||||
|                             print(f'root rx {msg}') | ||||
|                             batch.append(msg) | ||||
|                             if batch == seq: | ||||
|                                 break | ||||
| 
 | ||||
|                 if cancel_ctx: | ||||
|                     assert isinstance(res, ContextCancelled) | ||||
|                     assert tuple(res.canceller) == current_actor().uid | ||||
|                     # cancel the remote task | ||||
|                     print('Requesting `ctx.cancel()` in parent!') | ||||
|                     await ctx.cancel() | ||||
| 
 | ||||
|                 else: | ||||
|                     print(f'RX ROOT SIDE RESULT {res}') | ||||
|                     assert res == 'yo' | ||||
|             res: str|ContextCancelled = await ctx.result() | ||||
| 
 | ||||
|             if cancel_ctx: | ||||
|                 assert isinstance(res, ContextCancelled) | ||||
|                 assert tuple(res.canceller) == current_actor().uid | ||||
| 
 | ||||
|             else: | ||||
|                 print(f'RX ROOT SIDE RESULT {res}') | ||||
|                 assert res == 'yo' | ||||
| 
 | ||||
|             # cancel the daemon | ||||
|             await portal.cancel_actor() | ||||
|  | @ -1220,7 +1131,7 @@ def test_maybe_allow_overruns_stream( | |||
|             # NOTE: i tried to isolate to a deterministic case here | ||||
|             # based on timeing, but i was kinda wasted, and i don't | ||||
|             # think it's sane to catch them.. | ||||
|             assert err.boxed_type in ( | ||||
|             assert err.type in ( | ||||
|                 tractor.RemoteActorError, | ||||
|                 StreamOverrun, | ||||
|             ) | ||||
|  | @ -1228,12 +1139,11 @@ def test_maybe_allow_overruns_stream( | |||
|         elif ( | ||||
|             slow_side == 'child' | ||||
|         ): | ||||
|             assert err.boxed_type == StreamOverrun | ||||
|             assert err.type == StreamOverrun | ||||
| 
 | ||||
|         elif slow_side == 'parent': | ||||
|             assert err.boxed_type == tractor.RemoteActorError | ||||
|             assert 'StreamOverrun' in err.tb_str | ||||
|             assert err.tb_str == err.msgdata['tb_str'] | ||||
|             assert err.type == tractor.RemoteActorError | ||||
|             assert 'StreamOverrun' in err.msgdata['tb_str'] | ||||
| 
 | ||||
|     else: | ||||
|         # if this hits the logic blocks from above are not | ||||
|  |  | |||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							|  | @ -7,11 +7,8 @@ import platform | |||
| from functools import partial | ||||
| import itertools | ||||
| 
 | ||||
| import psutil | ||||
| import pytest | ||||
| import subprocess | ||||
| import tractor | ||||
| from tractor.trionics import collapse_eg | ||||
| from tractor._testing import tractor_test | ||||
| import trio | ||||
| 
 | ||||
|  | @ -29,7 +26,7 @@ async def test_reg_then_unreg(reg_addr): | |||
|         portal = await n.start_actor('actor', enable_modules=[__name__]) | ||||
|         uid = portal.channel.uid | ||||
| 
 | ||||
|         async with tractor.get_registry(reg_addr) as aportal: | ||||
|         async with tractor.get_arbiter(*reg_addr) as aportal: | ||||
|             # this local actor should be the arbiter | ||||
|             assert actor is aportal.actor | ||||
| 
 | ||||
|  | @ -155,25 +152,15 @@ async def unpack_reg(actor_or_portal): | |||
| async def spawn_and_check_registry( | ||||
|     reg_addr: tuple, | ||||
|     use_signal: bool, | ||||
|     debug_mode: bool = False, | ||||
|     remote_arbiter: bool = False, | ||||
|     with_streaming: bool = False, | ||||
|     maybe_daemon: tuple[ | ||||
|         subprocess.Popen, | ||||
|         psutil.Process, | ||||
|     ]|None = None, | ||||
| 
 | ||||
| ) -> None: | ||||
| 
 | ||||
|     if maybe_daemon: | ||||
|         popen, proc = maybe_daemon | ||||
|         # breakpoint() | ||||
| 
 | ||||
|     async with tractor.open_root_actor( | ||||
|         registry_addrs=[reg_addr], | ||||
|         debug_mode=debug_mode, | ||||
|     ): | ||||
|         async with tractor.get_registry(reg_addr) as portal: | ||||
|         async with tractor.get_arbiter(*reg_addr) as portal: | ||||
|             # runtime needs to be up to call this | ||||
|             actor = tractor.current_actor() | ||||
| 
 | ||||
|  | @ -189,30 +176,28 @@ async def spawn_and_check_registry( | |||
|                 extra = 2  # local root actor + remote arbiter | ||||
| 
 | ||||
|             # ensure current actor is registered | ||||
|             registry: dict = await get_reg() | ||||
|             registry = await get_reg() | ||||
|             assert actor.uid in registry | ||||
| 
 | ||||
|             try: | ||||
|                 async with tractor.open_nursery() as an: | ||||
|                     async with ( | ||||
|                         collapse_eg(), | ||||
|                         trio.open_nursery() as trion, | ||||
|                     ): | ||||
|                 async with tractor.open_nursery() as n: | ||||
|                     async with trio.open_nursery() as trion: | ||||
| 
 | ||||
|                         portals = {} | ||||
|                         for i in range(3): | ||||
|                             name = f'a{i}' | ||||
|                             if with_streaming: | ||||
|                                 portals[name] = await an.start_actor( | ||||
|                                 portals[name] = await n.start_actor( | ||||
|                                     name=name, enable_modules=[__name__]) | ||||
| 
 | ||||
|                             else:  # no streaming | ||||
|                                 portals[name] = await an.run_in_actor( | ||||
|                                 portals[name] = await n.run_in_actor( | ||||
|                                     trio.sleep_forever, name=name) | ||||
| 
 | ||||
|                         # wait on last actor to come up | ||||
|                         async with tractor.wait_for_actor(name): | ||||
|                             registry = await get_reg() | ||||
|                             for uid in an._children: | ||||
|                             for uid in n._children: | ||||
|                                 assert uid in registry | ||||
| 
 | ||||
|                         assert len(portals) + extra == len(registry) | ||||
|  | @ -245,7 +230,6 @@ async def spawn_and_check_registry( | |||
| @pytest.mark.parametrize('use_signal', [False, True]) | ||||
| @pytest.mark.parametrize('with_streaming', [False, True]) | ||||
| def test_subactors_unregister_on_cancel( | ||||
|     debug_mode: bool, | ||||
|     start_method, | ||||
|     use_signal, | ||||
|     reg_addr, | ||||
|  | @ -262,7 +246,6 @@ def test_subactors_unregister_on_cancel( | |||
|                 spawn_and_check_registry, | ||||
|                 reg_addr, | ||||
|                 use_signal, | ||||
|                 debug_mode=debug_mode, | ||||
|                 remote_arbiter=False, | ||||
|                 with_streaming=with_streaming, | ||||
|             ), | ||||
|  | @ -272,8 +255,7 @@ def test_subactors_unregister_on_cancel( | |||
| @pytest.mark.parametrize('use_signal', [False, True]) | ||||
| @pytest.mark.parametrize('with_streaming', [False, True]) | ||||
| def test_subactors_unregister_on_cancel_remote_daemon( | ||||
|     daemon: subprocess.Popen, | ||||
|     debug_mode: bool, | ||||
|     daemon, | ||||
|     start_method, | ||||
|     use_signal, | ||||
|     reg_addr, | ||||
|  | @ -289,13 +271,8 @@ def test_subactors_unregister_on_cancel_remote_daemon( | |||
|                 spawn_and_check_registry, | ||||
|                 reg_addr, | ||||
|                 use_signal, | ||||
|                 debug_mode=debug_mode, | ||||
|                 remote_arbiter=True, | ||||
|                 with_streaming=with_streaming, | ||||
|                 maybe_daemon=( | ||||
|                     daemon, | ||||
|                     psutil.Process(daemon.pid) | ||||
|                 ), | ||||
|             ), | ||||
|         ) | ||||
| 
 | ||||
|  | @ -321,7 +298,7 @@ async def close_chans_before_nursery( | |||
|     async with tractor.open_root_actor( | ||||
|         registry_addrs=[reg_addr], | ||||
|     ): | ||||
|         async with tractor.get_registry(reg_addr) as aportal: | ||||
|         async with tractor.get_arbiter(*reg_addr) as aportal: | ||||
|             try: | ||||
|                 get_reg = partial(unpack_reg, aportal) | ||||
| 
 | ||||
|  | @ -339,12 +316,9 @@ async def close_chans_before_nursery( | |||
|                         async with portal2.open_stream_from( | ||||
|                             stream_forever | ||||
|                         ) as agen2: | ||||
|                             async with ( | ||||
|                                 collapse_eg(), | ||||
|                                 trio.open_nursery() as tn, | ||||
|                             ): | ||||
|                                 tn.start_soon(streamer, agen1) | ||||
|                                 tn.start_soon(cancel, use_signal, .5) | ||||
|                             async with trio.open_nursery() as n: | ||||
|                                 n.start_soon(streamer, agen1) | ||||
|                                 n.start_soon(cancel, use_signal, .5) | ||||
|                                 try: | ||||
|                                     await streamer(agen2) | ||||
|                                 finally: | ||||
|  | @ -395,7 +369,7 @@ def test_close_channel_explicit( | |||
| 
 | ||||
| @pytest.mark.parametrize('use_signal', [False, True]) | ||||
| def test_close_channel_explicit_remote_arbiter( | ||||
|     daemon: subprocess.Popen, | ||||
|     daemon, | ||||
|     start_method, | ||||
|     use_signal, | ||||
|     reg_addr, | ||||
|  |  | |||
|  | @ -19,7 +19,7 @@ from tractor._testing import ( | |||
| @pytest.fixture | ||||
| def run_example_in_subproc( | ||||
|     loglevel: str, | ||||
|     testdir: pytest.Pytester, | ||||
|     testdir, | ||||
|     reg_addr: tuple[str, int], | ||||
| ): | ||||
| 
 | ||||
|  | @ -66,9 +66,6 @@ def run_example_in_subproc( | |||
|         # due to backpressure!!! | ||||
|         proc = testdir.popen( | ||||
|             cmdargs, | ||||
|             stdin=subprocess.PIPE, | ||||
|             stdout=subprocess.PIPE, | ||||
|             stderr=subprocess.PIPE, | ||||
|             **kwargs, | ||||
|         ) | ||||
|         assert not proc.returncode | ||||
|  | @ -84,37 +81,27 @@ def run_example_in_subproc( | |||
| 
 | ||||
|     # walk yields: (dirpath, dirnames, filenames) | ||||
|     [ | ||||
|         (p[0], f) | ||||
|         for p in os.walk(examples_dir()) | ||||
|         for f in p[2] | ||||
|         (p[0], f) for p in os.walk(examples_dir()) for f in p[2] | ||||
| 
 | ||||
|         if ( | ||||
|             '__' not in f | ||||
|             and f[0] != '_' | ||||
|             and 'debugging' not in p[0] | ||||
|             and 'integration' not in p[0] | ||||
|             and 'advanced_faults' not in p[0] | ||||
|             and 'multihost' not in p[0] | ||||
|             and 'trio' not in p[0] | ||||
|         ) | ||||
|         if '__' not in f | ||||
|         and f[0] != '_' | ||||
|         and 'debugging' not in p[0] | ||||
|         and 'integration' not in p[0] | ||||
|         and 'advanced_faults' not in p[0] | ||||
|     ], | ||||
| 
 | ||||
|     ids=lambda t: t[1], | ||||
| ) | ||||
| def test_example( | ||||
|     run_example_in_subproc, | ||||
|     example_script, | ||||
| ): | ||||
|     ''' | ||||
|     Load and run scripts from this repo's ``examples/`` dir as a user | ||||
| def test_example(run_example_in_subproc, example_script): | ||||
|     """Load and run scripts from this repo's ``examples/`` dir as a user | ||||
|     would copy and pasing them into their editor. | ||||
| 
 | ||||
|     On windows a little more "finessing" is done to make | ||||
|     ``multiprocessing`` play nice: we copy the ``__main__.py`` into the | ||||
|     test directory and invoke the script as a module with ``python -m | ||||
|     test_example``. | ||||
| 
 | ||||
|     ''' | ||||
|     ex_file: str = os.path.join(*example_script) | ||||
|     """ | ||||
|     ex_file = os.path.join(*example_script) | ||||
| 
 | ||||
|     if 'rpc_bidir_streaming' in ex_file and sys.version_info < (3, 9): | ||||
|         pytest.skip("2-way streaming example requires py3.9 async with syntax") | ||||
|  | @ -123,14 +110,10 @@ def test_example( | |||
|         code = ex.read() | ||||
| 
 | ||||
|         with run_example_in_subproc(code) as proc: | ||||
|             err = None | ||||
|             try: | ||||
|                 if not proc.poll(): | ||||
|                     _, err = proc.communicate(timeout=15) | ||||
| 
 | ||||
|             except subprocess.TimeoutExpired as e: | ||||
|                 proc.kill() | ||||
|                 err = e.stderr | ||||
|             proc.wait() | ||||
|             err, _ = proc.stderr.read(), proc.stdout.read() | ||||
|             # print(f'STDERR: {err}') | ||||
|             # print(f'STDOUT: {out}') | ||||
| 
 | ||||
|             # if we get some gnarly output let's aggregate and raise | ||||
|             if err: | ||||
|  | @ -144,8 +127,7 @@ def test_example( | |||
|                     # shouldn't eventually once we figure out what's | ||||
|                     # a better way to be explicit about aio side | ||||
|                     # cancels? | ||||
|                     and | ||||
|                     'asyncio.exceptions.CancelledError' not in last_error | ||||
|                     and 'asyncio.exceptions.CancelledError' not in last_error | ||||
|                 ): | ||||
|                     raise Exception(errmsg) | ||||
| 
 | ||||
|  |  | |||
|  | @ -1,946 +0,0 @@ | |||
| ''' | ||||
| Low-level functional audits for our | ||||
| "capability based messaging"-spec feats. | ||||
| 
 | ||||
| B~) | ||||
| 
 | ||||
| ''' | ||||
| from contextlib import ( | ||||
|     contextmanager as cm, | ||||
|     # nullcontext, | ||||
| ) | ||||
| import importlib | ||||
| from typing import ( | ||||
|     Any, | ||||
|     Type, | ||||
|     Union, | ||||
| ) | ||||
| 
 | ||||
| from msgspec import ( | ||||
|     # structs, | ||||
|     # msgpack, | ||||
|     Raw, | ||||
|     # Struct, | ||||
|     ValidationError, | ||||
| ) | ||||
| import pytest | ||||
| import trio | ||||
| 
 | ||||
| import tractor | ||||
| from tractor import ( | ||||
|     Actor, | ||||
|     # _state, | ||||
|     MsgTypeError, | ||||
|     Context, | ||||
| ) | ||||
| from tractor.msg import ( | ||||
|     _codec, | ||||
|     _ctxvar_MsgCodec, | ||||
|     _exts, | ||||
| 
 | ||||
|     NamespacePath, | ||||
|     MsgCodec, | ||||
|     MsgDec, | ||||
|     mk_codec, | ||||
|     mk_dec, | ||||
|     apply_codec, | ||||
|     current_codec, | ||||
| ) | ||||
| from tractor.msg.types import ( | ||||
|     log, | ||||
|     Started, | ||||
|     # _payload_msgs, | ||||
|     # PayloadMsg, | ||||
|     # mk_msg_spec, | ||||
| ) | ||||
| from tractor.msg._ops import ( | ||||
|     limit_plds, | ||||
| ) | ||||
| 
 | ||||
| def enc_nsp(obj: Any) -> Any: | ||||
|     actor: Actor = tractor.current_actor( | ||||
|         err_on_no_runtime=False, | ||||
|     ) | ||||
|     uid: tuple[str, str]|None = None if not actor else actor.uid | ||||
|     print(f'{uid} ENC HOOK') | ||||
| 
 | ||||
|     match obj: | ||||
|         # case NamespacePath()|str(): | ||||
|         case NamespacePath(): | ||||
|             encoded: str = str(obj) | ||||
|             print( | ||||
|                 f'----- ENCODING `NamespacePath` as `str` ------\n' | ||||
|                 f'|_obj:{type(obj)!r} = {obj!r}\n' | ||||
|                 f'|_encoded: str = {encoded!r}\n' | ||||
|             ) | ||||
|             # if type(obj) != NamespacePath: | ||||
|             #     breakpoint() | ||||
|             return encoded | ||||
|         case _: | ||||
|             logmsg: str = ( | ||||
|                 f'{uid}\n' | ||||
|                 'FAILED ENCODE\n' | ||||
|                 f'obj-> `{obj}: {type(obj)}`\n' | ||||
|             ) | ||||
|             raise NotImplementedError(logmsg) | ||||
| 
 | ||||
| 
 | ||||
| def dec_nsp( | ||||
|     obj_type: Type, | ||||
|     obj: Any, | ||||
| 
 | ||||
| ) -> Any: | ||||
|     # breakpoint() | ||||
|     actor: Actor = tractor.current_actor( | ||||
|         err_on_no_runtime=False, | ||||
|     ) | ||||
|     uid: tuple[str, str]|None = None if not actor else actor.uid | ||||
|     print( | ||||
|         f'{uid}\n' | ||||
|         'CUSTOM DECODE\n' | ||||
|         f'type-arg-> {obj_type}\n' | ||||
|         f'obj-arg-> `{obj}`: {type(obj)}\n' | ||||
|     ) | ||||
|     nsp = None | ||||
|     # XXX, never happens right? | ||||
|     if obj_type is Raw: | ||||
|         breakpoint() | ||||
| 
 | ||||
|     if ( | ||||
|         obj_type is NamespacePath | ||||
|         and isinstance(obj, str) | ||||
|         and ':' in obj | ||||
|     ): | ||||
|         nsp = NamespacePath(obj) | ||||
|         # TODO: we could built a generic handler using | ||||
|         # JUST matching the obj_type part? | ||||
|         # nsp = obj_type(obj) | ||||
| 
 | ||||
|     if nsp: | ||||
|         print(f'Returning NSP instance: {nsp}') | ||||
|         return nsp | ||||
| 
 | ||||
|     logmsg: str = ( | ||||
|         f'{uid}\n' | ||||
|         'FAILED DECODE\n' | ||||
|         f'type-> {obj_type}\n' | ||||
|         f'obj-arg-> `{obj}`: {type(obj)}\n\n' | ||||
|         f'current codec:\n' | ||||
|         f'{current_codec()}\n' | ||||
|     ) | ||||
|     # TODO: figure out the ignore subsys for this! | ||||
|     # -[ ] option whether to defense-relay backc the msg | ||||
|     #   inside an `Invalid`/`Ignore` | ||||
|     # -[ ] how to make this handling pluggable such that a | ||||
|     #   `Channel`/`MsgTransport` can intercept and process | ||||
|     #   back msgs either via exception handling or some other | ||||
|     #   signal? | ||||
|     log.warning(logmsg) | ||||
|     # NOTE: this delivers the invalid | ||||
|     # value up to `msgspec`'s decoding | ||||
|     # machinery for error raising. | ||||
|     return obj | ||||
|     # raise NotImplementedError(logmsg) | ||||
| 
 | ||||
| 
 | ||||
| def ex_func(*args): | ||||
|     ''' | ||||
|     A mod level func we can ref and load via our `NamespacePath` | ||||
|     python-object pointer `str` subtype. | ||||
| 
 | ||||
|     ''' | ||||
|     print(f'ex_func({args})') | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     'add_codec_hooks', | ||||
|     [ | ||||
|         True, | ||||
|         False, | ||||
|     ], | ||||
|     ids=['use_codec_hooks', 'no_codec_hooks'], | ||||
| ) | ||||
| def test_custom_extension_types( | ||||
|     debug_mode: bool, | ||||
|     add_codec_hooks: bool | ||||
| ): | ||||
|     ''' | ||||
|     Verify that a `MsgCodec` (used for encoding all outbound IPC msgs | ||||
|     and decoding all inbound `PayloadMsg`s) and a paired `MsgDec` | ||||
|     (used for decoding the `PayloadMsg.pld: Raw` received within a given | ||||
|     task's ipc `Context` scope) can both send and receive "extension types" | ||||
|     as supported via custom converter hooks passed to `msgspec`. | ||||
| 
 | ||||
|     ''' | ||||
|     nsp_pld_dec: MsgDec = mk_dec( | ||||
|         spec=None,  # ONLY support the ext type | ||||
|         dec_hook=dec_nsp if add_codec_hooks else None, | ||||
|         ext_types=[NamespacePath], | ||||
|     ) | ||||
|     nsp_codec: MsgCodec = mk_codec( | ||||
|         # ipc_pld_spec=Raw,  # default! | ||||
| 
 | ||||
|         # NOTE XXX: the encode hook MUST be used no matter what since | ||||
|         # our `NamespacePath` is not any of a `Any` native type nor | ||||
|         # a `msgspec.Struct` subtype - so `msgspec` has no way to know | ||||
|         # how to encode it unless we provide the custom hook. | ||||
|         # | ||||
|         # AGAIN that is, regardless of whether we spec an | ||||
|         # `Any`-decoded-pld the enc has no knowledge (by default) | ||||
|         # how to enc `NamespacePath` (nsp), so we add a custom | ||||
|         # hook to do that ALWAYS. | ||||
|         enc_hook=enc_nsp if add_codec_hooks else None, | ||||
| 
 | ||||
|         # XXX NOTE: pretty sure this is mutex with the `type=` to | ||||
|         # `Decoder`? so it won't work in tandem with the | ||||
|         # `ipc_pld_spec` passed above? | ||||
|         ext_types=[NamespacePath], | ||||
| 
 | ||||
|         # TODO? is it useful to have the `.pld` decoded *prior* to | ||||
|         # the `PldRx`?? like perf or mem related? | ||||
|         # ext_dec=nsp_pld_dec, | ||||
|     ) | ||||
|     if add_codec_hooks: | ||||
|         assert nsp_codec.dec.dec_hook is None | ||||
| 
 | ||||
|         # TODO? if we pass `ext_dec` above? | ||||
|         # assert nsp_codec.dec.dec_hook is dec_nsp | ||||
| 
 | ||||
|         assert nsp_codec.enc.enc_hook is enc_nsp | ||||
| 
 | ||||
|     nsp = NamespacePath.from_ref(ex_func) | ||||
| 
 | ||||
|     try: | ||||
|         nsp_bytes: bytes = nsp_codec.encode(nsp) | ||||
|         nsp_rt_sin_msg = nsp_pld_dec.decode(nsp_bytes) | ||||
|         nsp_rt_sin_msg.load_ref() is ex_func | ||||
|     except TypeError: | ||||
|         if not add_codec_hooks: | ||||
|             pass | ||||
| 
 | ||||
|     try: | ||||
|         msg_bytes: bytes = nsp_codec.encode( | ||||
|             Started( | ||||
|                 cid='cid', | ||||
|                 pld=nsp, | ||||
|             ) | ||||
|         ) | ||||
|         # since the ext-type obj should also be set as the msg.pld | ||||
|         assert nsp_bytes in msg_bytes | ||||
|         started_rt: Started = nsp_codec.decode(msg_bytes) | ||||
|         pld: Raw = started_rt.pld | ||||
|         assert isinstance(pld, Raw) | ||||
|         nsp_rt: NamespacePath = nsp_pld_dec.decode(pld) | ||||
|         assert isinstance(nsp_rt, NamespacePath) | ||||
|         # in obj comparison terms they should be the same | ||||
|         assert nsp_rt == nsp | ||||
|         # ensure we've decoded to ext type! | ||||
|         assert nsp_rt.load_ref() is ex_func | ||||
| 
 | ||||
|     except TypeError: | ||||
|         if not add_codec_hooks: | ||||
|             pass | ||||
| 
 | ||||
| @tractor.context | ||||
| async def sleep_forever_in_sub( | ||||
|     ctx: Context, | ||||
| ) -> None: | ||||
|     await trio.sleep_forever() | ||||
| 
 | ||||
| 
 | ||||
| def mk_custom_codec( | ||||
|     add_hooks: bool, | ||||
| 
 | ||||
| ) -> tuple[ | ||||
|     MsgCodec,  # encode to send | ||||
|     MsgDec,  # pld receive-n-decode | ||||
| ]: | ||||
|     ''' | ||||
|     Create custom `msgpack` enc/dec-hooks and set a `Decoder` | ||||
|     which only loads `pld_spec` (like `NamespacePath`) types. | ||||
| 
 | ||||
|     ''' | ||||
| 
 | ||||
|     # XXX NOTE XXX: despite defining `NamespacePath` as a type | ||||
|     # field on our `PayloadMsg.pld`, we still need a enc/dec_hook() pair | ||||
|     # to cast to/from that type on the wire. See the docs: | ||||
|     # https://jcristharif.com/msgspec/extending.html#mapping-to-from-native-types | ||||
| 
 | ||||
|     # if pld_spec is Any: | ||||
|     #     pld_spec = Raw | ||||
| 
 | ||||
|     nsp_codec: MsgCodec = mk_codec( | ||||
|         # ipc_pld_spec=Raw,  # default! | ||||
| 
 | ||||
|         # NOTE XXX: the encode hook MUST be used no matter what since | ||||
|         # our `NamespacePath` is not any of a `Any` native type nor | ||||
|         # a `msgspec.Struct` subtype - so `msgspec` has no way to know | ||||
|         # how to encode it unless we provide the custom hook. | ||||
|         # | ||||
|         # AGAIN that is, regardless of whether we spec an | ||||
|         # `Any`-decoded-pld the enc has no knowledge (by default) | ||||
|         # how to enc `NamespacePath` (nsp), so we add a custom | ||||
|         # hook to do that ALWAYS. | ||||
|         enc_hook=enc_nsp if add_hooks else None, | ||||
| 
 | ||||
|         # XXX NOTE: pretty sure this is mutex with the `type=` to | ||||
|         # `Decoder`? so it won't work in tandem with the | ||||
|         # `ipc_pld_spec` passed above? | ||||
|         ext_types=[NamespacePath], | ||||
|     ) | ||||
|     # dec_hook=dec_nsp if add_hooks else None, | ||||
|     return nsp_codec | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     'limit_plds_args', | ||||
|     [ | ||||
|         ( | ||||
|             {'dec_hook': None, 'ext_types': None}, | ||||
|             None, | ||||
|         ), | ||||
|         ( | ||||
|             {'dec_hook': dec_nsp, 'ext_types': None}, | ||||
|             TypeError, | ||||
|         ), | ||||
|         ( | ||||
|             {'dec_hook': dec_nsp, 'ext_types': [NamespacePath]}, | ||||
|             None, | ||||
|         ), | ||||
|         ( | ||||
|             {'dec_hook': dec_nsp, 'ext_types': [NamespacePath|None]}, | ||||
|             None, | ||||
|         ), | ||||
|     ], | ||||
|     ids=[ | ||||
|         'no_hook_no_ext_types', | ||||
|         'only_hook', | ||||
|         'hook_and_ext_types', | ||||
|         'hook_and_ext_types_w_null', | ||||
|     ] | ||||
| ) | ||||
| def test_pld_limiting_usage( | ||||
|     limit_plds_args: tuple[dict, Exception|None], | ||||
| ): | ||||
|     ''' | ||||
|     Verify `dec_hook()` and `ext_types` need to either both be | ||||
|     provided or we raise a explanator type-error. | ||||
| 
 | ||||
|     ''' | ||||
|     kwargs, maybe_err = limit_plds_args | ||||
|     async def main(): | ||||
|         async with tractor.open_nursery() as an:  # just to open runtime | ||||
| 
 | ||||
|             # XXX SHOULD NEVER WORK outside an ipc ctx scope! | ||||
|             try: | ||||
|                 with limit_plds(**kwargs): | ||||
|                     pass | ||||
|             except RuntimeError: | ||||
|                 pass | ||||
| 
 | ||||
|             p: tractor.Portal = await an.start_actor( | ||||
|                 'sub', | ||||
|                 enable_modules=[__name__], | ||||
|             ) | ||||
|             async with ( | ||||
|                 p.open_context( | ||||
|                     sleep_forever_in_sub | ||||
|                 ) as (ctx, first), | ||||
|             ): | ||||
|                 try: | ||||
|                     with limit_plds(**kwargs): | ||||
|                         pass | ||||
|                 except maybe_err as exc: | ||||
|                     assert type(exc) is maybe_err | ||||
|                     pass | ||||
| 
 | ||||
| 
 | ||||
| def chk_codec_applied( | ||||
|     expect_codec: MsgCodec|None, | ||||
|     enter_value: MsgCodec|None = None, | ||||
| 
 | ||||
| ) -> MsgCodec: | ||||
|     ''' | ||||
|     buncha sanity checks ensuring that the IPC channel's | ||||
|     context-vars are set to the expected codec and that are | ||||
|     ctx-var wrapper APIs match the same. | ||||
| 
 | ||||
|     ''' | ||||
|     # TODO: play with tricyle again, bc this is supposed to work | ||||
|     # the way we want? | ||||
|     # | ||||
|     # TreeVar | ||||
|     # task: trio.Task = trio.lowlevel.current_task() | ||||
|     # curr_codec = _ctxvar_MsgCodec.get_in(task) | ||||
| 
 | ||||
|     # ContextVar | ||||
|     # task_ctx: Context = task.context | ||||
|     # assert _ctxvar_MsgCodec in task_ctx | ||||
|     # curr_codec: MsgCodec = task.context[_ctxvar_MsgCodec] | ||||
|     if expect_codec is None: | ||||
|         assert enter_value is None | ||||
|         return | ||||
| 
 | ||||
|     # NOTE: currently we use this! | ||||
|     # RunVar | ||||
|     curr_codec: MsgCodec = current_codec() | ||||
|     last_read_codec = _ctxvar_MsgCodec.get() | ||||
|     # assert curr_codec is last_read_codec | ||||
| 
 | ||||
|     assert ( | ||||
|         (same_codec := expect_codec) is | ||||
|         # returned from `mk_codec()` | ||||
| 
 | ||||
|         # yielded value from `apply_codec()` | ||||
| 
 | ||||
|         # read from current task's `contextvars.Context` | ||||
|         curr_codec is | ||||
|         last_read_codec | ||||
| 
 | ||||
|         # the default `msgspec` settings | ||||
|         is not _codec._def_msgspec_codec | ||||
|         is not _codec._def_tractor_codec | ||||
|     ) | ||||
| 
 | ||||
|     if enter_value: | ||||
|         assert enter_value is same_codec | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def send_back_values( | ||||
|     ctx: Context, | ||||
|     rent_pld_spec_type_strs: list[str], | ||||
|     add_hooks: bool, | ||||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|     Setup up a custom codec to load instances of `NamespacePath` | ||||
|     and ensure we can round trip a func ref with our parent. | ||||
| 
 | ||||
|     ''' | ||||
|     uid: tuple = tractor.current_actor().uid | ||||
| 
 | ||||
|     # init state in sub-actor should be default | ||||
|     chk_codec_applied( | ||||
|         expect_codec=_codec._def_tractor_codec, | ||||
|     ) | ||||
| 
 | ||||
|     # load pld spec from input str | ||||
|     rent_pld_spec = _exts.dec_type_union( | ||||
|         rent_pld_spec_type_strs, | ||||
|         mods=[ | ||||
|             importlib.import_module(__name__), | ||||
|         ], | ||||
|     ) | ||||
|     rent_pld_spec_types: set[Type] = _codec.unpack_spec_types( | ||||
|         rent_pld_spec, | ||||
|     ) | ||||
| 
 | ||||
|     # ONLY add ext-hooks if the rent specified a non-std type! | ||||
|     add_hooks: bool = ( | ||||
|         NamespacePath in rent_pld_spec_types | ||||
|         and | ||||
|         add_hooks | ||||
|     ) | ||||
| 
 | ||||
|     # same as on parent side config. | ||||
|     nsp_codec: MsgCodec|None = None | ||||
|     if add_hooks: | ||||
|         nsp_codec = mk_codec( | ||||
|             enc_hook=enc_nsp, | ||||
|             ext_types=[NamespacePath], | ||||
|         ) | ||||
| 
 | ||||
|     with ( | ||||
|         maybe_apply_codec(nsp_codec) as codec, | ||||
|         limit_plds( | ||||
|             rent_pld_spec, | ||||
|             dec_hook=dec_nsp if add_hooks else None, | ||||
|             ext_types=[NamespacePath]  if add_hooks else None, | ||||
|         ) as pld_dec, | ||||
|     ): | ||||
|         # ?XXX? SHOULD WE NOT be swapping the global codec since it | ||||
|         # breaks `Context.started()` roundtripping checks?? | ||||
|         chk_codec_applied( | ||||
|             expect_codec=nsp_codec, | ||||
|             enter_value=codec, | ||||
|         ) | ||||
| 
 | ||||
|         # ?TODO, mismatch case(s)? | ||||
|         # | ||||
|         # ensure pld spec matches on both sides | ||||
|         ctx_pld_dec: MsgDec = ctx._pld_rx._pld_dec | ||||
|         assert pld_dec is ctx_pld_dec | ||||
|         child_pld_spec: Type = pld_dec.spec | ||||
|         child_pld_spec_types: set[Type] = _codec.unpack_spec_types( | ||||
|             child_pld_spec, | ||||
|         ) | ||||
|         assert ( | ||||
|             child_pld_spec_types.issuperset( | ||||
|                 rent_pld_spec_types | ||||
|             ) | ||||
|         ) | ||||
| 
 | ||||
|         # ?TODO, try loop for each of the types in pld-superset? | ||||
|         # | ||||
|         # for send_value in [ | ||||
|         #     nsp, | ||||
|         #     str(nsp), | ||||
|         #     None, | ||||
|         # ]: | ||||
|         nsp = NamespacePath.from_ref(ex_func) | ||||
|         try: | ||||
|             print( | ||||
|                 f'{uid}: attempting to `.started({nsp})`\n' | ||||
|                 f'\n' | ||||
|                 f'rent_pld_spec: {rent_pld_spec}\n' | ||||
|                 f'child_pld_spec: {child_pld_spec}\n' | ||||
|                 f'codec: {codec}\n' | ||||
|             ) | ||||
|             # await tractor.pause() | ||||
|             await ctx.started(nsp) | ||||
| 
 | ||||
|         except tractor.MsgTypeError as _mte: | ||||
|             mte = _mte | ||||
| 
 | ||||
|             # false -ve case | ||||
|             if add_hooks: | ||||
|                 raise RuntimeError( | ||||
|                     f'EXPECTED to `.started()` value given spec ??\n\n' | ||||
|                     f'child_pld_spec -> {child_pld_spec}\n' | ||||
|                     f'value = {nsp}: {type(nsp)}\n' | ||||
|                 ) | ||||
| 
 | ||||
|             # true -ve case | ||||
|             raise mte | ||||
| 
 | ||||
|         # TODO: maybe we should add our own wrapper error so as to | ||||
|         # be interchange-lib agnostic? | ||||
|         # -[ ] the error type is wtv is raised from the hook so we | ||||
|         #   could also require a type-class of errors for | ||||
|         #   indicating whether the hook-failure can be handled by | ||||
|         #   a nasty-dialog-unprot sub-sys? | ||||
|         except TypeError as typerr: | ||||
|             # false -ve | ||||
|             if add_hooks: | ||||
|                 raise RuntimeError('Should have been able to send `nsp`??') | ||||
| 
 | ||||
|             # true -ve | ||||
|             print('Failed to send `nsp` due to no ext hooks set!') | ||||
|             raise typerr | ||||
| 
 | ||||
|         # now try sending a set of valid and invalid plds to ensure | ||||
|         # the pld spec is respected. | ||||
|         sent: list[Any] = [] | ||||
|         async with ctx.open_stream() as ipc: | ||||
|             print( | ||||
|                 f'{uid}: streaming all pld types to rent..' | ||||
|             ) | ||||
| 
 | ||||
|             # for send_value, expect_send in iter_send_val_items: | ||||
|             for send_value in [ | ||||
|                 nsp, | ||||
|                 str(nsp), | ||||
|                 None, | ||||
|             ]: | ||||
|                 send_type: Type = type(send_value) | ||||
|                 print( | ||||
|                     f'{uid}: SENDING NEXT pld\n' | ||||
|                     f'send_type: {send_type}\n' | ||||
|                     f'send_value: {send_value}\n' | ||||
|                 ) | ||||
|                 try: | ||||
|                     await ipc.send(send_value) | ||||
|                     sent.append(send_value) | ||||
| 
 | ||||
|                 except ValidationError as valerr: | ||||
|                     print(f'{uid} FAILED TO SEND {send_value}!') | ||||
| 
 | ||||
|                     # false -ve | ||||
|                     if add_hooks: | ||||
|                         raise RuntimeError( | ||||
|                             f'EXPECTED to roundtrip value given spec:\n' | ||||
|                             f'rent_pld_spec -> {rent_pld_spec}\n' | ||||
|                             f'child_pld_spec -> {child_pld_spec}\n' | ||||
|                             f'value = {send_value}: {send_type}\n' | ||||
|                         ) | ||||
| 
 | ||||
|                     # true -ve | ||||
|                     raise valerr | ||||
|                     # continue | ||||
| 
 | ||||
|             else: | ||||
|                 print( | ||||
|                     f'{uid}: finished sending all values\n' | ||||
|                     'Should be exiting stream block!\n' | ||||
|                 ) | ||||
| 
 | ||||
|         print(f'{uid}: exited streaming block!') | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| @cm | ||||
| def maybe_apply_codec(codec: MsgCodec|None) -> MsgCodec|None: | ||||
|     if codec is None: | ||||
|         yield None | ||||
|         return | ||||
| 
 | ||||
|     with apply_codec(codec) as codec: | ||||
|         yield codec | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     'pld_spec', | ||||
|     [ | ||||
|         Any, | ||||
|         NamespacePath, | ||||
|         NamespacePath|None,  # the "maybe" spec Bo | ||||
|     ], | ||||
|     ids=[ | ||||
|         'any_type', | ||||
|         'only_nsp_ext', | ||||
|         'maybe_nsp_ext', | ||||
|     ] | ||||
| ) | ||||
| @pytest.mark.parametrize( | ||||
|     'add_hooks', | ||||
|     [ | ||||
|         True, | ||||
|         False, | ||||
|     ], | ||||
|     ids=[ | ||||
|         'use_codec_hooks', | ||||
|         'no_codec_hooks', | ||||
|     ], | ||||
| ) | ||||
| def test_ext_types_over_ipc( | ||||
|     debug_mode: bool, | ||||
|     pld_spec: Union[Type], | ||||
|     add_hooks: bool, | ||||
| ): | ||||
|     ''' | ||||
|     Ensure we can support extension types coverted using | ||||
|     `enc/dec_hook()`s passed to the `.msg.limit_plds()` API | ||||
|     and that sane errors happen when we try do the same without | ||||
|     the codec hooks. | ||||
| 
 | ||||
|     ''' | ||||
|     pld_types: set[Type] = _codec.unpack_spec_types(pld_spec) | ||||
| 
 | ||||
|     async def main(): | ||||
| 
 | ||||
|         # sanity check the default pld-spec beforehand | ||||
|         chk_codec_applied( | ||||
|             expect_codec=_codec._def_tractor_codec, | ||||
|         ) | ||||
| 
 | ||||
|         # extension type we want to send as msg payload | ||||
|         nsp = NamespacePath.from_ref(ex_func) | ||||
| 
 | ||||
|         # ^NOTE, 2 cases: | ||||
|         # - codec hooks noto added -> decode nsp as `str` | ||||
|         # - codec with hooks -> decode nsp as `NamespacePath` | ||||
|         nsp_codec: MsgCodec|None = None | ||||
|         if ( | ||||
|             NamespacePath in pld_types | ||||
|             and | ||||
|             add_hooks | ||||
|         ): | ||||
|             nsp_codec = mk_codec( | ||||
|                 enc_hook=enc_nsp, | ||||
|                 ext_types=[NamespacePath], | ||||
|             ) | ||||
| 
 | ||||
|         async with tractor.open_nursery( | ||||
|             debug_mode=debug_mode, | ||||
|         ) as an: | ||||
|             p: tractor.Portal = await an.start_actor( | ||||
|                 'sub', | ||||
|                 enable_modules=[__name__], | ||||
|             ) | ||||
|             with ( | ||||
|                 maybe_apply_codec(nsp_codec) as codec, | ||||
|             ): | ||||
|                 chk_codec_applied( | ||||
|                     expect_codec=nsp_codec, | ||||
|                     enter_value=codec, | ||||
|                 ) | ||||
|                 rent_pld_spec_type_strs: list[str] = _exts.enc_type_union(pld_spec) | ||||
| 
 | ||||
|                 # XXX should raise an mte (`MsgTypeError`) | ||||
|                 # when `add_hooks == False` bc the input | ||||
|                 # `expect_ipc_send` kwarg has a nsp which can't be | ||||
|                 # serialized! | ||||
|                 # | ||||
|                 # TODO:can we ensure this happens from the | ||||
|                 # `Return`-side (aka the sub) as well? | ||||
|                 try: | ||||
|                     ctx: tractor.Context | ||||
|                     ipc: tractor.MsgStream | ||||
|                     async with ( | ||||
| 
 | ||||
|                         # XXX should raise an mte (`MsgTypeError`) | ||||
|                         # when `add_hooks == False`.. | ||||
|                         p.open_context( | ||||
|                             send_back_values, | ||||
|                             # expect_debug=debug_mode, | ||||
|                             rent_pld_spec_type_strs=rent_pld_spec_type_strs, | ||||
|                             add_hooks=add_hooks, | ||||
|                             # expect_ipc_send=expect_ipc_send, | ||||
|                         ) as (ctx, first), | ||||
| 
 | ||||
|                         ctx.open_stream() as ipc, | ||||
|                     ): | ||||
|                         with ( | ||||
|                             limit_plds( | ||||
|                                 pld_spec, | ||||
|                                 dec_hook=dec_nsp if add_hooks else None, | ||||
|                                 ext_types=[NamespacePath]  if add_hooks else None, | ||||
|                             ) as pld_dec, | ||||
|                         ): | ||||
|                             ctx_pld_dec: MsgDec = ctx._pld_rx._pld_dec | ||||
|                             assert pld_dec is ctx_pld_dec | ||||
| 
 | ||||
|                             # if ( | ||||
|                             #     not add_hooks | ||||
|                             #     and | ||||
|                             #     NamespacePath in  | ||||
|                             # ): | ||||
|                             #     pytest.fail('ctx should fail to open without custom enc_hook!?') | ||||
| 
 | ||||
|                             await ipc.send(nsp) | ||||
|                             nsp_rt = await ipc.receive() | ||||
| 
 | ||||
|                             assert nsp_rt == nsp | ||||
|                             assert nsp_rt.load_ref() is ex_func | ||||
| 
 | ||||
|                 # this test passes bc we can go no further! | ||||
|                 except MsgTypeError as mte: | ||||
|                     # if not add_hooks: | ||||
|                     #     # teardown nursery | ||||
|                     #     await p.cancel_actor() | ||||
|                         # return | ||||
| 
 | ||||
|                     raise mte | ||||
| 
 | ||||
|             await p.cancel_actor() | ||||
| 
 | ||||
|     if ( | ||||
|         NamespacePath in pld_types | ||||
|         and | ||||
|         add_hooks | ||||
|     ): | ||||
|         trio.run(main) | ||||
| 
 | ||||
|     else: | ||||
|         with pytest.raises( | ||||
|             expected_exception=tractor.RemoteActorError, | ||||
|         ) as excinfo: | ||||
|             trio.run(main) | ||||
| 
 | ||||
|         exc = excinfo.value | ||||
|         # bc `.started(nsp: NamespacePath)` will raise | ||||
|         assert exc.boxed_type is TypeError | ||||
| 
 | ||||
| 
 | ||||
| # def chk_pld_type( | ||||
| #     payload_spec: Type[Struct]|Any, | ||||
| #     pld: Any, | ||||
| 
 | ||||
| #     expect_roundtrip: bool|None = None, | ||||
| 
 | ||||
| # ) -> bool: | ||||
| 
 | ||||
| #     pld_val_type: Type = type(pld) | ||||
| 
 | ||||
| #     # TODO: verify that the overridden subtypes | ||||
| #     # DO NOT have modified type-annots from original! | ||||
| #     # 'Start',  .pld: FuncSpec | ||||
| #     # 'StartAck',  .pld: IpcCtxSpec | ||||
| #     # 'Stop',  .pld: UNSEt | ||||
| #     # 'Error',  .pld: ErrorData | ||||
| 
 | ||||
| #     codec: MsgCodec = mk_codec( | ||||
| #         # NOTE: this ONLY accepts `PayloadMsg.pld` fields of a specified | ||||
| #         # type union. | ||||
| #         ipc_pld_spec=payload_spec, | ||||
| #     ) | ||||
| 
 | ||||
| #     # make a one-off dec to compare with our `MsgCodec` instance | ||||
| #     # which does the below `mk_msg_spec()` call internally | ||||
| #     ipc_msg_spec: Union[Type[Struct]] | ||||
| #     msg_types: list[PayloadMsg[payload_spec]] | ||||
| #     ( | ||||
| #         ipc_msg_spec, | ||||
| #         msg_types, | ||||
| #     ) = mk_msg_spec( | ||||
| #         payload_type_union=payload_spec, | ||||
| #     ) | ||||
| #     _enc = msgpack.Encoder() | ||||
| #     _dec = msgpack.Decoder( | ||||
| #         type=ipc_msg_spec or Any,  # like `PayloadMsg[Any]` | ||||
| #     ) | ||||
| 
 | ||||
| #     assert ( | ||||
| #         payload_spec | ||||
| #         == | ||||
| #         codec.pld_spec | ||||
| #     ) | ||||
| 
 | ||||
| #     # assert codec.dec == dec | ||||
| #     # | ||||
| #     # ^-XXX-^ not sure why these aren't "equal" but when cast | ||||
| #     # to `str` they seem to match ?? .. kk | ||||
| 
 | ||||
| #     assert ( | ||||
| #         str(ipc_msg_spec) | ||||
| #         == | ||||
| #         str(codec.msg_spec) | ||||
| #         == | ||||
| #         str(_dec.type) | ||||
| #         == | ||||
| #         str(codec.dec.type) | ||||
| #     ) | ||||
| 
 | ||||
| #     # verify the boxed-type for all variable payload-type msgs. | ||||
| #     if not msg_types: | ||||
| #         breakpoint() | ||||
| 
 | ||||
| #     roundtrip: bool|None = None | ||||
| #     pld_spec_msg_names: list[str] = [ | ||||
| #         td.__name__ for td in _payload_msgs | ||||
| #     ] | ||||
| #     for typedef in msg_types: | ||||
| 
 | ||||
| #         skip_runtime_msg: bool = typedef.__name__ not in pld_spec_msg_names | ||||
| #         if skip_runtime_msg: | ||||
| #             continue | ||||
| 
 | ||||
| #         pld_field = structs.fields(typedef)[1] | ||||
| #         assert pld_field.type is payload_spec # TODO-^ does this need to work to get all subtypes to adhere? | ||||
| 
 | ||||
| #         kwargs: dict[str, Any] = { | ||||
| #             'cid': '666', | ||||
| #             'pld': pld, | ||||
| #         } | ||||
| #         enc_msg: PayloadMsg = typedef(**kwargs) | ||||
| 
 | ||||
| #         _wire_bytes: bytes = _enc.encode(enc_msg) | ||||
| #         wire_bytes: bytes = codec.enc.encode(enc_msg) | ||||
| #         assert _wire_bytes == wire_bytes | ||||
| 
 | ||||
| #         ve: ValidationError|None = None | ||||
| #         try: | ||||
| #             dec_msg = codec.dec.decode(wire_bytes) | ||||
| #             _dec_msg = _dec.decode(wire_bytes) | ||||
| 
 | ||||
| #             # decoded msg and thus payload should be exactly same! | ||||
| #             assert (roundtrip := ( | ||||
| #                 _dec_msg | ||||
| #                 == | ||||
| #                 dec_msg | ||||
| #                 == | ||||
| #                 enc_msg | ||||
| #             )) | ||||
| 
 | ||||
| #             if ( | ||||
| #                 expect_roundtrip is not None | ||||
| #                 and expect_roundtrip != roundtrip | ||||
| #             ): | ||||
| #                 breakpoint() | ||||
| 
 | ||||
| #             assert ( | ||||
| #                 pld | ||||
| #                 == | ||||
| #                 dec_msg.pld | ||||
| #                 == | ||||
| #                 enc_msg.pld | ||||
| #             ) | ||||
| #             # assert (roundtrip := (_dec_msg == enc_msg)) | ||||
| 
 | ||||
| #         except ValidationError as _ve: | ||||
| #             ve = _ve | ||||
| #             roundtrip: bool = False | ||||
| #             if pld_val_type is payload_spec: | ||||
| #                 raise ValueError( | ||||
| #                    'Got `ValidationError` despite type-var match!?\n' | ||||
| #                     f'pld_val_type: {pld_val_type}\n' | ||||
| #                     f'payload_type: {payload_spec}\n' | ||||
| #                 ) from ve | ||||
| 
 | ||||
| #             else: | ||||
| #                 # ow we good cuz the pld spec mismatched. | ||||
| #                 print( | ||||
| #                     'Got expected `ValidationError` since,\n' | ||||
| #                     f'{pld_val_type} is not {payload_spec}\n' | ||||
| #                 ) | ||||
| #         else: | ||||
| #             if ( | ||||
| #                 payload_spec is not Any | ||||
| #                 and | ||||
| #                 pld_val_type is not payload_spec | ||||
| #             ): | ||||
| #                 raise ValueError( | ||||
| #                    'DID NOT `ValidationError` despite expected type match!?\n' | ||||
| #                     f'pld_val_type: {pld_val_type}\n' | ||||
| #                     f'payload_type: {payload_spec}\n' | ||||
| #                 ) | ||||
| 
 | ||||
| #     # full code decode should always be attempted! | ||||
| #     if roundtrip is None: | ||||
| #         breakpoint() | ||||
| 
 | ||||
| #     return roundtrip | ||||
| 
 | ||||
| 
 | ||||
| # ?TODO? maybe remove since covered in the newer `test_pldrx_limiting` | ||||
| # via end-2-end testing of all this? | ||||
| # -[ ] IOW do we really NEED this lowlevel unit testing? | ||||
| # | ||||
| # def test_limit_msgspec( | ||||
| #     debug_mode: bool, | ||||
| # ): | ||||
| #     ''' | ||||
| #     Internals unit testing to verify that type-limiting an IPC ctx's | ||||
| #     msg spec with `Pldrx.limit_plds()` results in various | ||||
| #     encapsulated `msgspec` object settings and state. | ||||
| 
 | ||||
| #     ''' | ||||
| #     async def main(): | ||||
| #         async with tractor.open_root_actor( | ||||
| #             debug_mode=debug_mode, | ||||
| #         ): | ||||
| #             # ensure we can round-trip a boxing `PayloadMsg` | ||||
| #             assert chk_pld_type( | ||||
| #                 payload_spec=Any, | ||||
| #                 pld=None, | ||||
| #                 expect_roundtrip=True, | ||||
| #             ) | ||||
| 
 | ||||
| #             # verify that a mis-typed payload value won't decode | ||||
| #             assert not chk_pld_type( | ||||
| #                 payload_spec=int, | ||||
| #                 pld='doggy', | ||||
| #             ) | ||||
| 
 | ||||
| #             # parametrize the boxed `.pld` type as a custom-struct | ||||
| #             # and ensure that parametrization propagates | ||||
| #             # to all payload-msg-spec-able subtypes! | ||||
| #             class CustomPayload(Struct): | ||||
| #                 name: str | ||||
| #                 value: Any | ||||
| 
 | ||||
| #             assert not chk_pld_type( | ||||
| #                 payload_spec=CustomPayload, | ||||
| #                 pld='doggy', | ||||
| #             ) | ||||
| 
 | ||||
| #             assert chk_pld_type( | ||||
| #                 payload_spec=CustomPayload, | ||||
| #                 pld=CustomPayload(name='doggy', value='urmom') | ||||
| #             ) | ||||
| 
 | ||||
| #             # yah, we can `.pause_from_sync()` now! | ||||
| #             # breakpoint() | ||||
| 
 | ||||
| #     trio.run(main) | ||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							|  | @ -16,18 +16,17 @@ from tractor import (  # typing | |||
|     Portal, | ||||
|     Context, | ||||
|     ContextCancelled, | ||||
|     RemoteActorError, | ||||
| ) | ||||
| from tractor._testing import ( | ||||
|     # tractor_test, | ||||
|     expect_ctxc, | ||||
| ) | ||||
| 
 | ||||
| # XXX TODO cases: | ||||
| # - [ ] peer cancelled itself - so other peers should | ||||
| #   get errors reflecting that the peer was itself the .canceller? | ||||
| 
 | ||||
| # - [x] WE cancelled the peer and thus should not see any raised | ||||
| #   `ContextCancelled` as it should be reaped silently? | ||||
| #   => pretty sure `test_context_stream_semantics::test_caller_cancels()` | ||||
| #      already covers this case? | ||||
| 
 | ||||
| # - [x] INTER-PEER: some arbitrary remote peer cancels via | ||||
| #   Portal.cancel_actor(). | ||||
| #   => all other connected peers should get that cancel requesting peer's | ||||
|  | @ -40,11 +39,20 @@ from tractor._testing import ( | |||
| #   that also spawned a remote task task in that same peer-parent. | ||||
| 
 | ||||
| 
 | ||||
| # def test_self_cancel(): | ||||
| #     ''' | ||||
| #     2 cases: | ||||
| #     - calls `Actor.cancel()` locally in some task | ||||
| #     - calls LocalPortal.cancel_actor()` ? | ||||
| 
 | ||||
| #     ''' | ||||
| #     ... | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def open_stream_then_sleep_forever( | ||||
| async def sleep_forever( | ||||
|     ctx: Context, | ||||
|     expect_ctxc: bool = False, | ||||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|     Sync the context, open a stream then just sleep. | ||||
|  | @ -54,10 +62,6 @@ async def open_stream_then_sleep_forever( | |||
|     ''' | ||||
|     try: | ||||
|         await ctx.started() | ||||
| 
 | ||||
|         # NOTE: the below means this child will send a `Stop` | ||||
|         # to it's parent-side task despite that side never | ||||
|         # opening a stream itself. | ||||
|         async with ctx.open_stream(): | ||||
|             await trio.sleep_forever() | ||||
| 
 | ||||
|  | @ -91,7 +95,7 @@ async def error_before_started( | |||
|     ''' | ||||
|     async with tractor.wait_for_actor('sleeper') as p2: | ||||
|         async with ( | ||||
|             p2.open_context(open_stream_then_sleep_forever) as (peer_ctx, first), | ||||
|             p2.open_context(sleep_forever) as (peer_ctx, first), | ||||
|             peer_ctx.open_stream(), | ||||
|         ): | ||||
|             # NOTE: this WAS inside an @acm body but i factored it | ||||
|  | @ -152,11 +156,10 @@ def test_do_not_swallow_error_before_started_by_remote_contextcancelled( | |||
|             ): | ||||
|                 await trio.sleep_forever() | ||||
| 
 | ||||
|     with pytest.raises(RemoteActorError) as excinfo: | ||||
|     with pytest.raises(tractor.RemoteActorError) as excinfo: | ||||
|         trio.run(main) | ||||
| 
 | ||||
|     rae = excinfo.value | ||||
|     assert rae.boxed_type is TypeError | ||||
|     assert excinfo.value.type == TypeError | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
|  | @ -176,10 +179,6 @@ async def sleep_a_bit_then_cancel_peer( | |||
|         await trio.sleep(cancel_after) | ||||
|         await peer.cancel_actor() | ||||
| 
 | ||||
|         # such that we're cancelled by our rent ctx-task | ||||
|         await trio.sleep(3) | ||||
|         print('CANCELLER RETURNING!') | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def stream_ints( | ||||
|  | @ -195,13 +194,9 @@ async def stream_ints( | |||
| @tractor.context | ||||
| async def stream_from_peer( | ||||
|     ctx: Context, | ||||
|     debug_mode: bool, | ||||
|     peer_name: str = 'sleeper', | ||||
| ) -> None: | ||||
| 
 | ||||
|     # sanity | ||||
|     assert tractor._state.debug_mode() == debug_mode | ||||
| 
 | ||||
|     peer: Portal | ||||
|     try: | ||||
|         async with ( | ||||
|  | @ -235,54 +230,20 @@ async def stream_from_peer( | |||
|                 assert msg is not None | ||||
|                 print(msg) | ||||
| 
 | ||||
|     # NOTE: cancellation of the (sleeper) peer should always cause | ||||
|     # a `ContextCancelled` raise in this streaming actor. | ||||
|     except ContextCancelled as _ctxc: | ||||
|         ctxc = _ctxc | ||||
|     # NOTE: cancellation of the (sleeper) peer should always | ||||
|     # cause a `ContextCancelled` raise in this streaming | ||||
|     # actor. | ||||
|     except ContextCancelled as ctxc: | ||||
|         ctxerr = ctxc | ||||
| 
 | ||||
|         # print("TRYING TO ENTER PAUSSE!!!") | ||||
|         # await tractor.pause(shield=True) | ||||
|         re: ContextCancelled = peer_ctx._remote_error | ||||
| 
 | ||||
|         # XXX YES XXX, remote error should be unpacked only once! | ||||
|         assert ( | ||||
|             re | ||||
|             is | ||||
|             peer_ctx.maybe_error | ||||
|             is | ||||
|             ctxc | ||||
|             is | ||||
|             peer_ctx._local_error | ||||
|         ) | ||||
|         # NOTE: these errors should all match! | ||||
|         #   ------ - ------ | ||||
|         # XXX [2024-05-03] XXX | ||||
|         #   ------ - ------ | ||||
|         # broke this due to a re-raise inside `.msg._ops.drain_to_final_msg()` | ||||
|         # where the `Error()` msg was directly raising the ctxc | ||||
|         # instead of just returning up to the caller inside | ||||
|         # `Context.return()` which would results in a diff instance of | ||||
|         # the same remote error bubbling out above vs what was | ||||
|         # already unpacked and set inside `Context. | ||||
|         assert ( | ||||
|             peer_ctx._remote_error.msgdata | ||||
|             == | ||||
|             ctxc.msgdata | ||||
|         ) | ||||
|         # ^-XXX-^ notice the data is of course the exact same.. so | ||||
|         # the above larger assert makes sense to also always be true! | ||||
| 
 | ||||
|         # XXX YES XXX, bc should be exact same msg instances | ||||
|         assert peer_ctx._remote_error._ipc_msg is ctxc._ipc_msg | ||||
| 
 | ||||
|         # XXX NO XXX, bc new one always created for property accesss | ||||
|         assert peer_ctx._remote_error.ipc_msg != ctxc.ipc_msg | ||||
|         assert peer_ctx._remote_error is ctxerr | ||||
|         assert peer_ctx._remote_error.msgdata == ctxerr.msgdata | ||||
| 
 | ||||
|         # the peer ctx is the canceller even though it's canceller | ||||
|         # is the "canceller" XD | ||||
|         assert peer_name in peer_ctx.canceller | ||||
| 
 | ||||
|         assert "canceller" in ctxc.canceller | ||||
|         assert "canceller" in ctxerr.canceller | ||||
| 
 | ||||
|         # caller peer should not be the cancel requester | ||||
|         assert not ctx.cancel_called | ||||
|  | @ -306,13 +267,12 @@ async def stream_from_peer( | |||
| 
 | ||||
|         # TODO / NOTE `.canceller` won't have been set yet | ||||
|         # here because that machinery is inside | ||||
|         # `Portal.open_context().__aexit__()` BUT, if we had | ||||
|         # `.open_context().__aexit__()` BUT, if we had | ||||
|         # a way to know immediately (from the last | ||||
|         # checkpoint) that cancellation was due to | ||||
|         # a remote, we COULD assert this here..see, | ||||
|         # https://github.com/goodboy/tractor/issues/368 | ||||
|         # | ||||
|         # await tractor.pause() | ||||
|         # assert 'canceller' in ctx.canceller | ||||
| 
 | ||||
|         # root/parent actor task should NEVER HAVE cancelled us! | ||||
|  | @ -396,6 +356,7 @@ def test_peer_canceller( | |||
|     ''' | ||||
|     async def main(): | ||||
|         async with tractor.open_nursery( | ||||
|             # NOTE: to halt the peer tasks on ctxc, uncomment this. | ||||
|             debug_mode=debug_mode, | ||||
|         ) as an: | ||||
|             canceller: Portal = await an.start_actor( | ||||
|  | @ -415,13 +376,12 @@ def test_peer_canceller( | |||
|             try: | ||||
|                 async with ( | ||||
|                     sleeper.open_context( | ||||
|                         open_stream_then_sleep_forever, | ||||
|                         sleep_forever, | ||||
|                         expect_ctxc=True, | ||||
|                     ) as (sleeper_ctx, sent), | ||||
| 
 | ||||
|                     just_caller.open_context( | ||||
|                         stream_from_peer, | ||||
|                         debug_mode=debug_mode, | ||||
|                     ) as (caller_ctx, sent), | ||||
| 
 | ||||
|                     canceller.open_context( | ||||
|  | @ -447,11 +407,10 @@ def test_peer_canceller( | |||
| 
 | ||||
|                     # should always raise since this root task does | ||||
|                     # not request the sleeper cancellation ;) | ||||
|                     except ContextCancelled as _ctxc: | ||||
|                         ctxc = _ctxc | ||||
|                     except ContextCancelled as ctxerr: | ||||
|                         print( | ||||
|                             'CAUGHT REMOTE CONTEXT CANCEL\n\n' | ||||
|                             f'{ctxc}\n' | ||||
|                             f'{ctxerr}\n' | ||||
|                         ) | ||||
| 
 | ||||
|                         # canceller and caller peers should not | ||||
|  | @ -462,7 +421,7 @@ def test_peer_canceller( | |||
|                         # we were not the actor, our peer was | ||||
|                         assert not sleeper_ctx.cancel_acked | ||||
| 
 | ||||
|                         assert ctxc.canceller[0] == 'canceller' | ||||
|                         assert ctxerr.canceller[0] == 'canceller' | ||||
| 
 | ||||
|                         # XXX NOTE XXX: since THIS `ContextCancelled` | ||||
|                         # HAS NOT YET bubbled up to the | ||||
|  | @ -473,7 +432,7 @@ def test_peer_canceller( | |||
| 
 | ||||
|                         # CASE_1: error-during-ctxc-handling, | ||||
|                         if error_during_ctxerr_handling: | ||||
|                             raise RuntimeError('Simulated RTE re-raise during ctxc handling') | ||||
|                             raise RuntimeError('Simulated error during teardown') | ||||
| 
 | ||||
|                         # CASE_2: standard teardown inside in `.open_context()` block | ||||
|                         raise | ||||
|  | @ -538,9 +497,6 @@ def test_peer_canceller( | |||
|                 #   should be cancelled by US. | ||||
|                 # | ||||
|                 if error_during_ctxerr_handling: | ||||
|                     print(f'loc_err: {_loc_err}\n') | ||||
|                     assert isinstance(loc_err, RuntimeError) | ||||
| 
 | ||||
|                     # since we do a rte reraise above, the | ||||
|                     # `.open_context()` error handling should have | ||||
|                     # raised a local rte, thus the internal | ||||
|  | @ -549,6 +505,9 @@ def test_peer_canceller( | |||
|                     # a `trio.Cancelled` due to a local | ||||
|                     # `._scope.cancel()` call. | ||||
|                     assert not sleeper_ctx._scope.cancelled_caught | ||||
| 
 | ||||
|                     assert isinstance(loc_err, RuntimeError) | ||||
|                     print(f'_loc_err: {_loc_err}\n') | ||||
|                     # assert sleeper_ctx._local_error is _loc_err | ||||
|                     # assert sleeper_ctx._local_error is _loc_err | ||||
|                     assert not ( | ||||
|  | @ -585,12 +544,9 @@ def test_peer_canceller( | |||
| 
 | ||||
|                         else:  # the other 2 ctxs | ||||
|                             assert ( | ||||
|                                 isinstance(re, ContextCancelled) | ||||
|                                 and ( | ||||
|                                     re.canceller | ||||
|                                     == | ||||
|                                     canceller.channel.uid | ||||
|                                 ) | ||||
|                                 re.canceller | ||||
|                                 == | ||||
|                                 canceller.channel.uid | ||||
|                             ) | ||||
| 
 | ||||
|                     # since the sleeper errors while handling a | ||||
|  | @ -783,16 +739,14 @@ def test_peer_canceller( | |||
|         with pytest.raises(ContextCancelled) as excinfo: | ||||
|             trio.run(main) | ||||
| 
 | ||||
|         assert excinfo.value.boxed_type == ContextCancelled | ||||
|         assert excinfo.value.type == ContextCancelled | ||||
|         assert excinfo.value.canceller[0] == 'canceller' | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def basic_echo_server( | ||||
|     ctx: Context, | ||||
|     peer_name: str = 'wittle_bruv', | ||||
| 
 | ||||
|     err_after_imsg: int|None = None, | ||||
|     peer_name: str = 'stepbro', | ||||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|  | @ -820,31 +774,17 @@ async def basic_echo_server( | |||
|             # assert 0 | ||||
|             await ipc.send(resp) | ||||
| 
 | ||||
|             if ( | ||||
|                 err_after_imsg | ||||
|                 and | ||||
|                 i > err_after_imsg | ||||
|             ): | ||||
|                 raise RuntimeError( | ||||
|                     f'Simulated error in `{peer_name}`' | ||||
|                 ) | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def serve_subactors( | ||||
|     ctx: Context, | ||||
|     peer_name: str, | ||||
|     debug_mode: bool, | ||||
| 
 | ||||
| ) -> None: | ||||
|     async with open_nursery() as an: | ||||
| 
 | ||||
|         # sanity | ||||
|         assert tractor._state.debug_mode() == debug_mode | ||||
| 
 | ||||
|         await ctx.started(peer_name) | ||||
|         async with ctx.open_stream() as ipc: | ||||
|             async for msg in ipc: | ||||
|         async with ctx.open_stream() as reqs: | ||||
|             async for msg in reqs: | ||||
|                 peer_name: str = msg | ||||
|                 peer: Portal = await an.start_actor( | ||||
|                     name=peer_name, | ||||
|  | @ -855,9 +795,9 @@ async def serve_subactors( | |||
|                     f'{peer_name}\n' | ||||
|                     f'|_{peer}\n' | ||||
|                 ) | ||||
|                 await ipc.send(( | ||||
|                 await reqs.send(( | ||||
|                     peer.chan.uid, | ||||
|                     peer.chan.raddr.unwrap(), | ||||
|                     peer.chan.raddr, | ||||
|                 )) | ||||
| 
 | ||||
|         print('Spawner exiting spawn serve loop!') | ||||
|  | @ -867,20 +807,14 @@ async def serve_subactors( | |||
| async def client_req_subactor( | ||||
|     ctx: Context, | ||||
|     peer_name: str, | ||||
|     debug_mode: bool, | ||||
| 
 | ||||
|     # used to simulate a user causing an error to be raised | ||||
|     # directly in thread (like a KBI) to better replicate the | ||||
|     # case where a `modden` CLI client would hang afer requesting | ||||
|     # a `Context.cancel()` to `bigd`'s wks spawner. | ||||
|     reraise_on_cancel: str|None = None, | ||||
|     sub_err_after: int|None = None, | ||||
| 
 | ||||
| ) -> None: | ||||
|     # sanity | ||||
|     if debug_mode: | ||||
|         assert tractor._state.debug_mode() | ||||
| 
 | ||||
|     # TODO: other cases to do with sub lifetimes: | ||||
|     # -[ ] test that we can have the server spawn a sub | ||||
|     #   that lives longer then ctx with this client. | ||||
|  | @ -902,7 +836,6 @@ async def client_req_subactor( | |||
|         spawner.open_context( | ||||
|             serve_subactors, | ||||
|             peer_name=peer_name, | ||||
|             debug_mode=debug_mode, | ||||
|         ) as (spawner_ctx, first), | ||||
|     ): | ||||
|         assert first == peer_name | ||||
|  | @ -924,7 +857,6 @@ async def client_req_subactor( | |||
|             await tell_little_bro( | ||||
|                 actor_name=sub_uid[0], | ||||
|                 caller='client', | ||||
|                 err_after=sub_err_after, | ||||
|             ) | ||||
| 
 | ||||
|             # TODO: test different scope-layers of | ||||
|  | @ -936,7 +868,9 @@ async def client_req_subactor( | |||
|             # TODO: would be super nice to have a special injected | ||||
|             # cancel type here (maybe just our ctxc) but using | ||||
|             # some native mechanism in `trio` :p | ||||
|             except trio.Cancelled as err: | ||||
|             except ( | ||||
|                 trio.Cancelled | ||||
|             ) as err: | ||||
|                 _err = err | ||||
|                 if reraise_on_cancel: | ||||
|                     errtype = globals()['__builtins__'][reraise_on_cancel] | ||||
|  | @ -963,10 +897,7 @@ async def client_req_subactor( | |||
| 
 | ||||
| async def tell_little_bro( | ||||
|     actor_name: str, | ||||
| 
 | ||||
|     caller: str = '', | ||||
|     err_after: float|None = None, | ||||
|     rng_seed: int = 50, | ||||
|     caller: str = '' | ||||
| ): | ||||
|     # contact target actor, do a stream dialog. | ||||
|     async with ( | ||||
|  | @ -975,20 +906,14 @@ async def tell_little_bro( | |||
|         ) as lb, | ||||
|         lb.open_context( | ||||
|             basic_echo_server, | ||||
| 
 | ||||
|             # XXX proxy any delayed err condition | ||||
|             err_after_imsg=( | ||||
|                 err_after * rng_seed | ||||
|                 if err_after is not None | ||||
|                 else None | ||||
|             ), | ||||
|         ) as (sub_ctx, first), | ||||
| 
 | ||||
|         sub_ctx.open_stream() as echo_ipc, | ||||
|         sub_ctx.open_stream( | ||||
|             basic_echo_server, | ||||
|         ) as echo_ipc, | ||||
|     ): | ||||
|         actor: Actor = current_actor() | ||||
|         uid: tuple = actor.uid | ||||
|         for i in range(rng_seed): | ||||
|         for i in range(100): | ||||
|             msg: tuple = ( | ||||
|                 uid, | ||||
|                 i, | ||||
|  | @ -1011,15 +936,10 @@ async def tell_little_bro( | |||
|     'raise_client_error', | ||||
|     [None, 'KeyboardInterrupt'], | ||||
| ) | ||||
| @pytest.mark.parametrize( | ||||
|     'raise_sub_spawn_error_after', | ||||
|     [None, 0.5], | ||||
| ) | ||||
| def test_peer_spawns_and_cancels_service_subactor( | ||||
|     debug_mode: bool, | ||||
|     raise_client_error: str, | ||||
|     reg_addr: tuple[str, int], | ||||
|     raise_sub_spawn_error_after: float|None, | ||||
| ): | ||||
|     # NOTE: this tests for the modden `mod wks open piker` bug | ||||
|     # discovered as part of implementing workspace ctx | ||||
|  | @ -1033,17 +953,6 @@ def test_peer_spawns_and_cancels_service_subactor( | |||
|     #   and the server's spawned child should cancel and terminate! | ||||
|     peer_name: str = 'little_bro' | ||||
| 
 | ||||
| 
 | ||||
|     def check_inner_rte(rae: RemoteActorError): | ||||
|         ''' | ||||
|         Validate the little_bro's relayed inception! | ||||
| 
 | ||||
|         ''' | ||||
|         assert rae.boxed_type is RemoteActorError | ||||
|         assert rae.src_type is RuntimeError | ||||
|         assert 'client' in rae.relay_uid | ||||
|         assert peer_name in rae.src_uid | ||||
| 
 | ||||
|     async def main(): | ||||
|         async with tractor.open_nursery( | ||||
|             # NOTE: to halt the peer tasks on ctxc, uncomment this. | ||||
|  | @ -1067,24 +976,14 @@ def test_peer_spawns_and_cancels_service_subactor( | |||
|                     server.open_context( | ||||
|                         serve_subactors, | ||||
|                         peer_name=peer_name, | ||||
|                         debug_mode=debug_mode, | ||||
| 
 | ||||
|                     ) as (spawn_ctx, first), | ||||
| 
 | ||||
|                     client.open_context( | ||||
|                         client_req_subactor, | ||||
|                         peer_name=peer_name, | ||||
|                         debug_mode=debug_mode, | ||||
|                         reraise_on_cancel=raise_client_error, | ||||
| 
 | ||||
|                         # trigger for error condition in sub | ||||
|                         # during streaming. | ||||
|                         sub_err_after=raise_sub_spawn_error_after, | ||||
| 
 | ||||
|                     ) as (client_ctx, client_says), | ||||
|                 ): | ||||
|                     root: Actor = current_actor() | ||||
|                     spawner_uid: tuple = spawn_ctx.chan.uid | ||||
|                     print( | ||||
|                         f'Server says: {first}\n' | ||||
|                         f'Client says: {client_says}\n' | ||||
|  | @ -1094,7 +993,6 @@ def test_peer_spawns_and_cancels_service_subactor( | |||
|                     # (grandchild of this root actor) "little_bro" | ||||
|                     # and ensure we can also use it as an echo | ||||
|                     # server. | ||||
|                     sub: Portal | ||||
|                     async with tractor.wait_for_actor( | ||||
|                         name=peer_name, | ||||
|                     ) as sub: | ||||
|  | @ -1106,150 +1004,56 @@ def test_peer_spawns_and_cancels_service_subactor( | |||
|                         f'.uid: {sub.actor.uid}\n' | ||||
|                         f'chan.raddr: {sub.chan.raddr}\n' | ||||
|                     ) | ||||
|                     await tell_little_bro( | ||||
|                         actor_name=peer_name, | ||||
|                         caller='root', | ||||
|                     ) | ||||
| 
 | ||||
|                     async with expect_ctxc( | ||||
|                         yay=raise_sub_spawn_error_after, | ||||
|                         reraise=False, | ||||
|                     ): | ||||
|                         await tell_little_bro( | ||||
|                             actor_name=peer_name, | ||||
|                             caller='root', | ||||
|                         ) | ||||
|                     # signal client to raise a KBI | ||||
|                     await client_ctx.cancel() | ||||
|                     print('root cancelled client, checking that sub-spawn is down') | ||||
| 
 | ||||
|                     if not raise_sub_spawn_error_after: | ||||
| 
 | ||||
|                         # signal client to cancel and maybe raise a KBI | ||||
|                         await client_ctx.cancel() | ||||
|                         print( | ||||
|                             '-> root cancelling client,\n' | ||||
|                             '-> root checking `client_ctx.result()`,\n' | ||||
|                             f'-> checking that sub-spawn {peer_name} is down\n' | ||||
|                         ) | ||||
| 
 | ||||
|                     try: | ||||
|                         res = await client_ctx.wait_for_result(hide_tb=False) | ||||
|                         # in remote (relayed inception) error | ||||
|                         # case, we should error on the line above! | ||||
|                         if raise_sub_spawn_error_after: | ||||
|                             pytest.fail( | ||||
|                                 'Never rxed proxied `RemoteActorError[RuntimeError]` !?' | ||||
|                             ) | ||||
| 
 | ||||
|                         assert isinstance(res, ContextCancelled) | ||||
|                         assert client_ctx.cancel_acked | ||||
|                         assert res.canceller == root.uid | ||||
|                         assert not raise_sub_spawn_error_after | ||||
| 
 | ||||
|                         # cancelling the spawner sub should | ||||
|                         # transitively cancel it's sub, the little | ||||
|                         # bruv. | ||||
|                         print('root cancelling server/client sub-actors') | ||||
|                         await spawn_ctx.cancel() | ||||
|                         async with tractor.find_actor( | ||||
|                             name=peer_name, | ||||
|                         ) as sub: | ||||
|                             assert not sub | ||||
| 
 | ||||
|                     # XXX, only for tracing | ||||
|                     # except BaseException as _berr: | ||||
|                     #     berr = _berr | ||||
|                     #     await tractor.pause(shield=True) | ||||
|                     #     raise berr | ||||
| 
 | ||||
|                     except RemoteActorError as rae: | ||||
|                         _err = rae | ||||
|                         assert raise_sub_spawn_error_after | ||||
| 
 | ||||
|                         # since this is a "relayed error" via the client | ||||
|                         # sub-actor, it is expected to be | ||||
|                         # a `RemoteActorError` boxing another | ||||
|                         # `RemoteActorError` otherwise known as | ||||
|                         #  an "inception" (from `trio`'s parlance) | ||||
|                         # ((or maybe a "Matryoshka" and/or "matron" | ||||
|                         # in our own working parlance)) which | ||||
|                         # contains the source error from the | ||||
|                         # little_bro: a `RuntimeError`. | ||||
|                         # | ||||
|                         check_inner_rte(rae) | ||||
|                         assert rae.relay_uid == client.chan.uid | ||||
|                         assert rae.src_uid == sub.chan.uid | ||||
| 
 | ||||
|                         assert not client_ctx.cancel_acked | ||||
|                         assert ( | ||||
|                             client_ctx.maybe_error | ||||
|                             is client_ctx.outcome | ||||
|                             is rae | ||||
|                         ) | ||||
|                         raise | ||||
|                         # await tractor.pause() | ||||
|                     async with tractor.find_actor( | ||||
|                         name=peer_name, | ||||
|                     ) as sub: | ||||
|                         assert not sub | ||||
| 
 | ||||
|                     print('root cancelling server/client sub-actors') | ||||
| 
 | ||||
|                     # await tractor.pause() | ||||
|                     # await server.cancel_actor() | ||||
|                     res = await client_ctx.result(hide_tb=False) | ||||
|                     assert isinstance(res, ContextCancelled) | ||||
|                     assert client_ctx.cancel_acked | ||||
|                     assert res.canceller == current_actor().uid | ||||
| 
 | ||||
|             except RemoteActorError as rae: | ||||
|                 # XXX more-or-less same as above handler | ||||
|                 # this is just making sure the error bubbles out | ||||
|                 # of the  | ||||
|                 _err = rae | ||||
|                 assert raise_sub_spawn_error_after | ||||
|                 raise | ||||
|                     await spawn_ctx.cancel() | ||||
|                     # await server.cancel_actor() | ||||
| 
 | ||||
|             # since we called `.cancel_actor()`, `.cancel_ack` | ||||
|             # will not be set on the ctx bc `ctx.cancel()` was not | ||||
|             # called directly for this confext. | ||||
|             # called directly fot this confext. | ||||
|             except ContextCancelled as ctxc: | ||||
|                 _ctxc = ctxc | ||||
|                 print( | ||||
|                     f'{root.uid} caught ctxc from ctx with {client_ctx.chan.uid}\n' | ||||
|                     f'{repr(ctxc)}\n' | ||||
|                 ) | ||||
| 
 | ||||
|                 if not raise_sub_spawn_error_after: | ||||
|                     assert ctxc.canceller == root.uid | ||||
|                 else: | ||||
|                     assert ctxc.canceller == spawner_uid | ||||
| 
 | ||||
|                 print('caught ctxc from contexts!') | ||||
|                 assert ctxc.canceller == current_actor().uid | ||||
|                 assert ctxc is spawn_ctx.outcome | ||||
|                 assert ctxc is spawn_ctx.maybe_error | ||||
|                 raise | ||||
| 
 | ||||
|             if raise_sub_spawn_error_after: | ||||
|                 pytest.fail( | ||||
|                     'context block(s) in PARENT never raised?!?' | ||||
|                 ) | ||||
|             # assert spawn_ctx.cancel_acked | ||||
|             assert spawn_ctx.cancel_acked | ||||
|             assert client_ctx.cancel_acked | ||||
| 
 | ||||
|             if not raise_sub_spawn_error_after: | ||||
|                 # assert spawn_ctx.cancel_acked | ||||
|                 assert spawn_ctx.cancel_acked | ||||
|                 assert client_ctx.cancel_acked | ||||
|             await client.cancel_actor() | ||||
|             await server.cancel_actor() | ||||
| 
 | ||||
|                 await client.cancel_actor() | ||||
|                 await server.cancel_actor() | ||||
|             # WOA WOA WOA! we need this to close..!!!?? | ||||
|             # that's super bad XD | ||||
| 
 | ||||
|                 # WOA WOA WOA! we need this to close..!!!?? | ||||
|                 # that's super bad XD | ||||
|             # TODO: why isn't this working!?!? | ||||
|             # we're now outside the `.open_context()` block so | ||||
|             # the internal `Context._scope: CancelScope` should be | ||||
|             # gracefully "closed" ;) | ||||
| 
 | ||||
|                 # TODO: why isn't this working!?!? | ||||
|                 # we're now outside the `.open_context()` block so | ||||
|                 # the internal `Context._scope: CancelScope` should be | ||||
|                 # gracefully "closed" ;) | ||||
|             # assert spawn_ctx.cancelled_caught | ||||
| 
 | ||||
|                 # assert spawn_ctx.cancelled_caught | ||||
| 
 | ||||
|     async def _main(): | ||||
|         with trio.fail_after( | ||||
|             3 if not debug_mode | ||||
|             else 999 | ||||
|         ): | ||||
|             await main() | ||||
| 
 | ||||
|     if raise_sub_spawn_error_after: | ||||
|         with pytest.raises(RemoteActorError) as excinfo: | ||||
|             trio.run(_main) | ||||
| 
 | ||||
|         rae: RemoteActorError = excinfo.value | ||||
|         check_inner_rte(rae) | ||||
| 
 | ||||
|     else: | ||||
|         trio.run(_main) | ||||
|     trio.run(main) | ||||
|  |  | |||
|  | @ -38,13 +38,10 @@ async def async_gen_stream(sequence): | |||
|     assert cs.cancelled_caught | ||||
| 
 | ||||
| 
 | ||||
| # TODO: deprecated either remove entirely | ||||
| # or re-impl in terms of `MsgStream` one-sides | ||||
| # wrapper, but at least remove `Portal.open_stream_from()` | ||||
| @tractor.stream | ||||
| async def context_stream( | ||||
|     ctx: tractor.Context, | ||||
|     sequence: list[int], | ||||
|     sequence | ||||
| ): | ||||
|     for i in sequence: | ||||
|         await ctx.send_yield(i) | ||||
|  | @ -235,16 +232,10 @@ async def cancel_after(wait, reg_addr): | |||
| 
 | ||||
| 
 | ||||
| @pytest.fixture(scope='module') | ||||
| def time_quad_ex( | ||||
|     reg_addr: tuple, | ||||
|     ci_env: bool, | ||||
|     spawn_backend: str, | ||||
| ): | ||||
| def time_quad_ex(reg_addr, ci_env, spawn_backend): | ||||
|     if spawn_backend == 'mp': | ||||
|         ''' | ||||
|         no idea but the  mp *nix runs are flaking out here often... | ||||
| 
 | ||||
|         ''' | ||||
|         """no idea but the  mp *nix runs are flaking out here often... | ||||
|         """ | ||||
|         pytest.skip("Test is too flaky on mp in CI") | ||||
| 
 | ||||
|     timeout = 7 if platform.system() in ('Windows', 'Darwin') else 4 | ||||
|  | @ -255,24 +246,12 @@ def time_quad_ex( | |||
|     return results, diff | ||||
| 
 | ||||
| 
 | ||||
| def test_a_quadruple_example( | ||||
|     time_quad_ex: tuple, | ||||
|     ci_env: bool, | ||||
|     spawn_backend: str, | ||||
| ): | ||||
|     ''' | ||||
|     This also serves as a kind of "we'd like to be this fast test". | ||||
| def test_a_quadruple_example(time_quad_ex, ci_env, spawn_backend): | ||||
|     """This also serves as a kind of "we'd like to be this fast test".""" | ||||
| 
 | ||||
|     ''' | ||||
|     results, diff = time_quad_ex | ||||
|     assert results | ||||
|     this_fast = ( | ||||
|         6 if platform.system() in ( | ||||
|             'Windows', | ||||
|             'Darwin', | ||||
|         ) | ||||
|         else 3 | ||||
|     ) | ||||
|     this_fast = 6 if platform.system() in ('Windows', 'Darwin') else 3 | ||||
|     assert diff < this_fast | ||||
| 
 | ||||
| 
 | ||||
|  |  | |||
|  | @ -38,7 +38,7 @@ async def test_self_is_registered_localportal(reg_addr): | |||
|     "Verify waiting on the arbiter to register itself using a local portal." | ||||
|     actor = tractor.current_actor() | ||||
|     assert actor.is_arbiter | ||||
|     async with tractor.get_registry(reg_addr) as portal: | ||||
|     async with tractor.get_arbiter(*reg_addr) as portal: | ||||
|         assert isinstance(portal, tractor._portal.LocalPortal) | ||||
| 
 | ||||
|         with trio.fail_after(0.2): | ||||
|  |  | |||
|  | @ -10,7 +10,7 @@ import tractor | |||
| from tractor._testing import ( | ||||
|     tractor_test, | ||||
| ) | ||||
| from .conftest import ( | ||||
| from conftest import ( | ||||
|     sig_prog, | ||||
|     _INT_SIGNAL, | ||||
|     _INT_RETURN_CODE, | ||||
|  | @ -32,7 +32,7 @@ def test_abort_on_sigint(daemon): | |||
| @tractor_test | ||||
| async def test_cancel_remote_arbiter(daemon, reg_addr): | ||||
|     assert not tractor.current_actor().is_arbiter | ||||
|     async with tractor.get_registry(reg_addr) as portal: | ||||
|     async with tractor.get_arbiter(*reg_addr) as portal: | ||||
|         await portal.cancel_actor() | ||||
| 
 | ||||
|     time.sleep(0.1) | ||||
|  | @ -41,7 +41,7 @@ async def test_cancel_remote_arbiter(daemon, reg_addr): | |||
| 
 | ||||
|     # no arbiter socket should exist | ||||
|     with pytest.raises(OSError): | ||||
|         async with tractor.get_registry(reg_addr) as portal: | ||||
|         async with tractor.get_arbiter(*reg_addr) as portal: | ||||
|             pass | ||||
| 
 | ||||
| 
 | ||||
|  |  | |||
|  | @ -1,239 +0,0 @@ | |||
| ''' | ||||
| Define the details of inter-actor "out-of-band" (OoB) cancel | ||||
| semantics, that is how cancellation works when a cancel request comes | ||||
| from the different concurrency (primitive's) "layer" then where the | ||||
| eventual `trio.Task` actually raises a signal. | ||||
| 
 | ||||
| ''' | ||||
| from functools import partial | ||||
| # from contextlib import asynccontextmanager as acm | ||||
| # import itertools | ||||
| 
 | ||||
| import pytest | ||||
| import trio | ||||
| import tractor | ||||
| from tractor import (  # typing | ||||
|     ActorNursery, | ||||
|     Portal, | ||||
|     Context, | ||||
|     # ContextCancelled, | ||||
|     # RemoteActorError, | ||||
| ) | ||||
| # from tractor._testing import ( | ||||
| #     tractor_test, | ||||
| #     expect_ctxc, | ||||
| # ) | ||||
| 
 | ||||
| # XXX TODO cases: | ||||
| # - [ ] peer cancelled itself - so other peers should | ||||
| #   get errors reflecting that the peer was itself the .canceller? | ||||
| 
 | ||||
| # def test_self_cancel(): | ||||
| #     ''' | ||||
| #     2 cases: | ||||
| #     - calls `Actor.cancel()` locally in some task | ||||
| #     - calls LocalPortal.cancel_actor()` ? | ||||
| # | ||||
| # things to ensure! | ||||
| # -[ ] the ctxc raised in a child should ideally show the tb of the | ||||
| #     underlying `Cancelled` checkpoint, i.e. | ||||
| #     `raise scope_error from ctxc`? | ||||
| # | ||||
| # -[ ] a self-cancelled context, if not allowed to block on | ||||
| #     `ctx.result()` at some point will hang since the `ctx._scope` | ||||
| #     is never `.cancel_called`; cases for this include, | ||||
| #     - an `open_ctx()` which never starteds before being OoB actor | ||||
| #       cancelled. | ||||
| #       |_ parent task will be blocked in `.open_context()` for the | ||||
| #         `Started` msg, and when the OoB ctxc arrives `ctx._scope` | ||||
| #         will never have been signalled.. | ||||
| 
 | ||||
| #     ''' | ||||
| #     ... | ||||
| 
 | ||||
| # TODO, sanity test against the case in `/examples/trio/lockacquire_not_unmasked.py` | ||||
| # but with the `Lock.acquire()` from a `@context` to ensure the | ||||
| # implicit ignore-case-non-unmasking. | ||||
| # | ||||
| # @tractor.context | ||||
| # async def acquire_actor_global_lock( | ||||
| #     ctx: tractor.Context, | ||||
| #     ignore_special_cases: bool, | ||||
| # ): | ||||
| 
 | ||||
| #     async with maybe_unmask_excs( | ||||
| #         ignore_special_cases=ignore_special_cases, | ||||
| #     ): | ||||
| #         await ctx.started('locked') | ||||
| 
 | ||||
| #     # block til cancelled | ||||
| #     await trio.sleep_forever() | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def sleep_forever( | ||||
|     ctx: tractor.Context, | ||||
|     # ignore_special_cases: bool, | ||||
|     do_started: bool, | ||||
| ): | ||||
| 
 | ||||
|     # async with maybe_unmask_excs( | ||||
|     #     ignore_special_cases=ignore_special_cases, | ||||
|     # ): | ||||
|     #     await ctx.started('locked') | ||||
|     if do_started: | ||||
|         await ctx.started() | ||||
| 
 | ||||
|     # block til cancelled | ||||
|     print('sleepin on child-side..') | ||||
|     await trio.sleep_forever() | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     'cancel_ctx', | ||||
|     [True, False], | ||||
| ) | ||||
| def test_cancel_ctx_with_parent_side_entered_in_bg_task( | ||||
|     debug_mode: bool, | ||||
|     loglevel: str, | ||||
|     cancel_ctx: bool, | ||||
| ): | ||||
|     ''' | ||||
|     The most "basic" out-of-band-task self-cancellation case where | ||||
|     `Portal.open_context()` is entered in a bg task and the | ||||
|     parent-task (of the containing nursery) calls `Context.cancel()` | ||||
|     without the child knowing; the `Context._scope` should be | ||||
|     `.cancel_called` when the IPC ctx's child-side relays | ||||
|     a `ContextCancelled` with a `.canceller` set to the parent | ||||
|     actor('s task). | ||||
| 
 | ||||
|     ''' | ||||
|     async def main(): | ||||
|         with trio.fail_after( | ||||
|             2 if not debug_mode else 999, | ||||
|         ): | ||||
|             an: ActorNursery | ||||
|             async with ( | ||||
|                 tractor.open_nursery( | ||||
|                     debug_mode=debug_mode, | ||||
|                     loglevel='devx', | ||||
|                     enable_stack_on_sig=True, | ||||
|                 ) as an, | ||||
|                 trio.open_nursery() as tn, | ||||
|             ): | ||||
|                 ptl: Portal = await an.start_actor( | ||||
|                     'sub', | ||||
|                     enable_modules=[__name__], | ||||
|                 ) | ||||
| 
 | ||||
|                 async def _open_ctx_async( | ||||
|                     do_started: bool = True, | ||||
|                     task_status=trio.TASK_STATUS_IGNORED, | ||||
|                 ): | ||||
|                     # do we expect to never enter the | ||||
|                     # `.open_context()` below. | ||||
|                     if not do_started: | ||||
|                         task_status.started() | ||||
| 
 | ||||
|                     async with ptl.open_context( | ||||
|                         sleep_forever, | ||||
|                         do_started=do_started, | ||||
|                     ) as (ctx, first): | ||||
|                         task_status.started(ctx) | ||||
|                         await trio.sleep_forever() | ||||
| 
 | ||||
|                 # XXX, this is the key OoB part! | ||||
|                 # | ||||
|                 # - start the `.open_context()` in a bg task which | ||||
|                 #   blocks inside the embedded scope-body, | ||||
|                 # | ||||
|                 # -  when we call `Context.cancel()` it **is | ||||
|                 #   not** from the same task which eventually runs | ||||
|                 #   `.__aexit__()`, | ||||
|                 # | ||||
|                 # - since the bg "opener" task will be in | ||||
|                 #   a `trio.sleep_forever()`, it must be interrupted | ||||
|                 #   by the `ContextCancelled` delivered from the | ||||
|                 #   child-side; `Context._scope: CancelScope` MUST | ||||
|                 #   be `.cancel_called`! | ||||
|                 # | ||||
|                 print('ASYNC opening IPC context in subtask..') | ||||
|                 maybe_ctx: Context|None = await tn.start(partial( | ||||
|                     _open_ctx_async, | ||||
|                 )) | ||||
| 
 | ||||
|                 if ( | ||||
|                     maybe_ctx | ||||
|                     and | ||||
|                     cancel_ctx | ||||
|                 ): | ||||
|                     print('cancelling first IPC ctx!') | ||||
|                     await maybe_ctx.cancel() | ||||
| 
 | ||||
|                 # XXX, note that despite `maybe_context.cancel()` | ||||
|                 # being called above, it's the parent (bg) task | ||||
|                 # which was originally never interrupted in | ||||
|                 # the `ctx._scope` body due to missing case logic in | ||||
|                 # `ctx._maybe_cancel_and_set_remote_error()`. | ||||
|                 # | ||||
|                 # It didn't matter that the subactor process was | ||||
|                 # already terminated and reaped, nothing was | ||||
|                 # cancelling the ctx-parent task's scope! | ||||
|                 # | ||||
|                 print('cancelling subactor!') | ||||
|                 await ptl.cancel_actor() | ||||
| 
 | ||||
|                 if maybe_ctx: | ||||
|                     try: | ||||
|                         await maybe_ctx.wait_for_result() | ||||
|                     except tractor.ContextCancelled as ctxc: | ||||
|                         assert not cancel_ctx | ||||
|                         assert ( | ||||
|                             ctxc.canceller | ||||
|                             == | ||||
|                             tractor.current_actor().aid.uid | ||||
|                         ) | ||||
|                         # don't re-raise since it'll trigger | ||||
|                         # an EG from the above tn. | ||||
| 
 | ||||
|     if cancel_ctx: | ||||
|         # graceful self-cancel | ||||
|         trio.run(main) | ||||
| 
 | ||||
|     else: | ||||
|         # ctx parent task should see OoB ctxc due to | ||||
|         # `ptl.cancel_actor()`. | ||||
|         with pytest.raises(tractor.ContextCancelled) as excinfo: | ||||
|             trio.run(main) | ||||
| 
 | ||||
|         assert 'root' in excinfo.value.canceller[0] | ||||
| 
 | ||||
| 
 | ||||
| # def test_parent_actor_cancels_subactor_with_gt1_ctxs_open_to_it( | ||||
| #     debug_mode: bool, | ||||
| #     loglevel: str, | ||||
| # ): | ||||
| #     ''' | ||||
| #     Demos OoB cancellation from the perspective of a ctx opened with | ||||
| #     a child subactor where the parent cancels the child at the "actor | ||||
| #     layer" using `Portal.cancel_actor()` and thus the | ||||
| #     `ContextCancelled.canceller` received by the ctx's parent-side | ||||
| #     task will appear to be a "self cancellation" even though that | ||||
| #     specific task itself was not cancelled and thus | ||||
| #     `Context.cancel_called ==False`. | ||||
| #     ''' | ||||
|                 # TODO, do we have an existing implied ctx | ||||
|                 # cancel test like this? | ||||
|                 # with trio.move_on_after(0.5):# as cs: | ||||
|                 #     await _open_ctx_async( | ||||
|                 #         do_started=False, | ||||
|                 #     ) | ||||
| 
 | ||||
| 
 | ||||
|                 # in-line ctx scope should definitely raise | ||||
|                 # a ctxc with `.canceller = 'root'` | ||||
|                 # async with ptl.open_context( | ||||
|                 #     sleep_forever, | ||||
|                 #     do_started=True, | ||||
|                 # ) as pair: | ||||
| 
 | ||||
|  | @ -1,364 +0,0 @@ | |||
| ''' | ||||
| Audit sub-sys APIs from `.msg._ops` | ||||
| mostly for ensuring correct `contextvars` | ||||
| related settings around IPC contexts. | ||||
| 
 | ||||
| ''' | ||||
| from contextlib import ( | ||||
|     asynccontextmanager as acm, | ||||
| ) | ||||
| 
 | ||||
| from msgspec import ( | ||||
|     Struct, | ||||
| ) | ||||
| import pytest | ||||
| import trio | ||||
| 
 | ||||
| import tractor | ||||
| from tractor import ( | ||||
|     Context, | ||||
|     MsgTypeError, | ||||
|     current_ipc_ctx, | ||||
|     Portal, | ||||
| ) | ||||
| from tractor.msg import ( | ||||
|     _ops as msgops, | ||||
|     Return, | ||||
| ) | ||||
| from tractor.msg import ( | ||||
|     _codec, | ||||
| ) | ||||
| from tractor.msg.types import ( | ||||
|     log, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| class PldMsg( | ||||
|     Struct, | ||||
| 
 | ||||
|     # TODO: with multiple structs in-spec we need to tag them! | ||||
|     # -[ ] offer a built-in `PldMsg` type to inherit from which takes | ||||
|     #      case of these details? | ||||
|     # | ||||
|     # https://jcristharif.com/msgspec/structs.html#tagged-unions | ||||
|     # tag=True, | ||||
|     # tag_field='msg_type', | ||||
| ): | ||||
|     field: str | ||||
| 
 | ||||
| 
 | ||||
| maybe_msg_spec = PldMsg|None | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def maybe_expect_raises( | ||||
|     raises: BaseException|None = None, | ||||
|     ensure_in_message: list[str]|None = None, | ||||
|     post_mortem: bool = False, | ||||
|     timeout: int = 3, | ||||
| ) -> None: | ||||
|     ''' | ||||
|     Async wrapper for ensuring errors propagate from the inner scope. | ||||
| 
 | ||||
|     ''' | ||||
|     if tractor._state.debug_mode(): | ||||
|         timeout += 999 | ||||
| 
 | ||||
|     with trio.fail_after(timeout): | ||||
|         try: | ||||
|             yield | ||||
|         except BaseException as _inner_err: | ||||
|             inner_err = _inner_err | ||||
|             # wasn't-expected to error.. | ||||
|             if raises is None: | ||||
|                 raise | ||||
| 
 | ||||
|             else: | ||||
|                 assert type(inner_err) is raises | ||||
| 
 | ||||
|                 # maybe check for error txt content | ||||
|                 if ensure_in_message: | ||||
|                     part: str | ||||
|                     err_repr: str = repr(inner_err) | ||||
|                     for part in ensure_in_message: | ||||
|                         for i, arg in enumerate(inner_err.args): | ||||
|                             if part in err_repr: | ||||
|                                 break | ||||
|                         # if part never matches an arg, then we're | ||||
|                         # missing a match. | ||||
|                         else: | ||||
|                             raise ValueError( | ||||
|                                 'Failed to find error message content?\n\n' | ||||
|                                 f'expected: {ensure_in_message!r}\n' | ||||
|                                 f'part: {part!r}\n\n' | ||||
|                                 f'{inner_err.args}' | ||||
|                         ) | ||||
| 
 | ||||
|                 if post_mortem: | ||||
|                     await tractor.post_mortem() | ||||
| 
 | ||||
|         else: | ||||
|             if raises: | ||||
|                 raise RuntimeError( | ||||
|                     f'Expected a {raises.__name__!r} to be raised?' | ||||
|                 ) | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context( | ||||
|     pld_spec=maybe_msg_spec, | ||||
| ) | ||||
| async def child( | ||||
|     ctx: Context, | ||||
|     started_value: int|PldMsg|None, | ||||
|     return_value: str|None, | ||||
|     validate_pld_spec: bool, | ||||
|     raise_on_started_mte: bool = True, | ||||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|     Call ``Context.started()`` more then once (an error). | ||||
| 
 | ||||
|     ''' | ||||
|     expect_started_mte: bool = started_value == 10 | ||||
| 
 | ||||
|     # sanaity check that child RPC context is the current one | ||||
|     curr_ctx: Context = current_ipc_ctx() | ||||
|     assert ctx is curr_ctx | ||||
| 
 | ||||
|     rx: msgops.PldRx = ctx._pld_rx | ||||
|     curr_pldec: _codec.MsgDec = rx.pld_dec | ||||
| 
 | ||||
|     ctx_meta: dict = getattr( | ||||
|         child, | ||||
|         '_tractor_context_meta', | ||||
|         None, | ||||
|     ) | ||||
|     if ctx_meta: | ||||
|         assert ( | ||||
|             ctx_meta['pld_spec'] | ||||
|             is curr_pldec.spec | ||||
|             is curr_pldec.pld_spec | ||||
|         ) | ||||
| 
 | ||||
|     # 2 cases: hdndle send-side and recv-only validation | ||||
|     # - when `raise_on_started_mte == True`, send validate | ||||
|     # - else, parent-recv-side only validation | ||||
|     mte: MsgTypeError|None = None | ||||
|     try: | ||||
|         await ctx.started( | ||||
|             value=started_value, | ||||
|             validate_pld_spec=validate_pld_spec, | ||||
|         ) | ||||
| 
 | ||||
|     except MsgTypeError as _mte: | ||||
|         mte = _mte | ||||
|         log.exception('started()` raised an MTE!\n') | ||||
|         if not expect_started_mte: | ||||
|             raise RuntimeError( | ||||
|                 'Child-ctx-task SHOULD NOT HAVE raised an MTE for\n\n' | ||||
|                 f'{started_value!r}\n' | ||||
|             ) | ||||
| 
 | ||||
|         boxed_div: str = '------ - ------' | ||||
|         assert boxed_div not in mte._message | ||||
|         assert boxed_div not in mte.tb_str | ||||
|         assert boxed_div not in repr(mte) | ||||
|         assert boxed_div not in str(mte) | ||||
|         mte_repr: str = repr(mte) | ||||
|         for line in mte.message.splitlines(): | ||||
|             assert line in mte_repr | ||||
| 
 | ||||
|         # since this is a *local error* there should be no | ||||
|         # boxed traceback content! | ||||
|         assert not mte.tb_str | ||||
| 
 | ||||
|         # propagate to parent? | ||||
|         if raise_on_started_mte: | ||||
|             raise | ||||
| 
 | ||||
|     # no-send-side-error fallthrough | ||||
|     if ( | ||||
|         validate_pld_spec | ||||
|         and | ||||
|         expect_started_mte | ||||
|     ): | ||||
|         raise RuntimeError( | ||||
|             'Child-ctx-task SHOULD HAVE raised an MTE for\n\n' | ||||
|             f'{started_value!r}\n' | ||||
|         ) | ||||
| 
 | ||||
|     assert ( | ||||
|         not expect_started_mte | ||||
|         or | ||||
|         not validate_pld_spec | ||||
|     ) | ||||
| 
 | ||||
|     # if wait_for_parent_to_cancel: | ||||
|     #     ... | ||||
|     # | ||||
|     # ^-TODO-^ logic for diff validation policies on each side: | ||||
|     # | ||||
|     # -[ ] ensure that if we don't validate on the send | ||||
|     #   side, that we are eventually error-cancelled by our | ||||
|     #   parent due to the bad `Started` payload! | ||||
|     # -[ ] the boxed error should be srced from the parent's | ||||
|     #   runtime NOT ours! | ||||
|     # -[ ] we should still error on bad `return_value`s | ||||
|     #   despite the parent not yet error-cancelling us? | ||||
|     #   |_ how do we want the parent side to look in that | ||||
|     #     case? | ||||
|     #     -[ ] maybe the equiv of "during handling of the | ||||
|     #       above error another occurred" for the case where | ||||
|     #       the parent sends a MTE to this child and while | ||||
|     #       waiting for the child to terminate it gets back | ||||
|     #       the MTE for this case? | ||||
|     # | ||||
| 
 | ||||
|     # XXX should always fail on recv side since we can't | ||||
|     # really do much else beside terminate and relay the | ||||
|     # msg-type-error from this RPC task ;) | ||||
|     return return_value | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     'return_value', | ||||
|     [ | ||||
|         'yo', | ||||
|         None, | ||||
|     ], | ||||
|     ids=[ | ||||
|         'return[invalid-"yo"]', | ||||
|         'return[valid-None]', | ||||
|     ], | ||||
| ) | ||||
| @pytest.mark.parametrize( | ||||
|     'started_value', | ||||
|     [ | ||||
|         10, | ||||
|         PldMsg(field='yo'), | ||||
|     ], | ||||
|     ids=[ | ||||
|         'Started[invalid-10]', | ||||
|         'Started[valid-PldMsg]', | ||||
|     ], | ||||
| ) | ||||
| @pytest.mark.parametrize( | ||||
|     'pld_check_started_value', | ||||
|     [ | ||||
|         True, | ||||
|         False, | ||||
|     ], | ||||
|     ids=[ | ||||
|         'check-started-pld', | ||||
|         'no-started-pld-validate', | ||||
|     ], | ||||
| ) | ||||
| def test_basic_payload_spec( | ||||
|     debug_mode: bool, | ||||
|     loglevel: str, | ||||
|     return_value: str|None, | ||||
|     started_value: int|PldMsg, | ||||
|     pld_check_started_value: bool, | ||||
| ): | ||||
|     ''' | ||||
|     Validate the most basic `PldRx` msg-type-spec semantics around | ||||
|     a IPC `Context` endpoint start, started-sync, and final return | ||||
|     value depending on set payload types and the currently applied | ||||
|     pld-spec. | ||||
| 
 | ||||
|     ''' | ||||
|     invalid_return: bool = return_value == 'yo' | ||||
|     invalid_started: bool = started_value == 10 | ||||
| 
 | ||||
|     async def main(): | ||||
|         async with tractor.open_nursery( | ||||
|             debug_mode=debug_mode, | ||||
|             loglevel=loglevel, | ||||
|         ) as an: | ||||
|             p: Portal = await an.start_actor( | ||||
|                 'child', | ||||
|                 enable_modules=[__name__], | ||||
|             ) | ||||
| 
 | ||||
|             # since not opened yet. | ||||
|             assert current_ipc_ctx() is None | ||||
| 
 | ||||
|             if invalid_started: | ||||
|                 msg_type_str: str = 'Started' | ||||
|                 bad_value: int = 10 | ||||
|             elif invalid_return: | ||||
|                 msg_type_str: str = 'Return' | ||||
|                 bad_value: str = 'yo' | ||||
|             else: | ||||
|                 # XXX but should never be used below then.. | ||||
|                 msg_type_str: str = '' | ||||
|                 bad_value: str = '' | ||||
| 
 | ||||
|             maybe_mte: MsgTypeError|None = None | ||||
|             should_raise: Exception|None = ( | ||||
|                 MsgTypeError if ( | ||||
|                     invalid_return | ||||
|                     or | ||||
|                     invalid_started | ||||
|                 ) else None | ||||
|             ) | ||||
|             async with ( | ||||
|                 maybe_expect_raises( | ||||
|                     raises=should_raise, | ||||
|                     ensure_in_message=[ | ||||
|                         f"invalid `{msg_type_str}` msg payload", | ||||
|                         f'{bad_value}', | ||||
|                         f'has type {type(bad_value)!r}', | ||||
|                         'not match type-spec', | ||||
|                         f'`{msg_type_str}.pld: PldMsg|NoneType`', | ||||
|                     ], | ||||
|                     # only for debug | ||||
|                     # post_mortem=True, | ||||
|                 ), | ||||
|                 p.open_context( | ||||
|                     child, | ||||
|                     return_value=return_value, | ||||
|                     started_value=started_value, | ||||
|                     validate_pld_spec=pld_check_started_value, | ||||
|                 ) as (ctx, first), | ||||
|             ): | ||||
|                 # now opened with 'child' sub | ||||
|                 assert current_ipc_ctx() is ctx | ||||
| 
 | ||||
|                 assert type(first) is PldMsg | ||||
|                 assert first.field == 'yo' | ||||
| 
 | ||||
|                 try: | ||||
|                     res: None|PldMsg = await ctx.result(hide_tb=False) | ||||
|                     assert res is None | ||||
|                 except MsgTypeError as mte: | ||||
|                     maybe_mte = mte | ||||
|                     if not invalid_return: | ||||
|                         raise | ||||
| 
 | ||||
|                     # expected this invalid `Return.pld` so audit | ||||
|                     # the error state + meta-data | ||||
|                     assert mte.expected_msg_type is Return | ||||
|                     assert mte.cid == ctx.cid | ||||
|                     mte_repr: str = repr(mte) | ||||
|                     for line in mte.message.splitlines(): | ||||
|                         assert line in mte_repr | ||||
| 
 | ||||
|                     assert mte.tb_str | ||||
|                     # await tractor.pause(shield=True) | ||||
| 
 | ||||
|                     # verify expected remote mte deats | ||||
|                     assert ctx._local_error is None | ||||
|                     assert ( | ||||
|                         mte is | ||||
|                         ctx._remote_error is | ||||
|                         ctx.maybe_error is | ||||
|                         ctx.outcome | ||||
|                     ) | ||||
| 
 | ||||
|             if should_raise is None: | ||||
|                 assert maybe_mte is None | ||||
| 
 | ||||
|             await p.cancel_actor() | ||||
| 
 | ||||
|     trio.run(main) | ||||
|  | @ -1,237 +0,0 @@ | |||
| ''' | ||||
| Special case testing for issues not (dis)covered in the primary | ||||
| `Context` related functional/scenario suites. | ||||
| 
 | ||||
| **NOTE: this mod is a WIP** space for handling | ||||
| odd/rare/undiscovered/not-yet-revealed faults which either | ||||
| loudly (ideal case) breakl our supervision protocol | ||||
| or (worst case) result in distributed sys hangs. | ||||
| 
 | ||||
| Suites here further try to clarify (if [partially] ill-defined) and | ||||
| verify our edge case semantics for inter-actor-relayed-exceptions | ||||
| including, | ||||
| 
 | ||||
| - lowlevel: what remote obj-data is interchanged for IPC and what is | ||||
|   native-obj form is expected from unpacking in the the new | ||||
|   mem-domain. | ||||
| 
 | ||||
| - which kinds of `RemoteActorError` (and its derivs) are expected by which | ||||
|   (types of) peers (parent, child, sibling, etc) with what | ||||
|   particular meta-data set such as, | ||||
| 
 | ||||
|   - `.src_uid`: the original (maybe) peer who raised. | ||||
|   - `.relay_uid`: the next-hop-peer who sent it. | ||||
|   - `.relay_path`: the sequence of peer actor hops. | ||||
|   - `.is_inception`: a predicate that denotes multi-hop remote errors. | ||||
| 
 | ||||
| - when should `ExceptionGroup`s be relayed from a particular | ||||
|   remote endpoint, they should never be caused by implicit `._rpc` | ||||
|   nursery machinery! | ||||
| 
 | ||||
| - various special `trio` edge cases around its cancellation semantics | ||||
|   and how we (currently) leverage `trio.Cancelled` as a signal for | ||||
|   whether a `Context` task should raise `ContextCancelled` (ctx). | ||||
| 
 | ||||
| ''' | ||||
| import pytest | ||||
| import trio | ||||
| import tractor | ||||
| from tractor import (  # typing | ||||
|     ActorNursery, | ||||
|     Portal, | ||||
|     Context, | ||||
|     ContextCancelled, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def sleep_n_chkpt_in_finally( | ||||
|     ctx: Context, | ||||
|     sleep_n_raise: bool, | ||||
| 
 | ||||
|     chld_raise_delay: float, | ||||
|     chld_finally_delay: float, | ||||
| 
 | ||||
|     rent_cancels: bool, | ||||
|     rent_ctxc_delay: float, | ||||
| 
 | ||||
|     expect_exc: str|None = None, | ||||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|     Sync, open a tn, then wait for cancel, run a chkpt inside | ||||
|     the user's `finally:` teardown. | ||||
| 
 | ||||
|     This covers a footgun case that `trio` core doesn't seem to care about | ||||
|     wherein an exc can be masked by a `trio.Cancelled` raised inside a tn emedded | ||||
|     `finally:`. | ||||
| 
 | ||||
|     Also see `test_trioisms::test_acm_embedded_nursery_propagates_enter_err` | ||||
|     for the down and gritty details. | ||||
| 
 | ||||
|     Since a `@context` endpoint fn can also contain code like this, | ||||
|     **and** bc we currently have no easy way other then | ||||
|     `trio.Cancelled` to signal cancellation on each side of an IPC `Context`, | ||||
|     the footgun issue can compound itself as demonstrated in this suite.. | ||||
| 
 | ||||
|     Here are some edge cases codified with our WIP "sclang" syntax | ||||
|     (note the parent(rent)/child(chld) naming here is just | ||||
|     pragmatism, generally these most of these cases can occurr | ||||
|     regardless of the distributed-task's supervision hiearchy), | ||||
| 
 | ||||
|     - rent c)=> chld.raises-then-taskc-in-finally | ||||
|      |_ chld's body raises an `exc: BaseException`. | ||||
|       _ in its `finally:` block it runs a chkpoint | ||||
|         which raises a taskc (`trio.Cancelled`) which | ||||
|         masks `exc` instead raising taskc up to the first tn. | ||||
|       _ the embedded/chld tn captures the masking taskc and then | ||||
|         raises it up to the ._rpc-ep-tn instead of `exc`. | ||||
|       _ the rent thinks the child ctxc-ed instead of errored.. | ||||
| 
 | ||||
|     ''' | ||||
|     await ctx.started() | ||||
| 
 | ||||
|     if expect_exc: | ||||
|         expect_exc: BaseException = tractor._exceptions.get_err_type( | ||||
|             type_name=expect_exc, | ||||
|         ) | ||||
| 
 | ||||
|     berr: BaseException|None = None | ||||
|     try: | ||||
|         if not sleep_n_raise: | ||||
|             await trio.sleep_forever() | ||||
|         elif sleep_n_raise: | ||||
| 
 | ||||
|             # XXX this sleep is less then the sleep the parent | ||||
|             # does before calling `ctx.cancel()` | ||||
|             await trio.sleep(chld_raise_delay) | ||||
| 
 | ||||
|             # XXX this will be masked by a taskc raised in | ||||
|             # the `finally:` if this fn doesn't terminate | ||||
|             # before any ctxc-req arrives AND a checkpoint is hit | ||||
|             # in that `finally:`. | ||||
|             raise RuntimeError('my app krurshed..') | ||||
| 
 | ||||
|     except BaseException as _berr: | ||||
|         berr = _berr | ||||
| 
 | ||||
|         # TODO: it'd sure be nice to be able to inject our own | ||||
|         # `ContextCancelled` here instead of of `trio.Cancelled` | ||||
|         # so that our runtime can expect it and this "user code" | ||||
|         # would be able to tell the diff between a generic trio | ||||
|         # cancel and a tractor runtime-IPC cancel. | ||||
|         if expect_exc: | ||||
|             if not isinstance( | ||||
|                 berr, | ||||
|                 expect_exc, | ||||
|             ): | ||||
|                 raise ValueError( | ||||
|                     f'Unexpected exc type ??\n' | ||||
|                     f'{berr!r}\n' | ||||
|                     f'\n' | ||||
|                     f'Expected a {expect_exc!r}\n' | ||||
|                 ) | ||||
| 
 | ||||
|         raise berr | ||||
| 
 | ||||
|     # simulate what user code might try even though | ||||
|     # it's a known boo-boo.. | ||||
|     finally: | ||||
|         # maybe wait for rent ctxc to arrive | ||||
|         with trio.CancelScope(shield=True): | ||||
|             await trio.sleep(chld_finally_delay) | ||||
| 
 | ||||
|         # !!XXX this will raise `trio.Cancelled` which | ||||
|         # will mask the RTE from above!!! | ||||
|         # | ||||
|         # YES, it's the same case as our extant | ||||
|         # `test_trioisms::test_acm_embedded_nursery_propagates_enter_err` | ||||
|         try: | ||||
|             await trio.lowlevel.checkpoint() | ||||
|         except trio.Cancelled as taskc: | ||||
|             if (scope_err := taskc.__context__): | ||||
|                 print( | ||||
|                     f'XXX MASKED REMOTE ERROR XXX\n' | ||||
|                     f'ENDPOINT exception -> {scope_err!r}\n' | ||||
|                     f'will be masked by -> {taskc!r}\n' | ||||
|                 ) | ||||
|                 # await tractor.pause(shield=True) | ||||
| 
 | ||||
|             raise taskc | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     'chld_callspec', | ||||
|     [ | ||||
|         dict( | ||||
|             sleep_n_raise=None, | ||||
|             chld_raise_delay=0.1, | ||||
|             chld_finally_delay=0.1, | ||||
|             expect_exc='Cancelled', | ||||
|             rent_cancels=True, | ||||
|             rent_ctxc_delay=0.1, | ||||
|         ), | ||||
|         dict( | ||||
|             sleep_n_raise='RuntimeError', | ||||
|             chld_raise_delay=0.1, | ||||
|             chld_finally_delay=1, | ||||
|             expect_exc='RuntimeError', | ||||
|             rent_cancels=False, | ||||
|             rent_ctxc_delay=0.1, | ||||
|         ), | ||||
|     ], | ||||
|     ids=lambda item: f'chld_callspec={item!r}' | ||||
| ) | ||||
| def test_unmasked_remote_exc( | ||||
|     debug_mode: bool, | ||||
|     chld_callspec: dict, | ||||
|     tpt_proto: str, | ||||
| ): | ||||
|     expect_exc_str: str|None = chld_callspec['sleep_n_raise'] | ||||
|     rent_ctxc_delay: float|None = chld_callspec['rent_ctxc_delay'] | ||||
|     async def main(): | ||||
|         an: ActorNursery | ||||
|         async with tractor.open_nursery( | ||||
|             debug_mode=debug_mode, | ||||
|             enable_transports=[tpt_proto], | ||||
|         ) as an: | ||||
|             ptl: Portal = await an.start_actor( | ||||
|                 'cancellee', | ||||
|                 enable_modules=[__name__], | ||||
|             ) | ||||
|             ctx: Context | ||||
|             async with ( | ||||
|                 ptl.open_context( | ||||
|                     sleep_n_chkpt_in_finally, | ||||
|                     **chld_callspec, | ||||
|                 ) as (ctx, sent), | ||||
|             ): | ||||
|                 assert not sent | ||||
|                 await trio.sleep(rent_ctxc_delay) | ||||
|                 await ctx.cancel() | ||||
| 
 | ||||
|                 # recv error or result from chld | ||||
|                 ctxc: ContextCancelled = await ctx.wait_for_result() | ||||
|                 assert ( | ||||
|                     ctxc is ctx.outcome | ||||
|                     and | ||||
|                     isinstance(ctxc, ContextCancelled) | ||||
|                 ) | ||||
| 
 | ||||
|             # always graceful terminate the sub in non-error cases | ||||
|             await an.cancel() | ||||
| 
 | ||||
|     if expect_exc_str: | ||||
|         expect_exc: BaseException = tractor._exceptions.get_err_type( | ||||
|             type_name=expect_exc_str, | ||||
|         ) | ||||
|         with pytest.raises( | ||||
|             expected_exception=tractor.RemoteActorError, | ||||
|         ) as excinfo: | ||||
|             trio.run(main) | ||||
| 
 | ||||
|         rae = excinfo.value | ||||
|         assert expect_exc == rae.boxed_type | ||||
| 
 | ||||
|     else: | ||||
|         trio.run(main) | ||||
|  | @ -1,6 +1,5 @@ | |||
| ''' | ||||
| Suites for our `.trionics.maybe_open_context()` multi-task | ||||
| shared-cached `@acm` API. | ||||
| Async context manager cache api testing: ``trionics.maybe_open_context():`` | ||||
| 
 | ||||
| ''' | ||||
| from contextlib import asynccontextmanager as acm | ||||
|  | @ -10,15 +9,6 @@ from typing import Awaitable | |||
| import pytest | ||||
| import trio | ||||
| import tractor | ||||
| from tractor.trionics import ( | ||||
|     maybe_open_context, | ||||
| ) | ||||
| from tractor.log import ( | ||||
|     get_console_log, | ||||
|     get_logger, | ||||
| ) | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| _resource: int = 0 | ||||
|  | @ -62,7 +52,7 @@ def test_resource_only_entered_once(key_on): | |||
|                 # different task names per task will be used | ||||
|                 kwargs = {'task_name': name} | ||||
| 
 | ||||
|             async with maybe_open_context( | ||||
|             async with tractor.trionics.maybe_open_context( | ||||
|                 maybe_increment_counter, | ||||
|                 kwargs=kwargs, | ||||
|                 key=key, | ||||
|  | @ -82,13 +72,11 @@ def test_resource_only_entered_once(key_on): | |||
|         with trio.move_on_after(0.5): | ||||
|             async with ( | ||||
|                 tractor.open_root_actor(), | ||||
|                 trio.open_nursery() as tn, | ||||
|                 trio.open_nursery() as n, | ||||
|             ): | ||||
| 
 | ||||
|                 for i in range(10): | ||||
|                     tn.start_soon( | ||||
|                         enter_cached_mngr, | ||||
|                         f'task_{i}', | ||||
|                     ) | ||||
|                     n.start_soon(enter_cached_mngr, f'task_{i}') | ||||
|                     await trio.sleep(0.001) | ||||
| 
 | ||||
|     trio.run(main) | ||||
|  | @ -110,55 +98,27 @@ async def streamer( | |||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def open_stream() -> Awaitable[ | ||||
|     tuple[ | ||||
|         tractor.ActorNursery, | ||||
|         tractor.MsgStream, | ||||
|     ] | ||||
| ]: | ||||
|     try: | ||||
|         async with tractor.open_nursery() as an: | ||||
|             portal = await an.start_actor( | ||||
|                 'streamer', | ||||
|                 enable_modules=[__name__], | ||||
|             ) | ||||
|             try: | ||||
|                 async with ( | ||||
|                     portal.open_context(streamer) as (ctx, first), | ||||
|                     ctx.open_stream() as stream, | ||||
|                 ): | ||||
|                     print('Entered open_stream() caller') | ||||
|                     yield an, stream | ||||
|                     print('Exited open_stream() caller') | ||||
| async def open_stream() -> Awaitable[tractor.MsgStream]: | ||||
| 
 | ||||
|             finally: | ||||
|                 print( | ||||
|                     'Cancelling streamer with,\n' | ||||
|                     '=> `Portal.cancel_actor()`' | ||||
|                 ) | ||||
|                 await portal.cancel_actor() | ||||
|                 print('Cancelled streamer') | ||||
|     async with tractor.open_nursery() as tn: | ||||
|         portal = await tn.start_actor('streamer', enable_modules=[__name__]) | ||||
|         async with ( | ||||
|             portal.open_context(streamer) as (ctx, first), | ||||
|             ctx.open_stream() as stream, | ||||
|         ): | ||||
|             yield stream | ||||
| 
 | ||||
|     except Exception as err: | ||||
|         print( | ||||
|             f'`open_stream()` errored?\n' | ||||
|             f'{err!r}\n' | ||||
|         ) | ||||
|         await tractor.pause(shield=True) | ||||
|         raise err | ||||
|         await portal.cancel_actor() | ||||
|     print('CANCELLED STREAMER') | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def maybe_open_stream(taskname: str): | ||||
|     async with maybe_open_context( | ||||
|     async with tractor.trionics.maybe_open_context( | ||||
|         # NOTE: all secondary tasks should cache hit on the same key | ||||
|         acm_func=open_stream, | ||||
|     ) as ( | ||||
|         cache_hit, | ||||
|         (an, stream) | ||||
|     ): | ||||
|         # when the actor + portal + ctx + stream has already been | ||||
|         # allocated we want to just bcast to this task. | ||||
|     ) as (cache_hit, stream): | ||||
| 
 | ||||
|         if cache_hit: | ||||
|             print(f'{taskname} loaded from cache') | ||||
| 
 | ||||
|  | @ -166,77 +126,27 @@ async def maybe_open_stream(taskname: str): | |||
|             # if this feed is already allocated by the first | ||||
|             # task that entereed | ||||
|             async with stream.subscribe() as bstream: | ||||
|                 yield an, bstream | ||||
|                 print( | ||||
|                     f'cached task exited\n' | ||||
|                     f')>\n' | ||||
|                     f' |_{taskname}\n' | ||||
|                 ) | ||||
| 
 | ||||
|             # we should always unreg the "cloned" bcrc for this | ||||
|             # consumer-task | ||||
|             assert id(bstream) not in bstream._state.subs | ||||
| 
 | ||||
|                 yield bstream | ||||
|         else: | ||||
|             # yield the actual stream | ||||
|             try: | ||||
|                 yield an, stream | ||||
|             finally: | ||||
|                 print( | ||||
|                     f'NON-cached task exited\n' | ||||
|                     f')>\n' | ||||
|                     f' |_{taskname}\n' | ||||
|                 ) | ||||
| 
 | ||||
|         first_bstream = stream._broadcaster | ||||
|         bcrx_state = first_bstream._state | ||||
|         subs: dict[int, int] = bcrx_state.subs | ||||
|         if len(subs) == 1: | ||||
|             assert id(first_bstream) in subs | ||||
|             # ^^TODO! the bcrx should always de-allocate all subs, | ||||
|             # including the implicit first one allocated on entry | ||||
|             # by the first subscribing peer task, no? | ||||
|             # | ||||
|             # -[ ] adjust `MsgStream.subscribe()` to do this mgmt! | ||||
|             #  |_ allows reverting `MsgStream.receive()` to the | ||||
|             #    non-bcaster method. | ||||
|             #  |_ we can decide whether to reset `._broadcaster`? | ||||
|             # | ||||
|             # await tractor.pause(shield=True) | ||||
|             yield stream | ||||
| 
 | ||||
| 
 | ||||
| def test_open_local_sub_to_stream( | ||||
|     debug_mode: bool, | ||||
| ): | ||||
| def test_open_local_sub_to_stream(): | ||||
|     ''' | ||||
|     Verify a single inter-actor stream can can be fanned-out shared to | ||||
|     N local tasks using `trionics.maybe_open_context()`. | ||||
|     N local tasks using ``trionics.maybe_open_context():``. | ||||
| 
 | ||||
|     ''' | ||||
|     timeout: float = 3.6 | ||||
|     if platform.system() == "Windows": | ||||
|         timeout: float = 10 | ||||
| 
 | ||||
|     if debug_mode: | ||||
|         timeout = 999 | ||||
|         print(f'IN debug_mode, setting large timeout={timeout!r}..') | ||||
|     timeout: float = 3.6 if platform.system() != "Windows" else 10 | ||||
| 
 | ||||
|     async def main(): | ||||
| 
 | ||||
|         full = list(range(1000)) | ||||
|         an: tractor.ActorNursery|None = None | ||||
|         num_tasks: int = 10 | ||||
| 
 | ||||
|         async def get_sub_and_pull(taskname: str): | ||||
| 
 | ||||
|             nonlocal an | ||||
| 
 | ||||
|             stream: tractor.MsgStream | ||||
|             async with ( | ||||
|                 maybe_open_stream(taskname) as ( | ||||
|                     an, | ||||
|                     stream, | ||||
|                 ), | ||||
|                 maybe_open_stream(taskname) as stream, | ||||
|             ): | ||||
|                 if '0' in taskname: | ||||
|                     assert isinstance(stream, tractor.MsgStream) | ||||
|  | @ -248,159 +158,24 @@ def test_open_local_sub_to_stream( | |||
| 
 | ||||
|                 first = await stream.receive() | ||||
|                 print(f'{taskname} started with value {first}') | ||||
|                 seq: list[int] = [] | ||||
|                 seq = [] | ||||
|                 async for msg in stream: | ||||
|                     seq.append(msg) | ||||
| 
 | ||||
|                 assert set(seq).issubset(set(full)) | ||||
| 
 | ||||
|             # end of @acm block | ||||
|             print(f'{taskname} finished') | ||||
| 
 | ||||
|         root: tractor.Actor | ||||
|         with trio.fail_after(timeout) as cs: | ||||
|         with trio.fail_after(timeout): | ||||
|             # TODO: turns out this isn't multi-task entrant XD | ||||
|             # We probably need an indepotent entry semantic? | ||||
|             async with tractor.open_root_actor( | ||||
|                 debug_mode=debug_mode, | ||||
|                 # maybe_enable_greenback=True, | ||||
|                 # | ||||
|                 # ^TODO? doesn't seem to mk breakpoint() usage work | ||||
|                 # bc each bg task needs to open a portal?? | ||||
|                 # - [ ] we should consider making this part of | ||||
|                 #      our taskman defaults? | ||||
|                 #   |_see https://github.com/goodboy/tractor/pull/363 | ||||
|                 # | ||||
|             ) as root: | ||||
|                 assert root.is_registrar | ||||
| 
 | ||||
|             async with tractor.open_root_actor(): | ||||
|                 async with ( | ||||
|                     trio.open_nursery() as tn, | ||||
|                     trio.open_nursery() as nurse, | ||||
|                 ): | ||||
|                     for i in range(num_tasks): | ||||
|                         tn.start_soon( | ||||
|                             get_sub_and_pull, | ||||
|                             f'task_{i}', | ||||
|                         ) | ||||
|                     for i in range(10): | ||||
|                         nurse.start_soon(get_sub_and_pull, f'task_{i}') | ||||
|                         await trio.sleep(0.001) | ||||
| 
 | ||||
|                 print('all consumer tasks finished!') | ||||
| 
 | ||||
|                 # ?XXX, ensure actor-nursery is shutdown or we might | ||||
|                 # hang here due to a minor task deadlock/race-condition? | ||||
|                 # | ||||
|                 # - seems that all we need is a checkpoint to ensure | ||||
|                 #   the last suspended task, which is inside | ||||
|                 #   `.maybe_open_context()`, can do the | ||||
|                 #   `Portal.cancel_actor()` call? | ||||
|                 # | ||||
|                 # - if that bg task isn't resumed, then this blocks | ||||
|                 #   timeout might hit before that? | ||||
|                 # | ||||
|                 if root.ipc_server.has_peers(): | ||||
|                     await trio.lowlevel.checkpoint() | ||||
| 
 | ||||
|                     # alt approach, cancel the entire `an` | ||||
|                     # await tractor.pause() | ||||
|                     # await an.cancel() | ||||
| 
 | ||||
|             # end of runtime scope | ||||
|             print('root actor terminated.') | ||||
| 
 | ||||
|         if cs.cancelled_caught: | ||||
|             pytest.fail( | ||||
|                 'Should NOT time out in `open_root_actor()` ?' | ||||
|             ) | ||||
| 
 | ||||
|         print('exiting main.') | ||||
| 
 | ||||
|     trio.run(main) | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def cancel_outer_cs( | ||||
|     cs: trio.CancelScope|None = None, | ||||
|     delay: float = 0, | ||||
| ): | ||||
|     # on first task delay this enough to block | ||||
|     # the 2nd task but then cancel it mid sleep | ||||
|     # so that the tn.start() inside the key-err handler block | ||||
|     # is cancelled and would previously corrupt the | ||||
|     # mutext state. | ||||
|     log.info(f'task entering sleep({delay})') | ||||
|     await trio.sleep(delay) | ||||
|     if cs: | ||||
|         log.info('task calling cs.cancel()') | ||||
|         cs.cancel() | ||||
|     trio.lowlevel.checkpoint() | ||||
|     yield | ||||
|     await trio.sleep_forever() | ||||
| 
 | ||||
| 
 | ||||
| def test_lock_not_corrupted_on_fast_cancel( | ||||
|     debug_mode: bool, | ||||
|     loglevel: str, | ||||
| ): | ||||
|     ''' | ||||
|     Verify that if the caching-task (the first to enter | ||||
|     `maybe_open_context()`) is cancelled mid-cache-miss, the embedded | ||||
|     mutex can never be left in a corrupted state. | ||||
| 
 | ||||
|     That is, the lock is always eventually released ensuring a peer | ||||
|     (cache-hitting) task will never, | ||||
| 
 | ||||
|     - be left to inf-block/hang on the `lock.acquire()`. | ||||
|     - try to release the lock when still owned by the caching-task | ||||
|       due to it having erronously exited without calling | ||||
|       `lock.release()`. | ||||
| 
 | ||||
| 
 | ||||
|     ''' | ||||
|     delay: float = 1. | ||||
| 
 | ||||
|     async def use_moc( | ||||
|         cs: trio.CancelScope|None, | ||||
|         delay: float, | ||||
|     ): | ||||
|         log.info('task entering moc') | ||||
|         async with maybe_open_context( | ||||
|             cancel_outer_cs, | ||||
|             kwargs={ | ||||
|                 'cs': cs, | ||||
|                 'delay': delay, | ||||
|             }, | ||||
|         ) as (cache_hit, _null): | ||||
|             if cache_hit: | ||||
|                 log.info('2nd task entered') | ||||
|             else: | ||||
|                 log.info('1st task entered') | ||||
| 
 | ||||
|             await trio.sleep_forever() | ||||
| 
 | ||||
|     async def main(): | ||||
|         with trio.fail_after(delay + 2): | ||||
|             async with ( | ||||
|                 tractor.open_root_actor( | ||||
|                     debug_mode=debug_mode, | ||||
|                     loglevel=loglevel, | ||||
|                 ), | ||||
|                 trio.open_nursery() as tn, | ||||
|             ): | ||||
|                 get_console_log('info') | ||||
|                 log.info('yo starting') | ||||
|                 cs = tn.cancel_scope | ||||
|                 tn.start_soon( | ||||
|                     use_moc, | ||||
|                     cs, | ||||
|                     delay, | ||||
|                     name='child', | ||||
|                 ) | ||||
|                 with trio.CancelScope() as rent_cs: | ||||
|                     await use_moc( | ||||
|                         cs=rent_cs, | ||||
|                         delay=delay, | ||||
|                     ) | ||||
| 
 | ||||
|                 print('all consumer tasks finished') | ||||
| 
 | ||||
|     trio.run(main) | ||||
|  |  | |||
|  | @ -1,211 +0,0 @@ | |||
| import time | ||||
| 
 | ||||
| import trio | ||||
| import pytest | ||||
| 
 | ||||
| import tractor | ||||
| from tractor.ipc._ringbuf import ( | ||||
|     open_ringbuf, | ||||
|     RBToken, | ||||
|     RingBuffSender, | ||||
|     RingBuffReceiver | ||||
| ) | ||||
| from tractor._testing.samples import ( | ||||
|     generate_sample_messages, | ||||
| ) | ||||
| 
 | ||||
| # in case you don't want to melt your cores, uncomment dis! | ||||
| pytestmark = pytest.mark.skip | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def child_read_shm( | ||||
|     ctx: tractor.Context, | ||||
|     msg_amount: int, | ||||
|     token: RBToken, | ||||
|     total_bytes: int, | ||||
| ) -> None: | ||||
|     recvd_bytes = 0 | ||||
|     await ctx.started() | ||||
|     start_ts = time.time() | ||||
|     async with RingBuffReceiver(token) as receiver: | ||||
|         while recvd_bytes < total_bytes: | ||||
|             msg = await receiver.receive_some() | ||||
|             recvd_bytes += len(msg) | ||||
| 
 | ||||
|         # make sure we dont hold any memoryviews | ||||
|         # before the ctx manager aclose() | ||||
|         msg = None | ||||
| 
 | ||||
|     end_ts = time.time() | ||||
|     elapsed = end_ts - start_ts | ||||
|     elapsed_ms = int(elapsed * 1000) | ||||
| 
 | ||||
|     print(f'\n\telapsed ms: {elapsed_ms}') | ||||
|     print(f'\tmsg/sec: {int(msg_amount / elapsed):,}') | ||||
|     print(f'\tbytes/sec: {int(recvd_bytes / elapsed):,}') | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def child_write_shm( | ||||
|     ctx: tractor.Context, | ||||
|     msg_amount: int, | ||||
|     rand_min: int, | ||||
|     rand_max: int, | ||||
|     token: RBToken, | ||||
| ) -> None: | ||||
|     msgs, total_bytes = generate_sample_messages( | ||||
|         msg_amount, | ||||
|         rand_min=rand_min, | ||||
|         rand_max=rand_max, | ||||
|     ) | ||||
|     await ctx.started(total_bytes) | ||||
|     async with RingBuffSender(token) as sender: | ||||
|         for msg in msgs: | ||||
|             await sender.send_all(msg) | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     'msg_amount,rand_min,rand_max,buf_size', | ||||
|     [ | ||||
|         # simple case, fixed payloads, large buffer | ||||
|         (100_000, 0, 0, 10 * 1024), | ||||
| 
 | ||||
|         # guaranteed wrap around on every write | ||||
|         (100, 10 * 1024, 20 * 1024, 10 * 1024), | ||||
| 
 | ||||
|         # large payload size, but large buffer | ||||
|         (10_000, 256 * 1024, 512 * 1024, 10 * 1024 * 1024) | ||||
|     ], | ||||
|     ids=[ | ||||
|         'fixed_payloads_large_buffer', | ||||
|         'wrap_around_every_write', | ||||
|         'large_payloads_large_buffer', | ||||
|     ] | ||||
| ) | ||||
| def test_ringbuf( | ||||
|     msg_amount: int, | ||||
|     rand_min: int, | ||||
|     rand_max: int, | ||||
|     buf_size: int | ||||
| ): | ||||
|     async def main(): | ||||
|         with open_ringbuf( | ||||
|             'test_ringbuf', | ||||
|             buf_size=buf_size | ||||
|         ) as token: | ||||
|             proc_kwargs = { | ||||
|                 'pass_fds': (token.write_eventfd, token.wrap_eventfd) | ||||
|             } | ||||
| 
 | ||||
|             common_kwargs = { | ||||
|                 'msg_amount': msg_amount, | ||||
|                 'token': token, | ||||
|             } | ||||
|             async with tractor.open_nursery() as an: | ||||
|                 send_p = await an.start_actor( | ||||
|                     'ring_sender', | ||||
|                     enable_modules=[__name__], | ||||
|                     proc_kwargs=proc_kwargs | ||||
|                 ) | ||||
|                 recv_p = await an.start_actor( | ||||
|                     'ring_receiver', | ||||
|                     enable_modules=[__name__], | ||||
|                     proc_kwargs=proc_kwargs | ||||
|                 ) | ||||
|                 async with ( | ||||
|                     send_p.open_context( | ||||
|                         child_write_shm, | ||||
|                         rand_min=rand_min, | ||||
|                         rand_max=rand_max, | ||||
|                         **common_kwargs | ||||
|                     ) as (sctx, total_bytes), | ||||
|                     recv_p.open_context( | ||||
|                         child_read_shm, | ||||
|                         **common_kwargs, | ||||
|                         total_bytes=total_bytes, | ||||
|                     ) as (sctx, _sent), | ||||
|                 ): | ||||
|                     await recv_p.result() | ||||
| 
 | ||||
|                 await send_p.cancel_actor() | ||||
|                 await recv_p.cancel_actor() | ||||
| 
 | ||||
| 
 | ||||
|     trio.run(main) | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def child_blocked_receiver( | ||||
|     ctx: tractor.Context, | ||||
|     token: RBToken | ||||
| ): | ||||
|     async with RingBuffReceiver(token) as receiver: | ||||
|         await ctx.started() | ||||
|         await receiver.receive_some() | ||||
| 
 | ||||
| 
 | ||||
| def test_ring_reader_cancel(): | ||||
|     async def main(): | ||||
|         with open_ringbuf('test_ring_cancel_reader') as token: | ||||
|             async with ( | ||||
|                 tractor.open_nursery() as an, | ||||
|                 RingBuffSender(token) as _sender, | ||||
|             ): | ||||
|                 recv_p = await an.start_actor( | ||||
|                     'ring_blocked_receiver', | ||||
|                     enable_modules=[__name__], | ||||
|                     proc_kwargs={ | ||||
|                         'pass_fds': (token.write_eventfd, token.wrap_eventfd) | ||||
|                     } | ||||
|                 ) | ||||
|                 async with ( | ||||
|                     recv_p.open_context( | ||||
|                         child_blocked_receiver, | ||||
|                         token=token | ||||
|                     ) as (sctx, _sent), | ||||
|                 ): | ||||
|                     await trio.sleep(1) | ||||
|                     await an.cancel() | ||||
| 
 | ||||
| 
 | ||||
|     with pytest.raises(tractor._exceptions.ContextCancelled): | ||||
|         trio.run(main) | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def child_blocked_sender( | ||||
|     ctx: tractor.Context, | ||||
|     token: RBToken | ||||
| ): | ||||
|     async with RingBuffSender(token) as sender: | ||||
|         await ctx.started() | ||||
|         await sender.send_all(b'this will wrap') | ||||
| 
 | ||||
| 
 | ||||
| def test_ring_sender_cancel(): | ||||
|     async def main(): | ||||
|         with open_ringbuf( | ||||
|             'test_ring_cancel_sender', | ||||
|             buf_size=1 | ||||
|         ) as token: | ||||
|             async with tractor.open_nursery() as an: | ||||
|                 recv_p = await an.start_actor( | ||||
|                     'ring_blocked_sender', | ||||
|                     enable_modules=[__name__], | ||||
|                     proc_kwargs={ | ||||
|                         'pass_fds': (token.write_eventfd, token.wrap_eventfd) | ||||
|                     } | ||||
|                 ) | ||||
|                 async with ( | ||||
|                     recv_p.open_context( | ||||
|                         child_blocked_sender, | ||||
|                         token=token | ||||
|                     ) as (sctx, _sent), | ||||
|                 ): | ||||
|                     await trio.sleep(1) | ||||
|                     await an.cancel() | ||||
| 
 | ||||
| 
 | ||||
|     with pytest.raises(tractor._exceptions.ContextCancelled): | ||||
|         trio.run(main) | ||||
|  | @ -1,240 +0,0 @@ | |||
| ''' | ||||
| Special attention cases for using "infect `asyncio`" mode from a root | ||||
| actor; i.e. not using a std `trio.run()` bootstrap. | ||||
| 
 | ||||
| ''' | ||||
| import asyncio | ||||
| from functools import partial | ||||
| 
 | ||||
| import pytest | ||||
| import trio | ||||
| import tractor | ||||
| from tractor import ( | ||||
|     to_asyncio, | ||||
| ) | ||||
| from tests.test_infected_asyncio import ( | ||||
|     aio_echo_server, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     'raise_error_mid_stream', | ||||
|     [ | ||||
|         False, | ||||
|         Exception, | ||||
|         KeyboardInterrupt, | ||||
|     ], | ||||
|     ids='raise_error={}'.format, | ||||
| ) | ||||
| def test_infected_root_actor( | ||||
|     raise_error_mid_stream: bool|Exception, | ||||
| 
 | ||||
|     # conftest wide | ||||
|     loglevel: str, | ||||
|     debug_mode: bool, | ||||
| ): | ||||
|     ''' | ||||
|     Verify you can run the `tractor` runtime with `Actor.is_infected_aio() == True` | ||||
|     in the root actor. | ||||
| 
 | ||||
|     ''' | ||||
|     async def _trio_main(): | ||||
|         with trio.fail_after(2 if not debug_mode else 999): | ||||
|             first: str | ||||
|             chan: to_asyncio.LinkedTaskChannel | ||||
|             async with ( | ||||
|                 tractor.open_root_actor( | ||||
|                     debug_mode=debug_mode, | ||||
|                     loglevel=loglevel, | ||||
|                 ), | ||||
|                 to_asyncio.open_channel_from( | ||||
|                     aio_echo_server, | ||||
|                 ) as (first, chan), | ||||
|             ): | ||||
|                 assert first == 'start' | ||||
| 
 | ||||
|                 for i in range(1000): | ||||
|                     await chan.send(i) | ||||
|                     out = await chan.receive() | ||||
|                     assert out == i | ||||
|                     print(f'asyncio echoing {i}') | ||||
| 
 | ||||
|                     if ( | ||||
|                         raise_error_mid_stream | ||||
|                         and | ||||
|                         i == 500 | ||||
|                     ): | ||||
|                         raise raise_error_mid_stream | ||||
| 
 | ||||
|                     if out is None: | ||||
|                         try: | ||||
|                             out = await chan.receive() | ||||
|                         except trio.EndOfChannel: | ||||
|                             break | ||||
|                         else: | ||||
|                             raise RuntimeError( | ||||
|                                 'aio channel never stopped?' | ||||
|                             ) | ||||
| 
 | ||||
|     if raise_error_mid_stream: | ||||
|         with pytest.raises(raise_error_mid_stream): | ||||
|             tractor.to_asyncio.run_as_asyncio_guest( | ||||
|                 trio_main=_trio_main, | ||||
|             ) | ||||
|     else: | ||||
|         tractor.to_asyncio.run_as_asyncio_guest( | ||||
|             trio_main=_trio_main, | ||||
|         ) | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| async def sync_and_err( | ||||
|     # just signature placeholders for compat with | ||||
|     # ``to_asyncio.open_channel_from()`` | ||||
|     to_trio: trio.MemorySendChannel, | ||||
|     from_trio: asyncio.Queue, | ||||
|     ev: asyncio.Event, | ||||
| 
 | ||||
| ): | ||||
|     if to_trio: | ||||
|         to_trio.send_nowait('start') | ||||
| 
 | ||||
|     await ev.wait() | ||||
|     raise RuntimeError('asyncio-side') | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     'aio_err_trigger', | ||||
|     [ | ||||
|         'before_start_point', | ||||
|         'after_trio_task_starts', | ||||
|         'after_start_point', | ||||
|     ], | ||||
|     ids='aio_err_triggered={}'.format | ||||
| ) | ||||
| def test_trio_prestarted_task_bubbles( | ||||
|     aio_err_trigger: str, | ||||
| 
 | ||||
|     # conftest wide | ||||
|     loglevel: str, | ||||
|     debug_mode: bool, | ||||
| ): | ||||
|     async def pre_started_err( | ||||
|         raise_err: bool = False, | ||||
|         pre_sleep: float|None = None, | ||||
|         aio_trigger: asyncio.Event|None = None, | ||||
|         task_status=trio.TASK_STATUS_IGNORED, | ||||
|     ): | ||||
|         ''' | ||||
|         Maybe pre-started error then sleep. | ||||
| 
 | ||||
|         ''' | ||||
|         if pre_sleep is not None: | ||||
|             print(f'Sleeping from trio for {pre_sleep!r}s !') | ||||
|             await trio.sleep(pre_sleep) | ||||
| 
 | ||||
|         # signal aio-task to raise JUST AFTER this task | ||||
|         # starts but has not yet `.started()` | ||||
|         if aio_trigger: | ||||
|             print('Signalling aio-task to raise from `trio`!!') | ||||
|             aio_trigger.set() | ||||
| 
 | ||||
|         if raise_err: | ||||
|             print('Raising from trio!') | ||||
|             raise TypeError('trio-side') | ||||
| 
 | ||||
|         task_status.started() | ||||
|         await trio.sleep_forever() | ||||
| 
 | ||||
|     async def _trio_main(): | ||||
|         with trio.fail_after(2 if not debug_mode else 999): | ||||
|             first: str | ||||
|             chan: to_asyncio.LinkedTaskChannel | ||||
|             aio_ev = asyncio.Event() | ||||
| 
 | ||||
|             async with ( | ||||
|                 tractor.open_root_actor( | ||||
|                     debug_mode=False, | ||||
|                     loglevel=loglevel, | ||||
|                 ), | ||||
|             ): | ||||
|                 # TODO, tests for this with 3.13 egs? | ||||
|                 # from tractor.devx import open_crash_handler | ||||
|                 # with open_crash_handler(): | ||||
|                 async with ( | ||||
|                     # where we'll start a sub-task that errors BEFORE | ||||
|                     # calling `.started()` such that the error should | ||||
|                     # bubble before the guest run terminates! | ||||
|                     trio.open_nursery() as tn, | ||||
| 
 | ||||
|                     # THEN start an infect task which should error just | ||||
|                     # after the trio-side's task does. | ||||
|                     to_asyncio.open_channel_from( | ||||
|                         partial( | ||||
|                             sync_and_err, | ||||
|                             ev=aio_ev, | ||||
|                         ) | ||||
|                     ) as (first, chan), | ||||
|                 ): | ||||
| 
 | ||||
|                     for i in range(5): | ||||
|                         pre_sleep: float|None = None | ||||
|                         last_iter: bool = (i == 4) | ||||
| 
 | ||||
|                         # TODO, missing cases? | ||||
|                         # -[ ] error as well on | ||||
|                         #    'after_start_point' case as well for | ||||
|                         #    another case? | ||||
|                         raise_err: bool = False | ||||
| 
 | ||||
|                         if last_iter: | ||||
|                             raise_err: bool = True | ||||
| 
 | ||||
|                             # trigger aio task to error on next loop | ||||
|                             # tick/checkpoint | ||||
|                             if aio_err_trigger == 'before_start_point': | ||||
|                                 aio_ev.set() | ||||
| 
 | ||||
|                             pre_sleep: float = 0 | ||||
| 
 | ||||
|                         await tn.start( | ||||
|                             pre_started_err, | ||||
|                             raise_err, | ||||
|                             pre_sleep, | ||||
|                             (aio_ev if ( | ||||
|                                     aio_err_trigger == 'after_trio_task_starts' | ||||
|                                     and | ||||
|                                     last_iter | ||||
|                                 ) else None | ||||
|                             ), | ||||
|                         ) | ||||
| 
 | ||||
|                         if ( | ||||
|                             aio_err_trigger == 'after_start_point' | ||||
|                             and | ||||
|                             last_iter | ||||
|                         ): | ||||
|                             aio_ev.set() | ||||
| 
 | ||||
|     # ensure the trio-task's error bubbled despite the aio-side | ||||
|     # having (maybe) errored first. | ||||
|     if aio_err_trigger in ( | ||||
|         'after_trio_task_starts', | ||||
|         'after_start_point', | ||||
|     ): | ||||
|         patt: str = 'trio-side' | ||||
|         expect_exc = TypeError | ||||
| 
 | ||||
|     # when aio errors BEFORE (last) trio task is scheduled, we should | ||||
|     # never see anythinb but the aio-side. | ||||
|     else: | ||||
|         patt: str = 'asyncio-side' | ||||
|         expect_exc = RuntimeError | ||||
| 
 | ||||
|     with pytest.raises(expect_exc) as excinfo: | ||||
|         tractor.to_asyncio.run_as_asyncio_guest( | ||||
|             trio_main=_trio_main, | ||||
|         ) | ||||
| 
 | ||||
|     caught_exc = excinfo.value | ||||
|     assert patt in caught_exc.args | ||||
|  | @ -1,108 +0,0 @@ | |||
| ''' | ||||
| Runtime boot/init sanity. | ||||
| 
 | ||||
| ''' | ||||
| 
 | ||||
| import pytest | ||||
| import trio | ||||
| 
 | ||||
| import tractor | ||||
| from tractor._exceptions import RuntimeFailure | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def open_new_root_in_sub( | ||||
|     ctx: tractor.Context, | ||||
| ) -> None: | ||||
| 
 | ||||
|     async with tractor.open_root_actor(): | ||||
|         pass | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     'open_root_in', | ||||
|     ['root', 'sub'], | ||||
|     ids='open_2nd_root_in={}'.format, | ||||
| ) | ||||
| def test_only_one_root_actor( | ||||
|     open_root_in: str, | ||||
|     reg_addr: tuple, | ||||
|     debug_mode: bool | ||||
| ): | ||||
|     ''' | ||||
|     Verify we specially fail whenever more then one root actor | ||||
|     is attempted to be opened within an already opened tree. | ||||
| 
 | ||||
|     ''' | ||||
|     async def main(): | ||||
|         async with tractor.open_nursery() as an: | ||||
| 
 | ||||
|             if open_root_in == 'root': | ||||
|                 async with tractor.open_root_actor( | ||||
|                     registry_addrs=[reg_addr], | ||||
|                 ): | ||||
|                     pass | ||||
| 
 | ||||
|             ptl: tractor.Portal = await an.start_actor( | ||||
|                 name='bad_rooty_boi', | ||||
|                 enable_modules=[__name__], | ||||
|             ) | ||||
| 
 | ||||
|             async with ptl.open_context( | ||||
|                 open_new_root_in_sub, | ||||
|             ) as (ctx, first): | ||||
|                 pass | ||||
| 
 | ||||
|     if open_root_in == 'root': | ||||
|         with pytest.raises( | ||||
|             RuntimeFailure | ||||
|         ) as excinfo: | ||||
|             trio.run(main) | ||||
| 
 | ||||
|     else: | ||||
|         with pytest.raises( | ||||
|             tractor.RemoteActorError, | ||||
|         ) as excinfo: | ||||
|             trio.run(main) | ||||
| 
 | ||||
|         assert excinfo.value.boxed_type is RuntimeFailure | ||||
| 
 | ||||
| 
 | ||||
| def test_implicit_root_via_first_nursery( | ||||
|     reg_addr: tuple, | ||||
|     debug_mode: bool | ||||
| ): | ||||
|     ''' | ||||
|     The first `ActorNursery` open should implicitly call | ||||
|     `_root.open_root_actor()`. | ||||
| 
 | ||||
|     ''' | ||||
|     async def main(): | ||||
|         async with tractor.open_nursery() as an: | ||||
|             assert an._implicit_runtime_started | ||||
|             assert tractor.current_actor().aid.name == 'root' | ||||
| 
 | ||||
|     trio.run(main) | ||||
| 
 | ||||
| 
 | ||||
| def test_runtime_vars_unset( | ||||
|     reg_addr: tuple, | ||||
|     debug_mode: bool | ||||
| ): | ||||
|     ''' | ||||
|     Ensure any `._state._runtime_vars` are restored to default values | ||||
|     after the root actor-runtime exits! | ||||
| 
 | ||||
|     ''' | ||||
|     assert not tractor._state._runtime_vars['_debug_mode'] | ||||
|     async def main(): | ||||
|         assert not tractor._state._runtime_vars['_debug_mode'] | ||||
|         async with tractor.open_nursery( | ||||
|             debug_mode=True, | ||||
|         ): | ||||
|             assert tractor._state._runtime_vars['_debug_mode'] | ||||
| 
 | ||||
|         # after runtime closure, should be reverted! | ||||
|         assert not tractor._state._runtime_vars['_debug_mode'] | ||||
| 
 | ||||
|     trio.run(main) | ||||
|  | @ -36,7 +36,7 @@ async def sleep_back_actor( | |||
|                 if not exposed_mods: | ||||
|                     expect = tractor.ModuleNotExposed | ||||
| 
 | ||||
|                 assert err.boxed_type is expect | ||||
|                 assert err.type is expect | ||||
|                 raise | ||||
|     else: | ||||
|         await trio.sleep(float('inf')) | ||||
|  | @ -150,4 +150,4 @@ def test_rpc_errors( | |||
|             )) | ||||
| 
 | ||||
|         if getattr(value, 'type', None): | ||||
|             assert value.boxed_type is inside_err | ||||
|             assert value.type is inside_err | ||||
|  |  | |||
|  | @ -64,8 +64,7 @@ async def test_lifetime_stack_wipes_tmpfile( | |||
| 
 | ||||
|     except ( | ||||
|         tractor.RemoteActorError, | ||||
|         # tractor.BaseExceptionGroup, | ||||
|         BaseExceptionGroup, | ||||
|         tractor.BaseExceptionGroup, | ||||
|     ): | ||||
|         pass | ||||
| 
 | ||||
|  |  | |||
|  | @ -8,7 +8,7 @@ import uuid | |||
| import pytest | ||||
| import trio | ||||
| import tractor | ||||
| from tractor.ipc._shm import ( | ||||
| from tractor._shm import ( | ||||
|     open_shm_list, | ||||
|     attach_shm_list, | ||||
| ) | ||||
|  |  | |||
|  | @ -2,10 +2,7 @@ | |||
| Spawning basics | ||||
| 
 | ||||
| """ | ||||
| from functools import partial | ||||
| from typing import ( | ||||
|     Any, | ||||
| ) | ||||
| from typing import Optional | ||||
| 
 | ||||
| import pytest | ||||
| import trio | ||||
|  | @ -13,99 +10,76 @@ import tractor | |||
| 
 | ||||
| from tractor._testing import tractor_test | ||||
| 
 | ||||
| data_to_pass_down = { | ||||
|     'doggy': 10, | ||||
|     'kitty': 4, | ||||
| } | ||||
| data_to_pass_down = {'doggy': 10, 'kitty': 4} | ||||
| 
 | ||||
| 
 | ||||
| async def spawn( | ||||
|     should_be_root: bool, | ||||
|     is_arbiter: bool, | ||||
|     data: dict, | ||||
|     reg_addr: tuple[str, int], | ||||
| 
 | ||||
|     debug_mode: bool = False, | ||||
| ): | ||||
|     namespaces = [__name__] | ||||
| 
 | ||||
|     await trio.sleep(0.1) | ||||
|     actor = tractor.current_actor(err_on_no_runtime=False) | ||||
| 
 | ||||
|     if should_be_root: | ||||
|         assert actor is None  # no runtime yet | ||||
|         async with ( | ||||
|             tractor.open_root_actor( | ||||
|                 arbiter_addr=reg_addr, | ||||
|             ), | ||||
|             tractor.open_nursery() as an, | ||||
|         ): | ||||
|             # now runtime exists | ||||
|             actor: tractor.Actor = tractor.current_actor() | ||||
|             assert actor.is_arbiter == should_be_root | ||||
|     async with tractor.open_root_actor( | ||||
|         arbiter_addr=reg_addr, | ||||
|     ): | ||||
| 
 | ||||
|             # spawns subproc here | ||||
|             portal: tractor.Portal = await an.run_in_actor( | ||||
|                 fn=spawn, | ||||
|         actor = tractor.current_actor() | ||||
|         assert actor.is_arbiter == is_arbiter | ||||
|         data = data_to_pass_down | ||||
| 
 | ||||
|                 # spawning args | ||||
|                 name='sub-actor', | ||||
|                 enable_modules=[__name__], | ||||
|         if actor.is_arbiter: | ||||
| 
 | ||||
|                 # passed to a subactor-recursive RPC invoke | ||||
|                 # of this same `spawn()` fn. | ||||
|                 should_be_root=False, | ||||
|                 data=data_to_pass_down, | ||||
|                 reg_addr=reg_addr, | ||||
|             ) | ||||
|             async with tractor.open_nursery() as nursery: | ||||
| 
 | ||||
|             assert len(an._children) == 1 | ||||
|             assert ( | ||||
|                 portal.channel.uid | ||||
|                 in | ||||
|                 tractor.current_actor().ipc_server._peers | ||||
|             ) | ||||
|                 # forks here | ||||
|                 portal = await nursery.run_in_actor( | ||||
|                     spawn, | ||||
|                     is_arbiter=False, | ||||
|                     name='sub-actor', | ||||
|                     data=data, | ||||
|                     reg_addr=reg_addr, | ||||
|                     enable_modules=namespaces, | ||||
|                 ) | ||||
| 
 | ||||
|             # get result from child subactor | ||||
|             result = await portal.result() | ||||
|             assert result == 10 | ||||
|             return result | ||||
|     else: | ||||
|         assert actor.is_arbiter == should_be_root | ||||
|         return 10 | ||||
|                 assert len(nursery._children) == 1 | ||||
|                 assert portal.channel.uid in tractor.current_actor()._peers | ||||
|                 # be sure we can still get the result | ||||
|                 result = await portal.result() | ||||
|                 assert result == 10 | ||||
|                 return result | ||||
|         else: | ||||
|             return 10 | ||||
| 
 | ||||
| 
 | ||||
| def test_run_in_actor_same_func_in_child( | ||||
|     reg_addr: tuple, | ||||
|     debug_mode: bool, | ||||
| def test_local_arbiter_subactor_global_state( | ||||
|     reg_addr, | ||||
| ): | ||||
|     result = trio.run( | ||||
|         partial( | ||||
|             spawn, | ||||
|             should_be_root=True, | ||||
|             data=data_to_pass_down, | ||||
|             reg_addr=reg_addr, | ||||
|             debug_mode=debug_mode, | ||||
|         ) | ||||
|         spawn, | ||||
|         True, | ||||
|         data_to_pass_down, | ||||
|         reg_addr, | ||||
|     ) | ||||
|     assert result == 10 | ||||
| 
 | ||||
| 
 | ||||
| async def movie_theatre_question(): | ||||
|     ''' | ||||
|     A question asked in a dark theatre, in a tangent | ||||
|     """A question asked in a dark theatre, in a tangent | ||||
|     (errr, I mean different) process. | ||||
| 
 | ||||
|     ''' | ||||
|     """ | ||||
|     return 'have you ever seen a portal?' | ||||
| 
 | ||||
| 
 | ||||
| @tractor_test | ||||
| async def test_movie_theatre_convo(start_method): | ||||
|     ''' | ||||
|     The main ``tractor`` routine. | ||||
|     """The main ``tractor`` routine. | ||||
|     """ | ||||
|     async with tractor.open_nursery() as n: | ||||
| 
 | ||||
|     ''' | ||||
|     async with tractor.open_nursery(debug_mode=True) as an: | ||||
| 
 | ||||
|         portal = await an.start_actor( | ||||
|         portal = await n.start_actor( | ||||
|             'frank', | ||||
|             # enable the actor to run funcs from this current module | ||||
|             enable_modules=[__name__], | ||||
|  | @ -121,9 +95,7 @@ async def test_movie_theatre_convo(start_method): | |||
|         await portal.cancel_actor() | ||||
| 
 | ||||
| 
 | ||||
| async def cellar_door( | ||||
|     return_value: str|None, | ||||
| ): | ||||
| async def cellar_door(return_value: Optional[str]): | ||||
|     return return_value | ||||
| 
 | ||||
| 
 | ||||
|  | @ -133,19 +105,17 @@ async def cellar_door( | |||
| ) | ||||
| @tractor_test | ||||
| async def test_most_beautiful_word( | ||||
|     start_method: str, | ||||
|     return_value: Any, | ||||
|     debug_mode: bool, | ||||
|     start_method, | ||||
|     return_value | ||||
| ): | ||||
|     ''' | ||||
|     The main ``tractor`` routine. | ||||
| 
 | ||||
|     ''' | ||||
|     with trio.fail_after(1): | ||||
|         async with tractor.open_nursery( | ||||
|             debug_mode=debug_mode, | ||||
|         ) as an: | ||||
|             portal = await an.run_in_actor( | ||||
|         async with tractor.open_nursery() as n: | ||||
| 
 | ||||
|             portal = await n.run_in_actor( | ||||
|                 cellar_door, | ||||
|                 return_value=return_value, | ||||
|                 name='some_linguist', | ||||
|  |  | |||
|  | @ -2,9 +2,7 @@ | |||
| Broadcast channels for fan-out to local tasks. | ||||
| 
 | ||||
| """ | ||||
| from contextlib import ( | ||||
|     asynccontextmanager as acm, | ||||
| ) | ||||
| from contextlib import asynccontextmanager | ||||
| from functools import partial | ||||
| from itertools import cycle | ||||
| import time | ||||
|  | @ -17,7 +15,6 @@ import tractor | |||
| from tractor.trionics import ( | ||||
|     broadcast_receiver, | ||||
|     Lagged, | ||||
|     collapse_eg, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
|  | @ -65,7 +62,7 @@ async def ensure_sequence( | |||
|                 break | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| @asynccontextmanager | ||||
| async def open_sequence_streamer( | ||||
| 
 | ||||
|     sequence: list[int], | ||||
|  | @ -77,9 +74,9 @@ async def open_sequence_streamer( | |||
|     async with tractor.open_nursery( | ||||
|         arbiter_addr=reg_addr, | ||||
|         start_method=start_method, | ||||
|     ) as an: | ||||
|     ) as tn: | ||||
| 
 | ||||
|         portal = await an.start_actor( | ||||
|         portal = await tn.start_actor( | ||||
|             'sequence_echoer', | ||||
|             enable_modules=[__name__], | ||||
|         ) | ||||
|  | @ -158,12 +155,9 @@ def test_consumer_and_parent_maybe_lag( | |||
|         ) as stream: | ||||
| 
 | ||||
|             try: | ||||
|                 async with ( | ||||
|                     collapse_eg(), | ||||
|                     trio.open_nursery() as tn, | ||||
|                 ): | ||||
|                 async with trio.open_nursery() as n: | ||||
| 
 | ||||
|                     tn.start_soon( | ||||
|                     n.start_soon( | ||||
|                         ensure_sequence, | ||||
|                         stream, | ||||
|                         sequence.copy(), | ||||
|  | @ -236,8 +230,8 @@ def test_faster_task_to_recv_is_cancelled_by_slower( | |||
| 
 | ||||
|         ) as stream: | ||||
| 
 | ||||
|             async with trio.open_nursery() as tn: | ||||
|                 tn.start_soon( | ||||
|             async with trio.open_nursery() as n: | ||||
|                 n.start_soon( | ||||
|                     ensure_sequence, | ||||
|                     stream, | ||||
|                     sequence.copy(), | ||||
|  | @ -259,7 +253,7 @@ def test_faster_task_to_recv_is_cancelled_by_slower( | |||
|                         continue | ||||
| 
 | ||||
|                 print('cancelling faster subtask') | ||||
|                 tn.cancel_scope.cancel() | ||||
|                 n.cancel_scope.cancel() | ||||
| 
 | ||||
|             try: | ||||
|                 value = await stream.receive() | ||||
|  | @ -277,7 +271,7 @@ def test_faster_task_to_recv_is_cancelled_by_slower( | |||
|                         # the faster subtask was cancelled | ||||
|                         break | ||||
| 
 | ||||
|                 # await tractor.pause() | ||||
|                 # await tractor.breakpoint() | ||||
|                 # await stream.receive() | ||||
|                 print(f'final value: {value}') | ||||
| 
 | ||||
|  | @ -377,13 +371,13 @@ def test_ensure_slow_consumers_lag_out( | |||
|                                     f'on {lags}:{value}') | ||||
|                                 return | ||||
| 
 | ||||
|             async with trio.open_nursery() as tn: | ||||
|             async with trio.open_nursery() as nursery: | ||||
| 
 | ||||
|                 for i in range(1, num_laggers): | ||||
| 
 | ||||
|                     task_name = f'sub_{i}' | ||||
|                     laggers[task_name] = 0 | ||||
|                     tn.start_soon( | ||||
|                     nursery.start_soon( | ||||
|                         partial( | ||||
|                             sub_and_print, | ||||
|                             delay=i*0.001, | ||||
|  | @ -503,7 +497,6 @@ def test_no_raise_on_lag(): | |||
|                 # internals when the no raise flag is set. | ||||
|                 loglevel='warning', | ||||
|             ), | ||||
|             collapse_eg(), | ||||
|             trio.open_nursery() as n, | ||||
|         ): | ||||
|             n.start_soon(slow) | ||||
|  |  | |||
|  | @ -3,21 +3,9 @@ Reminders for oddities in `trio` that we need to stay aware of and/or | |||
| want to see changed. | ||||
| 
 | ||||
| ''' | ||||
| from contextlib import ( | ||||
|     asynccontextmanager as acm, | ||||
| ) | ||||
| from types import ModuleType | ||||
| 
 | ||||
| from functools import partial | ||||
| 
 | ||||
| import pytest | ||||
| from _pytest import pathlib | ||||
| from tractor.trionics import collapse_eg | ||||
| import trio | ||||
| from trio import TaskStatus | ||||
| from tractor._testing import ( | ||||
|     examples_dir, | ||||
| ) | ||||
| from trio_typing import TaskStatus | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|  | @ -72,7 +60,6 @@ def test_stashed_child_nursery(use_start_soon): | |||
|     async def main(): | ||||
| 
 | ||||
|         async with ( | ||||
|             collapse_eg(), | ||||
|             trio.open_nursery() as pn, | ||||
|         ): | ||||
|             cn = await pn.start(mk_child_nursery) | ||||
|  | @ -93,210 +80,3 @@ def test_stashed_child_nursery(use_start_soon): | |||
| 
 | ||||
|     with pytest.raises(NameError): | ||||
|         trio.run(main) | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     ('unmask_from_canc', 'canc_from_finally'), | ||||
|     [ | ||||
|         (True, False), | ||||
|         (True, True), | ||||
|         pytest.param(False, True, | ||||
|                      marks=pytest.mark.xfail(reason="never raises!") | ||||
|         ), | ||||
|     ], | ||||
|     # TODO, ask ronny how to impl this .. XD | ||||
|     # ids='unmask_from_canc={0}, canc_from_finally={1}',#.format, | ||||
| ) | ||||
| def test_acm_embedded_nursery_propagates_enter_err( | ||||
|     canc_from_finally: bool, | ||||
|     unmask_from_canc: bool, | ||||
|     debug_mode: bool, | ||||
| ): | ||||
|     ''' | ||||
|     Demo how a masking `trio.Cancelled` could be handled by unmasking | ||||
|     from the `.__context__` field when a user (by accident) re-raises | ||||
|     from a `finally:`. | ||||
| 
 | ||||
|     ''' | ||||
|     import tractor | ||||
| 
 | ||||
|     @acm | ||||
|     async def wraps_tn_that_always_cancels(): | ||||
|         async with ( | ||||
|             trio.open_nursery() as tn, | ||||
|             tractor.trionics.maybe_raise_from_masking_exc( | ||||
|                 unmask_from=( | ||||
|                     (trio.Cancelled,) if unmask_from_canc | ||||
|                     else () | ||||
|                 ), | ||||
|             ) | ||||
|         ): | ||||
|             try: | ||||
|                 yield tn | ||||
|             finally: | ||||
|                 if canc_from_finally: | ||||
|                     tn.cancel_scope.cancel() | ||||
|                     await trio.lowlevel.checkpoint() | ||||
| 
 | ||||
|     async def _main(): | ||||
|         with tractor.devx.maybe_open_crash_handler( | ||||
|             pdb=debug_mode, | ||||
|         ) as bxerr: | ||||
|             assert not bxerr.value | ||||
| 
 | ||||
|             async with ( | ||||
|                 wraps_tn_that_always_cancels() as tn, | ||||
|             ): | ||||
|                 assert not tn.cancel_scope.cancel_called | ||||
|                 assert 0 | ||||
| 
 | ||||
|         if debug_mode: | ||||
|             assert ( | ||||
|                 (err := bxerr.value) | ||||
|                 and | ||||
|                 type(err) is AssertionError | ||||
|             ) | ||||
| 
 | ||||
|     with pytest.raises(ExceptionGroup) as excinfo: | ||||
|         trio.run(_main) | ||||
| 
 | ||||
|     eg: ExceptionGroup = excinfo.value | ||||
|     assert_eg, rest_eg = eg.split(AssertionError) | ||||
| 
 | ||||
|     assert len(assert_eg.exceptions) == 1 | ||||
| 
 | ||||
| 
 | ||||
| def test_gatherctxs_with_memchan_breaks_multicancelled( | ||||
|     debug_mode: bool, | ||||
| ): | ||||
|     ''' | ||||
|     Demo how a using an `async with sndchan` inside | ||||
|     a `.trionics.gather_contexts()` task will break a strict-eg-tn's | ||||
|     multi-cancelled absorption.. | ||||
| 
 | ||||
|     ''' | ||||
|     from tractor import ( | ||||
|         trionics, | ||||
|     ) | ||||
| 
 | ||||
|     @acm | ||||
|     async def open_memchan() -> trio.abc.ReceiveChannel: | ||||
| 
 | ||||
|         task: trio.Task = trio.lowlevel.current_task() | ||||
|         print( | ||||
|             f'Opening {task!r}\n' | ||||
|         ) | ||||
| 
 | ||||
|         # 1 to force eager sending | ||||
|         send, recv = trio.open_memory_channel(16) | ||||
| 
 | ||||
|         try: | ||||
|             async with send: | ||||
|                 yield recv | ||||
|         finally: | ||||
|             print( | ||||
|                 f'Closed {task!r}\n' | ||||
|             ) | ||||
| 
 | ||||
|     async def main(): | ||||
|         async with ( | ||||
|             # XXX should ensure ONLY the KBI | ||||
|             # is relayed upward | ||||
|             collapse_eg(), | ||||
|             trio.open_nursery(), # as tn, | ||||
| 
 | ||||
|             trionics.gather_contexts([ | ||||
|                 open_memchan(), | ||||
|                 open_memchan(), | ||||
|             ]) as recv_chans, | ||||
|         ): | ||||
|             assert len(recv_chans) == 2 | ||||
| 
 | ||||
|             await trio.sleep(1) | ||||
|             raise KeyboardInterrupt | ||||
|             # tn.cancel_scope.cancel() | ||||
| 
 | ||||
|     with pytest.raises(KeyboardInterrupt): | ||||
|         trio.run(main) | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     'raise_unmasked', [ | ||||
|         True, | ||||
|         pytest.param( | ||||
|             False, | ||||
|             marks=pytest.mark.xfail( | ||||
|                 reason="see examples/trio/send_chan_aclose_masks.py" | ||||
|             ) | ||||
|         ), | ||||
|     ] | ||||
| ) | ||||
| @pytest.mark.parametrize( | ||||
|     'child_errors_mid_stream', | ||||
|     [True, False], | ||||
| ) | ||||
| def test_unmask_aclose_as_checkpoint_on_aexit( | ||||
|     raise_unmasked: bool, | ||||
|     child_errors_mid_stream: bool, | ||||
|     debug_mode: bool, | ||||
| ): | ||||
|     ''' | ||||
|     Verify that our unmasker util works over the common case where | ||||
|     a mem-chan's `.aclose()` is included in an `@acm` stack | ||||
|     and it being currently a checkpoint, can `trio.Cancelled`-mask an embedded | ||||
|     exception from user code resulting in a silent failure which | ||||
|     appears like graceful cancellation. | ||||
| 
 | ||||
|     This test suite is mostly implemented as an example script so it | ||||
|     could more easily be shared with `trio`-core peeps as `tractor`-less | ||||
|     minimum reproducing example. | ||||
| 
 | ||||
|     ''' | ||||
|     mod: ModuleType = pathlib.import_path( | ||||
|         examples_dir() | ||||
|         / 'trio' | ||||
|         / 'send_chan_aclose_masks_beg.py', | ||||
|         root=examples_dir(), | ||||
|         consider_namespace_packages=False, | ||||
|     ) | ||||
|     with pytest.raises(RuntimeError): | ||||
|         trio.run(partial( | ||||
|             mod.main, | ||||
|             raise_unmasked=raise_unmasked, | ||||
|             child_errors_mid_stream=child_errors_mid_stream, | ||||
|         )) | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     'ignore_special_cases', [ | ||||
|         True, | ||||
|         pytest.param( | ||||
|             False, | ||||
|             marks=pytest.mark.xfail( | ||||
|                 reason="see examples/trio/lockacquire_not_umasked.py" | ||||
|             ) | ||||
|         ), | ||||
|     ] | ||||
| ) | ||||
| def test_cancelled_lockacquire_in_ipctx_not_unmasked( | ||||
|     ignore_special_cases: bool, | ||||
|     loglevel: str, | ||||
|     debug_mode: bool, | ||||
| ): | ||||
|     mod: ModuleType = pathlib.import_path( | ||||
|         examples_dir() | ||||
|         / 'trio' | ||||
|         / 'lockacquire_not_unmasked.py', | ||||
|         root=examples_dir(), | ||||
|         consider_namespace_packages=False, | ||||
|     ) | ||||
|     async def _main(): | ||||
|         with trio.fail_after(2): | ||||
|             await mod.main( | ||||
|                 ignore_special_cases=ignore_special_cases, | ||||
|                 loglevel=loglevel, | ||||
|                 debug_mode=debug_mode, | ||||
|             ) | ||||
| 
 | ||||
|     trio.run(_main) | ||||
|  |  | |||
|  | @ -18,6 +18,7 @@ | |||
| tractor: structured concurrent ``trio``-"actors". | ||||
| 
 | ||||
| """ | ||||
| from exceptiongroup import BaseExceptionGroup as BaseExceptionGroup | ||||
| 
 | ||||
| from ._clustering import ( | ||||
|     open_actor_cluster as open_actor_cluster, | ||||
|  | @ -31,7 +32,7 @@ from ._streaming import ( | |||
|     stream as stream, | ||||
| ) | ||||
| from ._discovery import ( | ||||
|     get_registry as get_registry, | ||||
|     get_arbiter as get_arbiter, | ||||
|     find_actor as find_actor, | ||||
|     wait_for_actor as wait_for_actor, | ||||
|     query_actor as query_actor, | ||||
|  | @ -43,15 +44,11 @@ from ._supervise import ( | |||
| from ._state import ( | ||||
|     current_actor as current_actor, | ||||
|     is_root_process as is_root_process, | ||||
|     current_ipc_ctx as current_ipc_ctx, | ||||
|     debug_mode as debug_mode | ||||
| ) | ||||
| from ._exceptions import ( | ||||
|     ContextCancelled as ContextCancelled, | ||||
|     ModuleNotExposed as ModuleNotExposed, | ||||
|     MsgTypeError as MsgTypeError, | ||||
|     RemoteActorError as RemoteActorError, | ||||
|     TransportClosed as TransportClosed, | ||||
|     ModuleNotExposed as ModuleNotExposed, | ||||
|     ContextCancelled as ContextCancelled, | ||||
| ) | ||||
| from .devx import ( | ||||
|     breakpoint as breakpoint, | ||||
|  | @ -64,7 +61,6 @@ from ._root import ( | |||
|     run_daemon as run_daemon, | ||||
|     open_root_actor as open_root_actor, | ||||
| ) | ||||
| from .ipc import Channel as Channel | ||||
| from ._ipc import Channel as Channel | ||||
| from ._portal import Portal as Portal | ||||
| from ._runtime import Actor as Actor | ||||
| # from . import hilevel as hilevel | ||||
|  |  | |||
							
								
								
									
										282
									
								
								tractor/_addr.py
								
								
								
								
							
							
						
						
									
										282
									
								
								tractor/_addr.py
								
								
								
								
							|  | @ -1,282 +0,0 @@ | |||
| # tractor: structured concurrent "actors". | ||||
| # Copyright 2018-eternity Tyler Goodlet. | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| from __future__ import annotations | ||||
| from uuid import uuid4 | ||||
| from typing import ( | ||||
|     Protocol, | ||||
|     ClassVar, | ||||
|     Type, | ||||
|     TYPE_CHECKING, | ||||
| ) | ||||
| 
 | ||||
| from bidict import bidict | ||||
| from trio import ( | ||||
|     SocketListener, | ||||
| ) | ||||
| 
 | ||||
| from .log import get_logger | ||||
| from ._state import ( | ||||
|     _def_tpt_proto, | ||||
| ) | ||||
| from .ipc._tcp import TCPAddress | ||||
| from .ipc._uds import UDSAddress | ||||
| 
 | ||||
| if TYPE_CHECKING: | ||||
|     from ._runtime import Actor | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
| 
 | ||||
| # TODO, maybe breakout the netns key to a struct? | ||||
| # class NetNs(Struct)[str, int]: | ||||
| #     ... | ||||
| 
 | ||||
| # TODO, can't we just use a type alias | ||||
| # for this? namely just some `tuple[str, int, str, str]`? | ||||
| # | ||||
| # -[ ] would also just be simpler to keep this as SockAddr[tuple] | ||||
| #     or something, implying it's just a simple pair of values which can | ||||
| #     presumably be mapped to all transports? | ||||
| # -[ ] `pydoc socket.socket.getsockname()` delivers a 4-tuple for | ||||
| #     ipv6 `(hostaddr, port, flowinfo, scope_id)`.. so how should we | ||||
| #     handle that? | ||||
| # -[ ] as a further alternative to this wrap()/unwrap() approach we | ||||
| #     could just implement `enc/dec_hook()`s for the `Address`-types | ||||
| #     and just deal with our internal objs directly and always and | ||||
| #     leave it to the codec layer to figure out marshalling? | ||||
| #    |_ would mean only one spot to do the `.unwrap()` (which we may | ||||
| #       end up needing to call from the hook()s anyway?) | ||||
| # -[x] rename to `UnwrappedAddress[Descriptor]` ?? | ||||
| #    seems like the right name as per, | ||||
| #    https://www.geeksforgeeks.org/introduction-to-address-descriptor/ | ||||
| # | ||||
| UnwrappedAddress = ( | ||||
|     # tcp/udp/uds | ||||
|     tuple[ | ||||
|         str,  # host/domain(tcp), filesys-dir(uds) | ||||
|         int|str,  # port/path(uds) | ||||
|     ] | ||||
|     # ?TODO? should we also include another 2 fields from | ||||
|     # our `Aid` msg such that we include the runtime `Actor.uid` | ||||
|     # of `.name` and `.uuid`? | ||||
|     # - would ensure uniqueness across entire net? | ||||
|     # - allows for easier runtime-level filtering of "actors by | ||||
|     #   service name" | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| # TODO, maybe rename to `SocketAddress`? | ||||
| class Address(Protocol): | ||||
|     proto_key: ClassVar[str] | ||||
|     unwrapped_type: ClassVar[UnwrappedAddress] | ||||
| 
 | ||||
|     # TODO, i feel like an `.is_bound()` is a better thing to | ||||
|     # support? | ||||
|     # Lke, what use does this have besides a noop and if it's not | ||||
|     # valid why aren't we erroring on creation/use? | ||||
|     @property | ||||
|     def is_valid(self) -> bool: | ||||
|         ... | ||||
| 
 | ||||
|     # TODO, maybe `.netns` is a better name? | ||||
|     @property | ||||
|     def namespace(self) -> tuple[str, int]|None: | ||||
|         ''' | ||||
|         The if-available, OS-specific "network namespace" key. | ||||
| 
 | ||||
|         ''' | ||||
|         ... | ||||
| 
 | ||||
|     @property | ||||
|     def bindspace(self) -> str: | ||||
|         ''' | ||||
|         Deliver the socket address' "bindable space" from | ||||
|         a `socket.socket.bind()` and thus from the perspective of | ||||
|         specific transport protocol domain. | ||||
| 
 | ||||
|         I.e. for most (layer-4) network-socket protocols this is | ||||
|         normally the ipv4/6 address, for UDS this is normally | ||||
|         a filesystem (sub-directory). | ||||
| 
 | ||||
|         For (distributed) network protocols this is normally the routing | ||||
|         layer's domain/(ip-)address, though it might also include a "network namespace" | ||||
|         key different then the default. | ||||
| 
 | ||||
|         For local-host-only transports this is either an explicit | ||||
|         namespace (with types defined by the OS: netns, Cgroup, IPC, | ||||
|         pid, etc. on linux) or failing that the sub-directory in the | ||||
|         filesys in which socket/shm files are located *under*. | ||||
| 
 | ||||
|         ''' | ||||
|         ... | ||||
| 
 | ||||
|     @classmethod | ||||
|     def from_addr(cls, addr: UnwrappedAddress) -> Address: | ||||
|         ... | ||||
| 
 | ||||
|     def unwrap(self) -> UnwrappedAddress: | ||||
|         ''' | ||||
|         Deliver the underying minimum field set in | ||||
|         a primitive python data type-structure. | ||||
|         ''' | ||||
|         ... | ||||
| 
 | ||||
|     @classmethod | ||||
|     def get_random( | ||||
|         cls, | ||||
|         current_actor: Actor, | ||||
|         bindspace: str|None = None, | ||||
|     ) -> Address: | ||||
|         ... | ||||
| 
 | ||||
|     # TODO, this should be something like a `.get_def_registar_addr()` | ||||
|     # or similar since, | ||||
|     # - it should be a **host singleton** (not root/tree singleton) | ||||
|     # - we **only need this value** when one isn't provided to the | ||||
|     #   runtime at boot and we want to implicitly provide a host-wide | ||||
|     #   registrar. | ||||
|     # - each rooted-actor-tree should likely have its own | ||||
|     #   micro-registry (likely the root being it), also see | ||||
|     @classmethod | ||||
|     def get_root(cls) -> Address: | ||||
|         ... | ||||
| 
 | ||||
|     def __repr__(self) -> str: | ||||
|         ... | ||||
| 
 | ||||
|     def __eq__(self, other) -> bool: | ||||
|         ... | ||||
| 
 | ||||
|     async def open_listener( | ||||
|         self, | ||||
|         **kwargs, | ||||
|     ) -> SocketListener: | ||||
|         ... | ||||
| 
 | ||||
|     async def close_listener(self): | ||||
|         ... | ||||
| 
 | ||||
| 
 | ||||
| _address_types: bidict[str, Type[Address]] = { | ||||
|     'tcp': TCPAddress, | ||||
|     'uds': UDSAddress | ||||
| } | ||||
| 
 | ||||
| 
 | ||||
| # TODO! really these are discovery sys default addrs ONLY useful for | ||||
| # when none is provided to a root actor on first boot. | ||||
| _default_lo_addrs: dict[ | ||||
|     str, | ||||
|     UnwrappedAddress | ||||
| ] = { | ||||
|     'tcp': TCPAddress.get_root().unwrap(), | ||||
|     'uds': UDSAddress.get_root().unwrap(), | ||||
| } | ||||
| 
 | ||||
| 
 | ||||
| def get_address_cls(name: str) -> Type[Address]: | ||||
|     return _address_types[name] | ||||
| 
 | ||||
| 
 | ||||
| def is_wrapped_addr(addr: any) -> bool: | ||||
|     return type(addr) in _address_types.values() | ||||
| 
 | ||||
| 
 | ||||
| def mk_uuid() -> str: | ||||
|     ''' | ||||
|     Encapsulate creation of a uuid4 as `str` as used | ||||
|     for creating `Actor.uid: tuple[str, str]` and/or | ||||
|     `.msg.types.Aid`. | ||||
| 
 | ||||
|     ''' | ||||
|     return str(uuid4()) | ||||
| 
 | ||||
| 
 | ||||
| def wrap_address( | ||||
|     addr: UnwrappedAddress | ||||
| ) -> Address: | ||||
|     ''' | ||||
|     Wrap an `UnwrappedAddress` as an `Address`-type based | ||||
|     on matching builtin python data-structures which we adhoc | ||||
|     use for each. | ||||
| 
 | ||||
|     XXX NOTE, careful care must be placed to ensure | ||||
|     `UnwrappedAddress` cases are **definitely unique** otherwise the | ||||
|     wrong transport backend may be loaded and will break many | ||||
|     low-level things in our runtime in a not-fun-to-debug way! | ||||
| 
 | ||||
|     XD | ||||
| 
 | ||||
|     ''' | ||||
|     if is_wrapped_addr(addr): | ||||
|         return addr | ||||
| 
 | ||||
|     cls: Type|None = None | ||||
|     # if 'sock' in addr[0]: | ||||
|     #     import pdbp; pdbp.set_trace() | ||||
|     match addr: | ||||
| 
 | ||||
|         # classic network socket-address as tuple/list | ||||
|         case ( | ||||
|             (str(), int()) | ||||
|             | | ||||
|             [str(), int()] | ||||
|         ): | ||||
|             cls = TCPAddress | ||||
| 
 | ||||
|         case ( | ||||
|             # (str()|Path(), str()|Path()), | ||||
|             # ^TODO? uhh why doesn't this work!? | ||||
| 
 | ||||
|             (_, filename) | ||||
|         ) if type(filename) is str: | ||||
|             cls = UDSAddress | ||||
| 
 | ||||
|         # likely an unset UDS or TCP reg address as defaulted in | ||||
|         # `_state._runtime_vars['_root_mailbox']` | ||||
|         # | ||||
|         # TODO? figure out when/if we even need this? | ||||
|         case ( | ||||
|             None | ||||
|             | | ||||
|             [None, None] | ||||
|         ): | ||||
|             cls: Type[Address] = get_address_cls(_def_tpt_proto) | ||||
|             addr: UnwrappedAddress = cls.get_root().unwrap() | ||||
| 
 | ||||
|         case _: | ||||
|             # import pdbp; pdbp.set_trace() | ||||
|             raise TypeError( | ||||
|                 f'Can not wrap unwrapped-address ??\n' | ||||
|                 f'type(addr): {type(addr)!r}\n' | ||||
|                 f'addr: {addr!r}\n' | ||||
|             ) | ||||
| 
 | ||||
|     return cls.from_addr(addr) | ||||
| 
 | ||||
| 
 | ||||
| def default_lo_addrs( | ||||
|     transports: list[str], | ||||
| ) -> list[Type[Address]]: | ||||
|     ''' | ||||
|     Return the default, host-singleton, registry address | ||||
|     for an input transport key set. | ||||
| 
 | ||||
|     ''' | ||||
|     return [ | ||||
|         _default_lo_addrs[transport] | ||||
|         for transport in transports | ||||
|     ] | ||||
|  | @ -31,16 +31,11 @@ def parse_uid(arg): | |||
|     return str(name), str(uuid)  # ensures str encoding | ||||
| 
 | ||||
| def parse_ipaddr(arg): | ||||
|     try: | ||||
|         return literal_eval(arg) | ||||
| 
 | ||||
|     except (ValueError, SyntaxError): | ||||
|         # UDS: try to interpret as a straight up str | ||||
|         return arg | ||||
|     host, port = literal_eval(arg) | ||||
|     return (str(host), int(port)) | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == "__main__": | ||||
|     __tracebackhide__: bool = True | ||||
| 
 | ||||
|     parser = argparse.ArgumentParser() | ||||
|     parser.add_argument("--uid", type=parse_uid) | ||||
|  | @ -50,8 +45,8 @@ if __name__ == "__main__": | |||
|     args = parser.parse_args() | ||||
| 
 | ||||
|     subactor = Actor( | ||||
|         name=args.uid[0], | ||||
|         uuid=args.uid[1], | ||||
|         args.uid[0], | ||||
|         uid=args.uid[1], | ||||
|         loglevel=args.loglevel, | ||||
|         spawn_method="trio" | ||||
|     ) | ||||
|  |  | |||
|  | @ -19,13 +19,10 @@ Actor cluster helpers. | |||
| 
 | ||||
| ''' | ||||
| from __future__ import annotations | ||||
| from contextlib import ( | ||||
|     asynccontextmanager as acm, | ||||
| ) | ||||
| 
 | ||||
| from contextlib import asynccontextmanager as acm | ||||
| from multiprocessing import cpu_count | ||||
| from typing import ( | ||||
|     AsyncGenerator, | ||||
| ) | ||||
| from typing import AsyncGenerator, Optional | ||||
| 
 | ||||
| import trio | ||||
| import tractor | ||||
|  | @ -55,17 +52,10 @@ async def open_actor_cluster( | |||
|         raise ValueError( | ||||
|             'Number of names is {len(names)} but count it {count}') | ||||
| 
 | ||||
|     async with ( | ||||
|         # tractor.trionics.collapse_eg(), | ||||
|         tractor.open_nursery( | ||||
|             **runtime_kwargs, | ||||
|         ) as an | ||||
|     ): | ||||
|         async with ( | ||||
|             # tractor.trionics.collapse_eg(), | ||||
|             trio.open_nursery() as tn, | ||||
|             tractor.trionics.maybe_raise_from_masking_exc() | ||||
|         ): | ||||
|     async with tractor.open_nursery( | ||||
|         **runtime_kwargs, | ||||
|     ) as an: | ||||
|         async with trio.open_nursery() as n: | ||||
|             uid = tractor.current_actor().uid | ||||
| 
 | ||||
|             async def _start(name: str) -> None: | ||||
|  | @ -76,8 +66,9 @@ async def open_actor_cluster( | |||
|                 ) | ||||
| 
 | ||||
|             for name in names: | ||||
|                 tn.start_soon(_start, name) | ||||
|                 n.start_soon(_start, name) | ||||
| 
 | ||||
|         assert len(portals) == count | ||||
|         yield portals | ||||
| 
 | ||||
|         await an.cancel(hard_kill=hard_kill) | ||||
|  |  | |||
							
								
								
									
										2122
									
								
								tractor/_context.py
								
								
								
								
							
							
						
						
									
										2122
									
								
								tractor/_context.py
								
								
								
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							|  | @ -26,18 +26,10 @@ from typing import ( | |||
|     TYPE_CHECKING, | ||||
| ) | ||||
| from contextlib import asynccontextmanager as acm | ||||
| import warnings | ||||
| 
 | ||||
| from tractor.log import get_logger | ||||
| from .trionics import ( | ||||
|     gather_contexts, | ||||
|     collapse_eg, | ||||
| ) | ||||
| from .ipc import _connect_chan, Channel | ||||
| from ._addr import ( | ||||
|     UnwrappedAddress, | ||||
|     Address, | ||||
|     wrap_address | ||||
| ) | ||||
| from .trionics import gather_contexts | ||||
| from ._ipc import _connect_chan, Channel | ||||
| from ._portal import ( | ||||
|     Portal, | ||||
|     open_portal, | ||||
|  | @ -46,50 +38,61 @@ from ._portal import ( | |||
| from ._state import ( | ||||
|     current_actor, | ||||
|     _runtime_vars, | ||||
|     _def_tpt_proto, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| if TYPE_CHECKING: | ||||
|     from ._runtime import Actor | ||||
| 
 | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def get_registry( | ||||
|     addr: UnwrappedAddress|None = None, | ||||
|     host: str, | ||||
|     port: int, | ||||
| 
 | ||||
| ) -> AsyncGenerator[ | ||||
|     Portal | LocalPortal | None, | ||||
|     None, | ||||
| ]: | ||||
|     ''' | ||||
|     Return a portal instance connected to a local or remote | ||||
|     registry-service actor; if a connection already exists re-use it | ||||
|     (presumably to call a `.register_actor()` registry runtime RPC | ||||
|     ep). | ||||
|     arbiter. | ||||
| 
 | ||||
|     ''' | ||||
|     actor: Actor = current_actor() | ||||
|     actor = current_actor() | ||||
| 
 | ||||
|     if not actor: | ||||
|         raise RuntimeError("No actor instance has been defined yet?") | ||||
| 
 | ||||
|     if actor.is_registrar: | ||||
|         # we're already the arbiter | ||||
|         # (likely a re-entrant call from the arbiter actor) | ||||
|         yield LocalPortal( | ||||
|             actor, | ||||
|             Channel(transport=None) | ||||
|             # ^XXX, we DO NOT actually provide nor connect an | ||||
|             # underlying transport since this is merely an API shim. | ||||
|             Channel((host, port)) | ||||
|         ) | ||||
|     else: | ||||
|         # TODO: try to look pre-existing connection from | ||||
|         # `Server._peers` and use it instead? | ||||
|         async with ( | ||||
|             _connect_chan(addr) as chan, | ||||
|             _connect_chan(host, port) as chan, | ||||
|             open_portal(chan) as regstr_ptl, | ||||
|         ): | ||||
|             yield regstr_ptl | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| # TODO: deprecate and this remove _arbiter form! | ||||
| @acm | ||||
| async def get_arbiter(*args, **kwargs): | ||||
|     warnings.warn( | ||||
|         '`tractor.get_arbiter()` is now deprecated!\n' | ||||
|         'Use `.get_registry()` instead!', | ||||
|         DeprecationWarning, | ||||
|         stacklevel=2, | ||||
|     ) | ||||
|     async with get_registry(*args, **kwargs) as to_yield: | ||||
|         yield to_yield | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def get_root( | ||||
|     **kwargs, | ||||
|  | @ -97,68 +100,32 @@ async def get_root( | |||
| 
 | ||||
|     # TODO: rename mailbox to `_root_maddr` when we finally | ||||
|     # add and impl libp2p multi-addrs? | ||||
|     addr = _runtime_vars['_root_mailbox'] | ||||
|     host, port = _runtime_vars['_root_mailbox'] | ||||
|     assert host is not None | ||||
| 
 | ||||
|     async with ( | ||||
|         _connect_chan(addr) as chan, | ||||
|         _connect_chan(host, port) as chan, | ||||
|         open_portal(chan, **kwargs) as portal, | ||||
|     ): | ||||
|         yield portal | ||||
| 
 | ||||
| 
 | ||||
| def get_peer_by_name( | ||||
|     name: str, | ||||
|     # uuid: str|None = None, | ||||
| 
 | ||||
| ) -> list[Channel]|None:  # at least 1 | ||||
|     ''' | ||||
|     Scan for an existing connection (set) to a named actor | ||||
|     and return any channels from `Server._peers: dict`. | ||||
| 
 | ||||
|     This is an optimization method over querying the registrar for | ||||
|     the same info. | ||||
| 
 | ||||
|     ''' | ||||
|     actor: Actor = current_actor() | ||||
|     to_scan: dict[tuple, list[Channel]] = actor.ipc_server._peers.copy() | ||||
| 
 | ||||
|     # TODO: is this ever needed? creates a duplicate channel on actor._peers | ||||
|     # when multiple find_actor calls are made to same actor from a single ctx | ||||
|     # which causes actor exit to hang waiting forever on | ||||
|     # `actor._no_more_peers.wait()` in `_runtime.async_main` | ||||
| 
 | ||||
|     # pchan: Channel|None = actor._parent_chan | ||||
|     # if pchan and pchan.uid not in to_scan: | ||||
|     #     to_scan[pchan.uid].append(pchan) | ||||
| 
 | ||||
|     for aid, chans in to_scan.items(): | ||||
|         _, peer_name = aid | ||||
|         if name == peer_name: | ||||
|             if not chans: | ||||
|                 log.warning( | ||||
|                     'No IPC chans for matching peer {peer_name}\n' | ||||
|                 ) | ||||
|                 continue | ||||
|             return chans | ||||
| 
 | ||||
|     return None | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def query_actor( | ||||
|     name: str, | ||||
|     regaddr: UnwrappedAddress|None = None, | ||||
|     arbiter_sockaddr: tuple[str, int] | None = None, | ||||
|     regaddr: tuple[str, int] | None = None, | ||||
| 
 | ||||
| ) -> AsyncGenerator[ | ||||
|     UnwrappedAddress|None, | ||||
|     tuple[str, int] | None, | ||||
|     None, | ||||
| ]: | ||||
|     ''' | ||||
|     Lookup a transport address (by actor name) via querying a registrar | ||||
|     listening @ `regaddr`. | ||||
|     Make a transport address lookup for an actor name to a specific | ||||
|     registrar. | ||||
| 
 | ||||
|     Returns the transport protocol (socket) address or `None` if no | ||||
|     entry under that name exists. | ||||
|     Returns the (socket) address or ``None`` if no entry under that | ||||
|     name exists for the given registrar listening @ `regaddr`. | ||||
| 
 | ||||
|     ''' | ||||
|     actor: Actor = current_actor() | ||||
|  | @ -170,48 +137,33 @@ async def query_actor( | |||
|             'The current actor IS the registry!?' | ||||
|         ) | ||||
| 
 | ||||
|     maybe_peers: list[Channel]|None = get_peer_by_name(name) | ||||
|     if maybe_peers: | ||||
|         yield maybe_peers[0].raddr | ||||
|         return | ||||
|     if arbiter_sockaddr is not None: | ||||
|         warnings.warn( | ||||
|             '`tractor.query_actor(regaddr=<blah>)` is deprecated.\n' | ||||
|             'Use `registry_addrs: list[tuple]` instead!', | ||||
|             DeprecationWarning, | ||||
|             stacklevel=2, | ||||
|         ) | ||||
|         regaddr: list[tuple[str, int]] = arbiter_sockaddr | ||||
| 
 | ||||
|     reg_portal: Portal | ||||
|     regaddr: Address = wrap_address(regaddr) or actor.reg_addrs[0] | ||||
|     async with get_registry(regaddr) as reg_portal: | ||||
|     regaddr: tuple[str, int] = regaddr or actor.reg_addrs[0] | ||||
|     async with get_registry(*regaddr) as reg_portal: | ||||
|         # TODO: return portals to all available actors - for now | ||||
|         # just the last one that registered | ||||
|         addr: UnwrappedAddress = await reg_portal.run_from_ns( | ||||
|         sockaddr: tuple[str, int] = await reg_portal.run_from_ns( | ||||
|             'self', | ||||
|             'find_actor', | ||||
|             name=name, | ||||
|         ) | ||||
|         yield addr | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def maybe_open_portal( | ||||
|     addr: UnwrappedAddress, | ||||
|     name: str, | ||||
| ): | ||||
|     async with query_actor( | ||||
|         name=name, | ||||
|         regaddr=addr, | ||||
|     ) as addr: | ||||
|         pass | ||||
| 
 | ||||
|     if addr: | ||||
|         async with _connect_chan(addr) as chan: | ||||
|             async with open_portal(chan) as portal: | ||||
|                 yield portal | ||||
|     else: | ||||
|         yield None | ||||
|         yield sockaddr | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def find_actor( | ||||
|     name: str, | ||||
|     registry_addrs: list[UnwrappedAddress]|None = None, | ||||
|     enable_transports: list[str] = [_def_tpt_proto], | ||||
|     arbiter_sockaddr: tuple[str, int]|None = None, | ||||
|     registry_addrs: list[tuple[str, int]]|None = None, | ||||
| 
 | ||||
|     only_first: bool = True, | ||||
|     raise_on_none: bool = False, | ||||
|  | @ -227,40 +179,51 @@ async def find_actor( | |||
|     known to the arbiter. | ||||
| 
 | ||||
|     ''' | ||||
|     # optimization path, use any pre-existing peer channel | ||||
|     maybe_peers: list[Channel]|None = get_peer_by_name(name) | ||||
|     if maybe_peers and only_first: | ||||
|         async with open_portal(maybe_peers[0]) as peer_portal: | ||||
|             yield peer_portal | ||||
|             return | ||||
|     if arbiter_sockaddr is not None: | ||||
|         warnings.warn( | ||||
|             '`tractor.find_actor(arbiter_sockaddr=<blah>)` is deprecated.\n' | ||||
|             'Use `registry_addrs: list[tuple]` instead!', | ||||
|             DeprecationWarning, | ||||
|             stacklevel=2, | ||||
|         ) | ||||
|         registry_addrs: list[tuple[str, int]] = [arbiter_sockaddr] | ||||
| 
 | ||||
|     @acm | ||||
|     async def maybe_open_portal_from_reg_addr( | ||||
|         addr: tuple[str, int], | ||||
|     ): | ||||
|         async with query_actor( | ||||
|             name=name, | ||||
|             regaddr=addr, | ||||
|         ) as sockaddr: | ||||
|             if sockaddr: | ||||
|                 async with _connect_chan(*sockaddr) as chan: | ||||
|                     async with open_portal(chan) as portal: | ||||
|                         yield portal | ||||
|             else: | ||||
|                 yield None | ||||
| 
 | ||||
|     if not registry_addrs: | ||||
|         # XXX NOTE: make sure to dynamically read the value on | ||||
|         # every call since something may change it globally (eg. | ||||
|         # like in our discovery test suite)! | ||||
|         from ._addr import default_lo_addrs | ||||
|         from . import _root | ||||
|         registry_addrs = ( | ||||
|             _runtime_vars['_registry_addrs'] | ||||
|             or | ||||
|             default_lo_addrs(enable_transports) | ||||
|             _root._default_lo_addrs | ||||
|         ) | ||||
| 
 | ||||
|     maybe_portals: list[ | ||||
|         AsyncContextManager[UnwrappedAddress] | ||||
|         AsyncContextManager[tuple[str, int]] | ||||
|     ] = list( | ||||
|         maybe_open_portal( | ||||
|             addr=addr, | ||||
|             name=name, | ||||
|         ) | ||||
|         maybe_open_portal_from_reg_addr(addr) | ||||
|         for addr in registry_addrs | ||||
|     ) | ||||
|     portals: list[Portal] | ||||
|     async with ( | ||||
|         collapse_eg(), | ||||
|         gather_contexts( | ||||
|             mngrs=maybe_portals, | ||||
|         ) as portals, | ||||
|     ): | ||||
| 
 | ||||
|     async with gather_contexts( | ||||
|         mngrs=maybe_portals, | ||||
|     ) as portals: | ||||
|         # log.runtime( | ||||
|         #     'Gathered portals:\n' | ||||
|         #     f'{portals}' | ||||
|  | @ -291,33 +254,33 @@ async def find_actor( | |||
| @acm | ||||
| async def wait_for_actor( | ||||
|     name: str, | ||||
|     registry_addr: UnwrappedAddress | None = None, | ||||
|     arbiter_sockaddr: tuple[str, int] | None = None, | ||||
|     registry_addr: tuple[str, int] | None = None, | ||||
| 
 | ||||
| ) -> AsyncGenerator[Portal, None]: | ||||
|     ''' | ||||
|     Wait on at least one peer actor to register `name` with the | ||||
|     registrar, yield a `Portal to the first registree. | ||||
|     Wait on an actor to register with the arbiter. | ||||
| 
 | ||||
|     A portal to the first registered actor is returned. | ||||
| 
 | ||||
|     ''' | ||||
|     actor: Actor = current_actor() | ||||
| 
 | ||||
|     # optimization path, use any pre-existing peer channel | ||||
|     maybe_peers: list[Channel]|None = get_peer_by_name(name) | ||||
|     if maybe_peers: | ||||
|         async with open_portal(maybe_peers[0]) as peer_portal: | ||||
|             yield peer_portal | ||||
|             return | ||||
|     if arbiter_sockaddr is not None: | ||||
|         warnings.warn( | ||||
|             '`tractor.wait_for_actor(arbiter_sockaddr=<foo>)` is deprecated.\n' | ||||
|             'Use `registry_addr: tuple` instead!', | ||||
|             DeprecationWarning, | ||||
|             stacklevel=2, | ||||
|         ) | ||||
|         registry_addr: tuple[str, int] = arbiter_sockaddr | ||||
| 
 | ||||
|     regaddr: UnwrappedAddress = ( | ||||
|         registry_addr | ||||
|         or | ||||
|         actor.reg_addrs[0] | ||||
|     ) | ||||
|     # TODO: use `.trionics.gather_contexts()` like | ||||
|     # above in `find_actor()` as well? | ||||
|     reg_portal: Portal | ||||
|     async with get_registry(regaddr) as reg_portal: | ||||
|         addrs = await reg_portal.run_from_ns( | ||||
|     regaddr: tuple[str, int] = registry_addr or actor.reg_addrs[0] | ||||
|     async with get_registry(*regaddr) as reg_portal: | ||||
|         sockaddrs = await reg_portal.run_from_ns( | ||||
|             'self', | ||||
|             'wait_for_actor', | ||||
|             name=name, | ||||
|  | @ -325,8 +288,8 @@ async def wait_for_actor( | |||
| 
 | ||||
|         # get latest registered addr by default? | ||||
|         # TODO: offer multi-portal yields in multi-homed case? | ||||
|         addr: UnwrappedAddress = addrs[-1] | ||||
|         sockaddr: tuple[str, int] = sockaddrs[-1] | ||||
| 
 | ||||
|         async with _connect_chan(addr) as chan: | ||||
|         async with _connect_chan(*sockaddr) as chan: | ||||
|             async with open_portal(chan) as portal: | ||||
|                 yield portal | ||||
|  |  | |||
|  | @ -20,8 +20,6 @@ Sub-process entry points. | |||
| """ | ||||
| from __future__ import annotations | ||||
| from functools import partial | ||||
| import multiprocessing as mp | ||||
| # import os | ||||
| from typing import ( | ||||
|     Any, | ||||
|     TYPE_CHECKING, | ||||
|  | @ -34,13 +32,7 @@ from .log import ( | |||
|     get_logger, | ||||
| ) | ||||
| from . import _state | ||||
| from .devx import ( | ||||
|     _frame_stack, | ||||
|     pformat, | ||||
| ) | ||||
| # from .msg import pretty_struct | ||||
| from .to_asyncio import run_as_asyncio_guest | ||||
| from ._addr import UnwrappedAddress | ||||
| from ._runtime import ( | ||||
|     async_main, | ||||
|     Actor, | ||||
|  | @ -56,35 +48,33 @@ log = get_logger(__name__) | |||
| def _mp_main( | ||||
| 
 | ||||
|     actor: Actor, | ||||
|     accept_addrs: list[UnwrappedAddress], | ||||
|     accept_addrs: list[tuple[str, int]], | ||||
|     forkserver_info: tuple[Any, Any, Any, Any, Any], | ||||
|     start_method: SpawnMethodKey, | ||||
|     parent_addr: UnwrappedAddress | None = None, | ||||
|     parent_addr: tuple[str, int] | None = None, | ||||
|     infect_asyncio: bool = False, | ||||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|     The routine called *after fork* which invokes a fresh `trio.run()` | ||||
|     The routine called *after fork* which invokes a fresh ``trio.run`` | ||||
| 
 | ||||
|     ''' | ||||
|     actor._forkserver_info = forkserver_info | ||||
|     from ._spawn import try_set_start_method | ||||
|     spawn_ctx: mp.context.BaseContext = try_set_start_method(start_method) | ||||
|     assert spawn_ctx | ||||
|     spawn_ctx = try_set_start_method(start_method) | ||||
| 
 | ||||
|     if actor.loglevel is not None: | ||||
|         log.info( | ||||
|             f'Setting loglevel for {actor.uid} to {actor.loglevel}' | ||||
|         ) | ||||
|             f"Setting loglevel for {actor.uid} to {actor.loglevel}") | ||||
|         get_console_log(actor.loglevel) | ||||
| 
 | ||||
|     # TODO: use scops headers like for `trio` below! | ||||
|     # (well after we libify it maybe..) | ||||
|     assert spawn_ctx | ||||
|     log.info( | ||||
|         f'Started new {spawn_ctx.current_process()} for {actor.uid}' | ||||
|     #     f"parent_addr is {parent_addr}" | ||||
|     ) | ||||
|     _state._current_actor: Actor = actor | ||||
|         f"Started new {spawn_ctx.current_process()} for {actor.uid}") | ||||
| 
 | ||||
|     _state._current_actor = actor | ||||
| 
 | ||||
|     log.debug(f"parent_addr is {parent_addr}") | ||||
|     trio_main = partial( | ||||
|         async_main, | ||||
|         actor=actor, | ||||
|  | @ -101,15 +91,14 @@ def _mp_main( | |||
|         pass  # handle it the same way trio does? | ||||
| 
 | ||||
|     finally: | ||||
|         log.info( | ||||
|             f'`mp`-subactor {actor.uid} exited' | ||||
|         ) | ||||
|         log.info(f"Actor {actor.uid} terminated") | ||||
| 
 | ||||
| 
 | ||||
| def _trio_main( | ||||
| 
 | ||||
|     actor: Actor, | ||||
|     *, | ||||
|     parent_addr: UnwrappedAddress|None = None, | ||||
|     parent_addr: tuple[str, int] | None = None, | ||||
|     infect_asyncio: bool = False, | ||||
| 
 | ||||
| ) -> None: | ||||
|  | @ -117,8 +106,6 @@ def _trio_main( | |||
|     Entry point for a `trio_run_in_process` subactor. | ||||
| 
 | ||||
|     ''' | ||||
|     _frame_stack.hide_runtime_frames() | ||||
| 
 | ||||
|     _state._current_actor = actor | ||||
|     trio_main = partial( | ||||
|         async_main, | ||||
|  | @ -128,55 +115,36 @@ def _trio_main( | |||
| 
 | ||||
|     if actor.loglevel is not None: | ||||
|         get_console_log(actor.loglevel) | ||||
|         import os | ||||
|         actor_info: str = ( | ||||
|             f'|_{actor}\n' | ||||
|             f'  uid: {actor.uid}\n' | ||||
|             f'  pid: {os.getpid()}\n' | ||||
|             f'  parent_addr: {parent_addr}\n' | ||||
|             f'  loglevel: {actor.loglevel}\n' | ||||
|         ) | ||||
|         log.info( | ||||
|             f'Starting `trio` subactor from parent @ ' | ||||
|             f'{parent_addr}\n' | ||||
|             'Started new trio process:\n' | ||||
|             + | ||||
|             pformat.nest_from_op( | ||||
|                 input_op='>(',  # see syntax ideas above | ||||
|                 text=f'{actor}', | ||||
|             ) | ||||
|             actor_info | ||||
|         ) | ||||
|     logmeth = log.info | ||||
|     exit_status: str = ( | ||||
|         'Subactor exited\n' | ||||
|         + | ||||
|         pformat.nest_from_op( | ||||
|             input_op=')>',  # like a "closed-to-play"-icon from super perspective | ||||
|             text=f'{actor}', | ||||
|             nest_indent=1, | ||||
|         ) | ||||
|     ) | ||||
| 
 | ||||
|     try: | ||||
|         if infect_asyncio: | ||||
|             actor._infected_aio = True | ||||
|             run_as_asyncio_guest(trio_main) | ||||
|         else: | ||||
|             trio.run(trio_main) | ||||
| 
 | ||||
|     except KeyboardInterrupt: | ||||
|         logmeth = log.cancel | ||||
|         exit_status: str = ( | ||||
|             'Actor received KBI (aka an OS-cancel)\n' | ||||
|         log.cancel( | ||||
|             'Actor received KBI\n' | ||||
|             + | ||||
|             pformat.nest_from_op( | ||||
|                 input_op='c)>',  # closed due to cancel (see above) | ||||
|                 text=f'{actor}', | ||||
|             ) | ||||
|             actor_info | ||||
|         ) | ||||
|     except BaseException as err: | ||||
|         logmeth = log.error | ||||
|         exit_status: str = ( | ||||
|             'Main actor task exited due to crash?\n' | ||||
|             + | ||||
|             pformat.nest_from_op( | ||||
|                 input_op='x)>',  # closed by error | ||||
|                 text=f'{actor}', | ||||
|             ) | ||||
|         ) | ||||
|         # NOTE since we raise a tb will already be shown on the | ||||
|         # console, thus we do NOT use `.exception()` above. | ||||
|         raise err | ||||
| 
 | ||||
|     finally: | ||||
|         logmeth(exit_status) | ||||
|         log.info( | ||||
|             'Actor terminated\n' | ||||
|             + | ||||
|             actor_info | ||||
|         ) | ||||
|  |  | |||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							|  | @ -0,0 +1,532 @@ | |||
| # tractor: structured concurrent "actors". | ||||
| # Copyright 2018-eternity Tyler Goodlet. | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| """ | ||||
| Inter-process comms abstractions | ||||
| 
 | ||||
| """ | ||||
| from __future__ import annotations | ||||
| from collections.abc import ( | ||||
|     AsyncGenerator, | ||||
|     AsyncIterator, | ||||
| ) | ||||
| from contextlib import asynccontextmanager as acm | ||||
| import platform | ||||
| from pprint import pformat | ||||
| import struct | ||||
| import typing | ||||
| from typing import ( | ||||
|     Any, | ||||
|     runtime_checkable, | ||||
|     Protocol, | ||||
|     Type, | ||||
|     TypeVar, | ||||
| ) | ||||
| 
 | ||||
| import msgspec | ||||
| from tricycle import BufferedReceiveStream | ||||
| import trio | ||||
| 
 | ||||
| from tractor.log import get_logger | ||||
| from tractor._exceptions import TransportClosed | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
| _is_windows = platform.system() == 'Windows' | ||||
| 
 | ||||
| 
 | ||||
| def get_stream_addrs(stream: trio.SocketStream) -> tuple: | ||||
|     # should both be IP sockets | ||||
|     lsockname = stream.socket.getsockname() | ||||
|     rsockname = stream.socket.getpeername() | ||||
|     return ( | ||||
|         tuple(lsockname[:2]), | ||||
|         tuple(rsockname[:2]), | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| MsgType = TypeVar("MsgType") | ||||
| 
 | ||||
| # TODO: consider using a generic def and indexing with our eventual | ||||
| # msg definition/types? | ||||
| # - https://docs.python.org/3/library/typing.html#typing.Protocol | ||||
| # - https://jcristharif.com/msgspec/usage.html#structs | ||||
| 
 | ||||
| 
 | ||||
| @runtime_checkable | ||||
| class MsgTransport(Protocol[MsgType]): | ||||
| 
 | ||||
|     stream: trio.SocketStream | ||||
|     drained: list[MsgType] | ||||
| 
 | ||||
|     def __init__(self, stream: trio.SocketStream) -> None: | ||||
|         ... | ||||
| 
 | ||||
|     # XXX: should this instead be called `.sendall()`? | ||||
|     async def send(self, msg: MsgType) -> None: | ||||
|         ... | ||||
| 
 | ||||
|     async def recv(self) -> MsgType: | ||||
|         ... | ||||
| 
 | ||||
|     def __aiter__(self) -> MsgType: | ||||
|         ... | ||||
| 
 | ||||
|     def connected(self) -> bool: | ||||
|         ... | ||||
| 
 | ||||
|     # defining this sync otherwise it causes a mypy error because it | ||||
|     # can't figure out it's a generator i guess?..? | ||||
|     def drain(self) -> AsyncIterator[dict]: | ||||
|         ... | ||||
| 
 | ||||
|     @property | ||||
|     def laddr(self) -> tuple[str, int]: | ||||
|         ... | ||||
| 
 | ||||
|     @property | ||||
|     def raddr(self) -> tuple[str, int]: | ||||
|         ... | ||||
| 
 | ||||
| 
 | ||||
| # TODO: not sure why we have to inherit here, but it seems to be an | ||||
| # issue with ``get_msg_transport()`` returning a ``Type[Protocol]``; | ||||
| # probably should make a `mypy` issue? | ||||
| class MsgpackTCPStream(MsgTransport): | ||||
|     ''' | ||||
|     A ``trio.SocketStream`` delivering ``msgpack`` formatted data | ||||
|     using the ``msgspec`` codec lib. | ||||
| 
 | ||||
|     ''' | ||||
|     layer_key: int = 4 | ||||
|     name_key: str = 'tcp' | ||||
| 
 | ||||
|     # TODO: better naming for this? | ||||
|     # -[ ] check how libp2p does naming for such things? | ||||
|     codec_key: str = 'msgpack' | ||||
| 
 | ||||
|     def __init__( | ||||
|         self, | ||||
|         stream: trio.SocketStream, | ||||
|         prefix_size: int = 4, | ||||
| 
 | ||||
|     ) -> None: | ||||
| 
 | ||||
|         self.stream = stream | ||||
|         assert self.stream.socket | ||||
| 
 | ||||
|         # should both be IP sockets | ||||
|         self._laddr, self._raddr = get_stream_addrs(stream) | ||||
| 
 | ||||
|         # create read loop instance | ||||
|         self._agen = self._iter_packets() | ||||
|         self._send_lock = trio.StrictFIFOLock() | ||||
| 
 | ||||
|         # public i guess? | ||||
|         self.drained: list[dict] = [] | ||||
| 
 | ||||
|         self.recv_stream = BufferedReceiveStream(transport_stream=stream) | ||||
|         self.prefix_size = prefix_size | ||||
| 
 | ||||
|         # TODO: struct aware messaging coders | ||||
|         self.encode = msgspec.msgpack.Encoder().encode | ||||
|         self.decode = msgspec.msgpack.Decoder().decode  # dict[str, Any]) | ||||
| 
 | ||||
|     async def _iter_packets(self) -> AsyncGenerator[dict, None]: | ||||
|         '''Yield packets from the underlying stream. | ||||
| 
 | ||||
|         ''' | ||||
|         import msgspec  # noqa | ||||
|         decodes_failed: int = 0 | ||||
| 
 | ||||
|         while True: | ||||
|             try: | ||||
|                 header = await self.recv_stream.receive_exactly(4) | ||||
| 
 | ||||
|             except ( | ||||
|                 ValueError, | ||||
|                 ConnectionResetError, | ||||
| 
 | ||||
|                 # not sure entirely why we need this but without it we | ||||
|                 # seem to be getting racy failures here on | ||||
|                 # arbiter/registry name subs.. | ||||
|                 trio.BrokenResourceError, | ||||
|             ): | ||||
|                 raise TransportClosed( | ||||
|                     f'transport {self} was already closed prior ro read' | ||||
|                 ) | ||||
| 
 | ||||
|             if header == b'': | ||||
|                 raise TransportClosed( | ||||
|                     f'transport {self} was already closed prior ro read' | ||||
|                 ) | ||||
| 
 | ||||
|             size, = struct.unpack("<I", header) | ||||
| 
 | ||||
|             log.transport(f'received header {size}')  # type: ignore | ||||
| 
 | ||||
|             msg_bytes = await self.recv_stream.receive_exactly(size) | ||||
| 
 | ||||
|             log.transport(f"received {msg_bytes}")  # type: ignore | ||||
|             try: | ||||
|                 yield self.decode(msg_bytes) | ||||
|             except ( | ||||
|                 msgspec.DecodeError, | ||||
|                 UnicodeDecodeError, | ||||
|             ): | ||||
|                 if decodes_failed < 4: | ||||
|                     # ignore decoding errors for now and assume they have to | ||||
|                     # do with a channel drop - hope that receiving from the | ||||
|                     # channel will raise an expected error and bubble up. | ||||
|                     try: | ||||
|                         msg_str: str | bytes = msg_bytes.decode() | ||||
|                     except UnicodeDecodeError: | ||||
|                         msg_str = msg_bytes | ||||
| 
 | ||||
|                     log.error( | ||||
|                         '`msgspec` failed to decode!?\n' | ||||
|                         'dumping bytes:\n' | ||||
|                         f'{msg_str!r}' | ||||
|                     ) | ||||
|                     decodes_failed += 1 | ||||
|                 else: | ||||
|                     raise | ||||
| 
 | ||||
|     async def send( | ||||
|         self, | ||||
|         msg: Any, | ||||
| 
 | ||||
|         # hide_tb: bool = False, | ||||
|     ) -> None: | ||||
|         ''' | ||||
|         Send a msgpack coded blob-as-msg over TCP. | ||||
| 
 | ||||
|         ''' | ||||
|         # __tracebackhide__: bool = hide_tb | ||||
|         async with self._send_lock: | ||||
| 
 | ||||
|             bytes_data: bytes = self.encode(msg) | ||||
| 
 | ||||
|             # supposedly the fastest says, | ||||
|             # https://stackoverflow.com/a/54027962 | ||||
|             size: bytes = struct.pack("<I", len(bytes_data)) | ||||
| 
 | ||||
|             return await self.stream.send_all(size + bytes_data) | ||||
| 
 | ||||
|     @property | ||||
|     def laddr(self) -> tuple[str, int]: | ||||
|         return self._laddr | ||||
| 
 | ||||
|     @property | ||||
|     def raddr(self) -> tuple[str, int]: | ||||
|         return self._raddr | ||||
| 
 | ||||
|     async def recv(self) -> Any: | ||||
|         return await self._agen.asend(None) | ||||
| 
 | ||||
|     async def drain(self) -> AsyncIterator[dict]: | ||||
|         ''' | ||||
|         Drain the stream's remaining messages sent from | ||||
|         the far end until the connection is closed by | ||||
|         the peer. | ||||
| 
 | ||||
|         ''' | ||||
|         try: | ||||
|             async for msg in self._iter_packets(): | ||||
|                 self.drained.append(msg) | ||||
|         except TransportClosed: | ||||
|             for msg in self.drained: | ||||
|                 yield msg | ||||
| 
 | ||||
|     def __aiter__(self): | ||||
|         return self._agen | ||||
| 
 | ||||
|     def connected(self) -> bool: | ||||
|         return self.stream.socket.fileno() != -1 | ||||
| 
 | ||||
| 
 | ||||
| def get_msg_transport( | ||||
| 
 | ||||
|     key: tuple[str, str], | ||||
| 
 | ||||
| ) -> Type[MsgTransport]: | ||||
| 
 | ||||
|     return { | ||||
|         ('msgpack', 'tcp'): MsgpackTCPStream, | ||||
|     }[key] | ||||
| 
 | ||||
| 
 | ||||
| class Channel: | ||||
|     ''' | ||||
|     An inter-process channel for communication between (remote) actors. | ||||
| 
 | ||||
|     Wraps a ``MsgStream``: transport + encoding IPC connection. | ||||
| 
 | ||||
|     Currently we only support ``trio.SocketStream`` for transport | ||||
|     (aka TCP) and the ``msgpack`` interchange format via the ``msgspec`` | ||||
|     codec libary. | ||||
| 
 | ||||
|     ''' | ||||
|     def __init__( | ||||
| 
 | ||||
|         self, | ||||
|         destaddr: tuple[str, int]|None, | ||||
| 
 | ||||
|         msg_transport_type_key: tuple[str, str] = ('msgpack', 'tcp'), | ||||
| 
 | ||||
|         # TODO: optional reconnection support? | ||||
|         # auto_reconnect: bool = False, | ||||
|         # on_reconnect: typing.Callable[..., typing.Awaitable] = None, | ||||
| 
 | ||||
|     ) -> None: | ||||
| 
 | ||||
|         # self._recon_seq = on_reconnect | ||||
|         # self._autorecon = auto_reconnect | ||||
| 
 | ||||
|         self._destaddr = destaddr | ||||
|         self._transport_key = msg_transport_type_key | ||||
| 
 | ||||
|         # Either created in ``.connect()`` or passed in by | ||||
|         # user in ``.from_stream()``. | ||||
|         self._stream: trio.SocketStream|None = None | ||||
|         self._transport: MsgTransport|None = None | ||||
| 
 | ||||
|         # set after handshake - always uid of far end | ||||
|         self.uid: tuple[str, str]|None = None | ||||
| 
 | ||||
|         self._agen = self._aiter_recv() | ||||
|         self._exc: Exception|None = None  # set if far end actor errors | ||||
|         self._closed: bool = False | ||||
| 
 | ||||
|         # flag set by ``Portal.cancel_actor()`` indicating remote | ||||
|         # (possibly peer) cancellation of the far end actor | ||||
|         # runtime. | ||||
|         self._cancel_called: bool = False | ||||
| 
 | ||||
|     @property | ||||
|     def msgstream(self) -> MsgTransport: | ||||
|         log.info('`Channel.msgstream` is an old name, use `._transport`') | ||||
|         return self._transport | ||||
| 
 | ||||
|     @property | ||||
|     def transport(self) -> MsgTransport: | ||||
|         return self._transport | ||||
| 
 | ||||
|     @classmethod | ||||
|     def from_stream( | ||||
|         cls, | ||||
|         stream: trio.SocketStream, | ||||
|         **kwargs, | ||||
| 
 | ||||
|     ) -> Channel: | ||||
| 
 | ||||
|         src, dst = get_stream_addrs(stream) | ||||
|         chan = Channel( | ||||
|             destaddr=dst, | ||||
|             **kwargs, | ||||
|         ) | ||||
| 
 | ||||
|         # set immediately here from provided instance | ||||
|         chan._stream: trio.SocketStream = stream | ||||
|         chan.set_msg_transport(stream) | ||||
|         return chan | ||||
| 
 | ||||
|     def set_msg_transport( | ||||
|         self, | ||||
|         stream: trio.SocketStream, | ||||
|         type_key: tuple[str, str]|None = None, | ||||
| 
 | ||||
|     ) -> MsgTransport: | ||||
|         type_key = type_key or self._transport_key | ||||
|         self._transport = get_msg_transport(type_key)(stream) | ||||
|         return self._transport | ||||
| 
 | ||||
|     def __repr__(self) -> str: | ||||
|         if not self._transport: | ||||
|             return '<Channel with inactive transport?>' | ||||
| 
 | ||||
|         return repr( | ||||
|             self._transport.stream.socket._sock | ||||
|         ).replace(  # type: ignore | ||||
|             "socket.socket", | ||||
|             "Channel", | ||||
|         ) | ||||
| 
 | ||||
|     @property | ||||
|     def laddr(self) -> tuple[str, int]|None: | ||||
|         return self._transport.laddr if self._transport else None | ||||
| 
 | ||||
|     @property | ||||
|     def raddr(self) -> tuple[str, int]|None: | ||||
|         return self._transport.raddr if self._transport else None | ||||
| 
 | ||||
|     async def connect( | ||||
|         self, | ||||
|         destaddr: tuple[Any, ...] | None = None, | ||||
|         **kwargs | ||||
| 
 | ||||
|     ) -> MsgTransport: | ||||
| 
 | ||||
|         if self.connected(): | ||||
|             raise RuntimeError("channel is already connected?") | ||||
| 
 | ||||
|         destaddr = destaddr or self._destaddr | ||||
|         assert isinstance(destaddr, tuple) | ||||
| 
 | ||||
|         stream = await trio.open_tcp_stream( | ||||
|             *destaddr, | ||||
|             **kwargs | ||||
|         ) | ||||
|         transport = self.set_msg_transport(stream) | ||||
| 
 | ||||
|         log.transport( | ||||
|             f'Opened channel[{type(transport)}]: {self.laddr} -> {self.raddr}' | ||||
|         ) | ||||
|         return transport | ||||
| 
 | ||||
|     async def send( | ||||
|         self, | ||||
|         payload: Any, | ||||
| 
 | ||||
|         # hide_tb: bool = False, | ||||
| 
 | ||||
|     ) -> None: | ||||
|         ''' | ||||
|         Send a coded msg-blob over the transport. | ||||
| 
 | ||||
|         ''' | ||||
|         # __tracebackhide__: bool = hide_tb | ||||
|         log.transport( | ||||
|             '=> send IPC msg:\n\n' | ||||
|             f'{pformat(payload)}\n' | ||||
|         )  # type: ignore | ||||
|         assert self._transport | ||||
| 
 | ||||
|         await self._transport.send( | ||||
|             payload, | ||||
|             # hide_tb=hide_tb, | ||||
|         ) | ||||
| 
 | ||||
|     async def recv(self) -> Any: | ||||
|         assert self._transport | ||||
|         return await self._transport.recv() | ||||
| 
 | ||||
|         # try: | ||||
|         #     return await self._transport.recv() | ||||
|         # except trio.BrokenResourceError: | ||||
|         #     if self._autorecon: | ||||
|         #         await self._reconnect() | ||||
|         #         return await self.recv() | ||||
|         #     raise | ||||
| 
 | ||||
|     async def aclose(self) -> None: | ||||
| 
 | ||||
|         log.transport( | ||||
|             f'Closing channel to {self.uid} ' | ||||
|             f'{self.laddr} -> {self.raddr}' | ||||
|         ) | ||||
|         assert self._transport | ||||
|         await self._transport.stream.aclose() | ||||
|         self._closed = True | ||||
| 
 | ||||
|     async def __aenter__(self): | ||||
|         await self.connect() | ||||
|         return self | ||||
| 
 | ||||
|     async def __aexit__(self, *args): | ||||
|         await self.aclose(*args) | ||||
| 
 | ||||
|     def __aiter__(self): | ||||
|         return self._agen | ||||
| 
 | ||||
|     # async def _reconnect(self) -> None: | ||||
|     #     """Handle connection failures by polling until a reconnect can be | ||||
|     #     established. | ||||
|     #     """ | ||||
|     #     down = False | ||||
|     #     while True: | ||||
|     #         try: | ||||
|     #             with trio.move_on_after(3) as cancel_scope: | ||||
|     #                 await self.connect() | ||||
|     #             cancelled = cancel_scope.cancelled_caught | ||||
|     #             if cancelled: | ||||
|     #                 log.transport( | ||||
|     #                     "Reconnect timed out after 3 seconds, retrying...") | ||||
|     #                 continue | ||||
|     #             else: | ||||
|     #                 log.transport("Stream connection re-established!") | ||||
| 
 | ||||
|     #                 # TODO: run any reconnection sequence | ||||
|     #                 # on_recon = self._recon_seq | ||||
|     #                 # if on_recon: | ||||
|     #                 #     await on_recon(self) | ||||
| 
 | ||||
|     #                 break | ||||
|     #         except (OSError, ConnectionRefusedError): | ||||
|     #             if not down: | ||||
|     #                 down = True | ||||
|     #                 log.transport( | ||||
|     #                     f"Connection to {self.raddr} went down, waiting" | ||||
|     #                     " for re-establishment") | ||||
|     #             await trio.sleep(1) | ||||
| 
 | ||||
|     async def _aiter_recv( | ||||
|         self | ||||
|     ) -> AsyncGenerator[Any, None]: | ||||
|         ''' | ||||
|         Async iterate items from underlying stream. | ||||
| 
 | ||||
|         ''' | ||||
|         assert self._transport | ||||
|         while True: | ||||
|             try: | ||||
|                 async for item in self._transport: | ||||
|                     yield item | ||||
|                     # sent = yield item | ||||
|                     # if sent is not None: | ||||
|                     #     # optimization, passing None through all the | ||||
|                     #     # time is pointless | ||||
|                     #     await self._transport.send(sent) | ||||
|             except trio.BrokenResourceError: | ||||
| 
 | ||||
|                 # if not self._autorecon: | ||||
|                 raise | ||||
| 
 | ||||
|             await self.aclose() | ||||
| 
 | ||||
|             # if self._autorecon:  # attempt reconnect | ||||
|             #     await self._reconnect() | ||||
|             #     continue | ||||
| 
 | ||||
|     def connected(self) -> bool: | ||||
|         return self._transport.connected() if self._transport else False | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def _connect_chan( | ||||
|     host: str, | ||||
|     port: int | ||||
| 
 | ||||
| ) -> typing.AsyncGenerator[Channel, None]: | ||||
|     ''' | ||||
|     Create and connect a channel with disconnect on context manager | ||||
|     teardown. | ||||
| 
 | ||||
|     ''' | ||||
|     chan = Channel((host, port)) | ||||
|     await chan.connect() | ||||
|     yield chan | ||||
|     await chan.aclose() | ||||
|  | @ -31,7 +31,7 @@ from typing import ( | |||
|     Any, | ||||
|     Callable, | ||||
|     AsyncGenerator, | ||||
|     TYPE_CHECKING, | ||||
|     # Type, | ||||
| ) | ||||
| from functools import partial | ||||
| from dataclasses import dataclass | ||||
|  | @ -39,24 +39,16 @@ import warnings | |||
| 
 | ||||
| import trio | ||||
| 
 | ||||
| from .trionics import ( | ||||
|     maybe_open_nursery, | ||||
|     collapse_eg, | ||||
| ) | ||||
| from .trionics import maybe_open_nursery | ||||
| from ._state import ( | ||||
|     current_actor, | ||||
| ) | ||||
| from .ipc import Channel | ||||
| from ._ipc import Channel | ||||
| from .log import get_logger | ||||
| from .msg import ( | ||||
|     # Error, | ||||
|     PayloadMsg, | ||||
|     NamespacePath, | ||||
|     Return, | ||||
| ) | ||||
| from .msg import NamespacePath | ||||
| from ._exceptions import ( | ||||
|     unpack_error, | ||||
|     NoResult, | ||||
|     TransportClosed, | ||||
| ) | ||||
| from ._context import ( | ||||
|     Context, | ||||
|  | @ -66,12 +58,41 @@ from ._streaming import ( | |||
|     MsgStream, | ||||
| ) | ||||
| 
 | ||||
| if TYPE_CHECKING: | ||||
|     from ._runtime import Actor | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
| 
 | ||||
| # TODO: rename to `unwrap_result()` and use | ||||
| # `._raise_from_no_key_in_msg()` (after tweak to | ||||
| # accept a `chan: Channel` arg) in key block! | ||||
| def _unwrap_msg( | ||||
|     msg: dict[str, Any], | ||||
|     channel: Channel, | ||||
| 
 | ||||
|     hide_tb: bool = True, | ||||
| 
 | ||||
| ) -> Any: | ||||
|     ''' | ||||
|     Unwrap a final result from a `{return: <Any>}` IPC msg. | ||||
| 
 | ||||
|     ''' | ||||
|     __tracebackhide__: bool = hide_tb | ||||
| 
 | ||||
|     try: | ||||
|         return msg['return'] | ||||
|     except KeyError as ke: | ||||
| 
 | ||||
|         # internal error should never get here | ||||
|         assert msg.get('cid'), ( | ||||
|             "Received internal error at portal?" | ||||
|         ) | ||||
| 
 | ||||
|         raise unpack_error( | ||||
|             msg, | ||||
|             channel | ||||
|         ) from ke | ||||
| 
 | ||||
| 
 | ||||
| class Portal: | ||||
|     ''' | ||||
|     A 'portal' to a memory-domain-separated `Actor`. | ||||
|  | @ -95,34 +116,17 @@ class Portal: | |||
|     # connected (peer) actors. | ||||
|     cancel_timeout: float = 0.5 | ||||
| 
 | ||||
|     def __init__( | ||||
|         self, | ||||
|         channel: Channel, | ||||
|     ) -> None: | ||||
| 
 | ||||
|         self._chan: Channel = channel | ||||
|     def __init__(self, channel: Channel) -> None: | ||||
|         self.chan = channel | ||||
|         # during the portal's lifetime | ||||
|         self._final_result_pld: Any|None = None | ||||
|         self._final_result_msg: PayloadMsg|None = None | ||||
|         self._result_msg: dict|None = None | ||||
| 
 | ||||
|         # When set to a ``Context`` (when _submit_for_result is called) | ||||
|         # it is expected that ``result()`` will be awaited at some | ||||
|         # point. | ||||
|         self._expect_result_ctx: Context|None = None | ||||
|         self._expect_result: Context | None = None | ||||
|         self._streams: set[MsgStream] = set() | ||||
| 
 | ||||
|         # TODO, this should be PRIVATE (and never used publicly)! since it's just | ||||
|         # a cached ref to the local runtime instead of calling | ||||
|         # `current_actor()` everywhere.. XD | ||||
|         self.actor: Actor = current_actor() | ||||
| 
 | ||||
|     @property | ||||
|     def chan(self) -> Channel: | ||||
|         ''' | ||||
|         Ref to this ctx's underlying `tractor.ipc.Channel`. | ||||
| 
 | ||||
|         ''' | ||||
|         return self._chan | ||||
|         self.actor = current_actor() | ||||
| 
 | ||||
|     @property | ||||
|     def channel(self) -> Channel: | ||||
|  | @ -136,8 +140,6 @@ class Portal: | |||
|         ) | ||||
|         return self.chan | ||||
| 
 | ||||
|     # TODO: factor this out into a `.highlevel` API-wrapper that uses | ||||
|     # a single `.open_context()` call underneath. | ||||
|     async def _submit_for_result( | ||||
|         self, | ||||
|         ns: str, | ||||
|  | @ -145,34 +147,32 @@ class Portal: | |||
|         **kwargs | ||||
|     ) -> None: | ||||
| 
 | ||||
|         if self._expect_result_ctx is not None: | ||||
|             raise RuntimeError( | ||||
|                 'A pending main result has already been submitted' | ||||
|             ) | ||||
| 
 | ||||
|         self._expect_result_ctx: Context = await self.actor.start_remote_task( | ||||
|             self.channel, | ||||
|             nsf=NamespacePath(f'{ns}:{func}'), | ||||
|             kwargs=kwargs, | ||||
|             portal=self, | ||||
|         assert self._expect_result is None, ( | ||||
|             "A pending main result has already been submitted" | ||||
|         ) | ||||
| 
 | ||||
|     # TODO: we should deprecate this API right? since if we remove | ||||
|     # `.run_in_actor()` (and instead move it to a `.highlevel` | ||||
|     # wrapper api (around a single `.open_context()` call) we don't | ||||
|     # really have any notion of a "main" remote task any more? | ||||
|     # | ||||
|     # @api_frame | ||||
|     async def wait_for_result( | ||||
|         self._expect_result = await self.actor.start_remote_task( | ||||
|             self.channel, | ||||
|             nsf=NamespacePath(f'{ns}:{func}'), | ||||
|             kwargs=kwargs | ||||
|         ) | ||||
| 
 | ||||
|     async def _return_once( | ||||
|         self, | ||||
|         hide_tb: bool = True, | ||||
|     ) -> Any: | ||||
|         ctx: Context, | ||||
| 
 | ||||
|     ) -> dict[str, Any]: | ||||
| 
 | ||||
|         assert ctx._remote_func_type == 'asyncfunc'  # single response | ||||
|         msg: dict = await ctx._recv_chan.receive() | ||||
|         return msg | ||||
| 
 | ||||
|     async def result(self) -> Any: | ||||
|         ''' | ||||
|         Return the final result delivered by a `Return`-msg from the | ||||
|         remote peer actor's "main" task's `return` statement. | ||||
|         Return the result(s) from the remote actor's "main" task. | ||||
| 
 | ||||
|         ''' | ||||
|         __tracebackhide__: bool = hide_tb | ||||
|         # __tracebackhide__ = True | ||||
|         # Check for non-rpc errors slapped on the | ||||
|         # channel for which we always raise | ||||
|         exc = self.channel._exc | ||||
|  | @ -180,66 +180,32 @@ class Portal: | |||
|             raise exc | ||||
| 
 | ||||
|         # not expecting a "main" result | ||||
|         if self._expect_result_ctx is None: | ||||
|             peer_id: str = f'{self.channel.aid.reprol()!r}' | ||||
|         if self._expect_result is None: | ||||
|             log.warning( | ||||
|                 f'Portal to peer {peer_id} will not deliver a final result?\n' | ||||
|                 f'\n' | ||||
|                 f'Context.result() can only be called by the parent of ' | ||||
|                 f'a sub-actor when it was spawned with ' | ||||
|                 f'`ActorNursery.run_in_actor()`' | ||||
|                 f'\n' | ||||
|                 f'Further this `ActorNursery`-method-API will deprecated in the' | ||||
|                 f'near fututre!\n' | ||||
|             ) | ||||
|                 f"Portal for {self.channel.uid} not expecting a final" | ||||
|                 " result?\nresult() should only be called if subactor" | ||||
|                 " was spawned with `ActorNursery.run_in_actor()`") | ||||
|             return NoResult | ||||
| 
 | ||||
|         # expecting a "main" result | ||||
|         assert self._expect_result_ctx | ||||
|         assert self._expect_result | ||||
| 
 | ||||
|         if self._final_result_msg is None: | ||||
|             try: | ||||
|                 ( | ||||
|                     self._final_result_msg, | ||||
|                     self._final_result_pld, | ||||
|                 ) = await self._expect_result_ctx._pld_rx.recv_msg( | ||||
|                     ipc=self._expect_result_ctx, | ||||
|                     expect_msg=Return, | ||||
|                 ) | ||||
|             except BaseException as err: | ||||
|                 # TODO: wrap this into `@api_frame` optionally with | ||||
|                 # some kinda filtering mechanism like log levels? | ||||
|                 __tracebackhide__: bool = False | ||||
|                 raise err | ||||
|         if self._result_msg is None: | ||||
|             self._result_msg = await self._return_once( | ||||
|                 self._expect_result | ||||
|             ) | ||||
| 
 | ||||
|         return self._final_result_pld | ||||
| 
 | ||||
|     # TODO: factor this out into a `.highlevel` API-wrapper that uses | ||||
|     # a single `.open_context()` call underneath. | ||||
|     async def result( | ||||
|         self, | ||||
|         *args, | ||||
|         **kwargs, | ||||
|     ) -> Any|Exception: | ||||
|         typname: str = type(self).__name__ | ||||
|         log.warning( | ||||
|             f'`{typname}.result()` is DEPRECATED!\n' | ||||
|             f'\n' | ||||
|             f'Use `{typname}.wait_for_result()` instead!\n' | ||||
|         ) | ||||
|         return await self.wait_for_result( | ||||
|             *args, | ||||
|             **kwargs, | ||||
|         return _unwrap_msg( | ||||
|             self._result_msg, | ||||
|             self.channel, | ||||
|         ) | ||||
| 
 | ||||
|     async def _cancel_streams(self): | ||||
|         # terminate all locally running async generator | ||||
|         # IPC calls | ||||
|         if self._streams: | ||||
|             peer_id: str = f'{self.channel.aid.reprol()!r}' | ||||
|             report: str = ( | ||||
|                 f'Cancelling all msg-streams with {peer_id}\n' | ||||
|             ) | ||||
|             log.cancel( | ||||
|                 f"Cancelling all streams with {self.channel.uid}") | ||||
|             for stream in self._streams.copy(): | ||||
|                 try: | ||||
|                     await stream.aclose() | ||||
|  | @ -248,18 +214,10 @@ class Portal: | |||
|                     # (unless of course at some point down the road we | ||||
|                     # won't expect this to always be the case or need to | ||||
|                     # detect it for respawning purposes?) | ||||
|                     report += ( | ||||
|                         f'->) {stream!r} already closed\n' | ||||
|                     ) | ||||
| 
 | ||||
|             log.cancel(report) | ||||
|                     log.debug(f"{stream} was already closed.") | ||||
| 
 | ||||
|     async def aclose(self): | ||||
|         log.debug( | ||||
|             f'Closing portal\n' | ||||
|             f'>}}\n' | ||||
|             f'|_{self}\n' | ||||
|         ) | ||||
|         log.debug(f"Closing {self}") | ||||
|         # TODO: once we move to implementing our own `ReceiveChannel` | ||||
|         # (including remote task cancellation inside its `.aclose()`) | ||||
|         # we'll need to .aclose all those channels here | ||||
|  | @ -282,25 +240,23 @@ class Portal: | |||
|         purpose. | ||||
| 
 | ||||
|         ''' | ||||
|         __runtimeframe__: int = 1  # noqa | ||||
| 
 | ||||
|         chan: Channel = self.channel | ||||
|         peer_id: str = f'{self.channel.aid.reprol()!r}' | ||||
|         if not chan.connected(): | ||||
|             log.runtime( | ||||
|                 'Peer {peer_id} is already disconnected\n' | ||||
|                 '-> skipping cancel request..\n' | ||||
|                 'This channel is already closed, skipping cancel request..' | ||||
|             ) | ||||
|             return False | ||||
| 
 | ||||
|         reminfo: str = ( | ||||
|             f'`Portal.cancel_actor()` => {self.channel.uid}\n' | ||||
|             f' |_{chan}\n' | ||||
|         ) | ||||
|         log.cancel( | ||||
|             f'Sending actor-runtime-cancel-req to peer\n' | ||||
|             f'\n' | ||||
|             f'c)=> {peer_id}\n' | ||||
|             f'Sending runtime `.cancel()` request to peer\n\n' | ||||
|             f'{reminfo}' | ||||
|         ) | ||||
| 
 | ||||
|         # XXX the one spot we set it? | ||||
|         chan._cancel_called: bool = True | ||||
|         self.channel._cancel_called: bool = True | ||||
|         try: | ||||
|             # send cancel cmd - might not get response | ||||
|             # XXX: sure would be nice to make this work with | ||||
|  | @ -321,47 +277,24 @@ class Portal: | |||
|                 # may timeout and we never get an ack (obvi racy) | ||||
|                 # but that doesn't mean it wasn't cancelled. | ||||
|                 log.debug( | ||||
|                     f'May have failed to cancel peer?\n' | ||||
|                     f'\n' | ||||
|                     f'c)=?> {peer_id}\n' | ||||
|                     'May have failed to cancel peer?\n' | ||||
|                     f'{reminfo}' | ||||
|                 ) | ||||
| 
 | ||||
|             # if we get here some weird cancellation case happened | ||||
|             return False | ||||
| 
 | ||||
|         except ( | ||||
|             # XXX, should never really get raised unless we aren't | ||||
|             # wrapping them in the below type by mistake? | ||||
|             # | ||||
|             # Leaving the catch here for now until we're very sure | ||||
|             # all the cases (for various tpt protos) have indeed been | ||||
|             # re-wrapped ;p | ||||
|             trio.ClosedResourceError, | ||||
|             trio.BrokenResourceError, | ||||
| 
 | ||||
|             TransportClosed, | ||||
|         ) as tpt_err: | ||||
|             ipc_borked_report: str = ( | ||||
|                 f'IPC for actor already closed/broken?\n\n' | ||||
|                 f'\n' | ||||
|                 f'c)=x> {peer_id}\n' | ||||
|         ): | ||||
|             log.debug( | ||||
|                 'IPC chan for actor already closed or broken?\n\n' | ||||
|                 f'{self.channel.uid}\n' | ||||
|                 f' |_{self.channel}\n' | ||||
|             ) | ||||
|             match tpt_err: | ||||
|                 case TransportClosed(): | ||||
|                     log.debug(ipc_borked_report) | ||||
|                 case _: | ||||
|                     ipc_borked_report += ( | ||||
|                         f'\n' | ||||
|                         f'Unhandled low-level transport-closed/error during\n' | ||||
|                         f'Portal.cancel_actor()` request?\n' | ||||
|                         f'<{type(tpt_err).__name__}( {tpt_err} )>\n' | ||||
|                     ) | ||||
|                     log.warning(ipc_borked_report) | ||||
| 
 | ||||
|             return False | ||||
| 
 | ||||
|     # TODO: do we still need this for low level `Actor`-runtime | ||||
|     # method calls or can we also remove it? | ||||
|     async def run_from_ns( | ||||
|         self, | ||||
|         namespace_path: str, | ||||
|  | @ -384,23 +317,21 @@ class Portal: | |||
|           internals! | ||||
| 
 | ||||
|         ''' | ||||
|         __runtimeframe__: int = 1  # noqa | ||||
|         nsf = NamespacePath( | ||||
|             f'{namespace_path}:{function_name}' | ||||
|         ) | ||||
|         ctx: Context = await self.actor.start_remote_task( | ||||
|         ctx = await self.actor.start_remote_task( | ||||
|             chan=self.channel, | ||||
|             nsf=nsf, | ||||
|             kwargs=kwargs, | ||||
|             portal=self, | ||||
|         ) | ||||
|         return await ctx._pld_rx.recv_pld( | ||||
|             ipc=ctx, | ||||
|             expect_msg=Return, | ||||
|         ctx._portal = self | ||||
|         msg = await self._return_once(ctx) | ||||
|         return _unwrap_msg( | ||||
|             msg, | ||||
|             self.channel, | ||||
|         ) | ||||
| 
 | ||||
|     # TODO: factor this out into a `.highlevel` API-wrapper that uses | ||||
|     # a single `.open_context()` call underneath. | ||||
|     async def run( | ||||
|         self, | ||||
|         func: str, | ||||
|  | @ -416,8 +347,6 @@ class Portal: | |||
|         remote rpc task or a local async generator instance. | ||||
| 
 | ||||
|         ''' | ||||
|         __runtimeframe__: int = 1  # noqa | ||||
| 
 | ||||
|         if isinstance(func, str): | ||||
|             warnings.warn( | ||||
|                 "`Portal.run(namespace: str, funcname: str)` is now" | ||||
|  | @ -448,15 +377,13 @@ class Portal: | |||
|             self.channel, | ||||
|             nsf=nsf, | ||||
|             kwargs=kwargs, | ||||
|             portal=self, | ||||
|         ) | ||||
|         return await ctx._pld_rx.recv_pld( | ||||
|             ipc=ctx, | ||||
|             expect_msg=Return, | ||||
|         ctx._portal = self | ||||
|         return _unwrap_msg( | ||||
|             await self._return_once(ctx), | ||||
|             self.channel, | ||||
|         ) | ||||
| 
 | ||||
|     # TODO: factor this out into a `.highlevel` API-wrapper that uses | ||||
|     # a single `.open_context()` call underneath. | ||||
|     @acm | ||||
|     async def open_stream_from( | ||||
|         self, | ||||
|  | @ -464,14 +391,6 @@ class Portal: | |||
|         **kwargs, | ||||
| 
 | ||||
|     ) -> AsyncGenerator[MsgStream, None]: | ||||
|         ''' | ||||
|         Legacy one-way streaming API. | ||||
| 
 | ||||
|         TODO: re-impl on top `Portal.open_context()` + an async gen | ||||
|         around `Context.open_stream()`. | ||||
| 
 | ||||
|         ''' | ||||
|         __runtimeframe__: int = 1  # noqa | ||||
| 
 | ||||
|         if not inspect.isasyncgenfunction(async_gen_func): | ||||
|             if not ( | ||||
|  | @ -485,8 +404,8 @@ class Portal: | |||
|             self.channel, | ||||
|             nsf=NamespacePath.from_ref(async_gen_func), | ||||
|             kwargs=kwargs, | ||||
|             portal=self, | ||||
|         ) | ||||
|         ctx._portal = self | ||||
| 
 | ||||
|         # ensure receive-only stream entrypoint | ||||
|         assert ctx._remote_func_type == 'asyncgen' | ||||
|  | @ -495,13 +414,13 @@ class Portal: | |||
|             # deliver receive only stream | ||||
|             async with MsgStream( | ||||
|                 ctx=ctx, | ||||
|                 rx_chan=ctx._rx_chan, | ||||
|             ) as stream: | ||||
|                 self._streams.add(stream) | ||||
|                 ctx._stream = stream | ||||
|                 yield stream | ||||
|                 rx_chan=ctx._recv_chan, | ||||
|             ) as rchan: | ||||
|                 self._streams.add(rchan) | ||||
|                 yield rchan | ||||
| 
 | ||||
|         finally: | ||||
| 
 | ||||
|             # cancel the far end task on consumer close | ||||
|             # NOTE: this is a special case since we assume that if using | ||||
|             # this ``.open_fream_from()`` api, the stream is one a one | ||||
|  | @ -513,17 +432,14 @@ class Portal: | |||
|                 with trio.CancelScope(shield=True): | ||||
|                     await ctx.cancel() | ||||
| 
 | ||||
|             except trio.ClosedResourceError as cre: | ||||
|             except trio.ClosedResourceError: | ||||
|                 # if the far end terminates before we send a cancel the | ||||
|                 # underlying transport-channel may already be closed. | ||||
|                 log.cancel( | ||||
|                     f'Context.cancel() -> {cre!r}\n' | ||||
|                     f'cid: {ctx.cid!r} already closed?\n' | ||||
|                 ) | ||||
|                 log.cancel(f'Context {ctx} was already closed?') | ||||
| 
 | ||||
|             # XXX: should this always be done? | ||||
|             # await recv_chan.aclose() | ||||
|             self._streams.remove(stream) | ||||
|             self._streams.remove(rchan) | ||||
| 
 | ||||
|     # NOTE: impl is found in `._context`` mod to make | ||||
|     # reading/groking the details simpler code-org-wise. This | ||||
|  | @ -556,12 +472,8 @@ class LocalPortal: | |||
|         return it's result. | ||||
| 
 | ||||
|         ''' | ||||
|         obj = ( | ||||
|             self.actor | ||||
|             if ns == 'self' | ||||
|             else importlib.import_module(ns) | ||||
|         ) | ||||
|         func: Callable = getattr(obj, func_name) | ||||
|         obj = self.actor if ns == 'self' else importlib.import_module(ns) | ||||
|         func = getattr(obj, func_name) | ||||
|         return await func(**kwargs) | ||||
| 
 | ||||
| 
 | ||||
|  | @ -569,7 +481,7 @@ class LocalPortal: | |||
| async def open_portal( | ||||
| 
 | ||||
|     channel: Channel, | ||||
|     tn: trio.Nursery|None = None, | ||||
|     nursery: trio.Nursery|None = None, | ||||
|     start_msg_loop: bool = True, | ||||
|     shield: bool = False, | ||||
| 
 | ||||
|  | @ -577,39 +489,31 @@ async def open_portal( | |||
|     ''' | ||||
|     Open a ``Portal`` through the provided ``channel``. | ||||
| 
 | ||||
|     Spawns a background task to handle RPC processing, normally | ||||
|     done by the actor-runtime implicitly via a call to | ||||
|     `._rpc.process_messages()`. just after connection establishment. | ||||
|     Spawns a background task to handle message processing (normally | ||||
|     done by the actor-runtime implicitly). | ||||
| 
 | ||||
|     ''' | ||||
|     actor = current_actor() | ||||
|     assert actor | ||||
|     was_connected: bool = False | ||||
|     was_connected = False | ||||
| 
 | ||||
|     async with ( | ||||
|         collapse_eg(), | ||||
|         maybe_open_nursery( | ||||
|             tn, | ||||
|             shield=shield, | ||||
|         ) as tn, | ||||
|     ): | ||||
|     async with maybe_open_nursery(nursery, shield=shield) as nursery: | ||||
| 
 | ||||
|         if not channel.connected(): | ||||
|             await channel.connect() | ||||
|             was_connected = True | ||||
| 
 | ||||
|         if channel.aid is None: | ||||
|             await channel._do_handshake( | ||||
|                 aid=actor.aid, | ||||
|             ) | ||||
|         if channel.uid is None: | ||||
|             await actor._do_handshake(channel) | ||||
| 
 | ||||
|         msg_loop_cs: trio.CancelScope|None = None | ||||
|         if start_msg_loop: | ||||
|             from . import _rpc | ||||
|             msg_loop_cs = await tn.start( | ||||
|             from ._runtime import process_messages | ||||
|             msg_loop_cs = await nursery.start( | ||||
|                 partial( | ||||
|                     _rpc.process_messages, | ||||
|                     chan=channel, | ||||
|                     process_messages, | ||||
|                     actor, | ||||
|                     channel, | ||||
|                     # if the local task is cancelled we want to keep | ||||
|                     # the msg loop running until our block ends | ||||
|                     shield=True, | ||||
|  | @ -622,10 +526,12 @@ async def open_portal( | |||
|             await portal.aclose() | ||||
| 
 | ||||
|             if was_connected: | ||||
|                 await channel.aclose() | ||||
|                 # gracefully signal remote channel-msg loop | ||||
|                 await channel.send(None) | ||||
|                 # await channel.aclose() | ||||
| 
 | ||||
|             # cancel background msg loop task | ||||
|             if msg_loop_cs is not None: | ||||
|             if msg_loop_cs: | ||||
|                 msg_loop_cs.cancel() | ||||
| 
 | ||||
|             tn.cancel_scope.cancel() | ||||
|             nursery.cancel_scope.cancel() | ||||
|  |  | |||
							
								
								
									
										826
									
								
								tractor/_root.py
								
								
								
								
							
							
						
						
									
										826
									
								
								tractor/_root.py
								
								
								
								
							|  | @ -18,142 +18,55 @@ | |||
| Root actor runtime ignition(s). | ||||
| 
 | ||||
| ''' | ||||
| from contextlib import ( | ||||
|     asynccontextmanager as acm, | ||||
| ) | ||||
| from contextlib import asynccontextmanager | ||||
| from functools import partial | ||||
| import importlib | ||||
| import inspect | ||||
| import logging | ||||
| import os | ||||
| import signal | ||||
| import sys | ||||
| from typing import ( | ||||
|     Any, | ||||
|     Callable, | ||||
| ) | ||||
| import os | ||||
| import warnings | ||||
| 
 | ||||
| 
 | ||||
| from exceptiongroup import BaseExceptionGroup | ||||
| import trio | ||||
| 
 | ||||
| from . import _runtime | ||||
| from .devx import ( | ||||
|     debug, | ||||
|     _frame_stack, | ||||
|     pformat as _pformat, | ||||
| from ._runtime import ( | ||||
|     Actor, | ||||
|     Arbiter, | ||||
|     async_main, | ||||
| ) | ||||
| from .devx import _debug | ||||
| from . import _spawn | ||||
| from . import _state | ||||
| from . import log | ||||
| from .ipc import ( | ||||
|     _connect_chan, | ||||
| ) | ||||
| from ._addr import ( | ||||
|     Address, | ||||
|     UnwrappedAddress, | ||||
|     default_lo_addrs, | ||||
|     mk_uuid, | ||||
|     wrap_address, | ||||
| ) | ||||
| from .trionics import ( | ||||
|     is_multi_cancelled, | ||||
|     collapse_eg, | ||||
| ) | ||||
| from ._exceptions import ( | ||||
|     RuntimeFailure, | ||||
| ) | ||||
| from ._ipc import _connect_chan | ||||
| from ._exceptions import is_multi_cancelled | ||||
| 
 | ||||
| 
 | ||||
| # set at startup and after forks | ||||
| _default_host: str = '127.0.0.1' | ||||
| _default_port: int = 1616 | ||||
| 
 | ||||
| # default registry always on localhost | ||||
| _default_lo_addrs: list[tuple[str, int]] = [( | ||||
|     _default_host, | ||||
|     _default_port, | ||||
| )] | ||||
| 
 | ||||
| 
 | ||||
| logger = log.get_logger('tractor') | ||||
| 
 | ||||
| 
 | ||||
| # TODO: stick this in a `@acm` defined in `devx.debug`? | ||||
| # -[ ] also maybe consider making this a `wrapt`-deco to | ||||
| #     save an indent level? | ||||
| # | ||||
| @acm | ||||
| async def maybe_block_bp( | ||||
|     debug_mode: bool, | ||||
|     maybe_enable_greenback: bool, | ||||
| ) -> bool: | ||||
|     # Override the global debugger hook to make it play nice with | ||||
|     # ``trio``, see much discussion in: | ||||
|     # https://github.com/python-trio/trio/issues/1155#issuecomment-742964018 | ||||
|     builtin_bp_handler: Callable = sys.breakpointhook | ||||
|     orig_bp_path: str|None = os.environ.get( | ||||
|         'PYTHONBREAKPOINT', | ||||
|         None, | ||||
|     ) | ||||
|     bp_blocked: bool | ||||
|     if ( | ||||
|         debug_mode | ||||
|         and maybe_enable_greenback | ||||
|         and ( | ||||
|             maybe_mod := await debug.maybe_init_greenback( | ||||
|                 raise_not_found=False, | ||||
|             ) | ||||
|         ) | ||||
|     ): | ||||
|         logger.info( | ||||
|             f'Found `greenback` installed @ {maybe_mod}\n' | ||||
|             f'Enabling `tractor.pause_from_sync()` support!\n' | ||||
|         ) | ||||
|         os.environ['PYTHONBREAKPOINT'] = ( | ||||
|             'tractor.devx.debug._sync_pause_from_builtin' | ||||
|         ) | ||||
|         _state._runtime_vars['use_greenback'] = True | ||||
|         bp_blocked = False | ||||
| 
 | ||||
|     else: | ||||
|         # TODO: disable `breakpoint()` by default (without | ||||
|         # `greenback`) since it will break any multi-actor | ||||
|         # usage by a clobbered TTY's stdstreams! | ||||
|         def block_bps(*args, **kwargs): | ||||
|             raise RuntimeError( | ||||
|                 'Trying to use `breakpoint()` eh?\n\n' | ||||
|                 'Welp, `tractor` blocks `breakpoint()` built-in calls by default!\n' | ||||
|                 'If you need to use it please install `greenback` and set ' | ||||
|                 '`debug_mode=True` when opening the runtime ' | ||||
|                 '(either via `.open_nursery()` or `open_root_actor()`)\n' | ||||
|             ) | ||||
| 
 | ||||
|         sys.breakpointhook = block_bps | ||||
|         # lol ok, | ||||
|         # https://docs.python.org/3/library/sys.html#sys.breakpointhook | ||||
|         os.environ['PYTHONBREAKPOINT'] = "0" | ||||
|         bp_blocked = True | ||||
| 
 | ||||
|     try: | ||||
|         yield bp_blocked | ||||
|     finally: | ||||
|         # restore any prior built-in `breakpoint()` hook state | ||||
|         if builtin_bp_handler is not None: | ||||
|             sys.breakpointhook = builtin_bp_handler | ||||
| 
 | ||||
|         if orig_bp_path is not None: | ||||
|             os.environ['PYTHONBREAKPOINT'] = orig_bp_path | ||||
| 
 | ||||
|         else: | ||||
|             # clear env back to having no entry | ||||
|             os.environ.pop('PYTHONBREAKPOINT', None) | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| @asynccontextmanager | ||||
| async def open_root_actor( | ||||
| 
 | ||||
|     *, | ||||
|     # defaults are above | ||||
|     registry_addrs: list[UnwrappedAddress]|None = None, | ||||
|     registry_addrs: list[tuple[str, int]]|None = None, | ||||
| 
 | ||||
|     # defaults are above | ||||
|     arbiter_addr: tuple[UnwrappedAddress]|None = None, | ||||
| 
 | ||||
|     enable_transports: list[ | ||||
|         # TODO, this should eventually be the pairs as | ||||
|         # defined by (codec, proto) as on `MsgTransport. | ||||
|         _state.TransportProtocolKey, | ||||
|     ]|None = None, | ||||
|     arbiter_addr: tuple[str, int]|None = None, | ||||
| 
 | ||||
|     name: str|None = 'root', | ||||
| 
 | ||||
|  | @ -164,10 +77,6 @@ async def open_root_actor( | |||
| 
 | ||||
|     # enables the multi-process debugger support | ||||
|     debug_mode: bool = False, | ||||
|     maybe_enable_greenback: bool = False,  # `.pause_from_sync()/breakpoint()` support | ||||
|     # ^XXX NOTE^ the perf implications of use, | ||||
|     # https://greenback.readthedocs.io/en/latest/principle.html#performance | ||||
|     enable_stack_on_sig: bool = False, | ||||
| 
 | ||||
|     # internal logging | ||||
|     loglevel: str|None = None, | ||||
|  | @ -179,448 +88,298 @@ async def open_root_actor( | |||
|     # and that this call creates it. | ||||
|     ensure_registry: bool = False, | ||||
| 
 | ||||
|     hide_tb: bool = True, | ||||
| 
 | ||||
|     # XXX, proxied directly to `.devx.debug._maybe_enter_pm()` | ||||
|     # for REPL-entry logic. | ||||
|     debug_filter: Callable[ | ||||
|         [BaseException|BaseExceptionGroup], | ||||
|         bool, | ||||
|     ] = lambda err: not is_multi_cancelled(err), | ||||
| 
 | ||||
|     # TODO, a way for actors to augment passing derived | ||||
|     # read-only state to sublayers? | ||||
|     # extra_rt_vars: dict|None = None, | ||||
| 
 | ||||
| ) -> _runtime.Actor: | ||||
| ) -> Actor: | ||||
|     ''' | ||||
|     Initialize the `tractor` runtime by starting a "root actor" in | ||||
|     a parent-most Python process. | ||||
| 
 | ||||
|     All (disjoint) actor-process-trees-as-programs are created via | ||||
|     this entrypoint. | ||||
|     Runtime init entry point for ``tractor``. | ||||
| 
 | ||||
|     ''' | ||||
|     # XXX NEVER allow nested actor-trees! | ||||
|     if already_actor := _state.current_actor( | ||||
|         err_on_no_runtime=False, | ||||
|     # Override the global debugger hook to make it play nice with | ||||
|     # ``trio``, see much discussion in: | ||||
|     # https://github.com/python-trio/trio/issues/1155#issuecomment-742964018 | ||||
|     builtin_bp_handler = sys.breakpointhook | ||||
|     orig_bp_path: str | None = os.environ.get('PYTHONBREAKPOINT', None) | ||||
|     os.environ['PYTHONBREAKPOINT'] = 'tractor.devx._debug.pause_from_sync' | ||||
| 
 | ||||
|     # attempt to retreive ``trio``'s sigint handler and stash it | ||||
|     # on our debugger lock state. | ||||
|     _debug.Lock._trio_handler = signal.getsignal(signal.SIGINT) | ||||
| 
 | ||||
|     # mark top most level process as root actor | ||||
|     _state._runtime_vars['_is_root'] = True | ||||
| 
 | ||||
|     # caps based rpc list | ||||
|     enable_modules = ( | ||||
|         enable_modules | ||||
|         or | ||||
|         [] | ||||
|     ) | ||||
| 
 | ||||
|     if rpc_module_paths: | ||||
|         warnings.warn( | ||||
|             "`rpc_module_paths` is now deprecated, use " | ||||
|             " `enable_modules` instead.", | ||||
|             DeprecationWarning, | ||||
|             stacklevel=2, | ||||
|         ) | ||||
|         enable_modules.extend(rpc_module_paths) | ||||
| 
 | ||||
|     if start_method is not None: | ||||
|         _spawn.try_set_start_method(start_method) | ||||
| 
 | ||||
|     if arbiter_addr is not None: | ||||
|         warnings.warn( | ||||
|             '`arbiter_addr` is now deprecated\n' | ||||
|             'Use `registry_addrs: list[tuple]` instead..', | ||||
|             DeprecationWarning, | ||||
|             stacklevel=2, | ||||
|         ) | ||||
|         registry_addrs = [arbiter_addr] | ||||
| 
 | ||||
|     registry_addrs: list[tuple[str, int]] = ( | ||||
|         registry_addrs | ||||
|         or | ||||
|         _default_lo_addrs | ||||
|     ) | ||||
|     assert registry_addrs | ||||
| 
 | ||||
|     loglevel = ( | ||||
|         loglevel | ||||
|         or log._default_loglevel | ||||
|     ).upper() | ||||
| 
 | ||||
|     if ( | ||||
|         debug_mode | ||||
|         and _spawn._spawn_method == 'trio' | ||||
|     ): | ||||
|         rtvs: dict[str, Any] = _state._runtime_vars | ||||
|         root_mailbox: list[str, int] = rtvs['_root_mailbox'] | ||||
|         registry_addrs: list[list[str, int]] = rtvs['_registry_addrs'] | ||||
|         raise RuntimeFailure( | ||||
|             f'A current actor already exists !?\n' | ||||
|             f'({already_actor}\n' | ||||
|             f'\n' | ||||
|             f'You can NOT open a second root actor from within ' | ||||
|             f'an existing tree and the current root of this ' | ||||
|             f'already exists !!\n' | ||||
|             f'\n' | ||||
|             f'_root_mailbox: {root_mailbox!r}\n' | ||||
|             f'_registry_addrs: {registry_addrs!r}\n' | ||||
|         _state._runtime_vars['_debug_mode'] = True | ||||
| 
 | ||||
|         # expose internal debug module to every actor allowing for | ||||
|         # use of ``await tractor.pause()`` | ||||
|         enable_modules.append('tractor.devx._debug') | ||||
| 
 | ||||
|         # if debug mode get's enabled *at least* use that level of | ||||
|         # logging for some informative console prompts. | ||||
|         if ( | ||||
|             logging.getLevelName( | ||||
|                 # lul, need the upper case for the -> int map? | ||||
|                 # sweet "dynamic function behaviour" stdlib... | ||||
|                 loglevel, | ||||
|             ) > logging.getLevelName('PDB') | ||||
|         ): | ||||
|             loglevel = 'PDB' | ||||
| 
 | ||||
|     elif debug_mode: | ||||
|         raise RuntimeError( | ||||
|             "Debug mode is only supported for the `trio` backend!" | ||||
|         ) | ||||
| 
 | ||||
|     async with maybe_block_bp( | ||||
|         debug_mode=debug_mode, | ||||
|         maybe_enable_greenback=maybe_enable_greenback, | ||||
|     ): | ||||
|         if enable_transports is None: | ||||
|             enable_transports: list[str] = _state.current_ipc_protos() | ||||
|         else: | ||||
|             _state._runtime_vars['_enable_tpts'] = enable_transports | ||||
|     assert loglevel | ||||
|     _log = log.get_console_log(loglevel) | ||||
|     assert _log | ||||
| 
 | ||||
|         # TODO! support multi-tpts per actor! | ||||
|         # Bo | ||||
|         if not len(enable_transports) == 1: | ||||
|             raise RuntimeError( | ||||
|                 f'No multi-tpt support yet!\n' | ||||
|                 f'enable_transports={enable_transports!r}\n' | ||||
|             ) | ||||
| 
 | ||||
|         _frame_stack.hide_runtime_frames() | ||||
|         __tracebackhide__: bool = hide_tb | ||||
| 
 | ||||
|         # attempt to retreive ``trio``'s sigint handler and stash it | ||||
|         # on our debugger lock state. | ||||
|         debug.DebugStatus._trio_handler = signal.getsignal(signal.SIGINT) | ||||
| 
 | ||||
|         # mark top most level process as root actor | ||||
|         _state._runtime_vars['_is_root'] = True | ||||
| 
 | ||||
|         # caps based rpc list | ||||
|         enable_modules = ( | ||||
|             enable_modules | ||||
|             or | ||||
|             [] | ||||
|         ) | ||||
| 
 | ||||
|         if rpc_module_paths: | ||||
|             warnings.warn( | ||||
|                 "`rpc_module_paths` is now deprecated, use " | ||||
|                 " `enable_modules` instead.", | ||||
|                 DeprecationWarning, | ||||
|                 stacklevel=2, | ||||
|             ) | ||||
|             enable_modules.extend(rpc_module_paths) | ||||
| 
 | ||||
|         if start_method is not None: | ||||
|             _spawn.try_set_start_method(start_method) | ||||
| 
 | ||||
|         # TODO! remove this ASAP! | ||||
|         if arbiter_addr is not None: | ||||
|             warnings.warn( | ||||
|                 '`arbiter_addr` is now deprecated\n' | ||||
|                 'Use `registry_addrs: list[tuple]` instead..', | ||||
|                 DeprecationWarning, | ||||
|                 stacklevel=2, | ||||
|             ) | ||||
|             uw_reg_addrs = [arbiter_addr] | ||||
| 
 | ||||
|         uw_reg_addrs = registry_addrs | ||||
|         if not uw_reg_addrs: | ||||
|             uw_reg_addrs: list[UnwrappedAddress] = default_lo_addrs( | ||||
|                 enable_transports | ||||
|             ) | ||||
| 
 | ||||
|         # must exist by now since all below code is dependent | ||||
|         assert uw_reg_addrs | ||||
|         registry_addrs: list[Address] = [ | ||||
|             wrap_address(uw_addr) | ||||
|             for uw_addr in uw_reg_addrs | ||||
|         ] | ||||
| 
 | ||||
|         loglevel = ( | ||||
|             loglevel | ||||
|             or log._default_loglevel | ||||
|         ).upper() | ||||
| 
 | ||||
|         if ( | ||||
|             debug_mode | ||||
|             and | ||||
|             _spawn._spawn_method == 'trio' | ||||
|         ): | ||||
|             _state._runtime_vars['_debug_mode'] = True | ||||
| 
 | ||||
|             # expose internal debug module to every actor allowing for | ||||
|             # use of ``await tractor.pause()`` | ||||
|             enable_modules.append('tractor.devx.debug._tty_lock') | ||||
| 
 | ||||
|             # if debug mode get's enabled *at least* use that level of | ||||
|             # logging for some informative console prompts. | ||||
|             if ( | ||||
|                 logging.getLevelName( | ||||
|                     # lul, need the upper case for the -> int map? | ||||
|                     # sweet "dynamic function behaviour" stdlib... | ||||
|                     loglevel, | ||||
|                 ) > logging.getLevelName('PDB') | ||||
|             ): | ||||
|                 loglevel = 'PDB' | ||||
| 
 | ||||
| 
 | ||||
|         elif debug_mode: | ||||
|             raise RuntimeError( | ||||
|                 "Debug mode is only supported for the `trio` backend!" | ||||
|             ) | ||||
| 
 | ||||
|         assert loglevel | ||||
|         _log = log.get_console_log(loglevel) | ||||
|         assert _log | ||||
| 
 | ||||
|         # TODO: factor this into `.devx._stackscope`!! | ||||
|         if ( | ||||
|             debug_mode | ||||
|             and | ||||
|             enable_stack_on_sig | ||||
|         ): | ||||
|             from .devx._stackscope import enable_stack_on_sig | ||||
|             enable_stack_on_sig() | ||||
| 
 | ||||
|         # closed into below ping task-func | ||||
|         ponged_addrs: list[Address] = [] | ||||
| 
 | ||||
|         async def ping_tpt_socket( | ||||
|             addr: Address, | ||||
|             timeout: float = 1, | ||||
|         ) -> None: | ||||
|             ''' | ||||
|             Attempt temporary connection to see if a registry is | ||||
|             listening at the requested address by a tranport layer | ||||
|             ping. | ||||
| 
 | ||||
|             If a connection can't be made quickly we assume none no | ||||
|             server is listening at that addr. | ||||
| 
 | ||||
|             ''' | ||||
|             try: | ||||
|                 # TODO: this connect-and-bail forces us to have to | ||||
|                 # carefully rewrap TCP 104-connection-reset errors as | ||||
|                 # EOF so as to avoid propagating cancel-causing errors | ||||
|                 # to the channel-msg loop machinery. Likely it would | ||||
|                 # be better to eventually have a "discovery" protocol | ||||
|                 # with basic handshake instead? | ||||
|                 with trio.move_on_after(timeout): | ||||
|                     async with _connect_chan(addr.unwrap()): | ||||
|                         ponged_addrs.append(addr) | ||||
| 
 | ||||
|             except OSError: | ||||
|                 # ?TODO, make this a "discovery" log level? | ||||
|                 logger.info( | ||||
|                     f'No root-actor registry found @ {addr!r}\n' | ||||
|                 ) | ||||
| 
 | ||||
|         # !TODO, this is basically just another (abstract) | ||||
|         # happy-eyeballs, so we should try for formalize it somewhere | ||||
|         # in a `.[_]discovery` ya? | ||||
|         # | ||||
|         async with trio.open_nursery() as tn: | ||||
|             for uw_addr in uw_reg_addrs: | ||||
|                 addr: Address = wrap_address(uw_addr) | ||||
|                 tn.start_soon( | ||||
|                     ping_tpt_socket, | ||||
|                     addr, | ||||
|                 ) | ||||
| 
 | ||||
|         trans_bind_addrs: list[UnwrappedAddress] = [] | ||||
| 
 | ||||
|         # Create a new local root-actor instance which IS NOT THE | ||||
|         # REGISTRAR | ||||
|         if ponged_addrs: | ||||
|             if ensure_registry: | ||||
|                 raise RuntimeError( | ||||
|                      f'Failed to open `{name}`@{ponged_addrs}: ' | ||||
|                     'registry socket(s) already bound' | ||||
|                 ) | ||||
| 
 | ||||
|             # we were able to connect to an arbiter | ||||
|             logger.info( | ||||
|                 f'Registry(s) seem(s) to exist @ {ponged_addrs}' | ||||
|             ) | ||||
| 
 | ||||
|             actor = _runtime.Actor( | ||||
|                 name=name or 'anonymous', | ||||
|                 uuid=mk_uuid(), | ||||
|                 registry_addrs=ponged_addrs, | ||||
|                 loglevel=loglevel, | ||||
|                 enable_modules=enable_modules, | ||||
|             ) | ||||
|             # **DO NOT** use the registry_addrs as the | ||||
|             # ipc-transport-server's bind-addrs as this is | ||||
|             # a new NON-registrar, ROOT-actor. | ||||
|             # | ||||
|             # XXX INSTEAD, bind random addrs using the same tpt | ||||
|             # proto. | ||||
|             for addr in ponged_addrs: | ||||
|                 trans_bind_addrs.append( | ||||
|                     addr.get_random( | ||||
|                         bindspace=addr.bindspace, | ||||
|                     ) | ||||
|                 ) | ||||
| 
 | ||||
|         # Start this local actor as the "registrar", aka a regular | ||||
|         # actor who manages the local registry of "mailboxes" of | ||||
|         # other process-tree-local sub-actors. | ||||
|         else: | ||||
|             # NOTE that if the current actor IS THE REGISTAR, the | ||||
|             # following init steps are taken: | ||||
|             # - the tranport layer server is bound to each addr | ||||
|             #   pair defined in provided registry_addrs, or the default. | ||||
|             trans_bind_addrs = uw_reg_addrs | ||||
| 
 | ||||
|             # - it is normally desirable for any registrar to stay up | ||||
|             #   indefinitely until either all registered (child/sub) | ||||
|             #   actors are terminated (via SC supervision) or, | ||||
|             #   a re-election process has taken place. | ||||
|             # NOTE: all of ^ which is not implemented yet - see: | ||||
|             # https://github.com/goodboy/tractor/issues/216 | ||||
|             # https://github.com/goodboy/tractor/pull/348 | ||||
|             # https://github.com/goodboy/tractor/issues/296 | ||||
| 
 | ||||
|             # TODO: rename as `RootActor` or is that even necessary? | ||||
|             actor = _runtime.Arbiter( | ||||
|                 name=name or 'registrar', | ||||
|                 uuid=mk_uuid(), | ||||
|                 registry_addrs=registry_addrs, | ||||
|                 loglevel=loglevel, | ||||
|                 enable_modules=enable_modules, | ||||
|             ) | ||||
|             # XXX, in case the root actor runtime was actually run from | ||||
|             # `tractor.to_asyncio.run_as_asyncio_guest()` and NOt | ||||
|             # `.trio.run()`. | ||||
|             actor._infected_aio = _state._runtime_vars['_is_infected_aio'] | ||||
| 
 | ||||
|         # NOTE, only set the loopback addr for the | ||||
|         # process-tree-global "root" mailbox since all sub-actors | ||||
|         # should be able to speak to their root actor over that | ||||
|         # channel. | ||||
|         raddrs: list[Address] = _state._runtime_vars['_root_addrs'] | ||||
|         raddrs.extend(trans_bind_addrs) | ||||
|         # TODO, remove once we have also removed all usage; | ||||
|         # eventually all (root-)registry apis should expect > 1 addr. | ||||
|         _state._runtime_vars['_root_mailbox'] = raddrs[0] | ||||
| 
 | ||||
|         # Start up main task set via core actor-runtime nurseries. | ||||
|     # TODO: factor this into `.devx._stackscope`!! | ||||
|     if debug_mode: | ||||
|         try: | ||||
|             # assign process-local actor | ||||
|             _state._current_actor = actor | ||||
| 
 | ||||
|             # start local channel-server and fake the portal API | ||||
|             # NOTE: this won't block since we provide the nursery | ||||
|             report: str = f'Starting actor-runtime for {actor.aid.reprol()!r}\n' | ||||
|             if reg_addrs := actor.registry_addrs: | ||||
|                 report += ( | ||||
|                     '-> Opening new registry @ ' | ||||
|                     + | ||||
|                     '\n'.join( | ||||
|                         f'{addr}' for addr in reg_addrs | ||||
|                     ) | ||||
|                 ) | ||||
|             logger.info(f'{report}\n') | ||||
| 
 | ||||
|             # start runtime in a bg sub-task, yield to caller. | ||||
|             async with ( | ||||
|                 collapse_eg(), | ||||
|                 trio.open_nursery() as root_tn, | ||||
| 
 | ||||
|                 # ?TODO? finally-footgun below? | ||||
|                 # -> see note on why shielding. | ||||
|                 # maybe_raise_from_masking_exc(), | ||||
|             ): | ||||
|                 actor._root_tn = root_tn | ||||
|                 # `_runtime.async_main()` creates an internal nursery | ||||
|                 # and blocks here until any underlying actor(-process) | ||||
|                 # tree has terminated thereby conducting so called | ||||
|                 # "end-to-end" structured concurrency throughout an | ||||
|                 # entire hierarchical python sub-process set; all | ||||
|                 # "actor runtime" primitives are SC-compat and thus all | ||||
|                 # transitively spawned actors/processes must be as | ||||
|                 # well. | ||||
|                 await root_tn.start( | ||||
|                     partial( | ||||
|                         _runtime.async_main, | ||||
|                         actor, | ||||
|                         accept_addrs=trans_bind_addrs, | ||||
|                         parent_addr=None | ||||
|                     ) | ||||
|                 ) | ||||
|                 try: | ||||
|                     yield actor | ||||
|                 except ( | ||||
|                     Exception, | ||||
|                     BaseExceptionGroup, | ||||
|                 ) as err: | ||||
| 
 | ||||
|                     # TODO, in beginning to handle the subsubactor with | ||||
|                     # crashed grandparent cases.. | ||||
|                     # | ||||
|                     # was_locked: bool = await debug.maybe_wait_for_debugger( | ||||
|                     #     child_in_debug=True, | ||||
|                     # ) | ||||
|                     # XXX NOTE XXX see equiv note inside | ||||
|                     # `._runtime.Actor._stream_handler()` where in the | ||||
|                     # non-root or root-that-opened-this-mahually case we | ||||
|                     # wait for the local actor-nursery to exit before | ||||
|                     # exiting the transport channel handler. | ||||
|                     entered: bool = await debug._maybe_enter_pm( | ||||
|                         err, | ||||
|                         api_frame=inspect.currentframe(), | ||||
|                         debug_filter=debug_filter, | ||||
| 
 | ||||
|                         # XXX NOTE, required to debug root-actor | ||||
|                         # crashes under cancellation conditions; so | ||||
|                         # most of them! | ||||
|                         shield=root_tn.cancel_scope.cancel_called, | ||||
|                     ) | ||||
| 
 | ||||
|                     if ( | ||||
|                         not entered | ||||
|                         and | ||||
|                         not is_multi_cancelled( | ||||
|                             err, | ||||
|                         ) | ||||
|                     ): | ||||
|                         logger.exception( | ||||
|                             'Root actor crashed\n' | ||||
|                             f'>x)\n' | ||||
|                             f' |_{actor}\n' | ||||
|                         ) | ||||
| 
 | ||||
|                     # ALWAYS re-raise any error bubbled up from the | ||||
|                     # runtime! | ||||
|                     raise | ||||
| 
 | ||||
|                 finally: | ||||
|                     # NOTE/TODO?, not sure if we'll ever need this but it's | ||||
|                     # possibly better for even more determinism? | ||||
|                     # logger.cancel( | ||||
|                     #     f'Waiting on {len(nurseries)} nurseries in root..') | ||||
|                     # nurseries = actor._actoruid2nursery.values() | ||||
|                     # async with trio.open_nursery() as tempn: | ||||
|                     #     for an in nurseries: | ||||
|                     #         tempn.start_soon(an.exited.wait) | ||||
| 
 | ||||
|                     op_nested_actor_repr: str = _pformat.nest_from_op( | ||||
|                         input_op='>) ', | ||||
|                         text=actor.pformat(), | ||||
|                         nest_prefix='|_', | ||||
|                     ) | ||||
|                     logger.info( | ||||
|                         f'Closing down root actor\n' | ||||
|                         f'{op_nested_actor_repr}' | ||||
|                     ) | ||||
|                     # XXX, THIS IS A *finally-footgun*! | ||||
|                     # (also mentioned in with-block above) | ||||
|                     # -> though already shields iternally it can | ||||
|                     # taskc here and mask underlying errors raised in | ||||
|                     # the try-block above? | ||||
|                     with trio.CancelScope(shield=True): | ||||
|                         await actor.cancel(None)  # self cancel | ||||
|         finally: | ||||
|             # revert all process-global runtime state | ||||
|             if ( | ||||
|                 debug_mode | ||||
|                 and | ||||
|                 _spawn._spawn_method == 'trio' | ||||
|             ): | ||||
|                 _state._runtime_vars['_debug_mode'] = False | ||||
| 
 | ||||
|             _state._current_actor = None | ||||
|             _state._last_actor_terminated = actor | ||||
| 
 | ||||
|             sclang_repr: str = _pformat.nest_from_op( | ||||
|                 input_op=')>', | ||||
|                 text=actor.pformat(), | ||||
|                 nest_prefix='|_', | ||||
|                 nest_indent=1, | ||||
|             logger.info('Enabling `stackscope` traces on SIGUSR1') | ||||
|             from .devx import enable_stack_on_sig | ||||
|             enable_stack_on_sig() | ||||
|         except ImportError: | ||||
|             logger.warning( | ||||
|                 '`stackscope` not installed for use in debug mode!' | ||||
|             ) | ||||
| 
 | ||||
|             logger.info( | ||||
|                 f'Root actor terminated\n' | ||||
|                 f'{sclang_repr}' | ||||
|     # closed into below ping task-func | ||||
|     ponged_addrs: list[tuple[str, int]] = [] | ||||
| 
 | ||||
|     async def ping_tpt_socket( | ||||
|         addr: tuple[str, int], | ||||
|         timeout: float = 1, | ||||
|     ) -> None: | ||||
|         ''' | ||||
|         Attempt temporary connection to see if a registry is | ||||
|         listening at the requested address by a tranport layer | ||||
|         ping. | ||||
| 
 | ||||
|         If a connection can't be made quickly we assume none no | ||||
|         server is listening at that addr. | ||||
| 
 | ||||
|         ''' | ||||
|         try: | ||||
|             # TODO: this connect-and-bail forces us to have to | ||||
|             # carefully rewrap TCP 104-connection-reset errors as | ||||
|             # EOF so as to avoid propagating cancel-causing errors | ||||
|             # to the channel-msg loop machinery. Likely it would | ||||
|             # be better to eventually have a "discovery" protocol | ||||
|             # with basic handshake instead? | ||||
|             with trio.move_on_after(timeout): | ||||
|                 async with _connect_chan(*addr): | ||||
|                     ponged_addrs.append(addr) | ||||
| 
 | ||||
|         except OSError: | ||||
|             # TODO: make this a "discovery" log level? | ||||
|             logger.warning(f'No actor registry found @ {addr}') | ||||
| 
 | ||||
|     async with trio.open_nursery() as tn: | ||||
|         for addr in registry_addrs: | ||||
|             tn.start_soon( | ||||
|                 ping_tpt_socket, | ||||
|                 tuple(addr),  # TODO: just drop this requirement? | ||||
|             ) | ||||
| 
 | ||||
|     trans_bind_addrs: list[tuple[str, int]] = [] | ||||
| 
 | ||||
|     # Create a new local root-actor instance which IS NOT THE | ||||
|     # REGISTRAR | ||||
|     if ponged_addrs: | ||||
| 
 | ||||
|         if ensure_registry: | ||||
|             raise RuntimeError( | ||||
|                  f'Failed to open `{name}`@{ponged_addrs}: ' | ||||
|                 'registry socket(s) already bound' | ||||
|             ) | ||||
| 
 | ||||
|         # we were able to connect to an arbiter | ||||
|         logger.info( | ||||
|             f'Registry(s) seem(s) to exist @ {ponged_addrs}' | ||||
|         ) | ||||
| 
 | ||||
|         actor = Actor( | ||||
|             name=name or 'anonymous', | ||||
|             registry_addrs=ponged_addrs, | ||||
|             loglevel=loglevel, | ||||
|             enable_modules=enable_modules, | ||||
|         ) | ||||
|         # DO NOT use the registry_addrs as the transport server | ||||
|         # addrs for this new non-registar, root-actor. | ||||
|         for host, port in ponged_addrs: | ||||
|             # NOTE: zero triggers dynamic OS port allocation | ||||
|             trans_bind_addrs.append((host, 0)) | ||||
| 
 | ||||
|     # Start this local actor as the "registrar", aka a regular | ||||
|     # actor who manages the local registry of "mailboxes" of | ||||
|     # other process-tree-local sub-actors. | ||||
|     else: | ||||
| 
 | ||||
|         # NOTE that if the current actor IS THE REGISTAR, the | ||||
|         # following init steps are taken: | ||||
|         # - the tranport layer server is bound to each (host, port) | ||||
|         #   pair defined in provided registry_addrs, or the default. | ||||
|         trans_bind_addrs = registry_addrs | ||||
| 
 | ||||
|         # - it is normally desirable for any registrar to stay up | ||||
|         #   indefinitely until either all registered (child/sub) | ||||
|         #   actors are terminated (via SC supervision) or, | ||||
|         #   a re-election process has taken place.  | ||||
|         # NOTE: all of ^ which is not implemented yet - see: | ||||
|         # https://github.com/goodboy/tractor/issues/216 | ||||
|         # https://github.com/goodboy/tractor/pull/348 | ||||
|         # https://github.com/goodboy/tractor/issues/296 | ||||
| 
 | ||||
|         actor = Arbiter( | ||||
|             name or 'registrar', | ||||
|             registry_addrs=registry_addrs, | ||||
|             loglevel=loglevel, | ||||
|             enable_modules=enable_modules, | ||||
|         ) | ||||
| 
 | ||||
|     # Start up main task set via core actor-runtime nurseries. | ||||
|     try: | ||||
|         # assign process-local actor | ||||
|         _state._current_actor = actor | ||||
| 
 | ||||
|         # start local channel-server and fake the portal API | ||||
|         # NOTE: this won't block since we provide the nursery | ||||
|         ml_addrs_str: str = '\n'.join( | ||||
|             f'@{addr}' for addr in trans_bind_addrs | ||||
|         ) | ||||
|         logger.info( | ||||
|             f'Starting local {actor.uid} on the following transport addrs:\n' | ||||
|             f'{ml_addrs_str}' | ||||
|         ) | ||||
| 
 | ||||
|         # start the actor runtime in a new task | ||||
|         async with trio.open_nursery() as nursery: | ||||
| 
 | ||||
|             # ``_runtime.async_main()`` creates an internal nursery | ||||
|             # and blocks here until any underlying actor(-process) | ||||
|             # tree has terminated thereby conducting so called | ||||
|             # "end-to-end" structured concurrency throughout an | ||||
|             # entire hierarchical python sub-process set; all | ||||
|             # "actor runtime" primitives are SC-compat and thus all | ||||
|             # transitively spawned actors/processes must be as | ||||
|             # well. | ||||
|             await nursery.start( | ||||
|                 partial( | ||||
|                     async_main, | ||||
|                     actor, | ||||
|                     accept_addrs=trans_bind_addrs, | ||||
|                     parent_addr=None | ||||
|                 ) | ||||
|             ) | ||||
|             try: | ||||
|                 yield actor | ||||
| 
 | ||||
|             except ( | ||||
|                 Exception, | ||||
|                 BaseExceptionGroup, | ||||
|             ) as err: | ||||
| 
 | ||||
|                 entered: bool = await _debug._maybe_enter_pm(err) | ||||
| 
 | ||||
|                 if ( | ||||
|                     not entered | ||||
|                     and not is_multi_cancelled(err) | ||||
|                 ): | ||||
|                     logger.exception('Root actor crashed:\n') | ||||
| 
 | ||||
|                 # ALWAYS re-raise any error bubbled up from the | ||||
|                 # runtime! | ||||
|                 raise | ||||
| 
 | ||||
|             finally: | ||||
|                 # NOTE: not sure if we'll ever need this but it's | ||||
|                 # possibly better for even more determinism? | ||||
|                 # logger.cancel( | ||||
|                 #     f'Waiting on {len(nurseries)} nurseries in root..') | ||||
|                 # nurseries = actor._actoruid2nursery.values() | ||||
|                 # async with trio.open_nursery() as tempn: | ||||
|                 #     for an in nurseries: | ||||
|                 #         tempn.start_soon(an.exited.wait) | ||||
| 
 | ||||
|                 logger.info( | ||||
|                     'Closing down root actor' | ||||
|                 ) | ||||
|                 await actor.cancel(None)  # self cancel | ||||
|     finally: | ||||
|         _state._current_actor = None | ||||
|         _state._last_actor_terminated = actor | ||||
| 
 | ||||
|         # restore built-in `breakpoint()` hook state | ||||
|         sys.breakpointhook = builtin_bp_handler | ||||
|         if orig_bp_path is not None: | ||||
|             os.environ['PYTHONBREAKPOINT'] = orig_bp_path | ||||
|         else: | ||||
|             # clear env back to having no entry | ||||
|             os.environ.pop('PYTHONBREAKPOINT') | ||||
| 
 | ||||
|         logger.runtime("Root actor terminated") | ||||
| 
 | ||||
| 
 | ||||
| def run_daemon( | ||||
|     enable_modules: list[str], | ||||
| 
 | ||||
|     # runtime kwargs | ||||
|     name: str | None = 'root', | ||||
|     registry_addrs: list[UnwrappedAddress]|None = None, | ||||
|     registry_addrs: list[tuple[str, int]] = _default_lo_addrs, | ||||
| 
 | ||||
|     start_method: str | None = None, | ||||
|     debug_mode: bool = False, | ||||
| 
 | ||||
|     # TODO, support `infected_aio=True` mode by, | ||||
|     # - calling the appropriate entrypoint-func from `.to_asyncio` | ||||
|     # - maybe init-ing `greenback` as done above in | ||||
|     #   `open_root_actor()`. | ||||
| 
 | ||||
|     **kwargs | ||||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|     Spawn a root (daemon) actor which will respond to RPC; the main | ||||
|     task simply starts the runtime and then blocks via embedded | ||||
|     `trio.sleep_forever()`. | ||||
|     Spawn daemon actor which will respond to RPC; the main task simply | ||||
|     starts the runtime and then sleeps forever. | ||||
| 
 | ||||
|     This is a very minimal convenience wrapper around starting | ||||
|     a "run-until-cancelled" root actor which can be started with a set | ||||
|  | @ -633,6 +392,7 @@ def run_daemon( | |||
|         importlib.import_module(path) | ||||
| 
 | ||||
|     async def _main(): | ||||
| 
 | ||||
|         async with open_root_actor( | ||||
|             registry_addrs=registry_addrs, | ||||
|             name=name, | ||||
|  |  | |||
							
								
								
									
										1174
									
								
								tractor/_rpc.py
								
								
								
								
							
							
						
						
									
										1174
									
								
								tractor/_rpc.py
								
								
								
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										1843
									
								
								tractor/_runtime.py
								
								
								
								
							
							
						
						
									
										1843
									
								
								tractor/_runtime.py
								
								
								
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							|  | @ -23,24 +23,19 @@ considered optional within the context of this runtime-library. | |||
| 
 | ||||
| """ | ||||
| from __future__ import annotations | ||||
| from multiprocessing import shared_memory as shm | ||||
| from multiprocessing.shared_memory import ( | ||||
|     # SharedMemory, | ||||
|     ShareableList, | ||||
| ) | ||||
| import platform | ||||
| from sys import byteorder | ||||
| import time | ||||
| from typing import Optional | ||||
| 
 | ||||
| from msgspec import ( | ||||
|     Struct, | ||||
|     to_builtins | ||||
| from multiprocessing import shared_memory as shm | ||||
| from multiprocessing.shared_memory import ( | ||||
|     SharedMemory, | ||||
|     ShareableList, | ||||
| ) | ||||
| 
 | ||||
| from msgspec import Struct | ||||
| import tractor | ||||
| 
 | ||||
| from tractor.ipc._mp_bs import disable_mantracker | ||||
| from tractor.log import get_logger | ||||
| from .log import get_logger | ||||
| 
 | ||||
| 
 | ||||
| _USE_POSIX = getattr(shm, '_USE_POSIX', False) | ||||
|  | @ -51,10 +46,7 @@ if _USE_POSIX: | |||
| try: | ||||
|     import numpy as np | ||||
|     from numpy.lib import recfunctions as rfn | ||||
|     # TODO ruff complains with, | ||||
|     # warning| F401: `nptyping` imported but unused; consider using | ||||
|     # `importlib.util.find_spec` to test for availability | ||||
|     import nptyping  # noqa | ||||
|     import nptyping | ||||
| except ImportError: | ||||
|     pass | ||||
| 
 | ||||
|  | @ -62,7 +54,35 @@ except ImportError: | |||
| log = get_logger(__name__) | ||||
| 
 | ||||
| 
 | ||||
| SharedMemory = disable_mantracker() | ||||
| def disable_mantracker(): | ||||
|     ''' | ||||
|     Disable all ``multiprocessing``` "resource tracking" machinery since | ||||
|     it's an absolute multi-threaded mess of non-SC madness. | ||||
| 
 | ||||
|     ''' | ||||
|     from multiprocessing import resource_tracker as mantracker | ||||
| 
 | ||||
|     # Tell the "resource tracker" thing to fuck off. | ||||
|     class ManTracker(mantracker.ResourceTracker): | ||||
|         def register(self, name, rtype): | ||||
|             pass | ||||
| 
 | ||||
|         def unregister(self, name, rtype): | ||||
|             pass | ||||
| 
 | ||||
|         def ensure_running(self): | ||||
|             pass | ||||
| 
 | ||||
|     # "know your land and know your prey" | ||||
|     # https://www.dailymotion.com/video/x6ozzco | ||||
|     mantracker._resource_tracker = ManTracker() | ||||
|     mantracker.register = mantracker._resource_tracker.register | ||||
|     mantracker.ensure_running = mantracker._resource_tracker.ensure_running | ||||
|     mantracker.unregister = mantracker._resource_tracker.unregister | ||||
|     mantracker.getfd = mantracker._resource_tracker.getfd | ||||
| 
 | ||||
| 
 | ||||
| disable_mantracker() | ||||
| 
 | ||||
| 
 | ||||
| class SharedInt: | ||||
|  | @ -122,7 +142,7 @@ class NDToken(Struct, frozen=True): | |||
|         ).descr | ||||
| 
 | ||||
|     def as_msg(self): | ||||
|         return to_builtins(self) | ||||
|         return self.to_dict() | ||||
| 
 | ||||
|     @classmethod | ||||
|     def from_msg(cls, msg: dict) -> NDToken: | ||||
|  | @ -790,23 +810,11 @@ def open_shm_list( | |||
|         readonly=readonly, | ||||
|     ) | ||||
| 
 | ||||
|     # TODO, factor into a @actor_fixture acm-API? | ||||
|     # -[ ] also `@maybe_actor_fixture()` which inludes | ||||
|     #     the .current_actor() convenience check? | ||||
|     #   |_ orr can that just be in the sin-maybe-version? | ||||
|     # | ||||
|     # "close" attached shm on actor teardown | ||||
|     try: | ||||
|         actor = tractor.current_actor() | ||||
| 
 | ||||
|         actor.lifetime_stack.callback(shml.shm.close) | ||||
| 
 | ||||
|         # XXX on 3.13+ we don't need to call this? | ||||
|         # -> bc we pass `track=False` for `SharedMemeory` orr? | ||||
|         if ( | ||||
|             platform.python_version_tuple()[:-1] < ('3', '13') | ||||
|         ): | ||||
|             actor.lifetime_stack.callback(shml.shm.unlink) | ||||
|         actor.lifetime_stack.callback(shml.shm.unlink) | ||||
|     except RuntimeError: | ||||
|         log.warning('tractor runtime not active, skipping teardown steps') | ||||
| 
 | ||||
|  | @ -31,41 +31,31 @@ from typing import ( | |||
|     TYPE_CHECKING, | ||||
| ) | ||||
| 
 | ||||
| from exceptiongroup import BaseExceptionGroup | ||||
| import trio | ||||
| from trio import TaskStatus | ||||
| from trio_typing import TaskStatus | ||||
| 
 | ||||
| from .devx import ( | ||||
|     debug, | ||||
|     pformat as _pformat | ||||
|     maybe_wait_for_debugger, | ||||
|     acquire_debug_lock, | ||||
| ) | ||||
| from tractor._state import ( | ||||
| from ._state import ( | ||||
|     current_actor, | ||||
|     is_main_process, | ||||
|     is_root_process, | ||||
|     debug_mode, | ||||
|     _runtime_vars, | ||||
| ) | ||||
| from tractor.log import get_logger | ||||
| from tractor._addr import UnwrappedAddress | ||||
| from tractor._portal import Portal | ||||
| from tractor._runtime import Actor | ||||
| from tractor._entry import _mp_main | ||||
| from tractor._exceptions import ActorFailure | ||||
| from tractor.msg import ( | ||||
|     types as msgtypes, | ||||
|     pretty_struct, | ||||
| ) | ||||
| from .log import get_logger | ||||
| from ._portal import Portal | ||||
| from ._runtime import Actor | ||||
| from ._entry import _mp_main | ||||
| from ._exceptions import ActorFailure | ||||
| 
 | ||||
| 
 | ||||
| if TYPE_CHECKING: | ||||
|     from ipc import ( | ||||
|         _server, | ||||
|         Channel, | ||||
|     ) | ||||
|     from ._supervise import ActorNursery | ||||
|     ProcessType = TypeVar('ProcessType', mp.Process, trio.Process) | ||||
| 
 | ||||
| 
 | ||||
| log = get_logger('tractor') | ||||
| 
 | ||||
| # placeholder for an mp start context if so using that backend | ||||
|  | @ -150,13 +140,11 @@ async def exhaust_portal( | |||
|     ''' | ||||
|     __tracebackhide__ = True | ||||
|     try: | ||||
|         log.debug( | ||||
|             f'Waiting on final result from {actor.uid}' | ||||
|         ) | ||||
|         log.debug(f"Waiting on final result from {actor.uid}") | ||||
| 
 | ||||
|         # XXX: streams should never be reaped here since they should | ||||
|         # always be established and shutdown using a context manager api | ||||
|         final: Any = await portal.wait_for_result() | ||||
|         final: Any = await portal.result() | ||||
| 
 | ||||
|     except ( | ||||
|         Exception, | ||||
|  | @ -170,7 +158,7 @@ async def exhaust_portal( | |||
|         # TODO: merge with above? | ||||
|         log.warning( | ||||
|             'Cancelled portal result waiter task:\n' | ||||
|             f'uid: {portal.channel.aid}\n' | ||||
|             f'uid: {portal.channel.uid}\n' | ||||
|             f'error: {err}\n' | ||||
|         ) | ||||
|         return err | ||||
|  | @ -178,7 +166,7 @@ async def exhaust_portal( | |||
|     else: | ||||
|         log.debug( | ||||
|             f'Returning final result from portal:\n' | ||||
|             f'uid: {portal.channel.aid}\n' | ||||
|             f'uid: {portal.channel.uid}\n' | ||||
|             f'result: {final}\n' | ||||
|         ) | ||||
|         return final | ||||
|  | @ -205,10 +193,7 @@ async def cancel_on_completion( | |||
|     # if this call errors we store the exception for later | ||||
|     # in ``errors`` which will be reraised inside | ||||
|     # an exception group and we still send out a cancel request | ||||
|     result: Any|Exception = await exhaust_portal( | ||||
|         portal, | ||||
|         actor, | ||||
|     ) | ||||
|     result: Any|Exception = await exhaust_portal(portal, actor) | ||||
|     if isinstance(result, Exception): | ||||
|         errors[actor.uid]: Exception = result | ||||
|         log.cancel( | ||||
|  | @ -230,8 +215,8 @@ async def cancel_on_completion( | |||
| 
 | ||||
| async def hard_kill( | ||||
|     proc: trio.Process, | ||||
| 
 | ||||
|     terminate_after: int = 1.6, | ||||
| 
 | ||||
|     # NOTE: for mucking with `.pause()`-ing inside the runtime | ||||
|     # whilst also hacking on it XD | ||||
|     # terminate_after: int = 99999, | ||||
|  | @ -253,9 +238,8 @@ async def hard_kill( | |||
| 
 | ||||
|     ''' | ||||
|     log.cancel( | ||||
|         'Terminating sub-proc\n' | ||||
|         f'>x)\n' | ||||
|         f' |_{proc}\n' | ||||
|         'Terminating sub-proc:\n' | ||||
|         f'|_{proc}\n' | ||||
|     ) | ||||
|     # NOTE: this timeout used to do nothing since we were shielding | ||||
|     # the ``.wait()`` inside ``new_proc()`` which will pretty much | ||||
|  | @ -297,34 +281,18 @@ async def hard_kill( | |||
|     # zombies (as a feature) we ask the OS to do send in the | ||||
|     # removal swad as the last resort. | ||||
|     if cs.cancelled_caught: | ||||
| 
 | ||||
|         # TODO? attempt at intermediary-rent-sub | ||||
|         # with child in debug lock? | ||||
|         # |_https://github.com/goodboy/tractor/issues/320 | ||||
|         # | ||||
|         # if not is_root_process(): | ||||
|         #     log.warning( | ||||
|         #         'Attempting to acquire debug-REPL-lock before zombie reap!' | ||||
|         #     ) | ||||
|         #     with trio.CancelScope(shield=True): | ||||
|         #         async with debug.acquire_debug_lock( | ||||
|         #             subactor_uid=current_actor().uid, | ||||
|         #         ) as _ctx: | ||||
|         #             log.warning( | ||||
|         #                 'Acquired debug lock, child ready to be killed ??\n' | ||||
|         #             ) | ||||
| 
 | ||||
|         # TODO: toss in the skynet-logo face as ascii art? | ||||
|         log.critical( | ||||
|             # 'Well, the #ZOMBIE_LORD_IS_HERE# to collect\n' | ||||
|             '#T-800 deployed to collect zombie B0\n' | ||||
|             f'>x)\n' | ||||
|             f' |_{proc}\n' | ||||
|             f'|\n' | ||||
|             f'|_{proc}\n' | ||||
|         ) | ||||
|         proc.kill() | ||||
| 
 | ||||
| 
 | ||||
| async def soft_kill( | ||||
| 
 | ||||
|     proc: ProcessType, | ||||
|     wait_func: Callable[ | ||||
|         [ProcessType], | ||||
|  | @ -344,31 +312,16 @@ async def soft_kill( | |||
|     see `.hard_kill()`). | ||||
| 
 | ||||
|     ''' | ||||
|     chan: Channel = portal.channel | ||||
|     peer_aid: msgtypes.Aid = chan.aid | ||||
|     uid: tuple[str, str] = portal.channel.uid | ||||
|     try: | ||||
|         log.cancel( | ||||
|             f'Soft killing sub-actor via portal request\n' | ||||
|             f'\n' | ||||
|             f'c)=> {peer_aid.reprol()}@[{chan.maddr}]\n' | ||||
|             f'   |_{proc}\n' | ||||
|             'Soft killing sub-actor via `Portal.cancel_actor()`\n' | ||||
|             f'|_{proc}\n' | ||||
|         ) | ||||
|         # wait on sub-proc to signal termination | ||||
|         await wait_func(proc) | ||||
| 
 | ||||
|     except trio.Cancelled: | ||||
|         with trio.CancelScope(shield=True): | ||||
|             await debug.maybe_wait_for_debugger( | ||||
|                 child_in_debug=_runtime_vars.get( | ||||
|                     '_debug_mode', False | ||||
|                 ), | ||||
|                 header_msg=( | ||||
|                     'Delaying `soft_kill()` subproc reaper while debugger locked..\n' | ||||
|                 ), | ||||
|                 # TODO: need a diff value then default? | ||||
|                 # poll_steps=9999999, | ||||
|             ) | ||||
| 
 | ||||
|         # if cancelled during a soft wait, cancel the child | ||||
|         # actor before entering the hard reap sequence | ||||
|         # below. This means we try to do a graceful teardown | ||||
|  | @ -399,7 +352,7 @@ async def soft_kill( | |||
|             if proc.poll() is None:  # type: ignore | ||||
|                 log.warning( | ||||
|                     'Subactor still alive after cancel request?\n\n' | ||||
|                     f'uid: {peer_aid}\n' | ||||
|                     f'uid: {uid}\n' | ||||
|                     f'|_{proc}\n' | ||||
|                 ) | ||||
|                 n.cancel_scope.cancel() | ||||
|  | @ -413,15 +366,14 @@ async def new_proc( | |||
|     errors: dict[tuple[str, str], Exception], | ||||
| 
 | ||||
|     # passed through to actor main | ||||
|     bind_addrs: list[UnwrappedAddress], | ||||
|     parent_addr: UnwrappedAddress, | ||||
|     bind_addrs: list[tuple[str, int]], | ||||
|     parent_addr: tuple[str, int], | ||||
|     _runtime_vars: dict[str, Any],  # serialized and sent to _child | ||||
| 
 | ||||
|     *, | ||||
| 
 | ||||
|     infect_asyncio: bool = False, | ||||
|     task_status: TaskStatus[Portal] = trio.TASK_STATUS_IGNORED, | ||||
|     proc_kwargs: dict[str, any] = {} | ||||
|     task_status: TaskStatus[Portal] = trio.TASK_STATUS_IGNORED | ||||
| 
 | ||||
| ) -> None: | ||||
| 
 | ||||
|  | @ -441,7 +393,6 @@ async def new_proc( | |||
|         _runtime_vars,  # run time vars | ||||
|         infect_asyncio=infect_asyncio, | ||||
|         task_status=task_status, | ||||
|         proc_kwargs=proc_kwargs | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
|  | @ -452,13 +403,12 @@ async def trio_proc( | |||
|     errors: dict[tuple[str, str], Exception], | ||||
| 
 | ||||
|     # passed through to actor main | ||||
|     bind_addrs: list[UnwrappedAddress], | ||||
|     parent_addr: UnwrappedAddress, | ||||
|     bind_addrs: list[tuple[str, int]], | ||||
|     parent_addr: tuple[str, int], | ||||
|     _runtime_vars: dict[str, Any],  # serialized and sent to _child | ||||
|     *, | ||||
|     infect_asyncio: bool = False, | ||||
|     task_status: TaskStatus[Portal] = trio.TASK_STATUS_IGNORED, | ||||
|     proc_kwargs: dict[str, any] = {} | ||||
|     task_status: TaskStatus[Portal] = trio.TASK_STATUS_IGNORED | ||||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|  | @ -480,9 +430,6 @@ async def trio_proc( | |||
|         # the OS; it otherwise can be passed via the parent channel if | ||||
|         # we prefer in the future (for privacy). | ||||
|         "--uid", | ||||
|         # TODO, how to pass this over "wire" encodings like | ||||
|         # cmdline args? | ||||
|         # -[ ] maybe we can add an `msgtypes.Aid.min_tuple()` ? | ||||
|         str(subactor.uid), | ||||
|         # Address the child must connect to on startup | ||||
|         "--parent_addr", | ||||
|  | @ -500,20 +447,19 @@ async def trio_proc( | |||
| 
 | ||||
|     cancelled_during_spawn: bool = False | ||||
|     proc: trio.Process|None = None | ||||
|     ipc_server: _server.Server = actor_nursery._actor.ipc_server | ||||
|     try: | ||||
|         try: | ||||
|             proc: trio.Process = await trio.lowlevel.open_process(spawn_cmd, **proc_kwargs) | ||||
|             # TODO: needs ``trio_typing`` patch? | ||||
|             proc = await trio.lowlevel.open_process(spawn_cmd) | ||||
|             log.runtime( | ||||
|                 f'Started new child subproc\n' | ||||
|                 f'(>\n' | ||||
|                 f' |_{proc}\n' | ||||
|                 'Started new sub-proc\n' | ||||
|                 f'|_{proc}\n' | ||||
|             ) | ||||
| 
 | ||||
|             # wait for actor to spawn and connect back to us | ||||
|             # channel should have handshake completed by the | ||||
|             # local actor by the time we get a ref to it | ||||
|             event, chan = await ipc_server.wait_for_peer( | ||||
|             event, chan = await actor_nursery._actor.wait_for_peer( | ||||
|                 subactor.uid | ||||
|             ) | ||||
| 
 | ||||
|  | @ -525,10 +471,10 @@ async def trio_proc( | |||
|                 with trio.CancelScope(shield=True): | ||||
|                     # don't clobber an ongoing pdb | ||||
|                     if is_root_process(): | ||||
|                         await debug.maybe_wait_for_debugger() | ||||
|                         await maybe_wait_for_debugger() | ||||
| 
 | ||||
|                     elif proc is not None: | ||||
|                         async with debug.acquire_debug_lock(subactor.uid): | ||||
|                         async with acquire_debug_lock(subactor.uid): | ||||
|                             # soft wait on the proc to terminate | ||||
|                             with trio.move_on_after(0.5): | ||||
|                                 await proc.wait() | ||||
|  | @ -544,25 +490,17 @@ async def trio_proc( | |||
|             portal, | ||||
|         ) | ||||
| 
 | ||||
|         # send a "spawning specification" which configures the | ||||
|         # initial runtime state of the child. | ||||
|         sspec = msgtypes.SpawnSpec( | ||||
|             _parent_main_data=subactor._parent_main_data, | ||||
|             enable_modules=subactor.enable_modules, | ||||
|             reg_addrs=subactor.reg_addrs, | ||||
|             bind_addrs=bind_addrs, | ||||
|             _runtime_vars=_runtime_vars, | ||||
|         ) | ||||
|         log.runtime( | ||||
|             f'Sending spawn spec to child\n' | ||||
|             f'{{}}=> {chan.aid.reprol()!r}\n' | ||||
|             f'\n' | ||||
|             f'{pretty_struct.pformat(sspec)}\n' | ||||
|         ) | ||||
|         await chan.send(sspec) | ||||
|         # send additional init params | ||||
|         await chan.send({ | ||||
|             '_parent_main_data': subactor._parent_main_data, | ||||
|             'enable_modules': subactor.enable_modules, | ||||
|             'reg_addrs': subactor.reg_addrs, | ||||
|             'bind_addrs': bind_addrs, | ||||
|             '_runtime_vars': _runtime_vars, | ||||
|         }) | ||||
| 
 | ||||
|         # track subactor in current nursery | ||||
|         curr_actor: Actor = current_actor() | ||||
|         curr_actor = current_actor() | ||||
|         curr_actor._actoruid2nursery[subactor.uid] = actor_nursery | ||||
| 
 | ||||
|         # resume caller at next checkpoint now that child is up | ||||
|  | @ -586,15 +524,15 @@ async def trio_proc( | |||
|             # condition. | ||||
|             await soft_kill( | ||||
|                 proc, | ||||
|                 trio.Process.wait,  # XXX, uses `pidfd_open()` below. | ||||
|                 trio.Process.wait, | ||||
|                 portal | ||||
|             ) | ||||
| 
 | ||||
|             # cancel result waiter that may have been spawned in | ||||
|             # tandem if not done already | ||||
|             log.cancel( | ||||
|                 'Cancelling portal result reaper task\n' | ||||
|                 f'c)> {subactor.aid.reprol()!r}\n' | ||||
|                 'Cancelling existing result waiter task for ' | ||||
|                 f'{subactor.uid}' | ||||
|             ) | ||||
|             nursery.cancel_scope.cancel() | ||||
| 
 | ||||
|  | @ -603,24 +541,17 @@ async def trio_proc( | |||
|         # allowed! Do this **after** cancellation/teardown to avoid | ||||
|         # killing the process too early. | ||||
|         if proc: | ||||
|             reap_repr: str = _pformat.nest_from_op( | ||||
|                 input_op='>x)', | ||||
|                 text=subactor.pformat(), | ||||
|             ) | ||||
|             log.cancel( | ||||
|                 f'Hard reap sequence starting for subactor\n' | ||||
|                 f'{reap_repr}' | ||||
|             ) | ||||
| 
 | ||||
|             log.cancel(f'Hard reap sequence starting for {subactor.uid}') | ||||
|             with trio.CancelScope(shield=True): | ||||
| 
 | ||||
|                 # don't clobber an ongoing pdb | ||||
|                 if cancelled_during_spawn: | ||||
|                     # Try again to avoid TTY clobbering. | ||||
|                     async with debug.acquire_debug_lock(subactor.uid): | ||||
|                     async with acquire_debug_lock(subactor.uid): | ||||
|                         with trio.move_on_after(0.5): | ||||
|                             await proc.wait() | ||||
| 
 | ||||
|                 await debug.maybe_wait_for_debugger( | ||||
|                 await maybe_wait_for_debugger( | ||||
|                     child_in_debug=_runtime_vars.get( | ||||
|                         '_debug_mode', False | ||||
|                     ), | ||||
|  | @ -649,7 +580,7 @@ async def trio_proc( | |||
|                 #     acquire the lock and get notified of who has it, | ||||
|                 #     check that uid against our known children? | ||||
|                 # this_uid: tuple[str, str] = current_actor().uid | ||||
|                 # await debug.acquire_debug_lock(this_uid) | ||||
|                 # await acquire_debug_lock(this_uid) | ||||
| 
 | ||||
|                 if proc.poll() is None: | ||||
|                     log.cancel(f"Attempting to hard kill {proc}") | ||||
|  | @ -671,13 +602,12 @@ async def mp_proc( | |||
|     subactor: Actor, | ||||
|     errors: dict[tuple[str, str], Exception], | ||||
|     # passed through to actor main | ||||
|     bind_addrs: list[UnwrappedAddress], | ||||
|     parent_addr: UnwrappedAddress, | ||||
|     bind_addrs: list[tuple[str, int]], | ||||
|     parent_addr: tuple[str, int], | ||||
|     _runtime_vars: dict[str, Any],  # serialized and sent to _child | ||||
|     *, | ||||
|     infect_asyncio: bool = False, | ||||
|     task_status: TaskStatus[Portal] = trio.TASK_STATUS_IGNORED, | ||||
|     proc_kwargs: dict[str, any] = {} | ||||
|     task_status: TaskStatus[Portal] = trio.TASK_STATUS_IGNORED | ||||
| 
 | ||||
| ) -> None: | ||||
| 
 | ||||
|  | @ -752,14 +682,12 @@ async def mp_proc( | |||
| 
 | ||||
|     log.runtime(f"Started {proc}") | ||||
| 
 | ||||
|     ipc_server: _server.Server = actor_nursery._actor.ipc_server | ||||
|     try: | ||||
|         # wait for actor to spawn and connect back to us | ||||
|         # channel should have handshake completed by the | ||||
|         # local actor by the time we get a ref to it | ||||
|         event, chan = await ipc_server.wait_for_peer( | ||||
|             subactor.uid, | ||||
|         ) | ||||
|         event, chan = await actor_nursery._actor.wait_for_peer( | ||||
|             subactor.uid) | ||||
| 
 | ||||
|         # XXX: monkey patch poll API to match the ``subprocess`` API.. | ||||
|         # not sure why they don't expose this but kk. | ||||
|  |  | |||
|  | @ -14,62 +14,27 @@ | |||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| Per actor-process runtime state mgmt APIs. | ||||
| """ | ||||
| Per process state | ||||
| 
 | ||||
| ''' | ||||
| """ | ||||
| from __future__ import annotations | ||||
| from contextvars import ( | ||||
|     ContextVar, | ||||
| ) | ||||
| import os | ||||
| from pathlib import Path | ||||
| from typing import ( | ||||
|     Any, | ||||
|     Literal, | ||||
|     TYPE_CHECKING, | ||||
| ) | ||||
| 
 | ||||
| from trio.lowlevel import current_task | ||||
| 
 | ||||
| if TYPE_CHECKING: | ||||
|     from ._runtime import Actor | ||||
|     from ._context import Context | ||||
| 
 | ||||
| 
 | ||||
| # default IPC transport protocol settings | ||||
| TransportProtocolKey = Literal[ | ||||
|     'tcp', | ||||
|     'uds', | ||||
| ] | ||||
| _def_tpt_proto: TransportProtocolKey = 'tcp' | ||||
| 
 | ||||
| _current_actor: Actor|None = None  # type: ignore # noqa | ||||
| _last_actor_terminated: Actor|None = None | ||||
| 
 | ||||
| # TODO: mk this a `msgspec.Struct`! | ||||
| # -[ ] type out all fields obvi! | ||||
| # -[ ] (eventually) mk wire-ready for monitoring? | ||||
| _runtime_vars: dict[str, Any] = { | ||||
|     # root of actor-process tree info | ||||
|     '_is_root': False,  # bool | ||||
|     '_root_mailbox': (None, None),  # tuple[str|None, str|None] | ||||
|     '_root_addrs': [],  # tuple[str|None, str|None] | ||||
| 
 | ||||
|     # parent->chld ipc protocol caps | ||||
|     '_enable_tpts': [_def_tpt_proto], | ||||
| 
 | ||||
|     # registrar info | ||||
|     '_debug_mode': False, | ||||
|     '_is_root': False, | ||||
|     '_root_mailbox': (None, None), | ||||
|     '_registry_addrs': [], | ||||
| 
 | ||||
|     # `debug_mode: bool` settings | ||||
|     '_debug_mode': False,  # bool | ||||
|     'repl_fixture': False,  # |AbstractContextManager[bool] | ||||
|     # for `tractor.pause_from_sync()` & `breakpoint()` support | ||||
|     'use_greenback': False, | ||||
| 
 | ||||
|     # infected-`asyncio`-mode: `trio` running as guest. | ||||
|     '_is_infected_aio': False, | ||||
| } | ||||
| 
 | ||||
| 
 | ||||
|  | @ -94,10 +59,9 @@ def current_actor( | |||
|     ''' | ||||
|     if ( | ||||
|         err_on_no_runtime | ||||
|         and | ||||
|         _current_actor is None | ||||
|         and _current_actor is None | ||||
|     ): | ||||
|         msg: str = 'No local actor has been initialized yet?\n' | ||||
|         msg: str = 'No local actor has been initialized yet' | ||||
|         from ._exceptions import NoRuntime | ||||
| 
 | ||||
|         if last := last_actor(): | ||||
|  | @ -110,8 +74,8 @@ def current_actor( | |||
|         # this process. | ||||
|         else: | ||||
|             msg += ( | ||||
|                 # 'No last actor found?\n' | ||||
|                 '\nDid you forget to call one of,\n' | ||||
|                 'No last actor found?\n' | ||||
|                 'Did you forget to open one of:\n\n' | ||||
|                 '- `tractor.open_root_actor()`\n' | ||||
|                 '- `tractor.open_nursery()`\n' | ||||
|             ) | ||||
|  | @ -121,7 +85,7 @@ def current_actor( | |||
|     return _current_actor | ||||
| 
 | ||||
| 
 | ||||
| def is_root_process() -> bool: | ||||
| def is_main_process() -> bool: | ||||
|     ''' | ||||
|     Bool determining if this actor is running in the top-most process. | ||||
| 
 | ||||
|  | @ -130,10 +94,7 @@ def is_root_process() -> bool: | |||
|     return mp.current_process().name == 'MainProcess' | ||||
| 
 | ||||
| 
 | ||||
| is_main_process = is_root_process | ||||
| 
 | ||||
| 
 | ||||
| def is_debug_mode() -> bool: | ||||
| def debug_mode() -> bool: | ||||
|     ''' | ||||
|     Bool determining if "debug mode" is on which enables | ||||
|     remote subactor pdb entry on crashes. | ||||
|  | @ -142,62 +103,5 @@ def is_debug_mode() -> bool: | |||
|     return bool(_runtime_vars['_debug_mode']) | ||||
| 
 | ||||
| 
 | ||||
| debug_mode = is_debug_mode | ||||
| 
 | ||||
| 
 | ||||
| def is_root_process() -> bool: | ||||
|     return _runtime_vars['_is_root'] | ||||
| 
 | ||||
| 
 | ||||
| _ctxvar_Context: ContextVar[Context] = ContextVar( | ||||
|     'ipc_context', | ||||
|     default=None, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| def current_ipc_ctx( | ||||
|     error_on_not_set: bool = False, | ||||
| ) -> Context|None: | ||||
|     ctx: Context = _ctxvar_Context.get() | ||||
| 
 | ||||
|     if ( | ||||
|         not ctx | ||||
|         and error_on_not_set | ||||
|     ): | ||||
|         from ._exceptions import InternalError | ||||
|         raise InternalError( | ||||
|             'No IPC context has been allocated for this task yet?\n' | ||||
|             f'|_{current_task()}\n' | ||||
|         ) | ||||
|     return ctx | ||||
| 
 | ||||
| 
 | ||||
| # std ODE (mutable) app state location | ||||
| _rtdir: Path = Path(os.environ['XDG_RUNTIME_DIR']) | ||||
| 
 | ||||
| 
 | ||||
| def get_rt_dir( | ||||
|     subdir: str = 'tractor' | ||||
| ) -> Path: | ||||
|     ''' | ||||
|     Return the user "runtime dir" where most userspace apps stick | ||||
|     their IPC and cache related system util-files; we take hold | ||||
|     of a `'XDG_RUNTIME_DIR'/tractor/` subdir by default. | ||||
| 
 | ||||
|     ''' | ||||
|     rtdir: Path = _rtdir / subdir | ||||
|     if not rtdir.is_dir(): | ||||
|         rtdir.mkdir() | ||||
|     return rtdir | ||||
| 
 | ||||
| 
 | ||||
| def current_ipc_protos() -> list[str]: | ||||
|     ''' | ||||
|     Return the list of IPC transport protocol keys currently | ||||
|     in use by this actor. | ||||
| 
 | ||||
|     The keys are as declared by `MsgTransport` and `Address` | ||||
|     concrete-backend sub-types defined throughout `tractor.ipc`. | ||||
| 
 | ||||
|     ''' | ||||
|     return _runtime_vars['_enable_tpts'] | ||||
|  |  | |||
|  | @ -26,7 +26,6 @@ import inspect | |||
| from pprint import pformat | ||||
| from typing import ( | ||||
|     Any, | ||||
|     AsyncGenerator, | ||||
|     Callable, | ||||
|     AsyncIterator, | ||||
|     TYPE_CHECKING, | ||||
|  | @ -36,27 +35,17 @@ import warnings | |||
| import trio | ||||
| 
 | ||||
| from ._exceptions import ( | ||||
|     _raise_from_no_key_in_msg, | ||||
|     ContextCancelled, | ||||
|     RemoteActorError, | ||||
| ) | ||||
| from .log import get_logger | ||||
| from .trionics import ( | ||||
|     broadcast_receiver, | ||||
|     BroadcastReceiver, | ||||
| ) | ||||
| from tractor.msg import ( | ||||
|     Error, | ||||
|     Return, | ||||
|     Stop, | ||||
|     MsgType, | ||||
|     PayloadT, | ||||
|     Yield, | ||||
| ) | ||||
| 
 | ||||
| if TYPE_CHECKING: | ||||
|     from ._runtime import Actor | ||||
|     from ._context import Context | ||||
|     from .ipc import Channel | ||||
| 
 | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
|  | @ -70,9 +59,10 @@ log = get_logger(__name__) | |||
| class MsgStream(trio.abc.Channel): | ||||
|     ''' | ||||
|     A bidirectional message stream for receiving logically sequenced | ||||
|     values over an inter-actor IPC `Channel`. | ||||
| 
 | ||||
|     values over an inter-actor IPC ``Channel``. | ||||
| 
 | ||||
|     This is the type returned to a local task which entered either | ||||
|     ``Portal.open_stream_from()`` or ``Context.open_stream()``. | ||||
| 
 | ||||
|     Termination rules: | ||||
| 
 | ||||
|  | @ -88,109 +78,46 @@ class MsgStream(trio.abc.Channel): | |||
|         self, | ||||
|         ctx: Context,  # typing: ignore # noqa | ||||
|         rx_chan: trio.MemoryReceiveChannel, | ||||
|         _broadcaster: BroadcastReceiver|None = None, | ||||
|         _broadcaster: BroadcastReceiver | None = None, | ||||
| 
 | ||||
|     ) -> None: | ||||
|         self._ctx = ctx | ||||
|         self._rx_chan = rx_chan | ||||
|         self._broadcaster = _broadcaster | ||||
| 
 | ||||
|         # any actual IPC msg which is effectively an `EndOfStream` | ||||
|         self._stop_msg: bool|Stop = False | ||||
| 
 | ||||
|         # flag to denote end of stream | ||||
|         self._eoc: bool|trio.EndOfChannel = False | ||||
|         self._closed: bool|trio.ClosedResourceError = False | ||||
| 
 | ||||
|     @property | ||||
|     def ctx(self) -> Context: | ||||
|         ''' | ||||
|         A read-only ref to this stream's inter-actor-task `Context`. | ||||
| 
 | ||||
|         ''' | ||||
|         return self._ctx | ||||
| 
 | ||||
|     @property | ||||
|     def chan(self) -> Channel: | ||||
|         ''' | ||||
|         Ref to the containing `Context`'s transport `Channel`. | ||||
| 
 | ||||
|         ''' | ||||
|         return self._ctx.chan | ||||
| 
 | ||||
|     # TODO: could we make this a direct method bind to `PldRx`? | ||||
|     # -> receive_nowait = PldRx.recv_pld | ||||
|     # |_ means latter would have to accept `MsgStream`-as-`self`? | ||||
|     #  => should be fine as long as, | ||||
|     #  -[ ] both define `._rx_chan` | ||||
|     #  -[ ] .ctx is bound into `PldRx` using a `@cm`? | ||||
|     # | ||||
|     # delegate directly to underlying mem channel | ||||
|     def receive_nowait( | ||||
|         self, | ||||
|         expect_msg: MsgType = Yield, | ||||
|     ) -> PayloadT: | ||||
|         ctx: Context = self._ctx | ||||
|         ( | ||||
|             msg, | ||||
|             pld, | ||||
|         ) = ctx._pld_rx.recv_msg_nowait( | ||||
|             ipc=self, | ||||
|             expect_msg=expect_msg, | ||||
|         ) | ||||
|         allow_msg_keys: list[str] = ['yield'], | ||||
|     ): | ||||
|         msg: dict = self._rx_chan.receive_nowait() | ||||
|         for ( | ||||
|             i, | ||||
|             key, | ||||
|         ) in enumerate(allow_msg_keys): | ||||
|             try: | ||||
|                 return msg[key] | ||||
|             except KeyError as kerr: | ||||
|                 if i < (len(allow_msg_keys) - 1): | ||||
|                     continue | ||||
| 
 | ||||
|         # ?TODO, maybe factor this into a hyper-common `unwrap_pld()` | ||||
|         # | ||||
|         match msg: | ||||
| 
 | ||||
|             # XXX, these never seems to ever hit? cool? | ||||
|             case Stop(): | ||||
|                 log.cancel( | ||||
|                     f'Msg-stream was ended via stop msg\n' | ||||
|                     f'{msg}' | ||||
|                 _raise_from_no_key_in_msg( | ||||
|                     ctx=self._ctx, | ||||
|                     msg=msg, | ||||
|                     src_err=kerr, | ||||
|                     log=log, | ||||
|                     expect_key=key, | ||||
|                     stream=self, | ||||
|                 ) | ||||
|             case Error(): | ||||
|                 log.error( | ||||
|                     f'Msg-stream was ended via error msg\n' | ||||
|                     f'{msg}' | ||||
|                 ) | ||||
| 
 | ||||
|             # XXX NOTE, always set any final result on the ctx to | ||||
|             # avoid teardown race conditions where previously this msg | ||||
|             # would be consumed silently (by `.aclose()` doing its | ||||
|             # own "msg drain loop" but WITHOUT those `drained: lists[MsgType]` | ||||
|             # being post-close-processed! | ||||
|             # | ||||
|             # !!TODO, see the equiv todo-comment in `.receive()` | ||||
|             # around the `if drained:` where we should prolly | ||||
|             # ACTUALLY be doing this post-close processing?? | ||||
|             # | ||||
|             case Return(pld=pld): | ||||
|                 log.warning( | ||||
|                     f'Msg-stream final result msg for IPC ctx?\n' | ||||
|                     f'{msg}' | ||||
|                 ) | ||||
|                 # XXX TODO, this **should be covered** by higher | ||||
|                 # scoped runtime-side method calls such as | ||||
|                 # `Context._deliver_msg()`, so you should never | ||||
|                 # really see the warning above or else something | ||||
|                 # racy/out-of-order is likely going on between | ||||
|                 # actor-runtime-side push tasks and the user-app-side | ||||
|                 # consume tasks! | ||||
|                 # -[ ] figure out that set of race cases and fix! | ||||
|                 # -[ ] possibly return the `msg` given an input | ||||
|                 #     arg-flag is set so we can process the `Return` | ||||
|                 #     from the `.aclose()` caller? | ||||
|                 # | ||||
|                 # breakpoint()  # to debug this RACE CASE! | ||||
|                 ctx._result = pld | ||||
|                 ctx._outcome_msg = msg | ||||
| 
 | ||||
|         return pld | ||||
| 
 | ||||
|     async def receive( | ||||
|         self, | ||||
|         hide_tb: bool = False, | ||||
| 
 | ||||
|         hide_tb: bool = True, | ||||
|     ): | ||||
|         ''' | ||||
|         Receive a single msg from the IPC transport, the next in | ||||
|  | @ -200,16 +127,17 @@ class MsgStream(trio.abc.Channel): | |||
|         ''' | ||||
|         __tracebackhide__: bool = hide_tb | ||||
| 
 | ||||
|         # NOTE FYI: `trio.ReceiveChannel` implements EOC handling as | ||||
|         # follows (aka uses it to gracefully exit async for loops): | ||||
|         # NOTE: `trio.ReceiveChannel` implements | ||||
|         # EOC handling as follows (aka uses it | ||||
|         # to gracefully exit async for loops): | ||||
|         # | ||||
|         # async def __anext__(self) -> ReceiveType: | ||||
|         #     try: | ||||
|         #         return await self.receive() | ||||
|         #     except trio.EndOfChannel: | ||||
|         #         raise StopAsyncIteration | ||||
|         # | ||||
|         # see `.aclose()` for notes on the old behaviour prior to | ||||
| 
 | ||||
|         # see ``.aclose()`` for notes on the old behaviour prior to | ||||
|         # introducing this | ||||
|         if self._eoc: | ||||
|             raise self._eoc | ||||
|  | @ -219,34 +147,71 @@ class MsgStream(trio.abc.Channel): | |||
| 
 | ||||
|         src_err: Exception|None = None  # orig tb | ||||
|         try: | ||||
|             ctx: Context = self._ctx | ||||
|             pld = await ctx._pld_rx.recv_pld( | ||||
|                 ipc=self, | ||||
|                 expect_msg=Yield, | ||||
|             ) | ||||
|             return pld | ||||
|             try: | ||||
|                 msg = await self._rx_chan.receive() | ||||
|                 return msg['yield'] | ||||
| 
 | ||||
|         # XXX: the stream terminates on either of: | ||||
|         # - `self._rx_chan.receive()` raising  after manual closure | ||||
|         #   by the rpc-runtime, | ||||
|         #   OR | ||||
|         # - via a `Stop`-msg received from remote peer task. | ||||
|         #   NOTE | ||||
|         #   |_ previously this was triggered by calling | ||||
|         #   `._rx_chan.aclose()` on the send side of the channel | ||||
|         #   inside `Actor._deliver_ctx_payload()`, but now the 'stop' | ||||
|         #   message handling gets delegated to `PldRFx.recv_pld()` | ||||
|         #   internals. | ||||
|         except trio.EndOfChannel as eoc: | ||||
|             # a graceful stream finished signal | ||||
|             self._eoc = eoc | ||||
|             except KeyError as kerr: | ||||
|                 # log.exception('GOT KEYERROR') | ||||
|                 src_err = kerr | ||||
| 
 | ||||
|                 # NOTE: may raise any of the below error types | ||||
|                 # includg EoC when a 'stop' msg is found. | ||||
|                 _raise_from_no_key_in_msg( | ||||
|                     ctx=self._ctx, | ||||
|                     msg=msg, | ||||
|                     src_err=kerr, | ||||
|                     log=log, | ||||
|                     expect_key='yield', | ||||
|                     stream=self, | ||||
|                 ) | ||||
| 
 | ||||
|         # XXX: we close the stream on any of these error conditions: | ||||
|         except ( | ||||
|             # trio.ClosedResourceError,  # by self._rx_chan | ||||
|             trio.EndOfChannel,  # by self._rx_chan or `stop` msg from far end | ||||
|         ) as eoc: | ||||
|             # log.exception('GOT EOC') | ||||
|             src_err = eoc | ||||
|             self._eoc = eoc | ||||
| 
 | ||||
|         # a `ClosedResourceError` indicates that the internal feeder | ||||
|         # memory receive channel was closed likely by the runtime | ||||
|         # after the associated transport-channel disconnected or | ||||
|         # broke. | ||||
|         except trio.ClosedResourceError as cre:  # by self._rx_chan.receive() | ||||
|             # a ``ClosedResourceError`` indicates that the internal | ||||
|             # feeder memory receive channel was closed likely by the | ||||
|             # runtime after the associated transport-channel | ||||
|             # disconnected or broke. | ||||
| 
 | ||||
|             # an ``EndOfChannel`` indicates either the internal recv | ||||
|             # memchan exhausted **or** we raisesd it just above after | ||||
|             # receiving a `stop` message from the far end of the stream. | ||||
| 
 | ||||
|             # Previously this was triggered by calling ``.aclose()`` on | ||||
|             # the send side of the channel inside | ||||
|             # ``Actor._push_result()`` (should still be commented code | ||||
|             # there - which should eventually get removed), but now the | ||||
|             # 'stop' message handling has been put just above. | ||||
| 
 | ||||
|             # TODO: Locally, we want to close this stream gracefully, by | ||||
|             # terminating any local consumers tasks deterministically. | ||||
|             # Once we have broadcast support, we **don't** want to be | ||||
|             # closing this stream and not flushing a final value to | ||||
|             # remaining (clone) consumers who may not have been | ||||
|             # scheduled to receive it yet. | ||||
|             # try: | ||||
|             #     maybe_err_msg_or_res: dict = self._rx_chan.receive_nowait() | ||||
|             #     if maybe_err_msg_or_res: | ||||
|             #         log.warning( | ||||
|             #             'Discarding un-processed msg:\n' | ||||
|             #             f'{maybe_err_msg_or_res}' | ||||
|             #         ) | ||||
|             # except trio.WouldBlock: | ||||
|             #     # no queued msgs that might be another remote | ||||
|             #     # error, so just raise the original EoC | ||||
|             #     pass | ||||
| 
 | ||||
|             # raise eoc | ||||
| 
 | ||||
|         except trio.ClosedResourceError as cre:  # by self._rx_chan | ||||
|             # log.exception('GOT CRE') | ||||
|             src_err = cre | ||||
|             log.warning( | ||||
|                 '`Context._rx_chan` was already closed?' | ||||
|  | @ -257,60 +222,32 @@ class MsgStream(trio.abc.Channel): | |||
|         # terminated and signal this local iterator to stop | ||||
|         drained: list[Exception|dict] = await self.aclose() | ||||
|         if drained: | ||||
|         #  ^^^^^^^^TODO? pass these to the `._ctx._drained_msgs: | ||||
|         #  deque` and then iterate them as part of any | ||||
|         #  `.wait_for_result()` call? | ||||
|         # | ||||
|         # -[ ] move the match-case processing from | ||||
|         #     `.receive_nowait()` instead to right here, use it from | ||||
|         #     a for msg in drained:` post-proc loop? | ||||
|         # | ||||
|             # from .devx import pause | ||||
|             # await pause() | ||||
|             log.warning( | ||||
|                 'Drained context msgs during closure\n\n' | ||||
|                 'Drained context msgs during closure:\n' | ||||
|                 f'{drained}' | ||||
|             ) | ||||
|         # TODO: pass these to the `._ctx._drained_msgs: deque` | ||||
|         # and then iterate them as part of any `.result()` call? | ||||
| 
 | ||||
|         # NOTE XXX: if the context was cancelled or remote-errored | ||||
|         # but we received the stream close msg first, we | ||||
|         # probably want to instead raise the remote error | ||||
|         # over the end-of-stream connection error since likely | ||||
|         # the remote error was the source cause? | ||||
|         # ctx: Context = self._ctx | ||||
|         ctx.maybe_raise( | ||||
|             raise_ctxc_from_self_call=True, | ||||
|             from_src_exc=src_err, | ||||
|         ) | ||||
| 
 | ||||
|         # propagate any error but hide low-level frame details from | ||||
|         # the caller by default for console/debug-REPL noise | ||||
|         # reduction. | ||||
|         if ( | ||||
|             hide_tb | ||||
|             and ( | ||||
| 
 | ||||
|                 # XXX NOTE special conditions: don't reraise on | ||||
|                 # certain stream-specific internal error types like, | ||||
|                 # | ||||
|                 # - `trio.EoC` since we want to use the exact instance | ||||
|                 #   to ensure that it is the error that bubbles upward | ||||
|                 #   for silent absorption by `Context.open_stream()`. | ||||
|                 not self._eoc | ||||
| 
 | ||||
|                 # - `RemoteActorError` (or subtypes like ctxc) | ||||
|                 #    since we want to present the error as though it is | ||||
|                 #    "sourced" directly from this `.receive()` call and | ||||
|                 #    generally NOT include the stack frames raised from | ||||
|                 #    inside the `PldRx` and/or the transport stack | ||||
|                 #    layers. | ||||
|                 or isinstance(src_err, RemoteActorError) | ||||
|         ctx: Context = self._ctx | ||||
|         if re := ctx._remote_error: | ||||
|             ctx._maybe_raise_remote_err( | ||||
|                 re, | ||||
|                 raise_ctxc_from_self_call=True, | ||||
|             ) | ||||
|         ): | ||||
| 
 | ||||
|         # propagate any error but hide low-level frames from | ||||
|         # caller by default. | ||||
|         if hide_tb: | ||||
|             raise type(src_err)(*src_err.args) from src_err | ||||
|         else: | ||||
|             # for any non-graceful-EOC we want to NOT hide this frame | ||||
|             if not self._eoc: | ||||
|                 __tracebackhide__: bool = False | ||||
| 
 | ||||
|             raise src_err | ||||
| 
 | ||||
|     async def aclose(self) -> list[Exception|dict]: | ||||
|  | @ -327,6 +264,9 @@ class MsgStream(trio.abc.Channel): | |||
|          - more or less we try to maintain adherance to trio's `.aclose()` semantics: | ||||
|            https://trio.readthedocs.io/en/stable/reference-io.html#trio.abc.AsyncResource.aclose | ||||
|         ''' | ||||
| 
 | ||||
|         # rx_chan = self._rx_chan | ||||
| 
 | ||||
|         # XXX NOTE XXX | ||||
|         # it's SUPER IMPORTANT that we ensure we don't DOUBLE | ||||
|         # DRAIN msgs on closure so avoid getting stuck handing on | ||||
|  | @ -338,16 +278,14 @@ class MsgStream(trio.abc.Channel): | |||
|             # this stream has already been closed so silently succeed as | ||||
|             # per ``trio.AsyncResource`` semantics. | ||||
|             # https://trio.readthedocs.io/en/stable/reference-io.html#trio.abc.AsyncResource.aclose | ||||
|             # import tractor | ||||
|             # await tractor.pause() | ||||
|             return [] | ||||
| 
 | ||||
|         ctx: Context = self._ctx | ||||
|         drained: list[Exception|dict] = [] | ||||
|         while not drained: | ||||
|             try: | ||||
|                 maybe_final_msg: Yield|Return = self.receive_nowait( | ||||
|                     expect_msg=Yield|Return, | ||||
|                 maybe_final_msg = self.receive_nowait( | ||||
|                     allow_msg_keys=['yield', 'return'], | ||||
|                 ) | ||||
|                 if maybe_final_msg: | ||||
|                     log.debug( | ||||
|  | @ -426,37 +364,19 @@ class MsgStream(trio.abc.Channel): | |||
|             self._closed = re | ||||
| 
 | ||||
|         # if caught_eoc: | ||||
|         #     # from .devx import debug | ||||
|         #     # await debug.pause() | ||||
|         #     # from .devx import _debug | ||||
|         #     # await _debug.pause() | ||||
|         #     with trio.CancelScope(shield=True): | ||||
|         #         await rx_chan.aclose() | ||||
| 
 | ||||
|         if not self._eoc: | ||||
|             this_side: str = self._ctx.side | ||||
|             peer_side: str = self._ctx.peer_side | ||||
|             message: str = ( | ||||
|                 f'Stream self-closed by {this_side!r}-side before EoC from {peer_side!r}\n' | ||||
|                 # } bc a stream is a "scope"/msging-phase inside an IPC | ||||
|                 f'c}}>\n' | ||||
|                 f'  |_{self}\n' | ||||
|             self._eoc: bool = trio.EndOfChannel( | ||||
|                 f'Context stream closed by {self._ctx.side}\n' | ||||
|                 f'|_{self}\n' | ||||
|             ) | ||||
|             if ( | ||||
|                 (rx_chan := self._rx_chan) | ||||
|                 and | ||||
|                 (stats := rx_chan.statistics()).tasks_waiting_receive | ||||
|             ): | ||||
|                 message += ( | ||||
|                     f'AND there is still reader tasks,\n' | ||||
|                     f'\n' | ||||
|                     f'{stats}\n' | ||||
|                 ) | ||||
| 
 | ||||
|             log.cancel(message) | ||||
|             self._eoc = trio.EndOfChannel(message) | ||||
| 
 | ||||
|         # ?XXX WAIT, why do we not close the local mem chan `._rx_chan` XXX? | ||||
|         # => NO, DEFINITELY NOT! <= | ||||
|         # if we're a bi-dir `MsgStream` BECAUSE this same | ||||
|         # if we're a bi-dir ``MsgStream`` BECAUSE this same | ||||
|         # core-msg-loop mem recv-chan is used to deliver the | ||||
|         # potential final result from the surrounding inter-actor | ||||
|         # `Context` so we don't want to close it until that | ||||
|  | @ -494,11 +414,13 @@ class MsgStream(trio.abc.Channel): | |||
| 
 | ||||
|     @property | ||||
|     def closed(self) -> bool: | ||||
| 
 | ||||
|         rxc: bool = self._rx_chan._closed | ||||
|         _closed: bool|Exception = self._closed | ||||
|         _eoc: bool|trio.EndOfChannel = self._eoc | ||||
|         if rxc or _closed or _eoc: | ||||
|         if ( | ||||
|             (rxc := self._rx_chan._closed) | ||||
|             or | ||||
|             (_closed := self._closed) | ||||
|             or | ||||
|             (_eoc := self._eoc) | ||||
|         ): | ||||
|             log.runtime( | ||||
|                 f'`MsgStream` is already closed\n' | ||||
|                 f'{self}\n' | ||||
|  | @ -538,9 +460,6 @@ class MsgStream(trio.abc.Channel): | |||
|                 self, | ||||
|                 # use memory channel size by default | ||||
|                 self._rx_chan._state.max_buffer_size,  # type: ignore | ||||
| 
 | ||||
|                 # TODO: can remove this kwarg right since | ||||
|                 # by default behaviour is to do this anyway? | ||||
|                 receive_afunc=self.receive, | ||||
|             ) | ||||
| 
 | ||||
|  | @ -577,259 +496,30 @@ class MsgStream(trio.abc.Channel): | |||
|         ''' | ||||
|         __tracebackhide__: bool = hide_tb | ||||
| 
 | ||||
|         # raise any alreay known error immediately | ||||
|         self._ctx.maybe_raise() | ||||
|         if self._eoc: | ||||
|             raise self._eoc | ||||
| 
 | ||||
|         if self._closed: | ||||
|             raise self._closed | ||||
| 
 | ||||
|         try: | ||||
|             await self._ctx.chan.send( | ||||
|                 payload=Yield( | ||||
|                     cid=self._ctx.cid, | ||||
|                     pld=data, | ||||
|                 ), | ||||
|                 payload={ | ||||
|                     'yield': data, | ||||
|                     'cid': self._ctx.cid, | ||||
|                 }, | ||||
|                 # hide_tb=hide_tb, | ||||
|             ) | ||||
|         except ( | ||||
|             trio.ClosedResourceError, | ||||
|             trio.BrokenResourceError, | ||||
|             BrokenPipeError, | ||||
|         ) as _trans_err: | ||||
|             trans_err = _trans_err | ||||
|             if ( | ||||
|                 hide_tb | ||||
|                 and | ||||
|                 self._ctx.chan._exc is trans_err | ||||
|                 # ^XXX, IOW, only if the channel is marked errored | ||||
|                 # for the same reason as whatever its underlying | ||||
|                 # transport raised, do we keep the full low-level tb | ||||
|                 # suppressed from the user. | ||||
|             ): | ||||
|         ) as trans_err: | ||||
|             if hide_tb: | ||||
|                 raise type(trans_err)( | ||||
|                     *trans_err.args | ||||
|                 ) from trans_err | ||||
|             else: | ||||
|                 raise | ||||
| 
 | ||||
|     # TODO: msg capability context api1 | ||||
|     # @acm | ||||
|     # async def enable_msg_caps( | ||||
|     #     self, | ||||
|     #     msg_subtypes: Union[ | ||||
|     #         list[list[Struct]], | ||||
|     #         Protocol,   # hypothetical type that wraps a msg set | ||||
|     #     ], | ||||
|     # ) -> tuple[Callable, Callable]:  # payload enc, dec pair | ||||
|     #     ... | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def open_stream_from_ctx( | ||||
|     ctx: Context, | ||||
|     allow_overruns: bool|None = False, | ||||
|     msg_buffer_size: int|None = None, | ||||
| 
 | ||||
| ) -> AsyncGenerator[MsgStream, None]: | ||||
|     ''' | ||||
|     Open a `MsgStream`, a bi-directional msg transport dialog | ||||
|     connected to the cross-actor peer task for an IPC `Context`. | ||||
| 
 | ||||
|     This context manager must be entered in both the "parent" (task | ||||
|     which entered `Portal.open_context()`) and "child" (RPC task | ||||
|     which is decorated by `@context`) tasks for the stream to | ||||
|     logically be considered "open"; if one side begins sending to an | ||||
|     un-opened peer, depending on policy config, msgs will either be | ||||
|     queued until the other side opens and/or a `StreamOverrun` will | ||||
|     (eventually) be raised. | ||||
| 
 | ||||
|                          ------ - ------ | ||||
| 
 | ||||
|     Runtime semantics design: | ||||
| 
 | ||||
|     A `MsgStream` session adheres to "one-shot use" semantics, | ||||
|     meaning if you close the scope it **can not** be "re-opened". | ||||
| 
 | ||||
|     Instead you must re-establish a new surrounding RPC `Context` | ||||
|     (RTC: remote task context?) using `Portal.open_context()`. | ||||
| 
 | ||||
|     In the future this *design choice* may need to be changed but | ||||
|     currently there seems to be no obvious reason to support such | ||||
|     semantics.. | ||||
| 
 | ||||
|     - "pausing a stream" can be supported with a message implemented | ||||
|       by the `tractor` application dev. | ||||
| 
 | ||||
|     - any remote error will normally require a restart of the entire | ||||
|       `trio.Task`'s scope due to the nature of `trio`'s cancellation | ||||
|       (`CancelScope`) system and semantics (level triggered). | ||||
| 
 | ||||
|     ''' | ||||
|     actor: Actor = ctx._actor | ||||
| 
 | ||||
|     # If the surrounding context has been cancelled by some | ||||
|     # task with a handle to THIS, we error here immediately | ||||
|     # since it likely means the surrounding lexical-scope has | ||||
|     # errored, been `trio.Cancelled` or at the least | ||||
|     # `Context.cancel()` was called by some task. | ||||
|     if ctx._cancel_called: | ||||
| 
 | ||||
|         # XXX NOTE: ALWAYS RAISE any remote error here even if | ||||
|         # it's an expected `ContextCancelled` due to a local | ||||
|         # task having called `.cancel()`! | ||||
|         # | ||||
|         # WHY: we expect the error to always bubble up to the | ||||
|         # surrounding `Portal.open_context()` call and be | ||||
|         # absorbed there (silently) and we DO NOT want to | ||||
|         # actually try to stream - a cancel msg was already | ||||
|         # sent to the other side! | ||||
|         ctx.maybe_raise( | ||||
|             raise_ctxc_from_self_call=True, | ||||
|         ) | ||||
|         # NOTE: this is diff then calling | ||||
|         # `._maybe_raise_remote_err()` specifically | ||||
|         # because we want to raise a ctxc on any task entering this `.open_stream()` | ||||
|         # AFTER cancellation was already been requested, | ||||
|         # we DO NOT want to absorb any ctxc ACK silently! | ||||
|         # if ctx._remote_error: | ||||
|         #     raise ctx._remote_error | ||||
| 
 | ||||
|         # XXX NOTE: if no `ContextCancelled` has been responded | ||||
|         # back from the other side (yet), we raise a different | ||||
|         # runtime error indicating that this task's usage of | ||||
|         # `Context.cancel()` and then `.open_stream()` is WRONG! | ||||
|         task: str = trio.lowlevel.current_task().name | ||||
|         raise RuntimeError( | ||||
|             'Stream opened after `Context.cancel()` called..?\n' | ||||
|             f'task: {actor.uid[0]}:{task}\n' | ||||
|             f'{ctx}' | ||||
|         ) | ||||
| 
 | ||||
|     if ( | ||||
|         not ctx._portal | ||||
|         and not ctx._started_called | ||||
|     ): | ||||
|         raise RuntimeError( | ||||
|             'Context.started()` must be called before opening a stream' | ||||
|         ) | ||||
| 
 | ||||
|     # NOTE: in one way streaming this only happens on the | ||||
|     # parent-ctx-task side (on the side that calls | ||||
|     # `Actor.start_remote_task()`) so if you try to send | ||||
|     # a stop from the caller to the callee in the | ||||
|     # single-direction-stream case you'll get a lookup error | ||||
|     # currently. | ||||
|     ctx: Context = actor.get_context( | ||||
|         chan=ctx.chan, | ||||
|         cid=ctx.cid, | ||||
|         nsf=ctx._nsf, | ||||
|         # side=ctx.side, | ||||
| 
 | ||||
|         msg_buffer_size=msg_buffer_size, | ||||
|         allow_overruns=allow_overruns, | ||||
|     ) | ||||
|     ctx._allow_overruns: bool = allow_overruns | ||||
|     assert ctx is ctx | ||||
| 
 | ||||
|     # XXX: If the underlying channel feeder receive mem chan has | ||||
|     # been closed then likely client code has already exited | ||||
|     # a ``.open_stream()`` block prior or there was some other | ||||
|     # unanticipated error or cancellation from ``trio``. | ||||
| 
 | ||||
|     if ctx._rx_chan._closed: | ||||
|         raise trio.ClosedResourceError( | ||||
|             'The underlying channel for this stream was already closed!\n' | ||||
|         ) | ||||
| 
 | ||||
|     # NOTE: implicitly this will call `MsgStream.aclose()` on | ||||
|     # `.__aexit__()` due to stream's parent `Channel` type! | ||||
|     # | ||||
|     # XXX NOTE XXX: ensures the stream is "one-shot use", | ||||
|     # which specifically means that on exit, | ||||
|     # - signal ``trio.EndOfChannel``/``StopAsyncIteration`` to | ||||
|     #   the far end indicating that the caller exited | ||||
|     #   the streaming context purposefully by letting | ||||
|     #   the exit block exec. | ||||
|     # - this is diff from the cancel/error case where | ||||
|     #   a cancel request from this side or an error | ||||
|     #   should be sent to the far end indicating the | ||||
|     #   stream WAS NOT just closed normally/gracefully. | ||||
|     async with MsgStream( | ||||
|         ctx=ctx, | ||||
|         rx_chan=ctx._rx_chan, | ||||
|     ) as stream: | ||||
| 
 | ||||
|         # NOTE: we track all existing streams per portal for | ||||
|         # the purposes of attempting graceful closes on runtime | ||||
|         # cancel requests. | ||||
|         if ctx._portal: | ||||
|             ctx._portal._streams.add(stream) | ||||
| 
 | ||||
|         try: | ||||
|             ctx._stream_opened: bool = True | ||||
|             ctx._stream = stream | ||||
| 
 | ||||
|             # XXX: do we need this? | ||||
|             # ensure we aren't cancelled before yielding the stream | ||||
|             # await trio.lowlevel.checkpoint() | ||||
|             yield stream | ||||
| 
 | ||||
|             # XXX: (MEGA IMPORTANT) if this is a root opened process we | ||||
|             # wait for any immediate child in debug before popping the | ||||
|             # context from the runtime msg loop otherwise inside | ||||
|             # ``Actor._deliver_ctx_payload()`` the msg will be discarded and in | ||||
|             # the case where that msg is global debugger unlock (via | ||||
|             # a "stop" msg for a stream), this can result in a deadlock | ||||
|             # where the root is waiting on the lock to clear but the | ||||
|             # child has already cleared it and clobbered IPC. | ||||
|             # | ||||
|             # await maybe_wait_for_debugger() | ||||
| 
 | ||||
|             # XXX TODO: pretty sure this isn't needed (see | ||||
|             # note above this block) AND will result in | ||||
|             # a double `.send_stop()` call. The only reason to | ||||
|             # put it here would be to due with "order" in | ||||
|             # terms of raising any remote error (as per | ||||
|             # directly below) or bc the stream's | ||||
|             # `.__aexit__()` block might not get run | ||||
|             # (doubtful)? Either way if we did put this back | ||||
|             # in we also need a state var to avoid the double | ||||
|             # stop-msg send.. | ||||
|             # | ||||
|             # await stream.aclose() | ||||
| 
 | ||||
|         # NOTE: absorb and do not raise any | ||||
|         # EoC received from the other side such that | ||||
|         # it is not raised inside the surrounding | ||||
|         # context block's scope! | ||||
|         except trio.EndOfChannel as eoc: | ||||
|             if ( | ||||
|                 eoc | ||||
|                 and | ||||
|                 stream.closed | ||||
|             ): | ||||
|                 # sanity, can remove? | ||||
|                 assert eoc is stream._eoc | ||||
| 
 | ||||
|                 log.runtime( | ||||
|                     'Stream was terminated by EoC\n\n' | ||||
|                     # NOTE: won't show the error <Type> but | ||||
|                     # does show txt followed by IPC msg. | ||||
|                     f'{str(eoc)}\n' | ||||
|                 ) | ||||
|         finally: | ||||
|             if ctx._portal: | ||||
|                 try: | ||||
|                     ctx._portal._streams.remove(stream) | ||||
|                 except KeyError: | ||||
|                     log.warning( | ||||
|                         f'Stream was already destroyed?\n' | ||||
|                         f'actor: {ctx.chan.uid}\n' | ||||
|                         f'ctx id: {ctx.cid}' | ||||
|                     ) | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| def stream(func: Callable) -> Callable: | ||||
|     ''' | ||||
|  | @ -838,7 +528,7 @@ def stream(func: Callable) -> Callable: | |||
|     ''' | ||||
|     # TODO: apply whatever solution ``mypy`` ends up picking for this: | ||||
|     # https://github.com/python/mypy/issues/2087#issuecomment-769266912 | ||||
|     func._tractor_stream_function: bool = True  # type: ignore | ||||
|     func._tractor_stream_function = True  # type: ignore | ||||
| 
 | ||||
|     sig = inspect.signature(func) | ||||
|     params = sig.parameters | ||||
|  |  | |||
|  | @ -21,49 +21,35 @@ | |||
| from contextlib import asynccontextmanager as acm | ||||
| from functools import partial | ||||
| import inspect | ||||
| from typing import ( | ||||
|     TYPE_CHECKING, | ||||
| ) | ||||
| from pprint import pformat | ||||
| from typing import TYPE_CHECKING | ||||
| import typing | ||||
| import warnings | ||||
| 
 | ||||
| from exceptiongroup import BaseExceptionGroup | ||||
| import trio | ||||
| 
 | ||||
| 
 | ||||
| from .devx import ( | ||||
|     debug, | ||||
|     pformat as _pformat, | ||||
| ) | ||||
| from ._addr import ( | ||||
|     UnwrappedAddress, | ||||
|     mk_uuid, | ||||
| ) | ||||
| from .devx._debug import maybe_wait_for_debugger | ||||
| from ._state import current_actor, is_main_process | ||||
| from .log import get_logger, get_loglevel | ||||
| from ._runtime import Actor | ||||
| from ._portal import Portal | ||||
| from .trionics import ( | ||||
|     is_multi_cancelled, | ||||
|     collapse_eg, | ||||
| ) | ||||
| from ._exceptions import ( | ||||
|     is_multi_cancelled, | ||||
|     ContextCancelled, | ||||
| ) | ||||
| from ._root import ( | ||||
|     open_root_actor, | ||||
| ) | ||||
| from ._root import open_root_actor | ||||
| from . import _state | ||||
| from . import _spawn | ||||
| 
 | ||||
| 
 | ||||
| if TYPE_CHECKING: | ||||
|     import multiprocessing as mp | ||||
|     # from .ipc._server import IPCServer | ||||
|     from .ipc import IPCServer | ||||
| 
 | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
| _default_bind_addr: tuple[str, int] = ('127.0.0.1', 0) | ||||
| 
 | ||||
| 
 | ||||
| class ActorNursery: | ||||
|     ''' | ||||
|  | @ -95,19 +81,15 @@ class ActorNursery: | |||
|     ''' | ||||
|     def __init__( | ||||
|         self, | ||||
|         # TODO: maybe def these as fields of a struct looking type? | ||||
|         actor: Actor, | ||||
|         ria_nursery: trio.Nursery, | ||||
|         da_nursery: trio.Nursery, | ||||
|         errors: dict[tuple[str, str], BaseException], | ||||
| 
 | ||||
|     ) -> None: | ||||
|         # self.supervisor = supervisor  # TODO | ||||
|         self._actor: Actor = actor | ||||
| 
 | ||||
|         # TODO: rename to `._tn` for our conventional "task-nursery" | ||||
|         self._ria_nursery = ria_nursery | ||||
|         self._da_nursery = da_nursery | ||||
| 
 | ||||
|         self._children: dict[ | ||||
|             tuple[str, str], | ||||
|             tuple[ | ||||
|  | @ -116,11 +98,13 @@ class ActorNursery: | |||
|                 Portal | None, | ||||
|             ] | ||||
|         ] = {} | ||||
| 
 | ||||
|         # portals spawned with ``run_in_actor()`` are | ||||
|         # cancelled when their "main" result arrives | ||||
|         self._cancel_after_result_on_exit: set = set() | ||||
|         self.cancelled: bool = False | ||||
|         self._join_procs = trio.Event() | ||||
|         self._at_least_one_child_in_debug: bool = False | ||||
|         self.errors = errors | ||||
|         self._scope_error: BaseException|None = None | ||||
|         self.exited = trio.Event() | ||||
| 
 | ||||
|         # NOTE: when no explicit call is made to | ||||
|  | @ -131,93 +115,28 @@ class ActorNursery: | |||
|         # and syncing purposes to any actor opened nurseries. | ||||
|         self._implicit_runtime_started: bool = False | ||||
| 
 | ||||
|         # TODO: remove the `.run_in_actor()` API and thus this 2ndary | ||||
|         # nursery when that API get's moved outside this primitive! | ||||
|         self._ria_nursery = ria_nursery | ||||
| 
 | ||||
|         # TODO, factor this into a .hilevel api! | ||||
|         # | ||||
|         # portals spawned with ``run_in_actor()`` are | ||||
|         # cancelled when their "main" result arrives | ||||
|         self._cancel_after_result_on_exit: set = set() | ||||
| 
 | ||||
|         # trio.Nursery-like cancel (request) statuses | ||||
|         self._cancelled_caught: bool = False | ||||
|         self._cancel_called: bool = False | ||||
| 
 | ||||
|     @property | ||||
|     def cancel_called(self) -> bool: | ||||
|         ''' | ||||
|         Records whether cancellation has been requested for this | ||||
|         actor-nursery by a call to  `.cancel()` either due to, | ||||
|         - an explicit call by some actor-local-task, | ||||
|         - an implicit call due to an error/cancel emited inside | ||||
|           the `tractor.open_nursery()` block. | ||||
| 
 | ||||
|         ''' | ||||
|         return self._cancel_called | ||||
| 
 | ||||
|     @property | ||||
|     def cancelled_caught(self) -> bool: | ||||
|         ''' | ||||
|         Set when this nursery was able to cance all spawned subactors | ||||
|         gracefully via an (implicit) call to `.cancel()`. | ||||
| 
 | ||||
|         ''' | ||||
|         return self._cancelled_caught | ||||
| 
 | ||||
|     # TODO! remove internal/test-suite usage! | ||||
|     @property | ||||
|     def cancelled(self) -> bool: | ||||
|         warnings.warn( | ||||
|             "`ActorNursery.cancelled` is now deprecated, use " | ||||
|             " `.cancel_called` instead.", | ||||
|             DeprecationWarning, | ||||
|             stacklevel=2, | ||||
|         ) | ||||
|         return ( | ||||
|             self._cancel_called | ||||
|             # and | ||||
|             # self._cancelled_caught | ||||
|         ) | ||||
| 
 | ||||
|     async def start_actor( | ||||
|         self, | ||||
|         name: str, | ||||
| 
 | ||||
|         *, | ||||
| 
 | ||||
|         bind_addrs: list[UnwrappedAddress]|None = None, | ||||
|         rpc_module_paths: list[str]|None = None, | ||||
|         enable_transports: list[str] = [_state._def_tpt_proto], | ||||
|         enable_modules: list[str]|None = None, | ||||
|         loglevel: str|None = None,  # set log level per subactor | ||||
|         debug_mode: bool|None = None, | ||||
|         bind_addrs: list[tuple[str, int]] = [_default_bind_addr], | ||||
|         rpc_module_paths: list[str] | None = None, | ||||
|         enable_modules: list[str] | None = None, | ||||
|         loglevel: str | None = None,  # set log level per subactor | ||||
|         nursery: trio.Nursery | None = None, | ||||
|         debug_mode: bool | None = None, | ||||
|         infect_asyncio: bool = False, | ||||
| 
 | ||||
|         # TODO: ideally we can rm this once we no longer have | ||||
|         # a `._ria_nursery` since the dependent APIs have been | ||||
|         # removed! | ||||
|         nursery: trio.Nursery|None = None, | ||||
|         proc_kwargs: dict[str, any] = {} | ||||
| 
 | ||||
|     ) -> Portal: | ||||
|         ''' | ||||
|         Start a (daemon) actor: an process that has no designated | ||||
|         "main task" besides the runtime. | ||||
| 
 | ||||
|         ''' | ||||
|         __runtimeframe__: int = 1  # noqa | ||||
|         loglevel: str = ( | ||||
|             loglevel | ||||
|             or self._actor.loglevel | ||||
|             or get_loglevel() | ||||
|         ) | ||||
|         loglevel = loglevel or self._actor.loglevel or get_loglevel() | ||||
| 
 | ||||
|         # configure and pass runtime state | ||||
|         _rtv = _state._runtime_vars.copy() | ||||
|         _rtv['_is_root'] = False | ||||
|         _rtv['_is_infected_aio'] = infect_asyncio | ||||
| 
 | ||||
|         # allow setting debug policy per actor | ||||
|         if debug_mode is not None: | ||||
|  | @ -236,17 +155,15 @@ class ActorNursery: | |||
|             enable_modules.extend(rpc_module_paths) | ||||
| 
 | ||||
|         subactor = Actor( | ||||
|             name=name, | ||||
|             uuid=mk_uuid(), | ||||
| 
 | ||||
|             name, | ||||
|             # modules allowed to invoked funcs from | ||||
|             enable_modules=enable_modules, | ||||
|             loglevel=loglevel, | ||||
| 
 | ||||
|             # verbatim relay this actor's registrar addresses | ||||
|             registry_addrs=current_actor().registry_addrs, | ||||
|             registry_addrs=current_actor().reg_addrs, | ||||
|         ) | ||||
|         parent_addr: UnwrappedAddress = self._actor.accept_addr | ||||
|         parent_addr = self._actor.accept_addr | ||||
|         assert parent_addr | ||||
| 
 | ||||
|         # start a task to spawn a process | ||||
|  | @ -265,18 +182,9 @@ class ActorNursery: | |||
|                 parent_addr, | ||||
|                 _rtv,  # run time vars | ||||
|                 infect_asyncio=infect_asyncio, | ||||
|                 proc_kwargs=proc_kwargs | ||||
|             ) | ||||
|         ) | ||||
| 
 | ||||
|     # TODO: DEPRECATE THIS: | ||||
|     # -[ ] impl instead as a hilevel wrapper on | ||||
|     #   top of a `@context` style invocation. | ||||
|     #  |_ dynamic @context decoration on child side | ||||
|     #  |_ implicit `Portal.open_context() as (ctx, first):` | ||||
|     #    and `return first` on parent side. | ||||
|     #  |_ mention how it's similar to `trio-parallel` API? | ||||
|     # -[ ] use @api_frame on the wrapper | ||||
|     async def run_in_actor( | ||||
|         self, | ||||
| 
 | ||||
|  | @ -284,12 +192,11 @@ class ActorNursery: | |||
|         *, | ||||
| 
 | ||||
|         name: str | None = None, | ||||
|         bind_addrs: UnwrappedAddress|None = None, | ||||
|         bind_addrs: tuple[str, int] = [_default_bind_addr], | ||||
|         rpc_module_paths: list[str] | None = None, | ||||
|         enable_modules: list[str] | None = None, | ||||
|         loglevel: str | None = None,  # set log level per subactor | ||||
|         infect_asyncio: bool = False, | ||||
|         proc_kwargs: dict[str, any] = {}, | ||||
| 
 | ||||
|         **kwargs,  # explicit args to ``fn`` | ||||
| 
 | ||||
|  | @ -303,14 +210,13 @@ class ActorNursery: | |||
|         the actor is terminated. | ||||
| 
 | ||||
|         ''' | ||||
|         __runtimeframe__: int = 1  # noqa | ||||
|         mod_path: str = fn.__module__ | ||||
| 
 | ||||
|         if name is None: | ||||
|             # use the explicit function name if not provided | ||||
|             name = fn.__name__ | ||||
| 
 | ||||
|         portal: Portal = await self.start_actor( | ||||
|         portal = await self.start_actor( | ||||
|             name, | ||||
|             enable_modules=[mod_path] + ( | ||||
|                 enable_modules or rpc_module_paths or [] | ||||
|  | @ -320,7 +226,6 @@ class ActorNursery: | |||
|             # use the run_in_actor nursery | ||||
|             nursery=self._ria_nursery, | ||||
|             infect_asyncio=infect_asyncio, | ||||
|             proc_kwargs=proc_kwargs | ||||
|         ) | ||||
| 
 | ||||
|         # XXX: don't allow stream funcs | ||||
|  | @ -340,39 +245,28 @@ class ActorNursery: | |||
|         ) | ||||
|         return portal | ||||
| 
 | ||||
|     # @api_frame | ||||
|     async def cancel( | ||||
|         self, | ||||
|         hard_kill: bool = False, | ||||
| 
 | ||||
|     ) -> None: | ||||
|         ''' | ||||
|         Cancel this actor-nursery by instructing each subactor's | ||||
|         runtime to cancel and wait for all underlying sub-processes | ||||
|         to terminate. | ||||
|         Cancel this nursery by instructing each subactor to cancel | ||||
|         itself and wait for all subactors to terminate. | ||||
| 
 | ||||
|         If `hard_kill` is set then kill the processes directly using | ||||
|         the spawning-backend's API/OS-machinery without any attempt | ||||
|         at (graceful) `trio`-style cancellation using our | ||||
|         `Actor.cancel()`. | ||||
|         If ``hard_killl`` is set to ``True`` then kill the processes | ||||
|         directly without any far end graceful ``trio`` cancellation. | ||||
| 
 | ||||
|         ''' | ||||
|         __runtimeframe__: int = 1  # noqa | ||||
|         self._cancel_called = True | ||||
|         self.cancelled = True | ||||
| 
 | ||||
|         # TODO: impl a repr for spawn more compact | ||||
|         # then `._children`.. | ||||
|         children: dict = self._children | ||||
|         child_count: int = len(children) | ||||
|         msg: str = f'Cancelling actor nursery with {child_count} children\n' | ||||
| 
 | ||||
|         server: IPCServer = self._actor.ipc_server | ||||
| 
 | ||||
|         with trio.move_on_after(3) as cs: | ||||
|             async with ( | ||||
|                 collapse_eg(), | ||||
|                 trio.open_nursery() as tn, | ||||
|             ): | ||||
|             async with trio.open_nursery() as tn: | ||||
| 
 | ||||
|                 subactor: Actor | ||||
|                 proc: trio.Process | ||||
|  | @ -391,7 +285,7 @@ class ActorNursery: | |||
| 
 | ||||
|                     else: | ||||
|                         if portal is None:  # actor hasn't fully spawned yet | ||||
|                             event: trio.Event = server._peer_connected[subactor.uid] | ||||
|                             event = self._actor._peer_connected[subactor.uid] | ||||
|                             log.warning( | ||||
|                                 f"{subactor.uid} never 't finished spawning?" | ||||
|                             ) | ||||
|  | @ -407,7 +301,7 @@ class ActorNursery: | |||
|                             if portal is None: | ||||
|                                 # cancelled while waiting on the event | ||||
|                                 # to arrive | ||||
|                                 chan = server._peers[subactor.uid][-1] | ||||
|                                 chan = self._actor._peers[subactor.uid][-1] | ||||
|                                 if chan: | ||||
|                                     portal = Portal(chan) | ||||
|                                 else:  # there's no other choice left | ||||
|  | @ -436,8 +330,6 @@ class ActorNursery: | |||
|             ) in children.values(): | ||||
|                 log.warning(f"Hard killing process {proc}") | ||||
|                 proc.terminate() | ||||
|         else: | ||||
|             self._cancelled_caught | ||||
| 
 | ||||
|         # mark ourselves as having (tried to have) cancelled all subactors | ||||
|         self._join_procs.set() | ||||
|  | @ -446,15 +338,11 @@ class ActorNursery: | |||
| @acm | ||||
| async def _open_and_supervise_one_cancels_all_nursery( | ||||
|     actor: Actor, | ||||
|     hide_tb: bool = True, | ||||
| 
 | ||||
| ) -> typing.AsyncGenerator[ActorNursery, None]: | ||||
| 
 | ||||
|     # normally don't need to show user by default | ||||
|     __tracebackhide__: bool = hide_tb | ||||
| 
 | ||||
|     outer_err: BaseException|None = None | ||||
|     inner_err: BaseException|None = None | ||||
|     # TODO: yay or nay? | ||||
|     __tracebackhide__ = True | ||||
| 
 | ||||
|     # the collection of errors retreived from spawned sub-actors | ||||
|     errors: dict[tuple[str, str], BaseException] = {} | ||||
|  | @ -464,26 +352,21 @@ async def _open_and_supervise_one_cancels_all_nursery( | |||
|     # handling errors that are generated by the inner nursery in | ||||
|     # a supervisor strategy **before** blocking indefinitely to wait for | ||||
|     # actors spawned in "daemon mode" (aka started using | ||||
|     # `ActorNursery.start_actor()`). | ||||
|     # ``ActorNursery.start_actor()``). | ||||
| 
 | ||||
|     # errors from this daemon actor nursery bubble up to caller | ||||
|     async with ( | ||||
|         collapse_eg(), | ||||
|         trio.open_nursery() as da_nursery, | ||||
|     ): | ||||
|     async with trio.open_nursery() as da_nursery: | ||||
|         try: | ||||
|             # This is the inner level "run in actor" nursery. It is | ||||
|             # awaited first since actors spawned in this way (using | ||||
|             # `ActorNusery.run_in_actor()`) are expected to only | ||||
|             # ``ActorNusery.run_in_actor()``) are expected to only | ||||
|             # return a single result and then complete (i.e. be canclled | ||||
|             # gracefully). Errors collected from these actors are | ||||
|             # immediately raised for handling by a supervisor strategy. | ||||
|             # As such if the strategy propagates any error(s) upwards | ||||
|             # the above "daemon actor" nursery will be notified. | ||||
|             async with ( | ||||
|                 collapse_eg(), | ||||
|                 trio.open_nursery() as ria_nursery, | ||||
|             ): | ||||
|             async with trio.open_nursery() as ria_nursery: | ||||
| 
 | ||||
|                 an = ActorNursery( | ||||
|                     actor, | ||||
|                     ria_nursery, | ||||
|  | @ -500,12 +383,11 @@ async def _open_and_supervise_one_cancels_all_nursery( | |||
|                     # the "hard join phase". | ||||
|                     log.runtime( | ||||
|                         'Waiting on subactors to complete:\n' | ||||
|                         f'>}} {len(an._children)}\n' | ||||
|                         f'{pformat(an._children)}\n' | ||||
|                     ) | ||||
|                     an._join_procs.set() | ||||
| 
 | ||||
|                 except BaseException as _inner_err: | ||||
|                     inner_err = _inner_err | ||||
|                 except BaseException as inner_err: | ||||
|                     errors[actor.uid] = inner_err | ||||
| 
 | ||||
|                     # If we error in the root but the debugger is | ||||
|  | @ -514,7 +396,7 @@ async def _open_and_supervise_one_cancels_all_nursery( | |||
|                     # will make the pdb repl unusable. | ||||
|                     # Instead try to wait for pdb to be released before | ||||
|                     # tearing down. | ||||
|                     await debug.maybe_wait_for_debugger( | ||||
|                     await maybe_wait_for_debugger( | ||||
|                         child_in_debug=an._at_least_one_child_in_debug | ||||
|                     ) | ||||
| 
 | ||||
|  | @ -549,8 +431,8 @@ async def _open_and_supervise_one_cancels_all_nursery( | |||
|                             ContextCancelled, | ||||
|                         }: | ||||
|                             log.cancel( | ||||
|                                 'Actor-nursery caught remote cancellation\n' | ||||
|                                 '\n' | ||||
|                                 'Actor-nursery caught remote cancellation\n\n' | ||||
| 
 | ||||
|                                 f'{inner_err.tb_str}' | ||||
|                             ) | ||||
|                         else: | ||||
|  | @ -583,14 +465,12 @@ async def _open_and_supervise_one_cancels_all_nursery( | |||
|             Exception, | ||||
|             BaseExceptionGroup, | ||||
|             trio.Cancelled | ||||
|         ) as _outer_err: | ||||
|             outer_err = _outer_err | ||||
| 
 | ||||
|             an._scope_error = outer_err or inner_err | ||||
|         ) as err: | ||||
| 
 | ||||
|             # XXX: yet another guard before allowing the cancel | ||||
|             # sequence in case a (single) child is in debug. | ||||
|             await debug.maybe_wait_for_debugger( | ||||
|             await maybe_wait_for_debugger( | ||||
|                 child_in_debug=an._at_least_one_child_in_debug | ||||
|             ) | ||||
| 
 | ||||
|  | @ -601,7 +481,7 @@ async def _open_and_supervise_one_cancels_all_nursery( | |||
|             if an._children: | ||||
|                 log.cancel( | ||||
|                     'Actor-nursery cancelling due error type:\n' | ||||
|                     f'{outer_err}\n' | ||||
|                     f'{err}\n' | ||||
|                 ) | ||||
|                 with trio.CancelScope(shield=True): | ||||
|                     await an.cancel() | ||||
|  | @ -628,29 +508,13 @@ async def _open_and_supervise_one_cancels_all_nursery( | |||
|                 else: | ||||
|                     raise list(errors.values())[0] | ||||
| 
 | ||||
|             # show frame on any (likely) internal error | ||||
|             if ( | ||||
|                 not an.cancelled | ||||
|                 and an._scope_error | ||||
|             ): | ||||
|                 __tracebackhide__: bool = False | ||||
| 
 | ||||
|         # da_nursery scope end - nursery checkpoint | ||||
|     # final exit | ||||
| 
 | ||||
| 
 | ||||
| _shutdown_msg: str = ( | ||||
|     'Actor-runtime-shutdown' | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| # @api_frame | ||||
| async def open_nursery( | ||||
|     *,  # named params only! | ||||
|     hide_tb: bool = True, | ||||
|     **kwargs, | ||||
|     # ^TODO, paramspec for `open_root_actor()` | ||||
| 
 | ||||
| ) -> typing.AsyncGenerator[ActorNursery, None]: | ||||
|     ''' | ||||
|  | @ -668,7 +532,6 @@ async def open_nursery( | |||
|     which cancellation scopes correspond to each spawned subactor set. | ||||
| 
 | ||||
|     ''' | ||||
|     __tracebackhide__: bool = hide_tb | ||||
|     implicit_runtime: bool = False | ||||
|     actor: Actor = current_actor(err_on_no_runtime=False) | ||||
|     an: ActorNursery|None = None | ||||
|  | @ -684,10 +547,7 @@ async def open_nursery( | |||
|             # mark us for teardown on exit | ||||
|             implicit_runtime: bool = True | ||||
| 
 | ||||
|             async with open_root_actor( | ||||
|                 hide_tb=hide_tb, | ||||
|                 **kwargs, | ||||
|             ) as actor: | ||||
|             async with open_root_actor(**kwargs) as actor: | ||||
|                 assert actor is current_actor() | ||||
| 
 | ||||
|                 try: | ||||
|  | @ -722,36 +582,13 @@ async def open_nursery( | |||
|                 an.exited.set() | ||||
| 
 | ||||
|     finally: | ||||
|         # show frame on any internal runtime-scope error | ||||
|         if ( | ||||
|             an | ||||
|             and | ||||
|             not an.cancelled | ||||
|             and | ||||
|             an._scope_error | ||||
|         ): | ||||
|             __tracebackhide__: bool = False | ||||
| 
 | ||||
| 
 | ||||
|         op_nested_an_repr: str = _pformat.nest_from_op( | ||||
|             input_op=')>', | ||||
|             text=f'{an}', | ||||
|             # nest_prefix='|_', | ||||
|             nest_indent=1,  # under > | ||||
|         msg: str = ( | ||||
|             'Actor-nursery exited\n' | ||||
|             f'|_{an}\n\n' | ||||
|         ) | ||||
|         an_msg: str = ( | ||||
|             f'Actor-nursery exited\n' | ||||
|             f'{op_nested_an_repr}\n' | ||||
|         ) | ||||
|         # keep noise low during std operation. | ||||
|         log.runtime(an_msg) | ||||
| 
 | ||||
|         # shutdown runtime if it was started | ||||
|         if implicit_runtime: | ||||
|             # shutdown runtime if it was started and report noisly | ||||
|             # that we're did so. | ||||
|             msg: str = ( | ||||
|                 '\n' | ||||
|                 '\n' | ||||
|                 f'{_shutdown_msg} )>\n' | ||||
|             ) | ||||
|             log.info(msg) | ||||
|             msg += '=> Shutting down actor runtime <=\n' | ||||
| 
 | ||||
|         log.info(msg) | ||||
|  |  | |||
|  | @ -19,27 +19,15 @@ Various helpers/utils for auditing your `tractor` app and/or the | |||
| core runtime. | ||||
| 
 | ||||
| ''' | ||||
| from contextlib import ( | ||||
|     asynccontextmanager as acm, | ||||
| ) | ||||
| import os | ||||
| from contextlib import asynccontextmanager as acm | ||||
| import pathlib | ||||
| 
 | ||||
| import tractor | ||||
| from tractor.devx.debug import ( | ||||
|     BoxedMaybeException, | ||||
| ) | ||||
| from .pytest import ( | ||||
|     tractor_test as tractor_test | ||||
| ) | ||||
| from .fault_simulation import ( | ||||
|     break_ipc as break_ipc, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| # TODO, use dulwhich for this instead? | ||||
| # -> we're going to likely need it (or something similar) | ||||
| #   for supporting hot-coad reload feats eventually anyway! | ||||
| def repodir() -> pathlib.Path: | ||||
|     ''' | ||||
|     Return the abspath to the repo directory. | ||||
|  | @ -63,35 +51,6 @@ def examples_dir() -> pathlib.Path: | |||
|     return repodir() / 'examples' | ||||
| 
 | ||||
| 
 | ||||
| def mk_cmd( | ||||
|     ex_name: str, | ||||
|     exs_subpath: str = 'debugging', | ||||
| ) -> str: | ||||
|     ''' | ||||
|     Generate a shell command suitable to pass to `pexpect.spawn()` | ||||
|     which runs the script as a python program's entrypoint. | ||||
| 
 | ||||
|     In particular ensure we disable the new tb coloring via unsetting | ||||
|     `$PYTHON_COLORS` so that `pexpect` can pattern match without | ||||
|     color-escape-codes. | ||||
| 
 | ||||
|     ''' | ||||
|     script_path: pathlib.Path = ( | ||||
|         examples_dir() | ||||
|         / exs_subpath | ||||
|         / f'{ex_name}.py' | ||||
|     ) | ||||
|     py_cmd: str = ' '.join([ | ||||
|         'python', | ||||
|         str(script_path) | ||||
|     ]) | ||||
|     # XXX, required for py 3.13+ | ||||
|     # https://docs.python.org/3/using/cmdline.html#using-on-controlling-color | ||||
|     # https://docs.python.org/3/using/cmdline.html#envvar-PYTHON_COLORS | ||||
|     os.environ['PYTHON_COLORS'] = '0' | ||||
|     return py_cmd | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def expect_ctxc( | ||||
|     yay: bool, | ||||
|  | @ -104,13 +63,12 @@ async def expect_ctxc( | |||
|     ''' | ||||
|     if yay: | ||||
|         try: | ||||
|             yield (maybe_exc := BoxedMaybeException()) | ||||
|             yield | ||||
|             raise RuntimeError('Never raised ctxc?') | ||||
|         except tractor.ContextCancelled as ctxc: | ||||
|             maybe_exc.value = ctxc | ||||
|         except tractor.ContextCancelled: | ||||
|             if reraise: | ||||
|                 raise | ||||
|             else: | ||||
|                 return | ||||
|     else: | ||||
|         yield (maybe_exc := BoxedMaybeException()) | ||||
|         yield | ||||
|  |  | |||
|  | @ -1,70 +0,0 @@ | |||
| # tractor: structured concurrent "actors". | ||||
| # Copyright 2018-eternity Tyler Goodlet. | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| Random IPC addr generation for isolating | ||||
| the discovery space between test sessions. | ||||
| 
 | ||||
| Might be eventually useful to expose as a util set from | ||||
| our `tractor.discovery` subsys? | ||||
| 
 | ||||
| ''' | ||||
| import random | ||||
| from typing import ( | ||||
|     Type, | ||||
| ) | ||||
| from tractor import ( | ||||
|     _addr, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| def get_rando_addr( | ||||
|     tpt_proto: str, | ||||
|     *, | ||||
| 
 | ||||
|     # choose random port at import time | ||||
|     _rando_port: str = random.randint(1000, 9999) | ||||
| 
 | ||||
| ) -> tuple[str, str|int]: | ||||
|     ''' | ||||
|     Used to globally override the runtime to the | ||||
|     per-test-session-dynamic addr so that all tests never conflict | ||||
|     with any other actor tree using the default. | ||||
| 
 | ||||
|     ''' | ||||
|     addr_type: Type[_addr.Addres] = _addr._address_types[tpt_proto] | ||||
|     def_reg_addr: tuple[str, int] = _addr._default_lo_addrs[tpt_proto] | ||||
| 
 | ||||
|     # this is the "unwrapped" form expected to be passed to | ||||
|     # `.open_root_actor()` by test body. | ||||
|     testrun_reg_addr: tuple[str, int|str] | ||||
|     match tpt_proto: | ||||
|         case 'tcp': | ||||
|             testrun_reg_addr = ( | ||||
|                 addr_type.def_bindspace, | ||||
|                 _rando_port, | ||||
|             ) | ||||
| 
 | ||||
|         # NOTE, file-name uniqueness (no-collisions) will be based on | ||||
|         # the runtime-directory and root (pytest-proc's) pid. | ||||
|         case 'uds': | ||||
|             testrun_reg_addr = addr_type.get_random().unwrap() | ||||
| 
 | ||||
|     # XXX, as sanity it should never the same as the default for the | ||||
|     # host-singleton registry actor. | ||||
|     assert def_reg_addr != testrun_reg_addr | ||||
| 
 | ||||
|     return testrun_reg_addr | ||||
|  | @ -1,92 +0,0 @@ | |||
| # tractor: structured concurrent "actors". | ||||
| # Copyright 2018-eternity Tyler Goodlet. | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| `pytest` utils helpers and plugins for testing `tractor`'s runtime | ||||
| and applications. | ||||
| 
 | ||||
| ''' | ||||
| 
 | ||||
| from tractor import ( | ||||
|     MsgStream, | ||||
| ) | ||||
| 
 | ||||
| async def break_ipc( | ||||
|     stream: MsgStream, | ||||
|     method: str|None = None, | ||||
|     pre_close: bool = False, | ||||
| 
 | ||||
|     def_method: str = 'socket_close', | ||||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|     XXX: close the channel right after an error is raised | ||||
|     purposely breaking the IPC transport to make sure the parent | ||||
|     doesn't get stuck in debug or hang on the connection join. | ||||
|     this more or less simulates an infinite msg-receive hang on | ||||
|     the other end. | ||||
| 
 | ||||
|     ''' | ||||
|     # close channel via IPC prot msging before | ||||
|     # any transport breakage | ||||
|     if pre_close: | ||||
|         await stream.aclose() | ||||
| 
 | ||||
|     method: str = method or def_method | ||||
|     print( | ||||
|         '#################################\n' | ||||
|         'Simulating CHILD-side IPC BREAK!\n' | ||||
|         f'method: {method}\n' | ||||
|         f'pre `.aclose()`: {pre_close}\n' | ||||
|         '#################################\n' | ||||
|     ) | ||||
| 
 | ||||
|     match method: | ||||
|         case 'socket_close': | ||||
|             await stream._ctx.chan.transport.stream.aclose() | ||||
| 
 | ||||
|         case 'socket_eof': | ||||
|             # NOTE: `trio` does the following underneath this | ||||
|             # call in `src/trio/_highlevel_socket.py`: | ||||
|             # `Stream.socket.shutdown(tsocket.SHUT_WR)` | ||||
|             await stream._ctx.chan.transport.stream.send_eof() | ||||
| 
 | ||||
|         # TODO: remove since now this will be invalid with our | ||||
|         # new typed msg spec? | ||||
|         # case 'msg': | ||||
|         #     await stream._ctx.chan.send(None) | ||||
| 
 | ||||
|         # TODO: the actual real-world simulated cases like | ||||
|         # transport layer hangs and/or lower layer 2-gens type | ||||
|         # scenarios.. | ||||
|         # | ||||
|         # -[ ] already have some issues for this general testing | ||||
|         # area: | ||||
|         #  - https://github.com/goodboy/tractor/issues/97 | ||||
|         #  - https://github.com/goodboy/tractor/issues/124 | ||||
|         #   - PR from @guille: | ||||
|         #     https://github.com/goodboy/tractor/pull/149 | ||||
|         # case 'hang': | ||||
|         # TODO: framework research: | ||||
|         # | ||||
|         # - https://github.com/GuoTengda1993/pynetem | ||||
|         # - https://github.com/shopify/toxiproxy | ||||
|         # - https://manpages.ubuntu.com/manpages/trusty/man1/wirefilter.1.html | ||||
| 
 | ||||
|         case _: | ||||
|             raise RuntimeError( | ||||
|                 f'IPC break method unsupported: {method}' | ||||
|             ) | ||||
|  | @ -26,46 +26,29 @@ from functools import ( | |||
| import inspect | ||||
| import platform | ||||
| 
 | ||||
| import pytest | ||||
| import tractor | ||||
| import trio | ||||
| 
 | ||||
| 
 | ||||
| def tractor_test(fn): | ||||
|     ''' | ||||
|     Decorator for async test fns to decorator-wrap them as "native" | ||||
|     looking sync funcs runnable by `pytest` and auto invoked with | ||||
|     `trio.run()` (much like the `pytest-trio` plugin's approach). | ||||
|     Decorator for async test funcs to present them as "native" | ||||
|     looking sync funcs runnable by `pytest` using `trio.run()`. | ||||
| 
 | ||||
|     Further the test fn body will be invoked AFTER booting the actor | ||||
|     runtime, i.e. from inside a `tractor.open_root_actor()` block AND | ||||
|     with various runtime and tooling parameters implicitly passed as | ||||
|     requested by by the test session's config; see immediately below. | ||||
|     Use: | ||||
| 
 | ||||
|     Basic deco use: | ||||
|     --------------- | ||||
|     @tractor_test | ||||
|     async def test_whatever(): | ||||
|         await ... | ||||
| 
 | ||||
|       @tractor_test | ||||
|       async def test_whatever(): | ||||
|           await ... | ||||
|     If fixtures: | ||||
| 
 | ||||
|         - ``reg_addr`` (a socket addr tuple where arbiter is listening) | ||||
|         - ``loglevel`` (logging level passed to tractor internals) | ||||
|         - ``start_method`` (subprocess spawning backend) | ||||
| 
 | ||||
|     Runtime config via special fixtures: | ||||
|     ------------------------------------ | ||||
|     If any of the following fixture are requested by the wrapped test | ||||
|     fn (via normal func-args declaration), | ||||
| 
 | ||||
|     - `reg_addr` (a socket addr tuple where arbiter is listening) | ||||
|     - `loglevel` (logging level passed to tractor internals) | ||||
|     - `start_method` (subprocess spawning backend) | ||||
| 
 | ||||
|     (TODO support) | ||||
|     - `tpt_proto` (IPC transport protocol key) | ||||
| 
 | ||||
|     they will be automatically injected to each test as normally | ||||
|     expected as well as passed to the initial | ||||
|     `tractor.open_root_actor()` funcargs. | ||||
| 
 | ||||
|     are defined in the `pytest` fixture space they will be automatically | ||||
|     injected to tests declaring these funcargs. | ||||
|     ''' | ||||
|     @wraps(fn) | ||||
|     def wrapper( | ||||
|  | @ -128,164 +111,3 @@ def tractor_test(fn): | |||
|         return trio.run(main) | ||||
| 
 | ||||
|     return wrapper | ||||
| 
 | ||||
| 
 | ||||
| def pytest_addoption( | ||||
|     parser: pytest.Parser, | ||||
| ): | ||||
|     # parser.addoption( | ||||
|     #     "--ll", | ||||
|     #     action="store", | ||||
|     #     dest='loglevel', | ||||
|     #     default='ERROR', help="logging level to set when testing" | ||||
|     # ) | ||||
| 
 | ||||
|     parser.addoption( | ||||
|         "--spawn-backend", | ||||
|         action="store", | ||||
|         dest='spawn_backend', | ||||
|         default='trio', | ||||
|         help="Processing spawning backend to use for test run", | ||||
|     ) | ||||
| 
 | ||||
|     parser.addoption( | ||||
|         "--tpdb", | ||||
|         "--debug-mode", | ||||
|         action="store_true", | ||||
|         dest='tractor_debug_mode', | ||||
|         # default=False, | ||||
|         help=( | ||||
|             'Enable a flag that can be used by tests to to set the ' | ||||
|             '`debug_mode: bool` for engaging the internal ' | ||||
|             'multi-proc debugger sys.' | ||||
|         ), | ||||
|     ) | ||||
| 
 | ||||
|     # provide which IPC transport protocols opting-in test suites | ||||
|     # should accumulatively run against. | ||||
|     parser.addoption( | ||||
|         "--tpt-proto", | ||||
|         nargs='+',  # accumulate-multiple-args | ||||
|         action="store", | ||||
|         dest='tpt_protos', | ||||
|         default=['tcp'], | ||||
|         help="Transport protocol to use under the `tractor.ipc.Channel`", | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| def pytest_configure(config): | ||||
|     backend = config.option.spawn_backend | ||||
|     tractor._spawn.try_set_start_method(backend) | ||||
| 
 | ||||
| 
 | ||||
| @pytest.fixture(scope='session') | ||||
| def debug_mode(request) -> bool: | ||||
|     ''' | ||||
|     Flag state for whether `--tpdb` (for `tractor`-py-debugger) | ||||
|     was passed to the test run. | ||||
| 
 | ||||
|     Normally tests should pass this directly to `.open_root_actor()` | ||||
|     to allow the user to opt into suite-wide crash handling. | ||||
| 
 | ||||
|     ''' | ||||
|     debug_mode: bool = request.config.option.tractor_debug_mode | ||||
|     return debug_mode | ||||
| 
 | ||||
| 
 | ||||
| @pytest.fixture(scope='session') | ||||
| def spawn_backend(request) -> str: | ||||
|     return request.config.option.spawn_backend | ||||
| 
 | ||||
| 
 | ||||
| @pytest.fixture(scope='session') | ||||
| def tpt_protos(request) -> list[str]: | ||||
| 
 | ||||
|     # allow quoting on CLI | ||||
|     proto_keys: list[str] = [ | ||||
|         proto_key.replace('"', '').replace("'", "") | ||||
|         for proto_key in request.config.option.tpt_protos | ||||
|     ] | ||||
| 
 | ||||
|     # ?TODO, eventually support multiple protos per test-sesh? | ||||
|     if len(proto_keys) > 1: | ||||
|         pytest.fail( | ||||
|             'We only support one `--tpt-proto <key>` atm!\n' | ||||
|         ) | ||||
| 
 | ||||
|     # XXX ensure we support the protocol by name via lookup! | ||||
|     for proto_key in proto_keys: | ||||
|         addr_type = tractor._addr._address_types[proto_key] | ||||
|         assert addr_type.proto_key == proto_key | ||||
| 
 | ||||
|     yield proto_keys | ||||
| 
 | ||||
| 
 | ||||
| @pytest.fixture( | ||||
|     scope='session', | ||||
|     autouse=True, | ||||
| ) | ||||
| def tpt_proto( | ||||
|     tpt_protos: list[str], | ||||
| ) -> str: | ||||
|     proto_key: str = tpt_protos[0] | ||||
| 
 | ||||
|     from tractor import _state | ||||
|     if _state._def_tpt_proto != proto_key: | ||||
|         _state._def_tpt_proto = proto_key | ||||
| 
 | ||||
|     yield proto_key | ||||
| 
 | ||||
| 
 | ||||
| @pytest.fixture(scope='session') | ||||
| def reg_addr( | ||||
|     tpt_proto: str, | ||||
| ) -> tuple[str, int|str]: | ||||
|     ''' | ||||
|     Deliver a test-sesh unique registry address such | ||||
|     that each run's (tests which use this fixture) will | ||||
|     have no conflicts/cross-talk when running simultaneously | ||||
|     nor will interfere with other live `tractor` apps active | ||||
|     on the same network-host (namespace). | ||||
| 
 | ||||
|     ''' | ||||
|     from tractor._testing.addr import get_rando_addr | ||||
|     return get_rando_addr( | ||||
|         tpt_proto=tpt_proto, | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| def pytest_generate_tests( | ||||
|     metafunc: pytest.Metafunc, | ||||
| ): | ||||
|     spawn_backend: str = metafunc.config.option.spawn_backend | ||||
| 
 | ||||
|     if not spawn_backend: | ||||
|         # XXX some weird windows bug with `pytest`? | ||||
|         spawn_backend = 'trio' | ||||
| 
 | ||||
|     # TODO: maybe just use the literal `._spawn.SpawnMethodKey`? | ||||
|     assert spawn_backend in ( | ||||
|         'mp_spawn', | ||||
|         'mp_forkserver', | ||||
|         'trio', | ||||
|     ) | ||||
| 
 | ||||
|     # NOTE: used-to-be-used-to dyanmically parametrize tests for when | ||||
|     # you just passed --spawn-backend=`mp` on the cli, but now we expect | ||||
|     # that cli input to be manually specified, BUT, maybe we'll do | ||||
|     # something like this again in the future? | ||||
|     if 'start_method' in metafunc.fixturenames: | ||||
|         metafunc.parametrize( | ||||
|             "start_method", | ||||
|             [spawn_backend], | ||||
|             scope='module', | ||||
|         ) | ||||
| 
 | ||||
|     # TODO, parametrize any `tpt_proto: str` declaring tests! | ||||
|     # proto_tpts: list[str] = metafunc.config.option.proto_tpts | ||||
|     # if 'tpt_proto' in metafunc.fixturenames: | ||||
|     #     metafunc.parametrize( | ||||
|     #         'tpt_proto', | ||||
|     #         proto_tpts,  # TODO, double check this list usage! | ||||
|     #         scope='module', | ||||
|     #     ) | ||||
|  |  | |||
|  | @ -1,35 +0,0 @@ | |||
| import os | ||||
| import random | ||||
| 
 | ||||
| 
 | ||||
| def generate_sample_messages( | ||||
|     amount: int, | ||||
|     rand_min: int = 0, | ||||
|     rand_max: int = 0, | ||||
|     silent: bool = False | ||||
| ) -> tuple[list[bytes], int]: | ||||
| 
 | ||||
|     msgs = [] | ||||
|     size = 0 | ||||
| 
 | ||||
|     if not silent: | ||||
|         print(f'\ngenerating {amount} messages...') | ||||
| 
 | ||||
|     for i in range(amount): | ||||
|         msg = f'[{i:08}]'.encode('utf-8') | ||||
| 
 | ||||
|         if rand_max > 0: | ||||
|             msg += os.urandom( | ||||
|                 random.randint(rand_min, rand_max)) | ||||
| 
 | ||||
|         size += len(msg) | ||||
| 
 | ||||
|         msgs.append(msg) | ||||
| 
 | ||||
|         if not silent and i and i % 10_000 == 0: | ||||
|             print(f'{i} generated') | ||||
| 
 | ||||
|     if not silent: | ||||
|         print(f'done, {size:,} bytes in total') | ||||
| 
 | ||||
|     return msgs, size | ||||
|  | @ -20,59 +20,18 @@ Runtime "developer experience" utils and addons to aid our | |||
| and working with/on the actor runtime. | ||||
| 
 | ||||
| """ | ||||
| from .debug import ( | ||||
| from ._debug import ( | ||||
|     maybe_wait_for_debugger as maybe_wait_for_debugger, | ||||
|     acquire_debug_lock as acquire_debug_lock, | ||||
|     breakpoint as breakpoint, | ||||
|     pause as pause, | ||||
|     pause_from_sync as pause_from_sync, | ||||
|     sigint_shield as sigint_shield, | ||||
|     shield_sigint_handler as shield_sigint_handler, | ||||
|     MultiActorPdb as MultiActorPdb, | ||||
|     open_crash_handler as open_crash_handler, | ||||
|     maybe_open_crash_handler as maybe_open_crash_handler, | ||||
|     maybe_init_greenback as maybe_init_greenback, | ||||
|     post_mortem as post_mortem, | ||||
|     mk_pdb as mk_pdb, | ||||
| ) | ||||
| from ._stackscope import ( | ||||
|     enable_stack_on_sig as enable_stack_on_sig, | ||||
| ) | ||||
| from .pformat import ( | ||||
|     add_div as add_div, | ||||
|     pformat_caller_frame as pformat_caller_frame, | ||||
|     pformat_boxed_tb as pformat_boxed_tb, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| # TODO, move this to a new `.devx._pdbp` mod? | ||||
| def _enable_readline_feats() -> str: | ||||
|     ''' | ||||
|     Handle `readline` when compiled with `libedit` to avoid breaking | ||||
|     tab completion in `pdbp` (and its dep `tabcompleter`) | ||||
|     particularly since `uv` cpython distis are compiled this way.. | ||||
| 
 | ||||
|     See docs for deats, | ||||
|     https://docs.python.org/3/library/readline.html#module-readline | ||||
| 
 | ||||
|     Originally discovered soln via SO answer, | ||||
|     https://stackoverflow.com/q/49287102 | ||||
| 
 | ||||
|     ''' | ||||
|     import readline | ||||
|     if ( | ||||
|         # 3.13+ attr | ||||
|         # https://docs.python.org/3/library/readline.html#readline.backend | ||||
|         (getattr(readline, 'backend', False) == 'libedit') | ||||
|         or | ||||
|         'libedit' in readline.__doc__ | ||||
|     ): | ||||
|         readline.parse_and_bind("python:bind -v") | ||||
|         readline.parse_and_bind("python:bind ^I rl_complete") | ||||
|         return 'libedit' | ||||
|     else: | ||||
|         readline.parse_and_bind("tab: complete") | ||||
|         readline.parse_and_bind("set editing-mode vi") | ||||
|         readline.parse_and_bind("set keymap vi") | ||||
|         return 'readline' | ||||
| 
 | ||||
| 
 | ||||
| _enable_readline_feats() | ||||
|  |  | |||
Some files were not shown because too many files have changed in this diff Show More
		Loading…
	
		Reference in New Issue