Compare commits
	
		
			60 Commits 
		
	
	
		
			main
			...
			sigintsavi
		
	
	| Author | SHA1 | Date | 
|---|---|---|
|  | fec2ba004c | |
|  | ba9c914221 | |
|  | 30ee3f2dcc | |
|  | 0b4fc4fc47 | |
|  | 6b8c193221 | |
|  | 05167bdc70 | |
|  | fa21083b51 | |
|  | e6ad7a117b | |
|  | 4366873582 | |
|  | 9e6a22e52e | |
|  | 0ab49cd244 | |
|  | 3fafa87ea9 | |
|  | a6f5b9396a | |
|  | 61af2dc5aa | |
|  | ba857fe85c | |
|  | cb221b9e7c | |
|  | 3bc4778950 | |
|  | 5ae21e4753 | |
|  | d4a36e57d1 | |
|  | 58956ae950 | |
|  | a864f1e729 | |
|  | a4bc5f79ad | |
|  | c132b7f624 | |
|  | b659326d5b | |
|  | d971e9bc9d | |
|  | 611120c67c | |
|  | 7f6cace40b | |
|  | fe4adbf6f0 | |
|  | 6ccfeb17d5 | |
|  | 9bed332a94 | |
|  | 13df959d90 | |
|  | d0074291a1 | |
|  | 8559ad69f3 | |
|  | e519df1bd2 | |
|  | 24fd87d969 | |
|  | 91054a8a42 | |
|  | cdc7bf6549 | |
|  | c865d01e85 | |
|  | e1caeeb8de | |
|  | 7c25aa176f | |
|  | 3b7985292f | |
|  | e8fc820b92 | |
|  | b2fdbc44d1 | |
|  | f7823a46b8 | |
|  | f76c809c39 | |
|  | 9e56881163 | |
|  | 8291ee09b3 | |
|  | 4a441f0988 | |
|  | df0108a0bb | |
|  | 8537e17251 | |
|  | 20acb50d94 | |
|  | eab895864f | |
|  | 65a9f69d6c | |
|  | 24b6cc0209 | |
|  | f488db6d8d | |
|  | c5d335c057 | |
|  | 4594fe3501 | |
|  | 5f0262fd98 | |
|  | 59e7f29eed | |
|  | e2dfd6e99d | 
|  | @ -8,70 +8,44 @@ on: | |||
|   workflow_dispatch: | ||||
| 
 | ||||
| jobs: | ||||
|   # ------ sdist ------ | ||||
|   # test that we can generate a software distribution and install it | ||||
|   # thus avoid missing file issues after packaging. | ||||
|   # | ||||
|   # -[x] produce sdist with uv | ||||
|   # ------ - ------ | ||||
| 
 | ||||
|   mypy: | ||||
|     name: 'MyPy' | ||||
|     runs-on: ubuntu-latest | ||||
| 
 | ||||
|     steps: | ||||
|       - name: Checkout | ||||
|         uses: actions/checkout@v2 | ||||
| 
 | ||||
|       - name: Setup python | ||||
|         uses: actions/setup-python@v2 | ||||
|         with: | ||||
|           python-version: '3.10' | ||||
| 
 | ||||
|       - name: Install dependencies | ||||
|         run: pip install -U . --upgrade-strategy eager -r requirements-test.txt | ||||
| 
 | ||||
|       - name: Run MyPy check | ||||
|         run: mypy tractor/ --ignore-missing-imports | ||||
| 
 | ||||
|   sdist-linux: | ||||
|     name: 'sdist' | ||||
|     runs-on: ubuntu-latest | ||||
| 
 | ||||
|     steps: | ||||
|       - name: Checkout | ||||
|         uses: actions/checkout@v4 | ||||
|         uses: actions/checkout@v2 | ||||
| 
 | ||||
|       - name: Install latest uv | ||||
|         uses: astral-sh/setup-uv@v6 | ||||
|       - name: Setup python | ||||
|         uses: actions/setup-python@v2 | ||||
|         with: | ||||
|           python-version: '3.10' | ||||
| 
 | ||||
|       - name: Build sdist as tar.gz | ||||
|         run: uv build --sdist --python=3.13 | ||||
|       - name: Build sdist | ||||
|         run: python setup.py sdist --formats=zip | ||||
| 
 | ||||
|       - name: Install sdist from .tar.gz | ||||
|         run: python -m pip install dist/*.tar.gz | ||||
| 
 | ||||
|   # ------ type-check ------ | ||||
|   # mypy: | ||||
|   #   name: 'MyPy' | ||||
|   #   runs-on: ubuntu-latest | ||||
| 
 | ||||
|   #   steps: | ||||
|   #     - name: Checkout | ||||
|   #       uses: actions/checkout@v4 | ||||
| 
 | ||||
|   #     - name: Install latest uv | ||||
|   #       uses: astral-sh/setup-uv@v6 | ||||
| 
 | ||||
|   #     # faster due to server caching? | ||||
|   #     # https://docs.astral.sh/uv/guides/integration/github/#setting-up-python | ||||
|   #     - name: "Set up Python" | ||||
|   #       uses: actions/setup-python@v6 | ||||
|   #       with: | ||||
|   #         python-version-file: "pyproject.toml" | ||||
| 
 | ||||
|   #     # w uv | ||||
|   #     # - name: Set up Python | ||||
|   #     #   run: uv python install | ||||
| 
 | ||||
|   #     - name: Setup uv venv | ||||
|   #       run: uv venv .venv --python=3.13 | ||||
| 
 | ||||
|   #     - name: Install | ||||
|   #       run: uv sync --dev | ||||
| 
 | ||||
|   #     # TODO, ty cmd over repo | ||||
|   #     # - name: type check with ty | ||||
|   #     #   run: ty ./tractor/ | ||||
| 
 | ||||
|   #     # - uses: actions/cache@v3 | ||||
|   #     #     name: Cache uv virtenv as default .venv | ||||
|   #     #     with: | ||||
|   #     #       path: ./.venv | ||||
|   #     #       key: venv-${{ hashFiles('uv.lock') }} | ||||
| 
 | ||||
|   #     - name: Run MyPy check | ||||
|   #       run: mypy tractor/ --ignore-missing-imports --show-traceback | ||||
|       - name: Install sdist from .zips | ||||
|         run: python -m pip install dist/*.zip | ||||
| 
 | ||||
| 
 | ||||
|   testing-linux: | ||||
|  | @ -83,86 +57,58 @@ jobs: | |||
|       fail-fast: false | ||||
|       matrix: | ||||
|         os: [ubuntu-latest] | ||||
|         python-version: ['3.13'] | ||||
|         spawn_backend: [ | ||||
|           'trio', | ||||
|           # 'mp_spawn', | ||||
|           # 'mp_forkserver', | ||||
|         ] | ||||
|         python: ['3.9', '3.10'] | ||||
|         spawn_backend: ['trio', 'mp'] | ||||
| 
 | ||||
|     steps: | ||||
| 
 | ||||
|       - uses: actions/checkout@v4 | ||||
|       - name: Checkout | ||||
|         uses: actions/checkout@v2 | ||||
| 
 | ||||
|       - name: 'Install uv + py-${{ matrix.python-version }}' | ||||
|         uses: astral-sh/setup-uv@v6 | ||||
|       - name: Setup python | ||||
|         uses: actions/setup-python@v2 | ||||
|         with: | ||||
|           python-version: ${{ matrix.python-version }} | ||||
|           python-version: '${{ matrix.python }}' | ||||
| 
 | ||||
|       # GH way.. faster? | ||||
|       # - name: setup-python@v6 | ||||
|       #   uses: actions/setup-python@v6 | ||||
|       #   with: | ||||
|       #     python-version: '${{ matrix.python-version }}' | ||||
| 
 | ||||
|       # consider caching for speedups? | ||||
|       # https://docs.astral.sh/uv/guides/integration/github/#caching | ||||
| 
 | ||||
|       - name: Install the project w uv | ||||
|         run: uv sync --all-extras --dev | ||||
| 
 | ||||
|       # - name: Install dependencies | ||||
|       #   run: pip install -U . -r requirements-test.txt -r requirements-docs.txt --upgrade-strategy eager | ||||
| 
 | ||||
|       - name: List deps tree | ||||
|         run: uv tree | ||||
|       - name: Install dependencies | ||||
|         run: pip install -U . -r requirements-test.txt -r requirements-docs.txt --upgrade-strategy eager | ||||
| 
 | ||||
|       - name: Run tests | ||||
|         run: uv run pytest tests/ --spawn-backend=${{ matrix.spawn_backend }} -rsx | ||||
|         run: pytest tests/ --spawn-backend=${{ matrix.spawn_backend }} -rs -v | ||||
| 
 | ||||
|   # XXX legacy NOTE XXX | ||||
|   # | ||||
|   # We skip 3.10 on windows for now due to not having any collabs to | ||||
|   # debug the CI failures. Anyone wanting to hack and solve them is very | ||||
|   # welcome, but our primary user base is not using that OS. | ||||
|   # We skip 3.10 on windows for now due to | ||||
|   # https://github.com/pytest-dev/pytest/issues/8733 | ||||
|   # some kinda weird `pyreadline` issue.. | ||||
| 
 | ||||
|   # TODO: use job filtering to accomplish instead of repeated | ||||
|   # boilerplate as is above XD: | ||||
|   # - https://docs.github.com/en/actions/learn-github-actions/managing-complex-workflows | ||||
|   # - https://docs.github.com/en/actions/learn-github-actions/managing-complex-workflows#using-a-build-matrix | ||||
|   # - https://docs.github.com/en/actions/learn-github-actions/workflow-syntax-for-github-actions#jobsjob_idif | ||||
|   # testing-windows: | ||||
|   #   name: '${{ matrix.os }} Python ${{ matrix.python }} - ${{ matrix.spawn_backend }}' | ||||
|   #   timeout-minutes: 12 | ||||
|   #   runs-on: ${{ matrix.os }} | ||||
|   testing-windows: | ||||
|     name: '${{ matrix.os }} Python ${{ matrix.python }} - ${{ matrix.spawn_backend }}' | ||||
|     timeout-minutes: 12 | ||||
|     runs-on: ${{ matrix.os }} | ||||
| 
 | ||||
|   #   strategy: | ||||
|   #     fail-fast: false | ||||
|   #     matrix: | ||||
|   #       os: [windows-latest] | ||||
|   #       python: ['3.10'] | ||||
|   #       spawn_backend: ['trio', 'mp'] | ||||
|     strategy: | ||||
|       fail-fast: false | ||||
|       matrix: | ||||
|         os: [windows-latest] | ||||
|         python: ['3.9', '3.10'] | ||||
|         spawn_backend: ['trio', 'mp'] | ||||
| 
 | ||||
|   #   steps: | ||||
|     steps: | ||||
| 
 | ||||
|   #     - name: Checkout | ||||
|   #       uses: actions/checkout@v2 | ||||
|       - name: Checkout | ||||
|         uses: actions/checkout@v2 | ||||
| 
 | ||||
|   #     - name: Setup python | ||||
|   #       uses: actions/setup-python@v2 | ||||
|   #       with: | ||||
|   #         python-version: '${{ matrix.python }}' | ||||
|       - name: Setup python | ||||
|         uses: actions/setup-python@v2 | ||||
|         with: | ||||
|           python-version: '${{ matrix.python }}' | ||||
| 
 | ||||
|   #     - name: Install dependencies | ||||
|   #       run: pip install -U . -r requirements-test.txt -r requirements-docs.txt --upgrade-strategy eager | ||||
|       - name: Install dependencies | ||||
|         run: pip install -U . -r requirements-test.txt -r requirements-docs.txt --upgrade-strategy eager | ||||
| 
 | ||||
|   #     # TODO: pretty sure this solves debugger deps-issues on windows, but it needs to | ||||
|   #     # be verified by someone with a native setup. | ||||
|   #     # - name: Force pyreadline3 | ||||
|   #     #   run: pip uninstall pyreadline; pip install -U pyreadline3 | ||||
| 
 | ||||
|   #     - name: List dependencies | ||||
|   #       run: pip list | ||||
| 
 | ||||
|   #     - name: Run tests | ||||
|   #       run: pytest tests/ --spawn-backend=${{ matrix.spawn_backend }} -rsx | ||||
|       - name: Run tests | ||||
|         run: pytest tests/ --spawn-backend=${{ matrix.spawn_backend }} -rs --full-trace | ||||
|  |  | |||
|  | @ -0,0 +1,7 @@ | |||
| Add ``tractor.query_actor()`` an addr looker-upper which doesn't deliver | ||||
| a ``Portal`` instance and instead just a socket address ``tuple``. | ||||
| 
 | ||||
| Sometimes it's handy to just have a simple way to figure out if | ||||
| a "service" actor is up, so add this discovery helper for that. We'll | ||||
| prolly just leave it undocumented for now until we figure out | ||||
| a longer-term/better discovery system. | ||||
							
								
								
									
										142
									
								
								NEWS.rst
								
								
								
								
							
							
						
						
									
										142
									
								
								NEWS.rst
								
								
								
								
							|  | @ -4,148 +4,6 @@ Changelog | |||
| 
 | ||||
| .. towncrier release notes start | ||||
| 
 | ||||
| tractor 0.1.0a5 (2022-08-03) | ||||
| ============================ | ||||
| 
 | ||||
| This is our final release supporting Python 3.9 since we will be moving | ||||
| internals to the new `match:` syntax from 3.10 going forward and | ||||
| further, we have officially dropped usage of the `msgpack` library and | ||||
| happily adopted `msgspec`. | ||||
| 
 | ||||
| Features | ||||
| -------- | ||||
| 
 | ||||
| - `#165 <https://github.com/goodboy/tractor/issues/165>`_: Add SIGINT | ||||
|   protection to our `pdbpp` based debugger subystem such that for | ||||
|   (single-depth) actor trees in debug mode we ignore interrupts in any | ||||
|   actor currently holding the TTY lock thus avoiding clobbering IPC | ||||
|   connections and/or task and process state when working in the REPL. | ||||
| 
 | ||||
|   As a big note currently so called "nested" actor trees (trees with | ||||
|   actors having more then one parent/ancestor) are not fully supported | ||||
|   since we don't yet have a mechanism to relay the debug mode knowledge | ||||
|   "up" the actor tree (for eg. when handling a crash in a leaf actor). | ||||
|   As such currently there is a set of tests and known scenarios which will | ||||
|   result in process cloberring by the zombie repaing machinery and these | ||||
|   have been documented in https://github.com/goodboy/tractor/issues/320. | ||||
| 
 | ||||
|   The implementation details include: | ||||
| 
 | ||||
|   - utilizing a custom SIGINT handler which we apply whenever an actor's | ||||
|     runtime enters the debug machinery, which we also make sure the | ||||
|     stdlib's `pdb` configuration doesn't override (which it does by | ||||
|     default without special instance config). | ||||
|   - litter the runtime with `maybe_wait_for_debugger()` mostly in spots | ||||
|     where the root actor should block before doing embedded nursery | ||||
|     teardown ops which both cancel potential-children-in-deubg as well | ||||
|     as eventually trigger zombie reaping machinery. | ||||
|   - hardening of the TTY locking semantics/API both in terms of IPC | ||||
|     terminations and cancellation and lock release determinism from | ||||
|     sync debugger instance methods. | ||||
|   - factoring of locking infrastructure into a new `._debug.Lock` global | ||||
|     which encapsulates all details of the ``trio`` sync primitives and | ||||
|     task/actor uid management and tracking. | ||||
| 
 | ||||
|   We also add `ctrl-c` cases throughout the test suite though these are | ||||
|   disabled for py3.9 (`pdbpp` UX differences that don't seem worth | ||||
|   compensating for, especially since this will be our last 3.9 supported | ||||
|   release) and there are a slew of marked cases that aren't expected to | ||||
|   work in CI more generally (as mentioned in the "nested" tree note | ||||
|   above) despite seemingly working  when run manually on linux. | ||||
| 
 | ||||
| - `#304 <https://github.com/goodboy/tractor/issues/304>`_: Add a new | ||||
|   ``to_asyncio.LinkedTaskChannel.subscribe()`` which gives task-oriented | ||||
|   broadcast functionality semantically equivalent to | ||||
|   ``tractor.MsgStream.subscribe()`` this makes it possible for multiple | ||||
|   ``trio``-side tasks to consume ``asyncio``-side task msgs in tandem. | ||||
| 
 | ||||
|   Further Improvements to the test suite were added in this patch set | ||||
|   including a new scenario test for a sub-actor managed "service nursery" | ||||
|   (implementing the basics of a "service manager") including use of | ||||
|   *infected asyncio* mode. Further we added a lower level | ||||
|   ``test_trioisms.py`` to start to track issues we need to work around in | ||||
|   ``trio`` itself which in this case included a bug we were trying to | ||||
|   solve related to https://github.com/python-trio/trio/issues/2258. | ||||
| 
 | ||||
| 
 | ||||
| Bug Fixes | ||||
| --------- | ||||
| 
 | ||||
| - `#318 <https://github.com/goodboy/tractor/issues/318>`_: Fix | ||||
|   a previously undetected ``trio``-``asyncio`` task lifetime linking | ||||
|   issue with the ``to_asyncio.open_channel_from()`` api where both sides | ||||
|   where not properly waiting/signalling termination and it was possible | ||||
|   for ``asyncio``-side errors to not propagate due to a race condition. | ||||
| 
 | ||||
|   The implementation fix summary is: | ||||
|   - add state to signal the end of the ``trio`` side task to be | ||||
|     read by the ``asyncio`` side and always cancel any ongoing | ||||
|     task in such cases. | ||||
|   - always wait on the ``asyncio`` task termination from the ``trio`` | ||||
|     side on error before maybe raising said error. | ||||
|   - always close the ``trio`` mem chan on exit to ensure the other | ||||
|     side can detect it and follow. | ||||
| 
 | ||||
| 
 | ||||
| Trivial/Internal Changes | ||||
| ------------------------ | ||||
| 
 | ||||
| - `#248 <https://github.com/goodboy/tractor/issues/248>`_: Adjust the | ||||
|   `tractor._spawn.soft_wait()` strategy to avoid sending an actor cancel | ||||
|   request (via `Portal.cancel_actor()`) if either the child process is | ||||
|   detected as having terminated or the IPC channel is detected to be | ||||
|   closed. | ||||
| 
 | ||||
|   This ensures (even) more deterministic inter-actor cancellation by | ||||
|   avoiding the timeout condition where possible when a whild never | ||||
|   sucessfully spawned, crashed, or became un-contactable over IPC. | ||||
| 
 | ||||
| - `#295 <https://github.com/goodboy/tractor/issues/295>`_: Add an | ||||
|   experimental ``tractor.msg.NamespacePath`` type for passing Python | ||||
|   objects by "reference" through a ``str``-subtype message and using the | ||||
|   new ``pkgutil.resolve_name()`` for reference loading. | ||||
| 
 | ||||
| - `#298 <https://github.com/goodboy/tractor/issues/298>`_: Add a new | ||||
|   `tractor.experimental` subpackage for staging new high level APIs and | ||||
|   subystems that we might eventually make built-ins. | ||||
| 
 | ||||
| - `#300 <https://github.com/goodboy/tractor/issues/300>`_: Update to and | ||||
|   pin latest ``msgpack`` (1.0.3) and ``msgspec`` (0.4.0) both of which | ||||
|   required adjustments for backwards imcompatible API tweaks. | ||||
| 
 | ||||
| - `#303 <https://github.com/goodboy/tractor/issues/303>`_: Fence off | ||||
|   ``multiprocessing`` imports until absolutely necessary in an effort to | ||||
|   avoid "resource tracker" spawning side effects that seem to have | ||||
|   varying degrees of unreliability per Python release. Port to new | ||||
|   ``msgspec.DecodeError``. | ||||
| 
 | ||||
| - `#305 <https://github.com/goodboy/tractor/issues/305>`_: Add | ||||
|   ``tractor.query_actor()`` an addr looker-upper which doesn't deliver | ||||
|   a ``Portal`` instance and instead just a socket address ``tuple``. | ||||
| 
 | ||||
|   Sometimes it's handy to just have a simple way to figure out if | ||||
|   a "service" actor is up, so add this discovery helper for that. We'll | ||||
|   prolly just leave it undocumented for now until we figure out | ||||
|   a longer-term/better discovery system. | ||||
| 
 | ||||
| - `#316 <https://github.com/goodboy/tractor/issues/316>`_: Run windows | ||||
|   CI jobs on python 3.10 after some hacks for ``pdbpp`` dependency | ||||
|   issues. | ||||
| 
 | ||||
|   Issue was to do with the now deprecated `pyreadline` project which | ||||
|   should be changed over to `pyreadline3`. | ||||
| 
 | ||||
| - `#317 <https://github.com/goodboy/tractor/issues/317>`_: Drop use of | ||||
|   the ``msgpack`` package and instead move fully to the ``msgspec`` | ||||
|   codec library. | ||||
| 
 | ||||
|   We've now used ``msgspec`` extensively in production and there's no | ||||
|   reason to not use it as default. Further this change preps us for the up | ||||
|   and coming typed messaging semantics (#196), dialog-unprotocol system | ||||
|   (#297), and caps-based messaging-protocols (#299) planned before our | ||||
|   first beta. | ||||
| 
 | ||||
| 
 | ||||
| tractor 0.1.0a4 (2021-12-18) | ||||
| ============================ | ||||
| 
 | ||||
|  |  | |||
							
								
								
									
										19
									
								
								default.nix
								
								
								
								
							
							
						
						
									
										19
									
								
								default.nix
								
								
								
								
							|  | @ -1,19 +0,0 @@ | |||
| { pkgs ? import <nixpkgs> {} }: | ||||
| let | ||||
|   nativeBuildInputs = with pkgs; [ | ||||
|     stdenv.cc.cc.lib | ||||
|     uv | ||||
|   ]; | ||||
| 
 | ||||
| in | ||||
| pkgs.mkShell { | ||||
|   inherit nativeBuildInputs; | ||||
| 
 | ||||
|   LD_LIBRARY_PATH = pkgs.lib.makeLibraryPath nativeBuildInputs; | ||||
|   TMPDIR = "/tmp"; | ||||
| 
 | ||||
|   shellHook = '' | ||||
|     set -e | ||||
|     uv venv .venv --python=3.12 | ||||
|   ''; | ||||
| } | ||||
							
								
								
									
										195
									
								
								docs/README.rst
								
								
								
								
							
							
						
						
									
										195
									
								
								docs/README.rst
								
								
								
								
							|  | @ -1,126 +1,40 @@ | |||
| |logo| ``tractor``: distributed structurred concurrency | ||||
| |logo| ``tractor``: next-gen Python parallelism | ||||
| 
 | ||||
| ``tractor`` is a `structured concurrency`_ (SC), multi-processing_ runtime built on trio_. | ||||
| |gh_actions| | ||||
| |docs| | ||||
| 
 | ||||
| Fundamentally, ``tractor`` provides parallelism via | ||||
| ``trio``-"*actors*": independent Python **processes** (i.e. | ||||
| *non-shared-memory threads*) which can schedule ``trio`` tasks whilst | ||||
| maintaining *end-to-end SC* inside a *distributed supervision tree*. | ||||
| ``tractor`` is a `structured concurrent`_, multi-processing_ runtime | ||||
| built on trio_. | ||||
| 
 | ||||
| Cross-process (and thus cross-host) SC is accomplished through the | ||||
| combined use of our, | ||||
| 
 | ||||
| - "actor nurseries_" which provide for spawning multiple, and | ||||
|   possibly nested, Python processes each running a ``trio`` scheduled | ||||
|   runtime - a call to ``trio.run()``, | ||||
| - an "SC-transitive supervision protocol" enforced as an | ||||
|   IPC-message-spec encapsulating all RPC-dialogs. | ||||
| Fundamentally ``tractor`` gives you parallelism via ``trio``-"*actors*": | ||||
| our nurseries_ let you spawn new Python processes which each run a ``trio`` | ||||
| scheduled runtime - a call to ``trio.run()``. | ||||
| 
 | ||||
| We believe the system adheres to the `3 axioms`_ of an "`actor model`_" | ||||
| but likely **does not** look like what **you** probably *think* an "actor | ||||
| model" looks like, and that's **intentional**. | ||||
| but likely *does not* look like what *you* probably think an "actor | ||||
| model" looks like, and that's *intentional*. | ||||
| 
 | ||||
| 
 | ||||
| Where do i start!? | ||||
| ------------------ | ||||
| The first step to grok ``tractor`` is to get an intermediate | ||||
| knowledge of ``trio`` and **structured concurrency** B) | ||||
| 
 | ||||
| Some great places to start are, | ||||
| 
 | ||||
| - the seminal `blog post`_ | ||||
| - obviously the `trio docs`_ | ||||
| - wikipedia's nascent SC_ page | ||||
| - the fancy diagrams @ libdill-docs_ | ||||
| The first step to grok ``tractor`` is to get the basics of ``trio`` down. | ||||
| A great place to start is the `trio docs`_ and this `blog post`_. | ||||
| 
 | ||||
| 
 | ||||
| Features | ||||
| -------- | ||||
| - **It's just** a ``trio`` API! | ||||
| - *Infinitely nesteable* process trees running embedded ``trio`` tasks. | ||||
| - Swappable, OS-specific, process spawning via multiple backends. | ||||
| - Modular IPC stack, allowing for custom interchange formats (eg. | ||||
|   as offered from `msgspec`_), varied transport protocols (TCP, RUDP, | ||||
|   QUIC, wireguard), and OS-env specific higher-perf primitives (UDS, | ||||
|   shm-ring-buffers). | ||||
| - Optionally distributed_: all IPC and RPC APIs work over multi-host | ||||
|   transports the same as local. | ||||
| - Builtin high-level streaming API that enables your app to easily | ||||
|   leverage the benefits of a "`cheap or nasty`_" `(un)protocol`_. | ||||
| - A "native UX" around a multi-process safe debugger REPL using | ||||
|   `pdbp`_ (a fork & fix of `pdb++`_) | ||||
| - "Infected ``asyncio``" mode: support for starting an actor's | ||||
|   runtime as a `guest`_ on the ``asyncio`` loop allowing us to | ||||
|   provide stringent SC-style ``trio.Task``-supervision around any | ||||
|   ``asyncio.Task`` spawned via our ``tractor.to_asyncio`` APIs. | ||||
| - A **very naive** and still very much work-in-progress inter-actor | ||||
|   `discovery`_ sys with plans to support multiple `modern protocol`_ | ||||
|   approaches. | ||||
| - Various ``trio`` extension APIs via ``tractor.trionics`` such as, | ||||
|   - task fan-out `broadcasting`_, | ||||
|   - multi-task-single-resource-caching and fan-out-to-multi | ||||
|     ``__aenter__()`` APIs for ``@acm`` functions, | ||||
|   - (WIP) a ``TaskMngr``: one-cancels-one style nursery supervisor. | ||||
| 
 | ||||
| 
 | ||||
| Status of `main` / infra | ||||
| ------------------------ | ||||
| 
 | ||||
| - |gh_actions| | ||||
| - |docs| | ||||
| 
 | ||||
| 
 | ||||
| Install | ||||
| ------- | ||||
| ``tractor`` is still in a *alpha-near-beta-stage* for many | ||||
| of its subsystems, however we are very close to having a stable | ||||
| lowlevel runtime and API. | ||||
| 
 | ||||
| As such, it's currently recommended that you clone and install the | ||||
| repo from source:: | ||||
| 
 | ||||
|     pip install git+git://github.com/goodboy/tractor.git | ||||
| 
 | ||||
| 
 | ||||
| We use the very hip `uv`_ for project mgmt:: | ||||
| 
 | ||||
|     git clone https://github.com/goodboy/tractor.git | ||||
|     cd tractor | ||||
|     uv sync --dev | ||||
|     uv run python examples/rpc_bidir_streaming.py | ||||
| 
 | ||||
| Consider activating a virtual/project-env before starting to hack on | ||||
| the code base:: | ||||
| 
 | ||||
|     # you could use plain ol' venvs | ||||
|     # https://docs.astral.sh/uv/pip/environments/ | ||||
|     uv venv tractor_py313 --python 3.13 | ||||
| 
 | ||||
|     # but @goodboy prefers the more explicit (and shell agnostic) | ||||
|     # https://docs.astral.sh/uv/configuration/environment/#uv_project_environment | ||||
|     UV_PROJECT_ENVIRONMENT="tractor_py313 | ||||
| 
 | ||||
|     # hint hint, enter @goodboy's fave shell B) | ||||
|     uv run --dev xonsh | ||||
| 
 | ||||
| Alongside all this we ofc offer "releases" on PyPi:: | ||||
| 
 | ||||
|     pip install tractor | ||||
| 
 | ||||
| Just note that YMMV since the main git branch is often much further | ||||
| ahead then any latest release. | ||||
| 
 | ||||
| 
 | ||||
| Example codez | ||||
| ------------- | ||||
| In ``tractor``'s (very lacking) documention we prefer to point to | ||||
| example scripts in the repo over duplicating them in docs, but with | ||||
| that in mind here are some definitive snippets to try and hook you | ||||
| into digging deeper. | ||||
| - **It's just** a ``trio`` API | ||||
| - *Infinitely nesteable* process trees | ||||
| - Builtin IPC streaming APIs with task fan-out broadcasting | ||||
| - A (first ever?) "native" multi-core debugger UX for Python using `pdb++`_ | ||||
| - Support for a swappable, OS specific, process spawning layer | ||||
| - A modular transport stack, allowing for custom serialization (eg. with | ||||
|   `msgspec`_), communications protocols, and environment specific IPC | ||||
|   primitives | ||||
| - Support for spawning process-level-SC, inter-loop one-to-one-task oriented | ||||
|   ``asyncio`` actors via "infected ``asyncio``" mode | ||||
| - `structured chadcurrency`_ from the ground up | ||||
| 
 | ||||
| 
 | ||||
| Run a func in a process | ||||
| *********************** | ||||
| ----------------------- | ||||
| Use ``trio``'s style of focussing on *tasks as functions*: | ||||
| 
 | ||||
| .. code:: python | ||||
|  | @ -178,7 +92,7 @@ might want to check out `trio-parallel`_. | |||
| 
 | ||||
| 
 | ||||
| Zombie safe: self-destruct a process tree | ||||
| ***************************************** | ||||
| ----------------------------------------- | ||||
| ``tractor`` tries to protect you from zombies, no matter what. | ||||
| 
 | ||||
| .. code:: python | ||||
|  | @ -204,7 +118,7 @@ Zombie safe: self-destruct a process tree | |||
|             f"running in pid {os.getpid()}" | ||||
|         ) | ||||
| 
 | ||||
|         await trio.sleep_forever() | ||||
|        await trio.sleep_forever() | ||||
| 
 | ||||
| 
 | ||||
|     async def main(): | ||||
|  | @ -234,8 +148,8 @@ it **is a bug**. | |||
| 
 | ||||
| 
 | ||||
| "Native" multi-process debugging | ||||
| ******************************** | ||||
| Using the magic of `pdbp`_ and our internal IPC, we've | ||||
| -------------------------------- | ||||
| Using the magic of `pdb++`_ and our internal IPC, we've | ||||
| been able to create a native feeling debugging experience for | ||||
| any (sub-)process in your ``tractor`` tree. | ||||
| 
 | ||||
|  | @ -289,7 +203,7 @@ We're hoping to add a respawn-from-repl system soon! | |||
| 
 | ||||
| 
 | ||||
| SC compatible bi-directional streaming | ||||
| ************************************** | ||||
| -------------------------------------- | ||||
| Yes, you saw it here first; we provide 2-way streams | ||||
| with reliable, transitive setup/teardown semantics. | ||||
| 
 | ||||
|  | @ -381,7 +295,7 @@ hear your thoughts on! | |||
| 
 | ||||
| 
 | ||||
| Worker poolz are easy peasy | ||||
| *************************** | ||||
| --------------------------- | ||||
| The initial ask from most new users is *"how do I make a worker | ||||
| pool thing?"*. | ||||
| 
 | ||||
|  | @ -403,10 +317,10 @@ This uses no extra threads, fancy semaphores or futures; all we need | |||
| is ``tractor``'s IPC! | ||||
| 
 | ||||
| "Infected ``asyncio``" mode | ||||
| *************************** | ||||
| --------------------------- | ||||
| Have a bunch of ``asyncio`` code you want to force to be SC at the process level? | ||||
| 
 | ||||
| Check out our experimental system for `guest`_-mode controlled | ||||
| Check out our experimental system for `guest-mode`_ controlled | ||||
| ``asyncio`` actors: | ||||
| 
 | ||||
| .. code:: python | ||||
|  | @ -512,7 +426,7 @@ We need help refining the `asyncio`-side channel API to be more | |||
| 
 | ||||
| 
 | ||||
| Higher level "cluster" APIs | ||||
| *************************** | ||||
| --------------------------- | ||||
| To be extra terse the ``tractor`` devs have started hacking some "higher | ||||
| level" APIs for managing actor trees/clusters. These interfaces should | ||||
| generally be condsidered provisional for now but we encourage you to try | ||||
|  | @ -569,6 +483,18 @@ spawn a flat cluster: | |||
| .. _full worker pool re-implementation: https://github.com/goodboy/tractor/blob/master/examples/parallelism/concurrent_actors_primes.py | ||||
| 
 | ||||
| 
 | ||||
| Install | ||||
| ------- | ||||
| From PyPi:: | ||||
| 
 | ||||
|     pip install tractor | ||||
| 
 | ||||
| 
 | ||||
| From git:: | ||||
| 
 | ||||
|     pip install git+git://github.com/goodboy/tractor.git | ||||
| 
 | ||||
| 
 | ||||
| Under the hood | ||||
| -------------- | ||||
| ``tractor`` is an attempt to pair trionic_ `structured concurrency`_ with | ||||
|  | @ -641,13 +567,6 @@ Help us push toward the future of distributed `Python`. | |||
| - Typed capability-based (dialog) protocols ( see `#196 | ||||
|   <https://github.com/goodboy/tractor/issues/196>`_ with draft work | ||||
|   started in `#311 <https://github.com/goodboy/tractor/pull/311>`_) | ||||
| - We **recently disabled CI-testing on windows** and need help getting | ||||
|   it running again! (see `#327 | ||||
|   <https://github.com/goodboy/tractor/pull/327>`_). **We do have windows | ||||
|   support** (and have for quite a while) but since no active hacker | ||||
|   exists in the user-base to help test on that OS, for now we're not | ||||
|   actively maintaining testing due to the added hassle and general | ||||
|   latency.. | ||||
| 
 | ||||
| 
 | ||||
| Feel like saying hi? | ||||
|  | @ -660,7 +579,6 @@ matrix seems too hip, we're also mostly all in the the `trio gitter | |||
| channel`_! | ||||
| 
 | ||||
| .. _structured concurrent: https://trio.discourse.group/t/concise-definition-of-structured-concurrency/228 | ||||
| .. _distributed: https://en.wikipedia.org/wiki/Distributed_computing | ||||
| .. _multi-processing: https://en.wikipedia.org/wiki/Multiprocessing | ||||
| .. _trio: https://github.com/python-trio/trio | ||||
| .. _nurseries: https://vorpus.org/blog/notes-on-structured-concurrency-or-go-statement-considered-harmful/#nurseries-a-structured-replacement-for-go-statements | ||||
|  | @ -672,32 +590,23 @@ channel`_! | |||
| .. _adherance to: https://www.youtube.com/watch?v=7erJ1DV_Tlo&t=1821s | ||||
| .. _trio gitter channel: https://gitter.im/python-trio/general | ||||
| .. _matrix channel: https://matrix.to/#/!tractor:matrix.org | ||||
| .. _broadcasting: https://github.com/goodboy/tractor/pull/229 | ||||
| .. _modern procotol: https://en.wikipedia.org/wiki/Rendezvous_protocol | ||||
| .. _pdbp: https://github.com/mdmintz/pdbp | ||||
| .. _pdb++: https://github.com/pdbpp/pdbpp | ||||
| .. _cheap or nasty: https://zguide.zeromq.org/docs/chapter7/#The-Cheap-or-Nasty-Pattern | ||||
| .. _(un)protocol: https://zguide.zeromq.org/docs/chapter7/#Unprotocols | ||||
| .. _discovery: https://zguide.zeromq.org/docs/chapter8/#Discovery | ||||
| .. _modern protocol: https://en.wikipedia.org/wiki/Rendezvous_protocol | ||||
| .. _guest mode: https://trio.readthedocs.io/en/stable/reference-lowlevel.html?highlight=guest%20mode#using-guest-mode-to-run-trio-on-top-of-other-event-loops | ||||
| .. _messages: https://en.wikipedia.org/wiki/Message_passing | ||||
| .. _trio docs: https://trio.readthedocs.io/en/latest/ | ||||
| .. _blog post: https://vorpus.org/blog/notes-on-structured-concurrency-or-go-statement-considered-harmful/ | ||||
| .. _structured concurrency: https://en.wikipedia.org/wiki/Structured_concurrency | ||||
| .. _SC: https://en.wikipedia.org/wiki/Structured_concurrency | ||||
| .. _libdill-docs: https://sustrik.github.io/libdill/structured-concurrency.html | ||||
| .. _structured chadcurrency: https://en.wikipedia.org/wiki/Structured_concurrency | ||||
| .. _structured concurrency: https://en.wikipedia.org/wiki/Structured_concurrency | ||||
| .. _unrequirements: https://en.wikipedia.org/wiki/Actor_model#Direct_communication_and_asynchrony | ||||
| .. _async generators: https://www.python.org/dev/peps/pep-0525/ | ||||
| .. _trio-parallel: https://github.com/richardsheridan/trio-parallel | ||||
| .. _uv: https://docs.astral.sh/uv/ | ||||
| .. _msgspec: https://jcristharif.com/msgspec/ | ||||
| .. _guest: https://trio.readthedocs.io/en/stable/reference-lowlevel.html?highlight=guest%20mode#using-guest-mode-to-run-trio-on-top-of-other-event-loops | ||||
| .. _guest-mode: https://trio.readthedocs.io/en/stable/reference-lowlevel.html?highlight=guest%20mode#using-guest-mode-to-run-trio-on-top-of-other-event-loops | ||||
| 
 | ||||
| .. | ||||
|    NOTE, on generating badge links from the UI | ||||
|    https://docs.github.com/en/actions/how-tos/monitoring-and-troubleshooting-workflows/monitoring-workflows/adding-a-workflow-status-badge?ref=gitguardian-blog-automated-secrets-detection#using-the-ui | ||||
| .. |gh_actions| image:: https://github.com/goodboy/tractor/actions/workflows/ci.yml/badge.svg?branch=main | ||||
|     :target: https://github.com/goodboy/tractor/actions/workflows/ci.yml | ||||
| 
 | ||||
| .. |gh_actions| image:: https://img.shields.io/endpoint.svg?url=https%3A%2F%2Factions-badge.atrox.dev%2Fgoodboy%2Ftractor%2Fbadge&style=popout-square | ||||
|     :target: https://actions-badge.atrox.dev/goodboy/tractor/goto | ||||
| 
 | ||||
| .. |docs| image:: https://readthedocs.org/projects/tractor/badge/?version=latest | ||||
|     :target: https://tractor.readthedocs.io/en/latest/?badge=latest | ||||
|  |  | |||
|  | @ -396,7 +396,7 @@ tasks spawned via multiple RPC calls to an actor can modify | |||
| 
 | ||||
| 
 | ||||
|         # a per process cache | ||||
|         _actor_cache: dict[str, bool] = {} | ||||
|         _actor_cache: Dict[str, bool] = {} | ||||
| 
 | ||||
| 
 | ||||
|         def ping_endpoints(endpoints: List[str]): | ||||
|  |  | |||
|  | @ -1,262 +0,0 @@ | |||
| ''' | ||||
| Complex edge case where during real-time streaming the IPC tranport | ||||
| channels are wiped out (purposely in this example though it could have | ||||
| been an outage) and we want to ensure that despite being in debug mode | ||||
| (or not) the user can sent SIGINT once they notice the hang and the | ||||
| actor tree will eventually be cancelled without leaving any zombies. | ||||
| 
 | ||||
| ''' | ||||
| from contextlib import asynccontextmanager as acm | ||||
| from functools import partial | ||||
| 
 | ||||
| from tractor import ( | ||||
|     open_nursery, | ||||
|     context, | ||||
|     Context, | ||||
|     ContextCancelled, | ||||
|     MsgStream, | ||||
|     _testing, | ||||
|     trionics, | ||||
| ) | ||||
| import trio | ||||
| import pytest | ||||
| 
 | ||||
| 
 | ||||
| async def break_ipc_then_error( | ||||
|     stream: MsgStream, | ||||
|     break_ipc_with: str|None = None, | ||||
|     pre_close: bool = False, | ||||
| ): | ||||
|     await _testing.break_ipc( | ||||
|         stream=stream, | ||||
|         method=break_ipc_with, | ||||
|         pre_close=pre_close, | ||||
|     ) | ||||
|     async for msg in stream: | ||||
|         await stream.send(msg) | ||||
| 
 | ||||
|     assert 0 | ||||
| 
 | ||||
| 
 | ||||
| async def iter_ipc_stream( | ||||
|     stream: MsgStream, | ||||
|     break_ipc_with: str|None = None, | ||||
|     pre_close: bool = False, | ||||
| ): | ||||
|     async for msg in stream: | ||||
|         await stream.send(msg) | ||||
| 
 | ||||
| 
 | ||||
| @context | ||||
| async def recv_and_spawn_net_killers( | ||||
| 
 | ||||
|     ctx: Context, | ||||
|     break_ipc_after: bool|int = False, | ||||
|     pre_close: bool = False, | ||||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|     Receive stream msgs and spawn some IPC killers mid-stream. | ||||
| 
 | ||||
|     ''' | ||||
|     broke_ipc: bool = False | ||||
|     await ctx.started() | ||||
|     async with ( | ||||
|         ctx.open_stream() as stream, | ||||
|         trionics.collapse_eg(), | ||||
|         trio.open_nursery() as tn, | ||||
|     ): | ||||
|         async for i in stream: | ||||
|             print(f'child echoing {i}') | ||||
|             if not broke_ipc: | ||||
|                 await stream.send(i) | ||||
|             else: | ||||
|                 await trio.sleep(0.01) | ||||
| 
 | ||||
|             if ( | ||||
|                 break_ipc_after | ||||
|                 and | ||||
|                 i >= break_ipc_after | ||||
|             ): | ||||
|                 broke_ipc = True | ||||
|                 tn.start_soon( | ||||
|                     iter_ipc_stream, | ||||
|                     stream, | ||||
|                 ) | ||||
|                 tn.start_soon( | ||||
|                     partial( | ||||
|                         break_ipc_then_error, | ||||
|                         stream=stream, | ||||
|                         pre_close=pre_close, | ||||
|                     ) | ||||
|                 ) | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def stuff_hangin_ctlc(timeout: float = 1) -> None: | ||||
| 
 | ||||
|     with trio.move_on_after(timeout) as cs: | ||||
|         yield timeout | ||||
| 
 | ||||
|     if cs.cancelled_caught: | ||||
|         # pretend to be a user seeing no streaming action | ||||
|         # thinking it's a hang, and then hitting ctl-c.. | ||||
|         print( | ||||
|             f"i'm a user on the PARENT side and thingz hangin " | ||||
|             f'after timeout={timeout} ???\n\n' | ||||
|             'MASHING CTlR-C..!?\n' | ||||
|         ) | ||||
|         raise KeyboardInterrupt | ||||
| 
 | ||||
| 
 | ||||
| async def main( | ||||
|     debug_mode: bool = False, | ||||
|     start_method: str = 'trio', | ||||
|     loglevel: str = 'cancel', | ||||
| 
 | ||||
|     # by default we break the parent IPC first (if configured to break | ||||
|     # at all), but this can be changed so the child does first (even if | ||||
|     # both are set to break). | ||||
|     break_parent_ipc_after: int|bool = False, | ||||
|     break_child_ipc_after: int|bool = False, | ||||
|     pre_close: bool = False, | ||||
|     tpt_proto: str = 'tcp', | ||||
| 
 | ||||
| ) -> None: | ||||
| 
 | ||||
|     async with ( | ||||
|         open_nursery( | ||||
|             start_method=start_method, | ||||
| 
 | ||||
|             # NOTE: even debugger is used we shouldn't get | ||||
|             # a hang since it never engages due to broken IPC | ||||
|             debug_mode=debug_mode, | ||||
|             loglevel=loglevel, | ||||
|             enable_transports=[tpt_proto], | ||||
| 
 | ||||
|         ) as an, | ||||
|     ): | ||||
|         sub_name: str = 'chitty_hijo' | ||||
|         portal = await an.start_actor( | ||||
|             sub_name, | ||||
|             enable_modules=[__name__], | ||||
|         ) | ||||
| 
 | ||||
|         async with ( | ||||
|             stuff_hangin_ctlc(timeout=2) as timeout, | ||||
|             _testing.expect_ctxc( | ||||
|                 yay=( | ||||
|                     break_parent_ipc_after | ||||
|                     or | ||||
|                     break_child_ipc_after | ||||
|                 ), | ||||
|                 # TODO: we CAN'T remove this right? | ||||
|                 # since we need the ctxc to bubble up from either | ||||
|                 # the stream API after the `None` msg is sent | ||||
|                 # (which actually implicitly cancels all remote | ||||
|                 # tasks in the hijo) or from simluated | ||||
|                 # KBI-mash-from-user | ||||
|                 # or should we expect that a KBI triggers the ctxc | ||||
|                 # and KBI in an eg? | ||||
|                 reraise=True, | ||||
|             ), | ||||
| 
 | ||||
|             portal.open_context( | ||||
|                 recv_and_spawn_net_killers, | ||||
|                 break_ipc_after=break_child_ipc_after, | ||||
|                 pre_close=pre_close, | ||||
|             ) as (ctx, sent), | ||||
|         ): | ||||
|             rx_eoc: bool = False | ||||
|             ipc_break_sent: bool = False | ||||
|             async with ctx.open_stream() as stream: | ||||
|                 for i in range(1000): | ||||
| 
 | ||||
|                     if ( | ||||
|                         break_parent_ipc_after | ||||
|                         and | ||||
|                         i > break_parent_ipc_after | ||||
|                         and | ||||
|                         not ipc_break_sent | ||||
|                     ): | ||||
|                         print( | ||||
|                             '#################################\n' | ||||
|                             'Simulating PARENT-side IPC BREAK!\n' | ||||
|                             '#################################\n' | ||||
|                         ) | ||||
| 
 | ||||
|                         # TODO: other methods? see break func above. | ||||
|                         # await stream._ctx.chan.send(None) | ||||
|                         # await stream._ctx.chan.transport.stream.send_eof() | ||||
|                         await stream._ctx.chan.transport.stream.aclose() | ||||
|                         ipc_break_sent = True | ||||
| 
 | ||||
|                     # it actually breaks right here in the | ||||
|                     # mp_spawn/forkserver backends and thus the | ||||
|                     # zombie reaper never even kicks in? | ||||
|                     try: | ||||
|                         print(f'parent sending {i}') | ||||
|                         await stream.send(i) | ||||
|                     except ContextCancelled as ctxc: | ||||
|                         print( | ||||
|                             'parent received ctxc on `stream.send()`\n' | ||||
|                             f'{ctxc}\n' | ||||
|                         ) | ||||
|                         assert 'root' in ctxc.canceller | ||||
|                         assert sub_name in ctx.canceller | ||||
| 
 | ||||
|                         # TODO: is this needed or no? | ||||
|                         raise | ||||
| 
 | ||||
|                     except trio.ClosedResourceError: | ||||
|                         # NOTE: don't send if we already broke the | ||||
|                         # connection to avoid raising a closed-error | ||||
|                         # such that we drop through to the ctl-c | ||||
|                         # mashing by user. | ||||
|                         await trio.sleep(0.01) | ||||
| 
 | ||||
|                     # timeout: int = 1 | ||||
|                     # with trio.move_on_after(timeout) as cs: | ||||
|                     async with stuff_hangin_ctlc() as timeout: | ||||
|                         print( | ||||
|                             f'PARENT `stream.receive()` with timeout={timeout}\n' | ||||
|                         ) | ||||
|                         # NOTE: in the parent side IPC failure case this | ||||
|                         # will raise an ``EndOfChannel`` after the child | ||||
|                         # is killed and sends a stop msg back to it's | ||||
|                         # caller/this-parent. | ||||
|                         try: | ||||
|                             rx = await stream.receive() | ||||
|                             print( | ||||
|                                 "I'm a happy PARENT user and echoed to me is\n" | ||||
|                                 f'{rx}\n' | ||||
|                             ) | ||||
|                         except trio.EndOfChannel: | ||||
|                             rx_eoc: bool = True | ||||
|                             print('MsgStream got EoC for PARENT') | ||||
|                             raise | ||||
| 
 | ||||
|             print( | ||||
|                 'Streaming finished and we got Eoc.\n' | ||||
|                 'Canceling `.open_context()` in root with\n' | ||||
|                 'CTlR-C..' | ||||
|             ) | ||||
|             if rx_eoc: | ||||
|                 assert stream.closed | ||||
|                 try: | ||||
|                     await stream.send(i) | ||||
|                     pytest.fail('stream not closed?') | ||||
|                 except ( | ||||
|                     trio.ClosedResourceError, | ||||
|                     trio.EndOfChannel, | ||||
|                 ) as send_err: | ||||
|                     if rx_eoc: | ||||
|                         assert send_err is stream._eoc | ||||
|                     else: | ||||
|                         assert send_err is stream._closed | ||||
| 
 | ||||
|             raise KeyboardInterrupt | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == '__main__': | ||||
|     trio.run(main) | ||||
|  | @ -1,136 +0,0 @@ | |||
| ''' | ||||
| Examples of using the builtin `breakpoint()` from an `asyncio.Task` | ||||
| running in a subactor spawned with `infect_asyncio=True`. | ||||
| 
 | ||||
| ''' | ||||
| import asyncio | ||||
| 
 | ||||
| import trio | ||||
| import tractor | ||||
| from tractor import ( | ||||
|     to_asyncio, | ||||
|     Portal, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| async def aio_sleep_forever(): | ||||
|     await asyncio.sleep(float('inf')) | ||||
| 
 | ||||
| 
 | ||||
| async def bp_then_error( | ||||
|     to_trio: trio.MemorySendChannel, | ||||
|     from_trio: asyncio.Queue, | ||||
| 
 | ||||
|     raise_after_bp: bool = True, | ||||
| 
 | ||||
| ) -> None: | ||||
| 
 | ||||
|     # sync with `trio`-side (caller) task | ||||
|     to_trio.send_nowait('start') | ||||
| 
 | ||||
|     # NOTE: what happens here inside the hook needs some refinement.. | ||||
|     # => seems like it's still `.debug._set_trace()` but | ||||
|     #    we set `Lock.local_task_in_debug = 'sync'`, we probably want | ||||
|     #    some further, at least, meta-data about the task/actor in debug | ||||
|     #    in terms of making it clear it's `asyncio` mucking about. | ||||
|     breakpoint()  # asyncio-side | ||||
| 
 | ||||
|     # short checkpoint / delay | ||||
|     await asyncio.sleep(0.5)  # asyncio-side | ||||
| 
 | ||||
|     if raise_after_bp: | ||||
|         raise ValueError('asyncio side error!') | ||||
| 
 | ||||
|     # TODO: test case with this so that it gets cancelled? | ||||
|     else: | ||||
|         # XXX NOTE: this is required in order to get the SIGINT-ignored | ||||
|         # hang case documented in the module script section! | ||||
|         await aio_sleep_forever() | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def trio_ctx( | ||||
|     ctx: tractor.Context, | ||||
|     bp_before_started: bool = False, | ||||
| ): | ||||
| 
 | ||||
|     # this will block until the ``asyncio`` task sends a "first" | ||||
|     # message, see first line in above func. | ||||
|     async with ( | ||||
|         to_asyncio.open_channel_from( | ||||
|             bp_then_error, | ||||
|             # raise_after_bp=not bp_before_started, | ||||
|         ) as (first, chan), | ||||
| 
 | ||||
|         trio.open_nursery() as tn, | ||||
|     ): | ||||
|         assert first == 'start' | ||||
| 
 | ||||
|         if bp_before_started: | ||||
|             await tractor.pause()  # trio-side | ||||
| 
 | ||||
|         await ctx.started(first)  # trio-side | ||||
| 
 | ||||
|         tn.start_soon( | ||||
|             to_asyncio.run_task, | ||||
|             aio_sleep_forever, | ||||
|         ) | ||||
|         await trio.sleep_forever() | ||||
| 
 | ||||
| 
 | ||||
| async def main( | ||||
|     bps_all_over: bool = True, | ||||
| 
 | ||||
|     # TODO, WHICH OF THESE HAZ BUGZ? | ||||
|     cancel_from_root: bool = False, | ||||
|     err_from_root: bool = False, | ||||
| 
 | ||||
| ) -> None: | ||||
| 
 | ||||
|     async with tractor.open_nursery( | ||||
|         debug_mode=True, | ||||
|         maybe_enable_greenback=True, | ||||
|         # loglevel='devx', | ||||
|     ) as an: | ||||
|         ptl: Portal = await an.start_actor( | ||||
|             'aio_daemon', | ||||
|             enable_modules=[__name__], | ||||
|             infect_asyncio=True, | ||||
|             debug_mode=True, | ||||
|             # loglevel='cancel', | ||||
|         ) | ||||
| 
 | ||||
|         async with ptl.open_context( | ||||
|             trio_ctx, | ||||
|             bp_before_started=bps_all_over, | ||||
|         ) as (ctx, first): | ||||
| 
 | ||||
|             assert first == 'start' | ||||
| 
 | ||||
|             # pause in parent to ensure no cross-actor | ||||
|             # locking problems exist! | ||||
|             await tractor.pause()  # trio-root | ||||
| 
 | ||||
|             if cancel_from_root: | ||||
|                 await ctx.cancel() | ||||
| 
 | ||||
|             if err_from_root: | ||||
|                 assert 0 | ||||
|             else: | ||||
|                 await trio.sleep_forever() | ||||
| 
 | ||||
| 
 | ||||
|         # TODO: case where we cancel from trio-side while asyncio task | ||||
|         # has debugger lock? | ||||
|         # await ptl.cancel_actor() | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == '__main__': | ||||
| 
 | ||||
|     # works fine B) | ||||
|     trio.run(main) | ||||
| 
 | ||||
|     # will hang and ignores SIGINT !! | ||||
|     # NOTE: you'll need to send a SIGQUIT (via ctl-\) to kill it | ||||
|     # manually.. | ||||
|     # trio.run(main, True) | ||||
|  | @ -1,9 +0,0 @@ | |||
| ''' | ||||
| Reproduce a bug where enabling debug mode for a sub-actor actually causes | ||||
| a hang on teardown... | ||||
| 
 | ||||
| ''' | ||||
| import asyncio | ||||
| 
 | ||||
| import trio | ||||
| import tractor | ||||
|  | @ -1,5 +1,5 @@ | |||
| ''' | ||||
| Fast fail test with a `Context`. | ||||
| Fast fail test with a context. | ||||
| 
 | ||||
| Ensure the partially initialized sub-actor process | ||||
| doesn't cause a hang on error/cancel of the parent | ||||
|  |  | |||
|  | @ -4,15 +4,9 @@ import trio | |||
| 
 | ||||
| async def breakpoint_forever(): | ||||
|     "Indefinitely re-enter debugger in child actor." | ||||
|     try: | ||||
|         while True: | ||||
|             yield 'yo' | ||||
|             await tractor.pause() | ||||
|     except BaseException: | ||||
|         tractor.log.get_console_log().exception( | ||||
|             'Cancelled while trying to enter pause point!' | ||||
|         ) | ||||
|         raise | ||||
|     while True: | ||||
|         yield 'yo' | ||||
|         await tractor.breakpoint() | ||||
| 
 | ||||
| 
 | ||||
| async def name_error(): | ||||
|  | @ -21,14 +15,11 @@ async def name_error(): | |||
| 
 | ||||
| 
 | ||||
| async def main(): | ||||
|     ''' | ||||
|     Test breakpoint in a streaming actor. | ||||
| 
 | ||||
|     ''' | ||||
|     """Test breakpoint in a streaming actor. | ||||
|     """ | ||||
|     async with tractor.open_nursery( | ||||
|         debug_mode=True, | ||||
|         loglevel='cancel', | ||||
|         # loglevel='devx', | ||||
|         loglevel='error', | ||||
|     ) as n: | ||||
| 
 | ||||
|         p0 = await n.start_actor('bp_forever', enable_modules=[__name__]) | ||||
|  | @ -36,18 +27,7 @@ async def main(): | |||
| 
 | ||||
|         # retreive results | ||||
|         async with p0.open_stream_from(breakpoint_forever) as stream: | ||||
| 
 | ||||
|             # triggers the first name error | ||||
|             try: | ||||
|                 await p1.run(name_error) | ||||
|             except tractor.RemoteActorError as rae: | ||||
|                 assert rae.boxed_type is NameError | ||||
| 
 | ||||
|             async for i in stream: | ||||
| 
 | ||||
|                 # a second time try the failing subactor and this tie | ||||
|                 # let error propagate up to the parent/nursery. | ||||
|                 await p1.run(name_error) | ||||
|             await p1.run(name_error) | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == '__main__': | ||||
|  |  | |||
|  | @ -10,12 +10,7 @@ async def name_error(): | |||
| async def breakpoint_forever(): | ||||
|     "Indefinitely re-enter debugger in child actor." | ||||
|     while True: | ||||
|         await tractor.pause() | ||||
| 
 | ||||
|         # NOTE: if the test never sent 'q'/'quit' commands | ||||
|         # on the pdb repl, without this checkpoint line the | ||||
|         # repl would spin in this actor forever. | ||||
|         # await trio.sleep(0) | ||||
|         await tractor.breakpoint() | ||||
| 
 | ||||
| 
 | ||||
| async def spawn_until(depth=0): | ||||
|  | @ -23,20 +18,12 @@ async def spawn_until(depth=0): | |||
|     """ | ||||
|     async with tractor.open_nursery() as n: | ||||
|         if depth < 1: | ||||
| 
 | ||||
|             await n.run_in_actor(breakpoint_forever) | ||||
| 
 | ||||
|             p = await n.run_in_actor( | ||||
|             # await n.run_in_actor('breakpoint_forever', breakpoint_forever) | ||||
|             await n.run_in_actor( | ||||
|                 name_error, | ||||
|                 name='name_error' | ||||
|             ) | ||||
|             await trio.sleep(0.5) | ||||
|             # rx and propagate error from child | ||||
|             await p.result() | ||||
| 
 | ||||
|         else: | ||||
|             # recusrive call to spawn another process branching layer of | ||||
|             # the tree | ||||
|             depth -= 1 | ||||
|             await n.run_in_actor( | ||||
|                 spawn_until, | ||||
|  | @ -45,7 +32,6 @@ async def spawn_until(depth=0): | |||
|             ) | ||||
| 
 | ||||
| 
 | ||||
| # TODO: notes on the new boxed-relayed errors through proxy actors | ||||
| async def main(): | ||||
|     """The main ``tractor`` routine. | ||||
| 
 | ||||
|  | @ -67,7 +53,6 @@ async def main(): | |||
|     """ | ||||
|     async with tractor.open_nursery( | ||||
|         debug_mode=True, | ||||
|         # loglevel='cancel', | ||||
|     ) as n: | ||||
| 
 | ||||
|         # spawn both actors | ||||
|  | @ -82,16 +67,8 @@ async def main(): | |||
|             name='spawner1', | ||||
|         ) | ||||
| 
 | ||||
|         # TODO: test this case as well where the parent don't see | ||||
|         # the sub-actor errors by default and instead expect a user | ||||
|         # ctrl-c to kill the root. | ||||
|         with trio.move_on_after(3): | ||||
|             await trio.sleep_forever() | ||||
| 
 | ||||
|         # gah still an issue here. | ||||
|         await portal.result() | ||||
| 
 | ||||
|         # should never get here | ||||
|         await portal1.result() | ||||
| 
 | ||||
| 
 | ||||
|  |  | |||
|  | @ -40,7 +40,7 @@ async def main(): | |||
|     """ | ||||
|     async with tractor.open_nursery( | ||||
|         debug_mode=True, | ||||
|         loglevel='devx', | ||||
|         # loglevel='cancel', | ||||
|     ) as n: | ||||
| 
 | ||||
|         # spawn both actors | ||||
|  |  | |||
|  | @ -6,7 +6,7 @@ async def breakpoint_forever(): | |||
|     "Indefinitely re-enter debugger in child actor." | ||||
|     while True: | ||||
|         await trio.sleep(0.1) | ||||
|         await tractor.pause() | ||||
|         await tractor.breakpoint() | ||||
| 
 | ||||
| 
 | ||||
| async def name_error(): | ||||
|  | @ -38,7 +38,6 @@ async def main(): | |||
|     """ | ||||
|     async with tractor.open_nursery( | ||||
|         debug_mode=True, | ||||
|         # loglevel='runtime', | ||||
|     ) as n: | ||||
| 
 | ||||
|         # Spawn both actors, don't bother with collecting results | ||||
|  |  | |||
|  | @ -23,6 +23,5 @@ async def main(): | |||
|             n.start_soon(debug_actor.run, die) | ||||
|             n.start_soon(crash_boi.run, die) | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == '__main__': | ||||
|     trio.run(main) | ||||
|  |  | |||
|  | @ -1,56 +0,0 @@ | |||
| import trio | ||||
| import tractor | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def name_error( | ||||
|     ctx: tractor.Context, | ||||
| ): | ||||
|     ''' | ||||
|     Raise a `NameError`, catch it and enter `.post_mortem()`, then | ||||
|     expect the `._rpc._invoke()` crash handler to also engage. | ||||
| 
 | ||||
|     ''' | ||||
|     try: | ||||
|         getattr(doggypants)  # noqa (on purpose) | ||||
|     except NameError: | ||||
|         await tractor.post_mortem() | ||||
|         raise | ||||
| 
 | ||||
| 
 | ||||
| async def main(): | ||||
|     ''' | ||||
|     Test 3 `PdbREPL` entries: | ||||
|       - one in the child due to manual `.post_mortem()`, | ||||
|       - another in the child due to runtime RPC crash handling. | ||||
|       - final one here in parent from the RAE. | ||||
| 
 | ||||
|     ''' | ||||
|     # XXX NOTE: ideally the REPL arrives at this frame in the parent | ||||
|     # ONE UP FROM the inner ctx block below! | ||||
|     async with tractor.open_nursery( | ||||
|         debug_mode=True, | ||||
|         # loglevel='cancel', | ||||
|     ) as an: | ||||
|         p: tractor.Portal = await an.start_actor( | ||||
|             'child', | ||||
|             enable_modules=[__name__], | ||||
|         ) | ||||
| 
 | ||||
|         # XXX should raise `RemoteActorError[NameError]` | ||||
|         # AND be the active frame when REPL enters! | ||||
|         try: | ||||
|             async with p.open_context(name_error) as (ctx, first): | ||||
|                 assert first | ||||
|         except tractor.RemoteActorError as rae: | ||||
|             assert rae.boxed_type is NameError | ||||
| 
 | ||||
|             # manually handle in root's parent task | ||||
|             await tractor.post_mortem() | ||||
|             raise | ||||
|         else: | ||||
|             raise RuntimeError('IPC ctx should have remote errored!?') | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == '__main__': | ||||
|     trio.run(main) | ||||
|  | @ -1,58 +0,0 @@ | |||
| import os | ||||
| import sys | ||||
| 
 | ||||
| import trio | ||||
| import tractor | ||||
| 
 | ||||
| # ensure mod-path is correct! | ||||
| from tractor.devx.debug import ( | ||||
|     _sync_pause_from_builtin as _sync_pause_from_builtin, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| async def main() -> None: | ||||
| 
 | ||||
|     # intially unset, no entry. | ||||
|     orig_pybp_var: int = os.environ.get('PYTHONBREAKPOINT') | ||||
|     assert orig_pybp_var in {None, "0"} | ||||
| 
 | ||||
|     async with tractor.open_nursery( | ||||
|         debug_mode=True, | ||||
|         loglevel='devx', | ||||
|         maybe_enable_greenback=True, | ||||
|         # ^XXX REQUIRED to enable `breakpoint()` support (from sync | ||||
|         # fns) and thus required here to avoid an assertion err | ||||
|         # on the next line | ||||
|     ): | ||||
|         assert ( | ||||
|             (pybp_var := os.environ['PYTHONBREAKPOINT']) | ||||
|             == | ||||
|             'tractor.devx.debug._sync_pause_from_builtin' | ||||
|         ) | ||||
| 
 | ||||
|         # TODO: an assert that verifies the hook has indeed been, hooked | ||||
|         # XD | ||||
|         assert ( | ||||
|             (pybp_hook := sys.breakpointhook) | ||||
|             is not tractor.devx.debug._set_trace | ||||
|         ) | ||||
| 
 | ||||
|         print( | ||||
|             f'$PYTHONOBREAKPOINT: {pybp_var!r}\n' | ||||
|             f'`sys.breakpointhook`: {pybp_hook!r}\n' | ||||
|         ) | ||||
|         breakpoint()  # first bp, tractor hook set. | ||||
| 
 | ||||
|     # XXX AFTER EXIT (of actor-runtime) verify the hook is unset.. | ||||
|     # | ||||
|     # YES, this is weird but it's how stdlib docs say to do it.. | ||||
|     # https://docs.python.org/3/library/sys.html#sys.breakpointhook | ||||
|     assert os.environ.get('PYTHONBREAKPOINT') is orig_pybp_var | ||||
|     assert sys.breakpointhook | ||||
| 
 | ||||
|     # now ensure a regular builtin pause still works | ||||
|     breakpoint()  # last bp, stdlib hook restored | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == '__main__': | ||||
|     trio.run(main) | ||||
|  | @ -10,7 +10,7 @@ async def main(): | |||
| 
 | ||||
|         await trio.sleep(0.1) | ||||
| 
 | ||||
|         await tractor.pause() | ||||
|         await tractor.breakpoint() | ||||
| 
 | ||||
|         await trio.sleep(0.1) | ||||
| 
 | ||||
|  |  | |||
|  | @ -2,16 +2,13 @@ import trio | |||
| import tractor | ||||
| 
 | ||||
| 
 | ||||
| async def main( | ||||
|     registry_addrs: tuple[str, int]|None = None | ||||
| ): | ||||
| async def main(): | ||||
| 
 | ||||
|     async with tractor.open_root_actor( | ||||
|         debug_mode=True, | ||||
|         # loglevel='runtime', | ||||
|     ): | ||||
|         while True: | ||||
|             await tractor.pause() | ||||
|             await tractor.breakpoint() | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == '__main__': | ||||
|  |  | |||
|  | @ -24,9 +24,10 @@ async def spawn_until(depth=0): | |||
| 
 | ||||
| 
 | ||||
| async def main(): | ||||
|     ''' | ||||
|     The process tree should look as approximately as follows when the | ||||
|     debugger first engages: | ||||
|     """The main ``tractor`` routine. | ||||
| 
 | ||||
|     The process tree should look as approximately as follows when the debugger | ||||
|     first engages: | ||||
| 
 | ||||
|     python examples/debugging/multi_nested_subactors_bp_forever.py | ||||
|     ├─ python -m tractor._child --uid ('spawner1', '7eab8462 ...) | ||||
|  | @ -36,11 +37,10 @@ async def main(): | |||
|     └─ python -m tractor._child --uid ('spawner0', '1d42012b ...) | ||||
|        └─ python -m tractor._child --uid ('name_error', '6c2733b8 ...) | ||||
| 
 | ||||
|     ''' | ||||
|     """ | ||||
|     async with tractor.open_nursery( | ||||
|         debug_mode=True, | ||||
|         loglevel='devx', | ||||
|         enable_transports=['uds'], | ||||
|         loglevel='warning' | ||||
|     ) as n: | ||||
| 
 | ||||
|         # spawn both actors | ||||
|  |  | |||
|  | @ -1,35 +0,0 @@ | |||
| import trio | ||||
| import tractor | ||||
| 
 | ||||
| 
 | ||||
| async def main(): | ||||
|     async with tractor.open_root_actor( | ||||
|         debug_mode=True, | ||||
|         loglevel='cancel', | ||||
|     ) as _root: | ||||
| 
 | ||||
|         # manually trigger self-cancellation and wait | ||||
|         # for it to fully trigger. | ||||
|         _root.cancel_soon() | ||||
|         await _root._cancel_complete.wait() | ||||
|         print('root cancelled') | ||||
| 
 | ||||
|         # now ensure we can still use the REPL | ||||
|         try: | ||||
|             await tractor.pause() | ||||
|         except trio.Cancelled as _taskc: | ||||
|             assert (root_cs := _root._root_tn.cancel_scope).cancel_called | ||||
|             # NOTE^^ above logic but inside `open_root_actor()` and | ||||
|             # passed to the `shield=` expression is effectively what | ||||
|             # we're testing here! | ||||
|             await tractor.pause(shield=root_cs.cancel_called) | ||||
| 
 | ||||
|         # XXX, if shield logic *is wrong* inside `open_root_actor()`'s | ||||
|         # crash-handler block this should never be interacted, | ||||
|         # instead `trio.Cancelled` would be bubbled up: the original | ||||
|         # BUG. | ||||
|         assert 0 | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == '__main__': | ||||
|     trio.run(main) | ||||
|  | @ -1,84 +0,0 @@ | |||
| ''' | ||||
| Verify we can dump a `stackscope` tree on a hang. | ||||
| 
 | ||||
| ''' | ||||
| import os | ||||
| import signal | ||||
| 
 | ||||
| import trio | ||||
| import tractor | ||||
| 
 | ||||
| @tractor.context | ||||
| async def start_n_shield_hang( | ||||
|     ctx: tractor.Context, | ||||
| ): | ||||
|     # actor: tractor.Actor = tractor.current_actor() | ||||
| 
 | ||||
|     # sync to parent-side task | ||||
|     await ctx.started(os.getpid()) | ||||
| 
 | ||||
|     print('Entering shield sleep..') | ||||
|     with trio.CancelScope(shield=True): | ||||
|         await trio.sleep_forever()  # in subactor | ||||
| 
 | ||||
|     # XXX NOTE ^^^ since this shields, we expect | ||||
|     # the zombie reaper (aka T800) to engage on | ||||
|     # SIGINT from the user and eventually hard-kill | ||||
|     # this subprocess! | ||||
| 
 | ||||
| 
 | ||||
| async def main( | ||||
|     from_test: bool = False, | ||||
| ) -> None: | ||||
| 
 | ||||
|     async with ( | ||||
|         tractor.open_nursery( | ||||
|             debug_mode=True, | ||||
|             enable_stack_on_sig=True, | ||||
|             # maybe_enable_greenback=False, | ||||
|             loglevel='devx', | ||||
|             enable_transports=['uds'], | ||||
|         ) as an, | ||||
|     ): | ||||
|         ptl: tractor.Portal  = await an.start_actor( | ||||
|             'hanger', | ||||
|             enable_modules=[__name__], | ||||
|             debug_mode=True, | ||||
|         ) | ||||
|         async with ptl.open_context( | ||||
|             start_n_shield_hang, | ||||
|         ) as (ctx, cpid): | ||||
| 
 | ||||
|             _, proc, _ = an._children[ptl.chan.uid] | ||||
|             assert cpid == proc.pid | ||||
| 
 | ||||
|             print( | ||||
|                 'Yo my child hanging..?\n' | ||||
|                 # "i'm a user who wants to see a `stackscope` tree!\n" | ||||
|             ) | ||||
| 
 | ||||
|             # XXX simulate the wrapping test's "user actions" | ||||
|             # (i.e. if a human didn't run this manually but wants to | ||||
|             # know what they should do to reproduce test behaviour) | ||||
|             if from_test: | ||||
|                 print( | ||||
|                     f'Sending SIGUSR1 to {cpid!r}!\n' | ||||
|                 ) | ||||
|                 os.kill( | ||||
|                     cpid, | ||||
|                     signal.SIGUSR1, | ||||
|                 ) | ||||
| 
 | ||||
|                 # simulate user cancelling program | ||||
|                 await trio.sleep(0.5) | ||||
|                 os.kill( | ||||
|                     os.getpid(), | ||||
|                     signal.SIGINT, | ||||
|                 ) | ||||
|             else: | ||||
|                 # actually let user send the ctl-c | ||||
|                 await trio.sleep_forever()  # in root | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == '__main__': | ||||
|     trio.run(main) | ||||
|  | @ -1,88 +0,0 @@ | |||
| import trio | ||||
| import tractor | ||||
| 
 | ||||
| 
 | ||||
| async def cancellable_pause_loop( | ||||
|     task_status: trio.TaskStatus[trio.CancelScope] = trio.TASK_STATUS_IGNORED | ||||
| ): | ||||
|     with trio.CancelScope() as cs: | ||||
|         task_status.started(cs) | ||||
|         for _ in range(3): | ||||
|             try: | ||||
|                 # ON first entry, there is no level triggered | ||||
|                 # cancellation yet, so this cp does a parent task | ||||
|                 # ctx-switch so that this scope raises for the NEXT | ||||
|                 # checkpoint we hit. | ||||
|                 await trio.lowlevel.checkpoint() | ||||
|                 await tractor.pause() | ||||
| 
 | ||||
|                 cs.cancel() | ||||
| 
 | ||||
|                 # parent should have called `cs.cancel()` by now | ||||
|                 await trio.lowlevel.checkpoint() | ||||
| 
 | ||||
|             except trio.Cancelled: | ||||
|                 print('INSIDE SHIELDED PAUSE') | ||||
|                 await tractor.pause(shield=True) | ||||
|         else: | ||||
|             # should raise it again, bubbling up to parent | ||||
|             print('BUBBLING trio.Cancelled to parent task-nursery') | ||||
|             await trio.lowlevel.checkpoint() | ||||
| 
 | ||||
| 
 | ||||
| async def pm_on_cancelled(): | ||||
|     async with trio.open_nursery() as tn: | ||||
|         tn.cancel_scope.cancel() | ||||
|         try: | ||||
|             await trio.sleep_forever() | ||||
|         except trio.Cancelled: | ||||
|             # should also raise `Cancelled` since | ||||
|             # we didn't pass `shield=True`. | ||||
|             try: | ||||
|                 await tractor.post_mortem(hide_tb=False) | ||||
|             except trio.Cancelled as taskc: | ||||
| 
 | ||||
|                 # should enter just fine, in fact it should | ||||
|                 # be debugging the internals of the previous | ||||
|                 # sin-shield call above Bo | ||||
|                 await tractor.post_mortem( | ||||
|                     hide_tb=False, | ||||
|                     shield=True, | ||||
|                 ) | ||||
|                 raise taskc | ||||
| 
 | ||||
|         else: | ||||
|             raise RuntimeError('Dint cancel as expected!?') | ||||
| 
 | ||||
| 
 | ||||
| async def cancelled_before_pause( | ||||
| ): | ||||
|     ''' | ||||
|     Verify that using a shielded pause works despite surrounding | ||||
|     cancellation called state in the calling task. | ||||
| 
 | ||||
|     ''' | ||||
|     async with trio.open_nursery() as tn: | ||||
|         cs: trio.CancelScope = await tn.start(cancellable_pause_loop) | ||||
|         await trio.sleep(0.1) | ||||
| 
 | ||||
|     assert cs.cancelled_caught | ||||
| 
 | ||||
|     await pm_on_cancelled() | ||||
| 
 | ||||
| 
 | ||||
| async def main(): | ||||
|     async with tractor.open_nursery( | ||||
|         debug_mode=True, | ||||
|     ) as n: | ||||
|         portal: tractor.Portal = await n.run_in_actor( | ||||
|             cancelled_before_pause, | ||||
|         ) | ||||
|         await portal.result() | ||||
| 
 | ||||
|         # ensure the same works in the root actor! | ||||
|         await pm_on_cancelled() | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == '__main__': | ||||
|     trio.run(main) | ||||
|  | @ -4,9 +4,9 @@ import trio | |||
| 
 | ||||
| async def gen(): | ||||
|     yield 'yo' | ||||
|     await tractor.pause() | ||||
|     await tractor.breakpoint() | ||||
|     yield 'yo' | ||||
|     await tractor.pause() | ||||
|     await tractor.breakpoint() | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
|  | @ -15,7 +15,7 @@ async def just_bp( | |||
| ) -> None: | ||||
| 
 | ||||
|     await ctx.started() | ||||
|     await tractor.pause() | ||||
|     await tractor.breakpoint() | ||||
| 
 | ||||
|     # TODO: bps and errors in this call.. | ||||
|     async for val in gen(): | ||||
|  | @ -33,11 +33,8 @@ async def just_bp( | |||
| 
 | ||||
| 
 | ||||
| async def main(): | ||||
| 
 | ||||
|     async with tractor.open_nursery( | ||||
|         debug_mode=True, | ||||
|         enable_transports=['uds'], | ||||
|         loglevel='devx', | ||||
|     ) as n: | ||||
|         p = await n.start_actor( | ||||
|             'bp_boi', | ||||
|  |  | |||
|  | @ -3,20 +3,17 @@ import tractor | |||
| 
 | ||||
| 
 | ||||
| async def breakpoint_forever(): | ||||
|     ''' | ||||
|     Indefinitely re-enter debugger in child actor. | ||||
| 
 | ||||
|     ''' | ||||
|     """Indefinitely re-enter debugger in child actor. | ||||
|     """ | ||||
|     while True: | ||||
|         await trio.sleep(0.1) | ||||
|         await tractor.pause() | ||||
|         await tractor.breakpoint() | ||||
| 
 | ||||
| 
 | ||||
| async def main(): | ||||
| 
 | ||||
|     async with tractor.open_nursery( | ||||
|         debug_mode=True, | ||||
|         loglevel='cancel', | ||||
|     ) as n: | ||||
| 
 | ||||
|         portal = await n.run_in_actor( | ||||
|  |  | |||
|  | @ -3,26 +3,16 @@ import tractor | |||
| 
 | ||||
| 
 | ||||
| async def name_error(): | ||||
|     getattr(doggypants)  # noqa (on purpose) | ||||
|     getattr(doggypants) | ||||
| 
 | ||||
| 
 | ||||
| async def main(): | ||||
|     async with tractor.open_nursery( | ||||
|         debug_mode=True, | ||||
|         # loglevel='transport', | ||||
|     ) as an: | ||||
|     ) as n: | ||||
| 
 | ||||
|         # TODO: ideally the REPL arrives at this frame in the parent, | ||||
|         # ABOVE the @api_frame of `Portal.run_in_actor()` (which | ||||
|         # should eventually not even be a portal method ... XD) | ||||
|         # await tractor.pause() | ||||
|         p: tractor.Portal = await an.run_in_actor(name_error) | ||||
| 
 | ||||
|         # with this style, should raise on this line | ||||
|         await p.result() | ||||
| 
 | ||||
|         # with this alt style should raise at `open_nusery()` | ||||
|         # return await p.result() | ||||
|         portal = await n.run_in_actor(name_error) | ||||
|         await portal.result() | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == '__main__': | ||||
|  |  | |||
|  | @ -1,169 +0,0 @@ | |||
| from functools import partial | ||||
| import time | ||||
| 
 | ||||
| import trio | ||||
| import tractor | ||||
| 
 | ||||
| # TODO: only import these when not running from test harness? | ||||
| # can we detect `pexpect` usage maybe? | ||||
| # from tractor.devx.debug import ( | ||||
| #     get_lock, | ||||
| #     get_debug_req, | ||||
| # ) | ||||
| 
 | ||||
| 
 | ||||
| def sync_pause( | ||||
|     use_builtin: bool = False, | ||||
|     error: bool = False, | ||||
|     hide_tb: bool = True, | ||||
|     pre_sleep: float|None = None, | ||||
| ): | ||||
|     if pre_sleep: | ||||
|         time.sleep(pre_sleep) | ||||
| 
 | ||||
|     if use_builtin: | ||||
|         breakpoint(hide_tb=hide_tb) | ||||
| 
 | ||||
|     else: | ||||
|         # TODO: maybe for testing some kind of cm style interface | ||||
|         # where the `._set_trace()` call doesn't happen until block | ||||
|         # exit? | ||||
|         # assert get_lock().ctx_in_debug is None | ||||
|         # assert get_debug_req().repl is None | ||||
|         tractor.pause_from_sync() | ||||
|         # assert get_debug_req().repl is None | ||||
| 
 | ||||
|     if error: | ||||
|         raise RuntimeError('yoyo sync code error') | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def start_n_sync_pause( | ||||
|     ctx: tractor.Context, | ||||
| ): | ||||
|     actor: tractor.Actor = tractor.current_actor() | ||||
| 
 | ||||
|     # sync to parent-side task | ||||
|     await ctx.started() | ||||
| 
 | ||||
|     print(f'Entering `sync_pause()` in subactor: {actor.uid}\n') | ||||
|     sync_pause() | ||||
|     print(f'Exited `sync_pause()` in subactor: {actor.uid}\n') | ||||
| 
 | ||||
| 
 | ||||
| async def main() -> None: | ||||
|     async with ( | ||||
|         tractor.open_nursery( | ||||
|             debug_mode=True, | ||||
|             maybe_enable_greenback=True, | ||||
|             enable_stack_on_sig=True, | ||||
|             # loglevel='warning', | ||||
|             # loglevel='devx', | ||||
|         ) as an, | ||||
|         trio.open_nursery() as tn, | ||||
|     ): | ||||
|         # just from root task | ||||
|         sync_pause() | ||||
| 
 | ||||
|         p: tractor.Portal  = await an.start_actor( | ||||
|             'subactor', | ||||
|             enable_modules=[__name__], | ||||
|             # infect_asyncio=True, | ||||
|             debug_mode=True, | ||||
|         ) | ||||
| 
 | ||||
|         # TODO: 3 sub-actor usage cases: | ||||
|         # -[x] via a `.open_context()` | ||||
|         # -[ ] via a `.run_in_actor()` call | ||||
|         # -[ ] via a `.run()` | ||||
|         # -[ ] via a `.to_thread.run_sync()` in subactor | ||||
|         async with p.open_context( | ||||
|             start_n_sync_pause, | ||||
|         ) as (ctx, first): | ||||
|             assert first is None | ||||
| 
 | ||||
|             # TODO: handle bg-thread-in-root-actor special cases! | ||||
|             # | ||||
|             # there are a couple very subtle situations possible here | ||||
|             # and they are likely to become more important as cpython | ||||
|             # moves to support no-GIL. | ||||
|             # | ||||
|             # Cases: | ||||
|             # 1. root-actor bg-threads that call `.pause_from_sync()` | ||||
|             #   whilst an in-tree subactor also is using ` .pause()`. | ||||
|             # |_ since the root-actor bg thread can not | ||||
|             #   `Lock._debug_lock.acquire_nowait()` without running | ||||
|             #   a `trio.Task`, AND because the | ||||
|             #   `PdbREPL.set_continue()` is called from that | ||||
|             #   bg-thread, we can not `._debug_lock.release()` | ||||
|             #   either! | ||||
|             #  |_ this results in no actor-tree `Lock` being used | ||||
|             #    on behalf of the bg-thread and thus the subactor's | ||||
|             #    task and the thread trying to to use stdio | ||||
|             #    simultaneously which results in the classic TTY | ||||
|             #    clobbering! | ||||
|             # | ||||
|             # 2. mutiple sync-bg-threads that call | ||||
|             #   `.pause_from_sync()` where one is scheduled via | ||||
|             #   `Nursery.start_soon(to_thread.run_sync)` in a bg | ||||
|             #   task. | ||||
|             # | ||||
|             #   Due to the GIL, the threads never truly try to step | ||||
|             #   through the REPL simultaneously, BUT their `logging` | ||||
|             #   and traceback outputs are interleaved since the GIL | ||||
|             #   (seemingly) on every REPL-input from the user | ||||
|             #   switches threads.. | ||||
|             # | ||||
|             #   Soo, the context switching semantics of the GIL | ||||
|             #   result in a very confusing and messy interaction UX | ||||
|             #   since eval and (tb) print output is NOT synced to | ||||
|             #   each REPL-cycle (like we normally make it via | ||||
|             #   a `.set_continue()` callback triggering the | ||||
|             #   `Lock.release()`). Ideally we can solve this | ||||
|             #   usability issue NOW because this will of course be | ||||
|             #   that much more important when eventually there is no | ||||
|             #   GIL! | ||||
| 
 | ||||
|             # XXX should cause double REPL entry and thus TTY | ||||
|             # clobbering due to case 1. above! | ||||
|             tn.start_soon( | ||||
|                 partial( | ||||
|                     trio.to_thread.run_sync, | ||||
|                     partial( | ||||
|                         sync_pause, | ||||
|                         use_builtin=False, | ||||
|                         # pre_sleep=0.5, | ||||
|                     ), | ||||
|                     abandon_on_cancel=True, | ||||
|                     thread_name='start_soon_root_bg_thread', | ||||
|                 ) | ||||
|             ) | ||||
| 
 | ||||
|             await tractor.pause() | ||||
| 
 | ||||
|             # XXX should cause double REPL entry and thus TTY | ||||
|             # clobbering due to case 2. above! | ||||
|             await trio.to_thread.run_sync( | ||||
|                 partial( | ||||
|                     sync_pause, | ||||
|                     # NOTE this already works fine since in the new | ||||
|                     # thread the `breakpoint()` built-in is never | ||||
|                     # overloaded, thus NO locking is used, HOWEVER | ||||
|                     # the case 2. from above still exists! | ||||
|                     use_builtin=True, | ||||
|                 ), | ||||
|                 # TODO: with this `False` we can hang!??! | ||||
|                 # abandon_on_cancel=False, | ||||
|                 abandon_on_cancel=True, | ||||
|                 thread_name='inline_root_bg_thread', | ||||
|             ) | ||||
| 
 | ||||
|         await ctx.cancel() | ||||
| 
 | ||||
|         # TODO: case where we cancel from trio-side while asyncio task | ||||
|         # has debugger lock? | ||||
|         await p.cancel_actor() | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == '__main__': | ||||
|     trio.run(main) | ||||
|  | @ -1,11 +1,6 @@ | |||
| import time | ||||
| import trio | ||||
| import tractor | ||||
| from tractor import ( | ||||
|     ActorNursery, | ||||
|     MsgStream, | ||||
|     Portal, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| # this is the first 2 actors, streamer_1 and streamer_2 | ||||
|  | @ -17,18 +12,14 @@ async def stream_data(seed): | |||
| 
 | ||||
| # this is the third actor; the aggregator | ||||
| async def aggregate(seed): | ||||
|     ''' | ||||
|     Ensure that the two streams we receive match but only stream | ||||
|     """Ensure that the two streams we receive match but only stream | ||||
|     a single set of values to the parent. | ||||
| 
 | ||||
|     ''' | ||||
|     an: ActorNursery | ||||
|     async with tractor.open_nursery() as an: | ||||
|         portals: list[Portal] = [] | ||||
|     """ | ||||
|     async with tractor.open_nursery() as nursery: | ||||
|         portals = [] | ||||
|         for i in range(1, 3): | ||||
| 
 | ||||
|             # fork/spawn call | ||||
|             portal = await an.start_actor( | ||||
|             # fork point | ||||
|             portal = await nursery.start_actor( | ||||
|                 name=f'streamer_{i}', | ||||
|                 enable_modules=[__name__], | ||||
|             ) | ||||
|  | @ -52,11 +43,7 @@ async def aggregate(seed): | |||
|         async with trio.open_nursery() as n: | ||||
| 
 | ||||
|             for portal in portals: | ||||
|                 n.start_soon( | ||||
|                     push_to_chan, | ||||
|                     portal, | ||||
|                     send_chan.clone(), | ||||
|                 ) | ||||
|                 n.start_soon(push_to_chan, portal, send_chan.clone()) | ||||
| 
 | ||||
|             # close this local task's reference to send side | ||||
|             await send_chan.aclose() | ||||
|  | @ -73,36 +60,26 @@ async def aggregate(seed): | |||
| 
 | ||||
|             print("FINISHED ITERATING in aggregator") | ||||
| 
 | ||||
|         await an.cancel() | ||||
|         await nursery.cancel() | ||||
|         print("WAITING on `ActorNursery` to finish") | ||||
|     print("AGGREGATOR COMPLETE!") | ||||
| 
 | ||||
| 
 | ||||
| async def main() -> list[int]: | ||||
|     ''' | ||||
|     This is the "root" actor's main task's entrypoint. | ||||
| 
 | ||||
|     By default (and if not otherwise specified) that root process | ||||
|     also acts as a "registry actor" / "registrar" on the localhost | ||||
|     for the purposes of multi-actor "service discovery". | ||||
| 
 | ||||
|     ''' | ||||
|     # yes, a nursery which spawns `trio`-"actors" B) | ||||
|     an: ActorNursery | ||||
| # this is the main actor and *arbiter* | ||||
| async def main(): | ||||
|     # a nursery which spawns "actors" | ||||
|     async with tractor.open_nursery( | ||||
|         loglevel='cancel', | ||||
|         # debug_mode=True, | ||||
|     ) as an: | ||||
|         arbiter_addr=('127.0.0.1', 1616) | ||||
|     ) as nursery: | ||||
| 
 | ||||
|         seed = int(1e3) | ||||
|         pre_start = time.time() | ||||
| 
 | ||||
|         portal: Portal = await an.start_actor( | ||||
|         portal = await nursery.start_actor( | ||||
|             name='aggregator', | ||||
|             enable_modules=[__name__], | ||||
|         ) | ||||
| 
 | ||||
|         stream: MsgStream | ||||
|         async with portal.open_stream_from( | ||||
|             aggregate, | ||||
|             seed=seed, | ||||
|  | @ -111,12 +88,11 @@ async def main() -> list[int]: | |||
|             start = time.time() | ||||
|             # the portal call returns exactly what you'd expect | ||||
|             # as if the remote "aggregate" function was called locally | ||||
|             result_stream: list[int] = [] | ||||
|             result_stream = [] | ||||
|             async for value in stream: | ||||
|                 result_stream.append(value) | ||||
| 
 | ||||
|         cancelled: bool = await portal.cancel_actor() | ||||
|         assert cancelled | ||||
|         await portal.cancel_actor() | ||||
| 
 | ||||
|         print(f"STREAM TIME = {time.time() - start}") | ||||
|         print(f"STREAM + SPAWN TIME = {time.time() - pre_start}") | ||||
|  |  | |||
|  | @ -8,17 +8,15 @@ This uses no extra threads, fancy semaphores or futures; all we need | |||
| is ``tractor``'s channels. | ||||
| 
 | ||||
| """ | ||||
| from contextlib import ( | ||||
|     asynccontextmanager as acm, | ||||
|     aclosing, | ||||
| ) | ||||
| from typing import Callable | ||||
| from contextlib import asynccontextmanager | ||||
| from typing import List, Callable | ||||
| import itertools | ||||
| import math | ||||
| import time | ||||
| 
 | ||||
| import tractor | ||||
| import trio | ||||
| from async_generator import aclosing | ||||
| 
 | ||||
| 
 | ||||
| PRIMES = [ | ||||
|  | @ -46,7 +44,7 @@ async def is_prime(n): | |||
|     return True | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| @asynccontextmanager | ||||
| async def worker_pool(workers=4): | ||||
|     """Though it's a trivial special case for ``tractor``, the well | ||||
|     known "worker pool" seems to be the defacto "but, I want this | ||||
|  | @ -73,8 +71,8 @@ async def worker_pool(workers=4): | |||
| 
 | ||||
|         async def _map( | ||||
|             worker_func: Callable[[int], bool], | ||||
|             sequence: list[int] | ||||
|         ) -> list[bool]: | ||||
|             sequence: List[int] | ||||
|         ) -> List[bool]: | ||||
| 
 | ||||
|             # define an async (local) task to collect results from workers | ||||
|             async def send_result(func, value, portal): | ||||
|  |  | |||
|  | @ -3,18 +3,20 @@ import trio | |||
| import tractor | ||||
| 
 | ||||
| 
 | ||||
| async def sleepy_jane() -> None: | ||||
|     uid: tuple = tractor.current_actor().uid | ||||
| async def sleepy_jane(): | ||||
|     uid = tractor.current_actor().uid | ||||
|     print(f'Yo i am actor {uid}') | ||||
|     await trio.sleep_forever() | ||||
| 
 | ||||
| 
 | ||||
| async def main(): | ||||
|     ''' | ||||
|     Spawn a flat actor cluster, with one process per detected core. | ||||
|     Spawn a flat actor cluster, with one process per | ||||
|     detected core. | ||||
| 
 | ||||
|     ''' | ||||
|     portal_map: dict[str, tractor.Portal] | ||||
|     results: dict[str, str] | ||||
| 
 | ||||
|     # look at this hip new syntax! | ||||
|     async with ( | ||||
|  | @ -23,15 +25,11 @@ async def main(): | |||
|             modules=[__name__] | ||||
|         ) as portal_map, | ||||
| 
 | ||||
|         tractor.trionics.collapse_eg(), | ||||
|         trio.open_nursery() as tn, | ||||
|         trio.open_nursery() as n, | ||||
|     ): | ||||
| 
 | ||||
|         for (name, portal) in portal_map.items(): | ||||
|             tn.start_soon( | ||||
|                 portal.run, | ||||
|                 sleepy_jane, | ||||
|             ) | ||||
|             n.start_soon(portal.run, sleepy_jane) | ||||
| 
 | ||||
|         await trio.sleep(0.5) | ||||
| 
 | ||||
|  | @ -43,4 +41,4 @@ if __name__ == '__main__': | |||
|     try: | ||||
|         trio.run(main) | ||||
|     except KeyboardInterrupt: | ||||
|         print('trio cancelled by KBI') | ||||
|         pass | ||||
|  |  | |||
|  | @ -13,7 +13,7 @@ async def simple_rpc( | |||
| 
 | ||||
|     ''' | ||||
|     # signal to parent that we're up much like | ||||
|     # ``trio.TaskStatus.started()`` | ||||
|     # ``trio_typing.TaskStatus.started()`` | ||||
|     await ctx.started(data + 1) | ||||
| 
 | ||||
|     async with ctx.open_stream() as stream: | ||||
|  |  | |||
|  | @ -9,7 +9,7 @@ async def main(service_name): | |||
|     async with tractor.open_nursery() as an: | ||||
|         await an.start_actor(service_name) | ||||
| 
 | ||||
|         async with tractor.get_registry() as portal: | ||||
|         async with tractor.get_arbiter('127.0.0.1', 1616) as portal: | ||||
|             print(f"Arbiter is listening on {portal.channel}") | ||||
| 
 | ||||
|         async with tractor.wait_for_actor(service_name) as sockaddr: | ||||
|  |  | |||
|  | @ -1,85 +0,0 @@ | |||
| from contextlib import ( | ||||
|     asynccontextmanager as acm, | ||||
| ) | ||||
| from functools import partial | ||||
| 
 | ||||
| import tractor | ||||
| import trio | ||||
| 
 | ||||
| 
 | ||||
| log = tractor.log.get_logger( | ||||
|     name=__name__ | ||||
| ) | ||||
| 
 | ||||
| _lock: trio.Lock|None = None | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def acquire_singleton_lock( | ||||
| ) -> None: | ||||
|     global _lock | ||||
|     if _lock is None: | ||||
|         log.info('Allocating LOCK') | ||||
|         _lock = trio.Lock() | ||||
| 
 | ||||
|     log.info('TRYING TO LOCK ACQUIRE') | ||||
|     async with _lock: | ||||
|         log.info('ACQUIRED') | ||||
|         yield _lock | ||||
| 
 | ||||
|     log.info('RELEASED') | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| async def hold_lock_forever( | ||||
|     task_status=trio.TASK_STATUS_IGNORED | ||||
| ): | ||||
|     async with ( | ||||
|         tractor.trionics.maybe_raise_from_masking_exc(), | ||||
|         acquire_singleton_lock() as lock, | ||||
|     ): | ||||
|         task_status.started(lock) | ||||
|         await trio.sleep_forever() | ||||
| 
 | ||||
| 
 | ||||
| async def main( | ||||
|     ignore_special_cases: bool, | ||||
|     loglevel: str = 'info', | ||||
|     debug_mode: bool = True, | ||||
| ): | ||||
|     async with ( | ||||
|         trio.open_nursery() as tn, | ||||
| 
 | ||||
|         # tractor.trionics.maybe_raise_from_masking_exc() | ||||
|         # ^^^ XXX NOTE, interestingly putting the unmasker | ||||
|         # here does not exhibit the same behaviour ?? | ||||
|     ): | ||||
|         if not ignore_special_cases: | ||||
|             from tractor.trionics import _taskc | ||||
|             _taskc._mask_cases.clear() | ||||
| 
 | ||||
|         _lock = await tn.start( | ||||
|             hold_lock_forever, | ||||
|         ) | ||||
|         with trio.move_on_after(0.2): | ||||
|             await tn.start( | ||||
|                 hold_lock_forever, | ||||
|             ) | ||||
| 
 | ||||
|         tn.cancel_scope.cancel() | ||||
| 
 | ||||
| 
 | ||||
| # XXX, manual test as script | ||||
| if __name__ == '__main__': | ||||
|     tractor.log.get_console_log(level='info') | ||||
|     for case in [True, False]: | ||||
|         log.info( | ||||
|             f'\n' | ||||
|             f'------ RUNNING SCRIPT TRIAL ------\n' | ||||
|             f'ignore_special_cases: {case!r}\n' | ||||
|         ) | ||||
|         trio.run(partial( | ||||
|             main, | ||||
|             ignore_special_cases=case, | ||||
|             loglevel='info', | ||||
|         )) | ||||
|  | @ -1,195 +0,0 @@ | |||
| from contextlib import ( | ||||
|     contextmanager as cm, | ||||
|     # TODO, any diff in async case(s)?? | ||||
|     # asynccontextmanager as acm, | ||||
| ) | ||||
| from functools import partial | ||||
| 
 | ||||
| import tractor | ||||
| import trio | ||||
| 
 | ||||
| 
 | ||||
| log = tractor.log.get_logger( | ||||
|     name=__name__ | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| @cm | ||||
| def teardown_on_exc( | ||||
|     raise_from_handler: bool = False, | ||||
| ): | ||||
|     ''' | ||||
|     You could also have a teardown handler which catches any exc and | ||||
|     does some required teardown. In this case the problem is | ||||
|     compounded UNLESS you ensure the handler's scope is OUTSIDE the | ||||
|     `ux.aclose()`.. that is in the caller's enclosing scope. | ||||
| 
 | ||||
|     ''' | ||||
|     try: | ||||
|         yield | ||||
|     except BaseException as _berr: | ||||
|         berr = _berr | ||||
|         log.exception( | ||||
|             f'Handling termination teardown in child due to,\n' | ||||
|             f'{berr!r}\n' | ||||
|         ) | ||||
|         if raise_from_handler: | ||||
|             # XXX teardown ops XXX | ||||
|             # on termination these steps say need to be run to | ||||
|             # ensure wider system consistency (like the state of | ||||
|             # remote connections/services). | ||||
|             # | ||||
|             # HOWEVER, any bug in this teardown code is also | ||||
|             # masked by the `tx.aclose()`! | ||||
|             # this is also true if `_tn.cancel_scope` is | ||||
|             # `.cancel_called` by the parent in a graceful | ||||
|             # request case.. | ||||
| 
 | ||||
|             # simulate a bug in teardown handler. | ||||
|             raise RuntimeError( | ||||
|                 'woopsie teardown bug!' | ||||
|             ) | ||||
| 
 | ||||
|         raise  # no teardown bug. | ||||
| 
 | ||||
| 
 | ||||
| async def finite_stream_to_rent( | ||||
|     tx: trio.abc.SendChannel, | ||||
|     child_errors_mid_stream: bool, | ||||
|     raise_unmasked: bool, | ||||
| 
 | ||||
|     task_status: trio.TaskStatus[ | ||||
|         trio.CancelScope, | ||||
|     ] = trio.TASK_STATUS_IGNORED, | ||||
| ): | ||||
|     async with ( | ||||
|         # XXX without this unmasker the mid-streaming RTE is never | ||||
|         # reported since it is masked by the `tx.aclose()` | ||||
|         # call which in turn raises `Cancelled`! | ||||
|         # | ||||
|         # NOTE, this is WITHOUT doing any exception handling | ||||
|         # inside the child  task! | ||||
|         # | ||||
|         # TODO, uncomment next LoC to see the supprsessed beg[RTE]! | ||||
|         tractor.trionics.maybe_raise_from_masking_exc( | ||||
|             raise_unmasked=raise_unmasked, | ||||
|         ), | ||||
| 
 | ||||
|         tx as tx,  # .aclose() is the guilty masker chkpt! | ||||
| 
 | ||||
|         # XXX, this ONLY matters in the | ||||
|         # `child_errors_mid_stream=False` case oddly!? | ||||
|         # THAT IS, if no tn is opened in that case then the | ||||
|         # test will not fail; it raises the RTE correctly? | ||||
|         # | ||||
|         # -> so it seems this new scope somehow affects the form of | ||||
|         #    eventual in the parent EG? | ||||
|         tractor.trionics.maybe_open_nursery( | ||||
|             nursery=( | ||||
|                 None | ||||
|                 if not child_errors_mid_stream | ||||
|                 else True | ||||
|             ), | ||||
|         ) as _tn, | ||||
|     ): | ||||
|         # pass our scope back to parent for supervision\ | ||||
|         # control. | ||||
|         cs: trio.CancelScope|None = ( | ||||
|             None | ||||
|             if _tn is True | ||||
|             else _tn.cancel_scope | ||||
|         ) | ||||
|         task_status.started(cs) | ||||
| 
 | ||||
|         with teardown_on_exc( | ||||
|             raise_from_handler=not child_errors_mid_stream, | ||||
|         ): | ||||
|             for i in range(100): | ||||
|                 log.debug( | ||||
|                     f'Child tx {i!r}\n' | ||||
|                 ) | ||||
|                 if ( | ||||
|                     child_errors_mid_stream | ||||
|                     and | ||||
|                     i == 66 | ||||
|                 ): | ||||
|                     # oh wait but WOOPS there's a bug | ||||
|                     # in that teardown code!? | ||||
|                     raise RuntimeError( | ||||
|                         'woopsie, a mid-streaming bug!?' | ||||
|                     ) | ||||
| 
 | ||||
|                 await tx.send(i) | ||||
| 
 | ||||
| 
 | ||||
| async def main( | ||||
|     # TODO! toggle this for the 2 cases! | ||||
|     # 1. child errors mid-stream while parent is also requesting | ||||
|     #   (graceful) cancel of that child streamer. | ||||
|     # | ||||
|     # 2. child contains a teardown handler which contains a | ||||
|     #   bug and raises. | ||||
|     # | ||||
|     child_errors_mid_stream: bool, | ||||
| 
 | ||||
|     raise_unmasked: bool = False, | ||||
|     loglevel: str = 'info', | ||||
| ): | ||||
|     tractor.log.get_console_log(level=loglevel) | ||||
| 
 | ||||
|     # the `.aclose()` being checkpoints on these | ||||
|     # is the source of the problem.. | ||||
|     tx, rx = trio.open_memory_channel(1) | ||||
| 
 | ||||
|     async with ( | ||||
|         tractor.trionics.collapse_eg(), | ||||
|         trio.open_nursery() as tn, | ||||
|         rx as rx, | ||||
|     ): | ||||
|         _child_cs = await tn.start( | ||||
|             partial( | ||||
|                 finite_stream_to_rent, | ||||
|                 child_errors_mid_stream=child_errors_mid_stream, | ||||
|                 raise_unmasked=raise_unmasked, | ||||
|                 tx=tx, | ||||
|             ) | ||||
|         ) | ||||
|         async for msg in rx: | ||||
|             log.debug( | ||||
|                 f'Rent rx {msg!r}\n' | ||||
|             ) | ||||
| 
 | ||||
|             # simulate some external cancellation | ||||
|             # request **JUST BEFORE** the child errors. | ||||
|             if msg == 65: | ||||
|                 log.cancel( | ||||
|                     f'Cancelling parent on,\n' | ||||
|                     f'msg={msg}\n' | ||||
|                     f'\n' | ||||
|                     f'Simulates OOB cancel request!\n' | ||||
|                 ) | ||||
|                 tn.cancel_scope.cancel() | ||||
| 
 | ||||
| 
 | ||||
| # XXX, manual test as script | ||||
| if __name__ == '__main__': | ||||
|     tractor.log.get_console_log(level='info') | ||||
|     for case in [True, False]: | ||||
|         log.info( | ||||
|             f'\n' | ||||
|             f'------ RUNNING SCRIPT TRIAL ------\n' | ||||
|             f'child_errors_midstream: {case!r}\n' | ||||
|         ) | ||||
|         try: | ||||
|             trio.run(partial( | ||||
|                 main, | ||||
|                 child_errors_mid_stream=case, | ||||
|                 # raise_unmasked=True, | ||||
|                 loglevel='info', | ||||
|             )) | ||||
|         except Exception as _exc: | ||||
|             exc = _exc | ||||
|             log.exception( | ||||
|                 'Should have raised an RTE or Cancelled?\n' | ||||
|             ) | ||||
|             breakpoint() | ||||
|  | @ -0,0 +1,8 @@ | |||
| Adjust the `tractor._spawn.soft_wait()` strategy to avoid sending an | ||||
| actor cancel request (via `Portal.cancel_actor()`) if either the child | ||||
| process is detected as having terminated or the IPC channel is detected | ||||
| to be closed. | ||||
| 
 | ||||
| This ensures (even) more deterministic inter-actor cancellation by | ||||
| avoiding the timeout condition where possible when a whild never | ||||
| sucessfully spawned, crashed, or became un-contactable over IPC. | ||||
|  | @ -0,0 +1,3 @@ | |||
| Add an experimental ``tractor.msg.NamespacePath`` type for passing Python | ||||
| objects by "reference" through a ``str``-subtype message and using the | ||||
| new ``pkgutil.resolve_name()`` for reference loading. | ||||
|  | @ -0,0 +1,2 @@ | |||
| Add a new `tractor.experimental` subpackage for staging new high level | ||||
| APIs and subystems that we might eventually make built-ins. | ||||
|  | @ -0,0 +1,3 @@ | |||
| Update to and pin latest ``msgpack`` (1.0.3) and ``msgspec`` (0.4.0) | ||||
| both of which required adjustments for backwards imcompatible API | ||||
| tweaks. | ||||
|  | @ -0,0 +1,4 @@ | |||
| Fence off ``multiprocessing`` imports until absolutely necessary in an | ||||
| effort to avoid "resource tracker" spawning side effects that seem to | ||||
| have varying degrees of unreliability per Python release. Port to new | ||||
| ``msgspec.DecodeError``. | ||||
|  | @ -0,0 +1,12 @@ | |||
| Add a new ``to_asyncio.LinkedTaskChannel.subscribe()`` which gives | ||||
| task-oriented broadcast functionality semantically equivalent to | ||||
| ``tractor.MsgStream.subscribe()`` this makes it possible for multiple | ||||
| ``trio``-side tasks to consume ``asyncio``-side task msgs in tandem. | ||||
| 
 | ||||
| Further Improvements to the test suite were added in this patch set | ||||
| including a new scenario test for a sub-actor managed "service nursery" | ||||
| (implementing the basics of a "service manager") including use of | ||||
| *infected asyncio* mode. Further we added a lower level | ||||
| ``test_trioisms.py`` to start to track issues we need to work around in | ||||
| ``trio`` itself which in this case included a bug we were trying to | ||||
| solve related to https://github.com/python-trio/trio/issues/2258. | ||||
|  | @ -0,0 +1,5 @@ | |||
| Run windows CI jobs on python 3.10 after some | ||||
| hacks for ``pdbpp`` dependency issues. | ||||
| 
 | ||||
| Issue was to do with the now deprecated `pyreadline` project which | ||||
| should be changed over to `pyreadline3`. | ||||
|  | @ -0,0 +1,8 @@ | |||
| Drop use of the ``msgpack`` package and instead move fully to the | ||||
| ``msgspec`` codec library. | ||||
| 
 | ||||
| We've now used ``msgspec`` extensively in production and there's no | ||||
| reason to not use it as default. Further this change preps us for the up | ||||
| and coming typed messaging semantics (#196), dialog-unprotocol system | ||||
| (#297), and caps-based messaging-protocols (#299) planned before our | ||||
| first beta. | ||||
|  | @ -0,0 +1,13 @@ | |||
| Fix a previously undetected ``trio``-``asyncio`` task lifetime linking | ||||
| issue with the ``to_asyncio.open_channel_from()`` api where both sides | ||||
| where not properly waiting/signalling termination and it was possible | ||||
| for ``asyncio``-side errors to not propagate due to a race condition. | ||||
| 
 | ||||
| The implementation fix summary is: | ||||
| - add state to signal the end of the ``trio`` side task to be | ||||
|   read by the ``asyncio`` side and always cancel any ongoing | ||||
|   task in such cases. | ||||
| - always wait on the ``asyncio`` task termination from the ``trio`` | ||||
|   side on error before maybe raising said error. | ||||
| - always close the ``trio`` mem chan on exit to ensure the other | ||||
|   side can detect it and follow. | ||||
|  | @ -1,16 +0,0 @@ | |||
| Strictly support Python 3.10+, start runtime machinery reorg | ||||
| 
 | ||||
| Since we want to push forward using the new `match:` syntax for our | ||||
| internal RPC-msg loops, we officially drop 3.9 support for the next | ||||
| release which should coincide well with the first release of 3.11. | ||||
| 
 | ||||
| This patch set also officially removes the ``tractor.run()`` API (which | ||||
| has been deprecated for some time) as well as starts an initial re-org | ||||
| of the internal runtime core by: | ||||
| - renaming ``tractor._actor`` -> ``._runtime`` | ||||
| - moving the ``._runtime.ActorActor._process_messages()`` and | ||||
|   ``._async_main()`` to be module level singleton-task-functions since | ||||
|   they are only started once for each connection and actor spawn | ||||
|   respectively; this internal API thus looks more similar to (at the | ||||
|   time of writing) the ``trio``-internals in ``trio._core._run``. | ||||
| - officially remove ``tractor.run()``, now deprecated for some time. | ||||
|  | @ -1,4 +0,0 @@ | |||
| Only set `._debug.Lock.local_pdb_complete` if has been created. | ||||
| 
 | ||||
| This can be triggered by a very rare race condition (and thus we have no | ||||
| working test yet) but it is known to exist in (a) consumer project(s). | ||||
|  | @ -1,25 +0,0 @@ | |||
| Add support for ``trio >= 0.22`` and support for the new Python 3.11 | ||||
| ``[Base]ExceptionGroup`` from `pep 654`_ via the backported | ||||
| `exceptiongroup`_ package and some final fixes to the debug mode | ||||
| subsystem. | ||||
| 
 | ||||
| This port ended up driving some (hopefully) final fixes to our debugger | ||||
| subsystem including the solution to all lingering stdstreams locking | ||||
| race-conditions and deadlock scenarios. This includes extending the | ||||
| debugger tests suite as well as cancellation and ``asyncio`` mode cases. | ||||
| Some of the notable details: | ||||
| 
 | ||||
| - always reverting to the ``trio`` SIGINT handler when leaving debug | ||||
|   mode. | ||||
| - bypassing child attempts to acquire the debug lock when detected | ||||
|   to be amdist actor-runtime-cancellation. | ||||
| - allowing the root actor to cancel local but IPC-stale subactor | ||||
|   requests-tasks for the debug lock when in a "no IPC peers" state. | ||||
| 
 | ||||
| Further we refined our ``ActorNursery`` semantics to be more similar to | ||||
| ``trio`` in the sense that parent task errors are always packed into the | ||||
| actor-nursery emitted exception group and adjusted all tests and | ||||
| examples accordingly. | ||||
| 
 | ||||
| .. _pep 654: https://peps.python.org/pep-0654/#handling-exception-groups | ||||
| .. _exceptiongroup: https://github.com/python-trio/exceptiongroup | ||||
|  | @ -1,5 +0,0 @@ | |||
| Establish an explicit "backend spawning" method table; use it from CI | ||||
| 
 | ||||
| More clearly lays out the current set of (3) backends: ``['trio', | ||||
| 'mp_spawn', 'mp_forkserver']`` and adjusts the ``._spawn.py`` internals | ||||
| as well as the test suite to accommodate. | ||||
|  | @ -1,4 +0,0 @@ | |||
| Add ``key: Callable[..., Hashable]`` support to ``.trionics.maybe_open_context()`` | ||||
| 
 | ||||
| Gives users finer grained control over cache hit behaviour using | ||||
| a callable which receives the input ``kwargs: dict``. | ||||
|  | @ -1,41 +0,0 @@ | |||
| Add support for debug-lock blocking using a ``._debug.Lock._blocked: | ||||
| set[tuple]`` and add ids when no-more IPC connections with the | ||||
| root actor are detected. | ||||
| 
 | ||||
| This is an enhancement which (mostly) solves a lingering debugger | ||||
| locking race case we needed to handle: | ||||
| 
 | ||||
| - child crashes acquires TTY lock in root and attaches to ``pdb`` | ||||
| - child IPC goes down such that all channels to the root are broken | ||||
|   / non-functional. | ||||
| - root is stuck thinking the child is still in debug even though it | ||||
|   can't be contacted and the child actor machinery hasn't been | ||||
|   cancelled by its parent. | ||||
| - root get's stuck in deadlock with child since it won't send a cancel | ||||
|   request until the child is finished debugging (to avoid clobbering | ||||
|   a child that is actually using the debugger), but the child can't | ||||
|   unlock the debugger bc IPC is down and it can't contact the root. | ||||
| 
 | ||||
| To avoid this scenario add debug lock blocking list via | ||||
| `._debug.Lock._blocked: set[tuple]` which holds actor uids for any actor | ||||
| that is detected by the root as having no transport channel connections | ||||
| (of which at least one should exist if this sub-actor at some point | ||||
| acquired the debug lock). The root consequently checks this list for any | ||||
| actor that tries to (re)acquire the lock and blocks with | ||||
| a ``ContextCancelled``. Further, when a debug condition is tested in | ||||
| ``._runtime._invoke``, the context's ``._enter_debugger_on_cancel`` is | ||||
| set to `False` if the actor was put on the block list then all | ||||
| post-mortem / crash handling will be bypassed for that task. | ||||
| 
 | ||||
| In theory this approach to block list management may cause problems | ||||
| where some nested child actor acquires and releases the lock multiple | ||||
| times and it gets stuck on the block list after the first use? If this | ||||
| turns out to be an issue we can try changing the strat so blocks are | ||||
| only added when the root has zero IPC peers left? | ||||
| 
 | ||||
| Further, this adds a root-locking-task side cancel scope, | ||||
| ``Lock._root_local_task_cs_in_debug``, which can be ``.cancel()``-ed by the root | ||||
| runtime when a stale lock is detected during the IPC channel testing. | ||||
| However, right now we're NOT using this since it seems to cause test | ||||
| failures likely due to causing pre-mature cancellation and maybe needs | ||||
| a bit more experimenting? | ||||
|  | @ -1,19 +0,0 @@ | |||
| Rework our ``.trionics.BroadcastReceiver`` internals to avoid method | ||||
| recursion and approach a design and interface closer to ``trio``'s | ||||
| ``MemoryReceiveChannel``. | ||||
| 
 | ||||
| The details of the internal changes include: | ||||
| 
 | ||||
| - implementing a ``BroadcastReceiver.receive_nowait()`` and using it | ||||
|   within the async ``.receive()`` thus avoiding recursion from | ||||
|   ``.receive()``. | ||||
| - failing over to an internal ``._receive_from_underlying()`` when the | ||||
|   ``_nowait()`` call raises ``trio.WouldBlock`` | ||||
| - adding ``BroadcastState.statistics()`` for debugging and testing both | ||||
|   internals and by users. | ||||
| - add an internal ``BroadcastReceiver._raise_on_lag: bool`` which can be | ||||
|   set to avoid ``Lagged`` raising for possible use cases where a user | ||||
|   wants to choose between a [cheap or nasty | ||||
|   pattern](https://zguide.zeromq.org/docs/chapter7/#The-Cheap-or-Nasty-Pattern) | ||||
|   the the particular stream (we use this in ``piker``'s dark clearing | ||||
|   engine to avoid fast feeds breaking during HFT periods). | ||||
|  | @ -1,11 +0,0 @@ | |||
| Always ``list``-cast the ``mngrs`` input to | ||||
| ``.trionics.gather_contexts()`` and ensure its size otherwise raise | ||||
| a ``ValueError``. | ||||
| 
 | ||||
| Turns out that trying to pass an inline-style generator comprehension | ||||
| doesn't seem to work inside the ``async with`` expression? Further, in | ||||
| such a case we can get a hang waiting on the all-entered event | ||||
| completion when the internal mngrs iteration is a noop. Instead we | ||||
| always greedily check a size and error on empty input; the lazy | ||||
| iteration of a generator input is not beneficial anyway since we're | ||||
| entering all manager instances in concurrent tasks. | ||||
|  | @ -1,15 +0,0 @@ | |||
| Fixes to ensure IPC (channel) breakage doesn't result in hung actor | ||||
| trees; the zombie reaping and general supervision machinery will always | ||||
| clean up and terminate. | ||||
| 
 | ||||
| This includes not only the (mostly minor) fixes to solve these cases but | ||||
| also a new extensive test suite in `test_advanced_faults.py` with an | ||||
| accompanying highly configurable example module-script in | ||||
| `examples/advanced_faults/ipc_failure_during_stream.py`. Tests ensure we | ||||
| never get hang or zombies despite operating in debug mode and attempt to | ||||
| simulate all possible IPC transport failure cases for a local-host actor | ||||
| tree. | ||||
| 
 | ||||
| Further we simplify `Context.open_stream.__aexit__()` to just call | ||||
| `MsgStream.aclose()` directly more or less avoiding a pure duplicate | ||||
| code path. | ||||
|  | @ -1,10 +0,0 @@ | |||
| Always redraw the `pdbpp` prompt on `SIGINT` during REPL use. | ||||
| 
 | ||||
| There was recent changes todo with Python 3.10 that required us to pin | ||||
| to a specific commit in `pdbpp` which have recently been fixed minus | ||||
| this last issue with `SIGINT` shielding: not clobbering or not | ||||
| showing the `(Pdb++)` prompt on ctlr-c by the user. This repairs all | ||||
| that by firstly removing the standard KBI intercepting of the std lib's | ||||
| `pdb.Pdb._cmdloop()` as well as ensuring that only the actor with REPL | ||||
| control ever reports `SIGINT` handler log msgs and prompt redraws. With | ||||
| this we move back to using pypi `pdbpp` release. | ||||
|  | @ -1,7 +0,0 @@ | |||
| Drop `trio.Process.aclose()` usage, copy into our spawning code. | ||||
| 
 | ||||
| The details are laid out in https://github.com/goodboy/tractor/issues/330. | ||||
| `trio` changed is process running quite some time ago, this just copies | ||||
| out the small bit we needed (from the old `.aclose()`) for hard kills | ||||
| where a soft runtime cancel request fails and our "zombie killer" | ||||
| implementation kicks in. | ||||
|  | @ -1,15 +0,0 @@ | |||
| Switch to using the fork & fix of `pdb++`, `pdbp`: | ||||
| https://github.com/mdmintz/pdbp | ||||
| 
 | ||||
| Allows us to sidestep a variety of issues that aren't being maintained | ||||
| in the upstream project thanks to the hard work of @mdmintz! | ||||
| 
 | ||||
| We also include some default settings adjustments as per recent | ||||
| development on the fork: | ||||
| 
 | ||||
| - sticky mode is still turned on by default but now activates when | ||||
|   a using the `ll` repl command. | ||||
| - turn off line truncation by default to avoid inter-line gaps when | ||||
|   resizing the terimnal during use. | ||||
| - when using the backtrace cmd either by `w` or `bt`, the config | ||||
|   automatically switches to non-sticky mode. | ||||
|  | @ -1,18 +0,0 @@ | |||
| First generate a built disti: | ||||
| 
 | ||||
| ``` | ||||
| python -m pip install --upgrade build | ||||
| python -m build --sdist --outdir dist/alpha5/ | ||||
| ``` | ||||
| 
 | ||||
| Then try a test ``pypi`` upload: | ||||
| 
 | ||||
| ``` | ||||
| python -m twine upload --repository testpypi dist/alpha5/* | ||||
| ``` | ||||
| 
 | ||||
| The push to `pypi` for realz. | ||||
| 
 | ||||
| ``` | ||||
| python -m twine upload --repository testpypi dist/alpha5/* | ||||
| ``` | ||||
							
								
								
									
										164
									
								
								pyproject.toml
								
								
								
								
							
							
						
						
									
										164
									
								
								pyproject.toml
								
								
								
								
							|  | @ -1,164 +0,0 @@ | |||
| [build-system] | ||||
| requires = ["hatchling"] | ||||
| build-backend = "hatchling.build" | ||||
| 
 | ||||
| # ------ build-system ------ | ||||
| 
 | ||||
| [project] | ||||
| name = "tractor" | ||||
| version = "0.1.0a6dev0" | ||||
| description = 'structured concurrent `trio`-"actors"' | ||||
| authors = [{ name = "Tyler Goodlet", email = "goodboy_foss@protonmail.com" }] | ||||
| requires-python = ">= 3.11" | ||||
| readme = "docs/README.rst" | ||||
| license = "AGPL-3.0-or-later" | ||||
| keywords = [ | ||||
|   "trio", | ||||
|   "async", | ||||
|   "concurrency", | ||||
|   "structured concurrency", | ||||
|   "actor model", | ||||
|   "distributed", | ||||
|   "multiprocessing", | ||||
| ] | ||||
| classifiers = [ | ||||
|   "Development Status :: 3 - Alpha", | ||||
|   "Operating System :: POSIX :: Linux", | ||||
|   "Framework :: Trio", | ||||
|   "License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)", | ||||
|   "Programming Language :: Python :: Implementation :: CPython", | ||||
|   "Programming Language :: Python :: 3 :: Only", | ||||
|   "Programming Language :: Python :: 3.11", | ||||
|   "Topic :: System :: Distributed Computing", | ||||
| ] | ||||
| dependencies = [ | ||||
|   # trio runtime and friends | ||||
|   # (poetry) proper range specs, | ||||
|   # https://packaging.python.org/en/latest/discussions/install-requires-vs-requirements/#id5 | ||||
|   # TODO, for 3.13 we must go go `0.27` which means we have to | ||||
|   # disable strict egs or port to handling them internally! | ||||
|   "trio>0.27", | ||||
|   "tricycle>=0.4.1,<0.5", | ||||
|   "wrapt>=1.16.0,<2", | ||||
|   "colorlog>=6.8.2,<7", | ||||
|   # built-in multi-actor `pdb` REPL | ||||
|   "pdbp>=1.6,<2", # windows only (from `pdbp`) | ||||
|   # typed IPC msging | ||||
|   "msgspec>=0.19.0", | ||||
|   "cffi>=1.17.1", | ||||
|   "bidict>=0.23.1", | ||||
| ] | ||||
| 
 | ||||
| # ------ project ------ | ||||
| 
 | ||||
| [dependency-groups] | ||||
| dev = [ | ||||
|   # test suite | ||||
|   # TODO: maybe some of these layout choices? | ||||
|   # https://docs.pytest.org/en/8.0.x/explanation/goodpractices.html#choosing-a-test-layout-import-rules | ||||
|   "pytest>=8.3.5", | ||||
|   "pexpect>=4.9.0,<5", | ||||
|   # `tractor.devx` tooling | ||||
|   "greenback>=1.2.1,<2", | ||||
|   "stackscope>=0.2.2,<0.3", | ||||
|   # ^ requires this? | ||||
|   "typing-extensions>=4.14.1", | ||||
| 
 | ||||
|   "pyperclip>=1.9.0", | ||||
|   "prompt-toolkit>=3.0.50", | ||||
|   "xonsh>=0.19.2", | ||||
|   "psutil>=7.0.0", | ||||
| ] | ||||
| # TODO, add these with sane versions; were originally in | ||||
| # `requirements-docs.txt`.. | ||||
| # docs = [ | ||||
| #   "sphinx>=" | ||||
| #   "sphinx_book_theme>=" | ||||
| # ] | ||||
| 
 | ||||
| # ------ dependency-groups ------ | ||||
| 
 | ||||
| # ------ dependency-groups ------ | ||||
| 
 | ||||
| [tool.uv.sources] | ||||
| # XXX NOTE, only for @goodboy's hacking on `pprint(sort_dicts=False)` | ||||
| # for the `pp` alias.. | ||||
| # pdbp = { path = "../pdbp", editable = true } | ||||
| 
 | ||||
| # ------ tool.uv.sources ------ | ||||
| # TODO, distributed (multi-host) extensions | ||||
| # linux kernel networking | ||||
| # 'pyroute2 | ||||
| 
 | ||||
| # ------ tool.uv.sources ------ | ||||
| 
 | ||||
| [tool.uv] | ||||
| # XXX NOTE, prefer the sys python bc apparently the distis from | ||||
| # `astral` are built in a way that breaks `pdbp`+`tabcompleter`'s | ||||
| # likely due to linking against `libedit` over `readline`.. | ||||
| # |_https://docs.astral.sh/uv/concepts/python-versions/#managed-python-distributions | ||||
| # |_https://gregoryszorc.com/docs/python-build-standalone/main/quirks.html#use-of-libedit-on-linux | ||||
| # | ||||
| # https://docs.astral.sh/uv/reference/settings/#python-preference | ||||
| python-preference = 'system' | ||||
| 
 | ||||
| # ------ tool.uv ------ | ||||
| 
 | ||||
| [tool.hatch.build.targets.sdist] | ||||
| include = ["tractor"] | ||||
| 
 | ||||
| [tool.hatch.build.targets.wheel] | ||||
| include = ["tractor"] | ||||
| 
 | ||||
| # ------ tool.hatch ------ | ||||
| 
 | ||||
| [tool.towncrier] | ||||
| package = "tractor" | ||||
| filename = "NEWS.rst" | ||||
| directory = "nooz/" | ||||
| version = "0.1.0a6" | ||||
| title_format = "tractor {version} ({project_date})" | ||||
| template = "nooz/_template.rst" | ||||
| all_bullets = true | ||||
| 
 | ||||
| [[tool.towncrier.type]] | ||||
|   directory = "feature" | ||||
|   name = "Features" | ||||
|   showcontent = true | ||||
| 
 | ||||
| [[tool.towncrier.type]] | ||||
|   directory = "bugfix" | ||||
|   name = "Bug Fixes" | ||||
|   showcontent = true | ||||
| 
 | ||||
| [[tool.towncrier.type]] | ||||
|   directory = "doc" | ||||
|   name = "Improved Documentation" | ||||
|   showcontent = true | ||||
| 
 | ||||
| [[tool.towncrier.type]] | ||||
|   directory = "trivial" | ||||
|   name = "Trivial/Internal Changes" | ||||
|   showcontent = true | ||||
| 
 | ||||
| # ------ tool.towncrier ------ | ||||
| 
 | ||||
| [tool.pytest.ini_options] | ||||
| minversion = '6.0' | ||||
| testpaths = [ | ||||
|   'tests' | ||||
| ] | ||||
| addopts = [ | ||||
|   # TODO: figure out why this isn't working.. | ||||
|   '--rootdir=./tests', | ||||
| 
 | ||||
|   '--import-mode=importlib', | ||||
|   # don't show frickin captured logs AGAIN in the report.. | ||||
|   '--show-capture=no', | ||||
| ] | ||||
| log_cli = false | ||||
| # TODO: maybe some of these layout choices? | ||||
| # https://docs.pytest.org/en/8.0.x/explanation/goodpractices.html#choosing-a-test-layout-import-rules | ||||
| # pythonpath = "src" | ||||
| 
 | ||||
| # ------ tool.pytest ------ | ||||
|  | @ -1,8 +0,0 @@ | |||
| # vim: ft=ini | ||||
| # pytest.ini for tractor | ||||
| 
 | ||||
| [pytest] | ||||
| # don't show frickin captured logs AGAIN in the report.. | ||||
| addopts = --show-capture='no' | ||||
| log_cli = false | ||||
| ; minversion = 6.0 | ||||
|  | @ -0,0 +1,2 @@ | |||
| sphinx | ||||
| sphinx_book_theme | ||||
|  | @ -0,0 +1,8 @@ | |||
| pytest | ||||
| pytest-trio | ||||
| pytest-timeout | ||||
| pdbpp | ||||
| mypy<0.920 | ||||
| trio_typing<0.7.0 | ||||
| pexpect | ||||
| towncrier | ||||
							
								
								
									
										82
									
								
								ruff.toml
								
								
								
								
							
							
						
						
									
										82
									
								
								ruff.toml
								
								
								
								
							|  | @ -1,82 +0,0 @@ | |||
| # from default `ruff.toml` @ | ||||
| # https://docs.astral.sh/ruff/configuration/ | ||||
| 
 | ||||
| # Exclude a variety of commonly ignored directories. | ||||
| exclude = [ | ||||
|     ".bzr", | ||||
|     ".direnv", | ||||
|     ".eggs", | ||||
|     ".git", | ||||
|     ".git-rewrite", | ||||
|     ".hg", | ||||
|     ".ipynb_checkpoints", | ||||
|     ".mypy_cache", | ||||
|     ".nox", | ||||
|     ".pants.d", | ||||
|     ".pyenv", | ||||
|     ".pytest_cache", | ||||
|     ".pytype", | ||||
|     ".ruff_cache", | ||||
|     ".svn", | ||||
|     ".tox", | ||||
|     ".venv", | ||||
|     ".vscode", | ||||
|     "__pypackages__", | ||||
|     "_build", | ||||
|     "buck-out", | ||||
|     "build", | ||||
|     "dist", | ||||
|     "node_modules", | ||||
|     "site-packages", | ||||
|     "venv", | ||||
| ] | ||||
| 
 | ||||
| # Same as Black. | ||||
| line-length = 88 | ||||
| indent-width = 4 | ||||
| 
 | ||||
| # Assume Python 3.9 | ||||
| target-version = "py311" | ||||
| 
 | ||||
| [lint] | ||||
| # Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`)  codes by default. | ||||
| # Unlike Flake8, Ruff doesn't enable pycodestyle warnings (`W`) or | ||||
| # McCabe complexity (`C901`) by default. | ||||
| select = ["E4", "E7", "E9", "F"] | ||||
| ignore = [ | ||||
|   'E402',  # https://docs.astral.sh/ruff/rules/module-import-not-at-top-of-file/ | ||||
| ] | ||||
| 
 | ||||
| # Allow fix for all enabled rules (when `--fix`) is provided. | ||||
| fixable = ["ALL"] | ||||
| unfixable = [] | ||||
| 
 | ||||
| # Allow unused variables when underscore-prefixed. | ||||
| # dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" | ||||
| 
 | ||||
| [format] | ||||
| # Use single quotes in `ruff format`. | ||||
| quote-style = "single" | ||||
| 
 | ||||
| # Like Black, indent with spaces, rather than tabs. | ||||
| indent-style = "space" | ||||
| 
 | ||||
| # Like Black, respect magic trailing commas. | ||||
| skip-magic-trailing-comma = false | ||||
| 
 | ||||
| # Like Black, automatically detect the appropriate line ending. | ||||
| line-ending = "auto" | ||||
| 
 | ||||
| # Enable auto-formatting of code examples in docstrings. Markdown, | ||||
| # reStructuredText code/literal blocks and doctests are all supported. | ||||
| # | ||||
| # This is currently disabled by default, but it is planned for this | ||||
| # to be opt-out in the future. | ||||
| docstring-code-format = false | ||||
| 
 | ||||
| # Set the line length limit used when formatting code snippets in | ||||
| # docstrings. | ||||
| # | ||||
| # This only has an effect when the `docstring-code-format` setting is | ||||
| # enabled. | ||||
| docstring-code-line-length = "dynamic" | ||||
|  | @ -0,0 +1,103 @@ | |||
| #!/usr/bin/env python | ||||
| # | ||||
| # tractor: structured concurrent "actors". | ||||
| # | ||||
| # Copyright 2018-eternity Tyler Goodlet. | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| from setuptools import setup | ||||
| 
 | ||||
| with open('docs/README.rst', encoding='utf-8') as f: | ||||
|     readme = f.read() | ||||
| 
 | ||||
| 
 | ||||
| setup( | ||||
|     name="tractor", | ||||
|     version='0.1.0a5.dev',  # alpha zone | ||||
|     description='structured concurrrent "actors"', | ||||
|     long_description=readme, | ||||
|     license='AGPLv3', | ||||
|     author='Tyler Goodlet', | ||||
|     maintainer='Tyler Goodlet', | ||||
|     maintainer_email='jgbt@protonmail.com', | ||||
|     url='https://github.com/goodboy/tractor', | ||||
|     platforms=['linux', 'windows'], | ||||
|     packages=[ | ||||
|         'tractor', | ||||
|         'tractor.experimental', | ||||
|         'tractor.trionics', | ||||
|         'tractor.testing', | ||||
|     ], | ||||
|     install_requires=[ | ||||
| 
 | ||||
|         # trio related | ||||
|         'trio >= 0.20', | ||||
|         'async_generator', | ||||
|         'trio_typing', | ||||
| 
 | ||||
|         # tooling | ||||
|         'tricycle', | ||||
|         'trio_typing', | ||||
| 
 | ||||
|         # tooling | ||||
|         'colorlog', | ||||
|         'wrapt', | ||||
| 
 | ||||
|         # pip ref docs on these specs: | ||||
|         # https://pip.pypa.io/en/stable/reference/requirement-specifiers/#examples | ||||
|         # and pep: | ||||
|         # https://peps.python.org/pep-0440/#version-specifiers | ||||
|         'pdbpp <= 0.10.1; python_version < "3.10"', | ||||
| 
 | ||||
|         # windows deps workaround for ``pdbpp`` | ||||
|         # https://github.com/pdbpp/pdbpp/issues/498 | ||||
|         # https://github.com/pdbpp/fancycompleter/issues/37 | ||||
|         'pyreadline3 ; platform_system == "Windows"', | ||||
| 
 | ||||
|         # 3.10 has an outstanding unreleased issue and `pdbpp` itself | ||||
|         #   pins to patched forks of its own dependencies as well..and | ||||
|         #   we need a specific patch on master atm. | ||||
|         'pdbpp @ git+https://github.com/pdbpp/pdbpp@76c4be5#egg=pdbpp ; python_version > "3.9"',  # noqa: E501 | ||||
| 
 | ||||
|         # serialization | ||||
|         'msgspec >= "0.4.0"' | ||||
| 
 | ||||
|     ], | ||||
|     tests_require=['pytest'], | ||||
|     python_requires=">=3.9", | ||||
|     keywords=[ | ||||
|         'trio', | ||||
|         'async', | ||||
|         'concurrency', | ||||
|         'structured concurrency', | ||||
|         'actor model', | ||||
|         'distributed', | ||||
|         'multiprocessing' | ||||
|     ], | ||||
|     classifiers=[ | ||||
|         "Development Status :: 3 - Alpha", | ||||
|         "Operating System :: POSIX :: Linux", | ||||
|         "Operating System :: Microsoft :: Windows", | ||||
|         "Framework :: Trio", | ||||
|         "License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)", | ||||
|         "Programming Language :: Python :: Implementation :: CPython", | ||||
|         "Programming Language :: Python :: 3 :: Only", | ||||
|         "Programming Language :: Python :: 3.9", | ||||
|         "Programming Language :: Python :: 3.10", | ||||
|         "Intended Audience :: Science/Research", | ||||
|         "Intended Audience :: Developers", | ||||
|         "Topic :: System :: Distributed Computing", | ||||
|     ], | ||||
| ) | ||||
|  | @ -1,26 +1,23 @@ | |||
| """ | ||||
| Top level of the testing suites! | ||||
| 
 | ||||
| ``tractor`` testing!! | ||||
| """ | ||||
| from __future__ import annotations | ||||
| import sys | ||||
| import subprocess | ||||
| import os | ||||
| import random | ||||
| import signal | ||||
| import platform | ||||
| import time | ||||
| 
 | ||||
| import pytest | ||||
| from tractor._testing import ( | ||||
|     examples_dir as examples_dir, | ||||
|     tractor_test as tractor_test, | ||||
|     expect_ctxc as expect_ctxc, | ||||
| ) | ||||
| import tractor | ||||
| 
 | ||||
| pytest_plugins: list[str] = [ | ||||
|     'pytester', | ||||
|     'tractor._testing.pytest', | ||||
| ] | ||||
| # export for tests | ||||
| from tractor.testing import tractor_test  # noqa | ||||
| 
 | ||||
| 
 | ||||
| pytest_plugins = ['pytester'] | ||||
| _arb_addr = '127.0.0.1', random.randint(1000, 9999) | ||||
| 
 | ||||
| 
 | ||||
| # Sending signal.SIGINT on subprocess fails on windows. Use CTRL_* alternatives | ||||
|  | @ -33,11 +30,7 @@ else: | |||
|     _KILL_SIGNAL = signal.SIGKILL | ||||
|     _INT_SIGNAL = signal.SIGINT | ||||
|     _INT_RETURN_CODE = 1 if sys.version_info < (3, 8) else -signal.SIGINT.value | ||||
|     _PROC_SPAWN_WAIT = ( | ||||
|         0.6 | ||||
|         if sys.version_info < (3, 7) | ||||
|         else 0.4 | ||||
|     ) | ||||
|     _PROC_SPAWN_WAIT = 0.6 if sys.version_info < (3, 7) else 0.4 | ||||
| 
 | ||||
| 
 | ||||
| no_windows = pytest.mark.skipif( | ||||
|  | @ -46,23 +39,40 @@ no_windows = pytest.mark.skipif( | |||
| ) | ||||
| 
 | ||||
| 
 | ||||
| def pytest_addoption( | ||||
|     parser: pytest.Parser, | ||||
| ): | ||||
|     # ?TODO? should this be exposed from our `._testing.pytest` | ||||
|     # plugin or should we make it more explicit with `--tl` for | ||||
|     # tractor logging like we do in other client projects? | ||||
| def repodir(): | ||||
|     """Return the abspath to the repo directory. | ||||
|     """ | ||||
|     dirname = os.path.dirname | ||||
|     dirpath = os.path.abspath( | ||||
|         dirname(dirname(os.path.realpath(__file__))) | ||||
|         ) | ||||
|     return dirpath | ||||
| 
 | ||||
| 
 | ||||
| def pytest_addoption(parser): | ||||
|     parser.addoption( | ||||
|         "--ll", | ||||
|         action="store", | ||||
|         dest='loglevel', | ||||
|         "--ll", action="store", dest='loglevel', | ||||
|         default='ERROR', help="logging level to set when testing" | ||||
|     ) | ||||
| 
 | ||||
|     parser.addoption( | ||||
|         "--spawn-backend", action="store", dest='spawn_backend', | ||||
|         default='trio', | ||||
|         help="Processing spawning backend to use for test run", | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| def pytest_configure(config): | ||||
|     backend = config.option.spawn_backend | ||||
| 
 | ||||
|     if backend == 'mp': | ||||
|         tractor._spawn.try_set_start_method('spawn') | ||||
|     elif backend == 'trio': | ||||
|         tractor._spawn.try_set_start_method(backend) | ||||
| 
 | ||||
| 
 | ||||
| @pytest.fixture(scope='session', autouse=True) | ||||
| def loglevel(request): | ||||
|     import tractor | ||||
|     orig = tractor.log._default_loglevel | ||||
|     level = tractor.log._default_loglevel = request.config.option.loglevel | ||||
|     tractor.log.get_console_log(level) | ||||
|  | @ -70,148 +80,87 @@ def loglevel(request): | |||
|     tractor.log._default_loglevel = orig | ||||
| 
 | ||||
| 
 | ||||
| @pytest.fixture(scope='session') | ||||
| def spawn_backend(request): | ||||
|     return request.config.option.spawn_backend | ||||
| 
 | ||||
| 
 | ||||
| _ci_env: bool = os.environ.get('CI', False) | ||||
| 
 | ||||
| 
 | ||||
| @pytest.fixture(scope='session') | ||||
| def ci_env() -> bool: | ||||
|     ''' | ||||
|     Detect CI environment. | ||||
| 
 | ||||
|     ''' | ||||
|     """Detect CI envoirment. | ||||
|     """ | ||||
|     return _ci_env | ||||
| 
 | ||||
| 
 | ||||
| def sig_prog( | ||||
|     proc: subprocess.Popen, | ||||
|     sig: int, | ||||
|     canc_timeout: float = 0.1, | ||||
| ) -> int: | ||||
| @pytest.fixture(scope='session') | ||||
| def arb_addr(): | ||||
|     return _arb_addr | ||||
| 
 | ||||
| 
 | ||||
| def pytest_generate_tests(metafunc): | ||||
|     spawn_backend = metafunc.config.option.spawn_backend | ||||
|     if not spawn_backend: | ||||
|         # XXX some weird windows bug with `pytest`? | ||||
|         spawn_backend = 'mp' | ||||
|     assert spawn_backend in ('mp', 'trio') | ||||
| 
 | ||||
|     if 'start_method' in metafunc.fixturenames: | ||||
|         if spawn_backend == 'mp': | ||||
|             from multiprocessing import get_all_start_methods | ||||
|             methods = get_all_start_methods() | ||||
|             if 'fork' in methods: | ||||
|                 # fork not available on windows, so check before | ||||
|                 # removing XXX: the fork method is in general | ||||
|                 # incompatible with trio's global scheduler state | ||||
|                 methods.remove('fork') | ||||
|         elif spawn_backend == 'trio': | ||||
|             methods = ['trio'] | ||||
| 
 | ||||
|         metafunc.parametrize("start_method", methods, scope='module') | ||||
| 
 | ||||
| 
 | ||||
| def sig_prog(proc, sig): | ||||
|     "Kill the actor-process with ``sig``." | ||||
|     proc.send_signal(sig) | ||||
|     time.sleep(canc_timeout) | ||||
|     time.sleep(0.1) | ||||
|     if not proc.poll(): | ||||
|         # TODO: why sometimes does SIGINT not work on teardown? | ||||
|         # seems to happen only when trace logging enabled? | ||||
|         proc.send_signal(_KILL_SIGNAL) | ||||
|     ret: int = proc.wait() | ||||
|     ret = proc.wait() | ||||
|     assert ret | ||||
| 
 | ||||
| 
 | ||||
| # TODO: factor into @cm and move to `._testing`? | ||||
| @pytest.fixture | ||||
| def daemon( | ||||
|     debug_mode: bool, | ||||
|     loglevel: str, | ||||
|     testdir: pytest.Pytester, | ||||
|     reg_addr: tuple[str, int], | ||||
|     tpt_proto: str, | ||||
| 
 | ||||
| ) -> subprocess.Popen: | ||||
|     ''' | ||||
|     Run a daemon root actor as a separate actor-process tree and | ||||
|     "remote registrar" for discovery-protocol related tests. | ||||
| 
 | ||||
|     ''' | ||||
| def daemon(loglevel, testdir, arb_addr): | ||||
|     """Run a daemon actor as a "remote arbiter". | ||||
|     """ | ||||
|     if loglevel in ('trace', 'debug'): | ||||
|         # XXX: too much logging will lock up the subproc (smh) | ||||
|         loglevel: str = 'info' | ||||
|         # too much logging will lock up the subproc (smh) | ||||
|         loglevel = 'info' | ||||
| 
 | ||||
|     code: str = ( | ||||
|         "import tractor; " | ||||
|         "tractor.run_daemon([], " | ||||
|         "registry_addrs={reg_addrs}, " | ||||
|         "debug_mode={debug_mode}, " | ||||
|         "loglevel={ll})" | ||||
|     ).format( | ||||
|         reg_addrs=str([reg_addr]), | ||||
|         ll="'{}'".format(loglevel) if loglevel else None, | ||||
|         debug_mode=debug_mode, | ||||
|     ) | ||||
|     cmd: list[str] = [ | ||||
|         sys.executable, | ||||
|         '-c', code, | ||||
|     cmdargs = [ | ||||
|         sys.executable, '-c', | ||||
|         "import tractor; tractor.run_daemon([], arbiter_addr={}, loglevel={})" | ||||
|         .format( | ||||
|             arb_addr, | ||||
|             "'{}'".format(loglevel) if loglevel else None) | ||||
|     ] | ||||
|     # breakpoint() | ||||
|     kwargs = {} | ||||
|     kwargs = dict() | ||||
|     if platform.system() == 'Windows': | ||||
|         # without this, tests hang on windows forever | ||||
|         kwargs['creationflags'] = subprocess.CREATE_NEW_PROCESS_GROUP | ||||
| 
 | ||||
|     proc: subprocess.Popen = testdir.popen( | ||||
|         cmd, | ||||
|     proc = testdir.popen( | ||||
|         cmdargs, | ||||
|         stdout=subprocess.PIPE, | ||||
|         stderr=subprocess.PIPE, | ||||
|         **kwargs, | ||||
|     ) | ||||
| 
 | ||||
|     # UDS sockets are **really** fast to bind()/listen()/connect() | ||||
|     # so it's often required that we delay a bit more starting | ||||
|     # the first actor-tree.. | ||||
|     if tpt_proto == 'uds': | ||||
|         global _PROC_SPAWN_WAIT | ||||
|         _PROC_SPAWN_WAIT = 0.6 | ||||
| 
 | ||||
|     time.sleep(_PROC_SPAWN_WAIT) | ||||
| 
 | ||||
|     assert not proc.returncode | ||||
|     time.sleep(_PROC_SPAWN_WAIT) | ||||
|     yield proc | ||||
|     sig_prog(proc, _INT_SIGNAL) | ||||
| 
 | ||||
|     # XXX! yeah.. just be reaaal careful with this bc sometimes it | ||||
|     # can lock up on the `_io.BufferedReader` and hang.. | ||||
|     stderr: str = proc.stderr.read().decode() | ||||
|     if stderr: | ||||
|         print( | ||||
|             f'Daemon actor tree produced STDERR:\n' | ||||
|             f'{proc.args}\n' | ||||
|             f'\n' | ||||
|             f'{stderr}\n' | ||||
|         ) | ||||
|     if proc.returncode != -2: | ||||
|         raise RuntimeError( | ||||
|             'Daemon actor tree failed !?\n' | ||||
|             f'{proc.args}\n' | ||||
|         ) | ||||
| 
 | ||||
| 
 | ||||
| # @pytest.fixture(autouse=True) | ||||
| # def shared_last_failed(pytestconfig): | ||||
| #     val = pytestconfig.cache.get("example/value", None) | ||||
| #     breakpoint() | ||||
| #     if val is None: | ||||
| #         pytestconfig.cache.set("example/value", val) | ||||
| #     return val | ||||
| 
 | ||||
| 
 | ||||
| # TODO: a way to let test scripts (like from `examples/`) | ||||
| # guarantee they won't `registry_addrs` collide! | ||||
| # -[ ] maybe use some kinda standard `def main()` arg-spec that | ||||
| #     we can introspect from a fixture that is called from the test | ||||
| #     body? | ||||
| # -[ ] test and figure out typing for below prototype! Bp | ||||
| # | ||||
| # @pytest.fixture | ||||
| # def set_script_runtime_args( | ||||
| #     reg_addr: tuple, | ||||
| # ) -> Callable[[...], None]: | ||||
| 
 | ||||
| #     def import_n_partial_in_args_n_triorun( | ||||
| #         script: Path,  # under examples? | ||||
| #         **runtime_args, | ||||
| #     ) -> Callable[[], Any]:  # a `partial`-ed equiv of `trio.run()` | ||||
| 
 | ||||
| #         # NOTE, below is taken from | ||||
| #         # `.test_advanced_faults.test_ipc_channel_break_during_stream` | ||||
| #         mod: ModuleType = import_path( | ||||
| #             examples_dir() / 'advanced_faults' | ||||
| #             / 'ipc_failure_during_stream.py', | ||||
| #             root=examples_dir(), | ||||
| #             consider_namespace_packages=False, | ||||
| #         ) | ||||
| #         return partial( | ||||
| #             trio.run, | ||||
| #             partial( | ||||
| #                 mod.main, | ||||
| #                 **runtime_args, | ||||
| #             ) | ||||
| #         ) | ||||
| #     return import_n_partial_in_args_n_triorun | ||||
|  |  | |||
|  | @ -1,253 +0,0 @@ | |||
| ''' | ||||
| `tractor.devx.*` tooling sub-pkg test space. | ||||
| 
 | ||||
| ''' | ||||
| from __future__ import annotations | ||||
| import time | ||||
| from typing import ( | ||||
|     Callable, | ||||
|     TYPE_CHECKING, | ||||
| ) | ||||
| 
 | ||||
| import pytest | ||||
| from pexpect.exceptions import ( | ||||
|     TIMEOUT, | ||||
| ) | ||||
| from pexpect.spawnbase import SpawnBase | ||||
| 
 | ||||
| from tractor._testing import ( | ||||
|     mk_cmd, | ||||
| ) | ||||
| from tractor.devx.debug import ( | ||||
|     _pause_msg as _pause_msg, | ||||
|     _crash_msg as _crash_msg, | ||||
|     _repl_fail_msg as _repl_fail_msg, | ||||
|     _ctlc_ignore_header as _ctlc_ignore_header, | ||||
| ) | ||||
| from ..conftest import ( | ||||
|     _ci_env, | ||||
| ) | ||||
| 
 | ||||
| if TYPE_CHECKING: | ||||
|     from pexpect import pty_spawn | ||||
| 
 | ||||
| 
 | ||||
| # a fn that sub-instantiates a `pexpect.spawn()` | ||||
| # and returns it. | ||||
| type PexpectSpawner = Callable[[str], pty_spawn.spawn] | ||||
| 
 | ||||
| 
 | ||||
| @pytest.fixture | ||||
| def spawn( | ||||
|     start_method: str, | ||||
|     testdir: pytest.Pytester, | ||||
|     reg_addr: tuple[str, int], | ||||
| 
 | ||||
| ) -> PexpectSpawner: | ||||
|     ''' | ||||
|     Use the `pexpect` module shipped via `testdir.spawn()` to | ||||
|     run an `./examples/..` script by name. | ||||
| 
 | ||||
|     ''' | ||||
|     if start_method != 'trio': | ||||
|         pytest.skip( | ||||
|             '`pexpect` based tests only supported on `trio` backend' | ||||
|         ) | ||||
| 
 | ||||
|     def unset_colors(): | ||||
|         ''' | ||||
|         Python 3.13 introduced colored tracebacks that break patt | ||||
|         matching, | ||||
| 
 | ||||
|         https://docs.python.org/3/using/cmdline.html#envvar-PYTHON_COLORS | ||||
|         https://docs.python.org/3/using/cmdline.html#using-on-controlling-color | ||||
| 
 | ||||
|         ''' | ||||
|         import os | ||||
|         os.environ['PYTHON_COLORS'] = '0' | ||||
| 
 | ||||
|     def _spawn( | ||||
|         cmd: str, | ||||
|         **mkcmd_kwargs, | ||||
|     ) -> pty_spawn.spawn: | ||||
|         unset_colors() | ||||
|         return testdir.spawn( | ||||
|             cmd=mk_cmd( | ||||
|                 cmd, | ||||
|                 **mkcmd_kwargs, | ||||
|             ), | ||||
|             expect_timeout=3, | ||||
|             # preexec_fn=unset_colors, | ||||
|             # ^TODO? get `pytest` core to expose underlying | ||||
|             # `pexpect.spawn()` stuff? | ||||
|         ) | ||||
| 
 | ||||
|     # such that test-dep can pass input script name. | ||||
|     return _spawn  # the `PexpectSpawner`, type alias. | ||||
| 
 | ||||
| 
 | ||||
| @pytest.fixture( | ||||
|     params=[False, True], | ||||
|     ids='ctl-c={}'.format, | ||||
| ) | ||||
| def ctlc( | ||||
|     request, | ||||
|     ci_env: bool, | ||||
| 
 | ||||
| ) -> bool: | ||||
| 
 | ||||
|     use_ctlc = request.param | ||||
| 
 | ||||
|     node = request.node | ||||
|     markers = node.own_markers | ||||
|     for mark in markers: | ||||
|         if mark.name == 'has_nested_actors': | ||||
|             pytest.skip( | ||||
|                 f'Test {node} has nested actors and fails with Ctrl-C.\n' | ||||
|                 f'The test can sometimes run fine locally but until' | ||||
|                 ' we solve' 'this issue this CI test will be xfail:\n' | ||||
|                 'https://github.com/goodboy/tractor/issues/320' | ||||
|             ) | ||||
| 
 | ||||
|         if mark.name == 'ctlcs_bish': | ||||
|             pytest.skip( | ||||
|                 f'Test {node} prolly uses something from the stdlib (namely `asyncio`..)\n' | ||||
|                 f'The test and/or underlying example script can *sometimes* run fine ' | ||||
|                 f'locally but more then likely until the cpython peeps get their sh#$ together, ' | ||||
|                 f'this test will definitely not behave like `trio` under SIGINT..\n' | ||||
|             ) | ||||
| 
 | ||||
|     if use_ctlc: | ||||
|         # XXX: disable pygments highlighting for auto-tests | ||||
|         # since some envs (like actions CI) will struggle | ||||
|         # the the added color-char encoding.. | ||||
|         from tractor.devx.debug import TractorConfig | ||||
|         TractorConfig.use_pygements = False | ||||
| 
 | ||||
|     yield use_ctlc | ||||
| 
 | ||||
| 
 | ||||
| def expect( | ||||
|     child, | ||||
| 
 | ||||
|     # normally a `pdb` prompt by default | ||||
|     patt: str, | ||||
| 
 | ||||
|     **kwargs, | ||||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|     Expect wrapper that prints last seen console | ||||
|     data before failing. | ||||
| 
 | ||||
|     ''' | ||||
|     try: | ||||
|         child.expect( | ||||
|             patt, | ||||
|             **kwargs, | ||||
|         ) | ||||
|     except TIMEOUT: | ||||
|         before = str(child.before.decode()) | ||||
|         print(before) | ||||
|         raise | ||||
| 
 | ||||
| 
 | ||||
| PROMPT = r"\(Pdb\+\)" | ||||
| 
 | ||||
| 
 | ||||
| def in_prompt_msg( | ||||
|     child: SpawnBase, | ||||
|     parts: list[str], | ||||
| 
 | ||||
|     pause_on_false: bool = False, | ||||
|     err_on_false: bool = False, | ||||
|     print_prompt_on_false: bool = True, | ||||
| 
 | ||||
| ) -> bool: | ||||
|     ''' | ||||
|     Predicate check if (the prompt's) std-streams output has all | ||||
|     `str`-parts in it. | ||||
| 
 | ||||
|     Can be used in test asserts for bulk matching expected | ||||
|     log/REPL output for a given `pdb` interact point. | ||||
| 
 | ||||
|     ''' | ||||
|     __tracebackhide__: bool = False | ||||
| 
 | ||||
|     before: str = str(child.before.decode()) | ||||
|     for part in parts: | ||||
|         if part not in before: | ||||
|             if pause_on_false: | ||||
|                 import pdbp | ||||
|                 pdbp.set_trace() | ||||
| 
 | ||||
|             if print_prompt_on_false: | ||||
|                 print(before) | ||||
| 
 | ||||
|             if err_on_false: | ||||
|                 raise ValueError( | ||||
|                     f'Could not find pattern in `before` output?\n' | ||||
|                     f'part: {part!r}\n' | ||||
|                 ) | ||||
|             return False | ||||
| 
 | ||||
|     return True | ||||
| 
 | ||||
| 
 | ||||
| # TODO: todo support terminal color-chars stripping so we can match | ||||
| # against call stack frame output from the the 'll' command the like! | ||||
| # -[ ] SO answer for stipping ANSI codes: https://stackoverflow.com/a/14693789 | ||||
| def assert_before( | ||||
|     child: SpawnBase, | ||||
|     patts: list[str], | ||||
| 
 | ||||
|     **kwargs, | ||||
| 
 | ||||
| ) -> None: | ||||
|     __tracebackhide__: bool = False | ||||
| 
 | ||||
|     assert in_prompt_msg( | ||||
|         child=child, | ||||
|         parts=patts, | ||||
| 
 | ||||
|         # since this is an "assert" helper ;) | ||||
|         err_on_false=True, | ||||
|         **kwargs | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| def do_ctlc( | ||||
|     child, | ||||
|     count: int = 3, | ||||
|     delay: float = 0.1, | ||||
|     patt: str|None = None, | ||||
| 
 | ||||
|     # expect repl UX to reprint the prompt after every | ||||
|     # ctrl-c send. | ||||
|     # XXX: no idea but, in CI this never seems to work even on 3.10 so | ||||
|     # needs some further investigation potentially... | ||||
|     expect_prompt: bool = not _ci_env, | ||||
| 
 | ||||
| ) -> str|None: | ||||
| 
 | ||||
|     before: str|None = None | ||||
| 
 | ||||
|     # make sure ctl-c sends don't do anything but repeat output | ||||
|     for _ in range(count): | ||||
|         time.sleep(delay) | ||||
|         child.sendcontrol('c') | ||||
| 
 | ||||
|         # TODO: figure out why this makes CI fail.. | ||||
|         # if you run this test manually it works just fine.. | ||||
|         if expect_prompt: | ||||
|             time.sleep(delay) | ||||
|             child.expect(PROMPT) | ||||
|             before = str(child.before.decode()) | ||||
|             time.sleep(delay) | ||||
| 
 | ||||
|             if patt: | ||||
|                 # should see the last line on console | ||||
|                 assert patt in before | ||||
| 
 | ||||
|     # return the console content up to the final prompt | ||||
|     return before | ||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							|  | @ -1,381 +0,0 @@ | |||
| ''' | ||||
| That "foreign loop/thread" debug REPL support better ALSO WORK! | ||||
| 
 | ||||
| Same as `test_native_pause.py`. | ||||
| All these tests can be understood (somewhat) by running the | ||||
| equivalent `examples/debugging/` scripts manually. | ||||
| 
 | ||||
| ''' | ||||
| from contextlib import ( | ||||
|     contextmanager as cm, | ||||
| ) | ||||
| # from functools import partial | ||||
| # import itertools | ||||
| import time | ||||
| # from typing import ( | ||||
| #     Iterator, | ||||
| # ) | ||||
| 
 | ||||
| import pytest | ||||
| from pexpect.exceptions import ( | ||||
|     TIMEOUT, | ||||
|     EOF, | ||||
| ) | ||||
| 
 | ||||
| from .conftest import ( | ||||
|     # _ci_env, | ||||
|     do_ctlc, | ||||
|     PROMPT, | ||||
|     # expect, | ||||
|     in_prompt_msg, | ||||
|     assert_before, | ||||
|     _pause_msg, | ||||
|     _crash_msg, | ||||
|     _ctlc_ignore_header, | ||||
|     # _repl_fail_msg, | ||||
| ) | ||||
| 
 | ||||
| @cm | ||||
| def maybe_expect_timeout( | ||||
|     ctlc: bool = False, | ||||
| ) -> None: | ||||
|     try: | ||||
|         yield | ||||
|     except TIMEOUT: | ||||
|         # breakpoint() | ||||
|         if ctlc: | ||||
|             pytest.xfail( | ||||
|                 'Some kinda redic threading SIGINT bug i think?\n' | ||||
|                 'See the notes in `examples/debugging/sync_bp.py`..\n' | ||||
|             ) | ||||
|         raise | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.ctlcs_bish | ||||
| def test_pause_from_sync( | ||||
|     spawn, | ||||
|     ctlc: bool, | ||||
| ): | ||||
|     ''' | ||||
|     Verify we can use the `pdbp` REPL from sync functions AND from | ||||
|     any thread spawned with `trio.to_thread.run_sync()`. | ||||
| 
 | ||||
|     `examples/debugging/sync_bp.py` | ||||
| 
 | ||||
|     ''' | ||||
|     child = spawn('sync_bp') | ||||
| 
 | ||||
|     # first `sync_pause()` after nurseries open | ||||
|     child.expect(PROMPT) | ||||
|     assert_before( | ||||
|         child, | ||||
|         [ | ||||
|             # pre-prompt line | ||||
|             _pause_msg, | ||||
|             "<Task '__main__.main'", | ||||
|             "('root'", | ||||
|         ] | ||||
|     ) | ||||
|     if ctlc: | ||||
|         do_ctlc(child) | ||||
|         # ^NOTE^ subactor not spawned yet; don't need extra delay. | ||||
| 
 | ||||
|     child.sendline('c') | ||||
| 
 | ||||
|     # first `await tractor.pause()` inside `p.open_context()` body | ||||
|     child.expect(PROMPT) | ||||
| 
 | ||||
|     # XXX shouldn't see gb loaded message with PDB loglevel! | ||||
|     # assert not in_prompt_msg( | ||||
|     #     child, | ||||
|     #     ['`greenback` portal opened!'], | ||||
|     # ) | ||||
|     # should be same root task | ||||
|     assert_before( | ||||
|         child, | ||||
|         [ | ||||
|             _pause_msg, | ||||
|             "<Task '__main__.main'", | ||||
|             "('root'", | ||||
|         ] | ||||
|     ) | ||||
| 
 | ||||
|     if ctlc: | ||||
|         do_ctlc( | ||||
|             child, | ||||
|             # NOTE: setting this to 0 (or some other sufficient | ||||
|             # small val) can cause the test to fail since the | ||||
|             # `subactor` suffers a race where the root/parent | ||||
|             # sends an actor-cancel prior to it hitting its pause | ||||
|             # point; by def the value is 0.1 | ||||
|             delay=0.4, | ||||
|         ) | ||||
| 
 | ||||
|     # XXX, fwiw without a brief sleep here the SIGINT might actually | ||||
|     # trigger "subactor" cancellation by its parent  before the | ||||
|     # shield-handler is engaged. | ||||
|     # | ||||
|     # => similar to the `delay` input to `do_ctlc()` below, setting | ||||
|     # this too low can cause the test to fail since the `subactor` | ||||
|     # suffers a race where the root/parent sends an actor-cancel | ||||
|     # prior to the context task hitting its pause point (and thus | ||||
|     # engaging the `sigint_shield()` handler in time); this value | ||||
|     # seems be good enuf? | ||||
|     time.sleep(0.6) | ||||
| 
 | ||||
|     # one of the bg thread or subactor should have | ||||
|     # `Lock.acquire()`-ed | ||||
|     # (NOT both, which will result in REPL clobbering!) | ||||
|     attach_patts: dict[str, list[str]] = { | ||||
|         'subactor': [ | ||||
|             "'start_n_sync_pause'", | ||||
|             "('subactor'", | ||||
|         ], | ||||
|         'inline_root_bg_thread': [ | ||||
|             "<Thread(inline_root_bg_thread", | ||||
|             "('root'", | ||||
|         ], | ||||
|         'start_soon_root_bg_thread': [ | ||||
|             "<Thread(start_soon_root_bg_thread", | ||||
|             "('root'", | ||||
|         ], | ||||
|     } | ||||
|     conts: int = 0  # for debugging below matching logic on failure | ||||
|     while attach_patts: | ||||
|         child.sendline('c') | ||||
|         conts += 1 | ||||
|         child.expect(PROMPT) | ||||
|         before = str(child.before.decode()) | ||||
|         for key in attach_patts: | ||||
|             if key in before: | ||||
|                 attach_key: str = key | ||||
|                 expected_patts: str = attach_patts.pop(key) | ||||
|                 assert_before( | ||||
|                     child, | ||||
|                     [_pause_msg] | ||||
|                     + | ||||
|                     expected_patts | ||||
|                 ) | ||||
|                 break | ||||
|         else: | ||||
|             pytest.fail( | ||||
|                 f'No keys found?\n\n' | ||||
|                 f'{attach_patts.keys()}\n\n' | ||||
|                 f'{before}\n' | ||||
|             ) | ||||
| 
 | ||||
|         # ensure no other task/threads engaged a REPL | ||||
|         # at the same time as the one that was detected above. | ||||
|         for key, other_patts in attach_patts.copy().items(): | ||||
|             assert not in_prompt_msg( | ||||
|                 child, | ||||
|                 other_patts, | ||||
|             ) | ||||
| 
 | ||||
|         if ctlc: | ||||
|             do_ctlc( | ||||
|                 child, | ||||
|                 patt=attach_key, | ||||
|                 # NOTE same as comment above | ||||
|                 delay=0.4, | ||||
|             ) | ||||
| 
 | ||||
|     child.sendline('c') | ||||
| 
 | ||||
|     # XXX TODO, weird threading bug it seems despite the | ||||
|     # `abandon_on_cancel: bool` setting to | ||||
|     # `trio.to_thread.run_sync()`.. | ||||
|     with maybe_expect_timeout( | ||||
|         ctlc=ctlc, | ||||
|     ): | ||||
|         child.expect(EOF) | ||||
| 
 | ||||
| 
 | ||||
| def expect_any_of( | ||||
|     attach_patts: dict[str, list[str]], | ||||
|     child,   # what type? | ||||
|     ctlc: bool = False, | ||||
|     prompt: str = _ctlc_ignore_header, | ||||
|     ctlc_delay: float = .4, | ||||
| 
 | ||||
| ) -> list[str]: | ||||
|     ''' | ||||
|     Receive any of a `list[str]` of patterns provided in | ||||
|     `attach_patts`. | ||||
| 
 | ||||
|     Used to test racing prompts from multiple actors and/or | ||||
|     tasks using a common root process' `pdbp` REPL. | ||||
| 
 | ||||
|     ''' | ||||
|     assert attach_patts | ||||
| 
 | ||||
|     child.expect(PROMPT) | ||||
|     before = str(child.before.decode()) | ||||
| 
 | ||||
|     for attach_key in attach_patts: | ||||
|         if attach_key in before: | ||||
|             expected_patts: str = attach_patts.pop(attach_key) | ||||
|             assert_before( | ||||
|                 child, | ||||
|                 expected_patts | ||||
|             ) | ||||
|             break  # from for | ||||
|     else: | ||||
|         pytest.fail( | ||||
|             f'No keys found?\n\n' | ||||
|             f'{attach_patts.keys()}\n\n' | ||||
|             f'{before}\n' | ||||
|         ) | ||||
| 
 | ||||
|     # ensure no other task/threads engaged a REPL | ||||
|     # at the same time as the one that was detected above. | ||||
|     for key, other_patts in attach_patts.copy().items(): | ||||
|         assert not in_prompt_msg( | ||||
|             child, | ||||
|             other_patts, | ||||
|         ) | ||||
| 
 | ||||
|     if ctlc: | ||||
|         do_ctlc( | ||||
|             child, | ||||
|             patt=prompt, | ||||
|             # NOTE same as comment above | ||||
|             delay=ctlc_delay, | ||||
|         ) | ||||
| 
 | ||||
|     return expected_patts | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.ctlcs_bish | ||||
| def test_sync_pause_from_aio_task( | ||||
|     spawn, | ||||
| 
 | ||||
|     ctlc: bool | ||||
|     # ^TODO, fix for `asyncio`!! | ||||
| ): | ||||
|     ''' | ||||
|     Verify we can use the `pdbp` REPL from an `asyncio.Task` spawned using | ||||
|     APIs in `.to_asyncio`. | ||||
| 
 | ||||
|     `examples/debugging/asycio_bp.py` | ||||
| 
 | ||||
|     ''' | ||||
|     child = spawn('asyncio_bp') | ||||
| 
 | ||||
|     # RACE on whether trio/asyncio task bps first | ||||
|     attach_patts: dict[str, list[str]] = { | ||||
| 
 | ||||
|         # first pause in guest-mode (aka "infecting") | ||||
|         # `trio.Task`. | ||||
|         'trio-side': [ | ||||
|             _pause_msg, | ||||
|             "<Task 'trio_ctx'", | ||||
|             "('aio_daemon'", | ||||
|         ], | ||||
| 
 | ||||
|         # `breakpoint()` from `asyncio.Task`. | ||||
|         'asyncio-side': [ | ||||
|             _pause_msg, | ||||
|             "<Task pending name='Task-2' coro=<greenback_shim()", | ||||
|             "('aio_daemon'", | ||||
|         ], | ||||
|     } | ||||
| 
 | ||||
|     while attach_patts: | ||||
|         expect_any_of( | ||||
|             attach_patts=attach_patts, | ||||
|             child=child, | ||||
|             ctlc=ctlc, | ||||
|         ) | ||||
|         child.sendline('c') | ||||
| 
 | ||||
|     # NOW in race order, | ||||
|     # - the asyncio-task will error | ||||
|     # - the root-actor parent task will pause | ||||
|     # | ||||
|     attach_patts: dict[str, list[str]] = { | ||||
| 
 | ||||
|         # error raised in `asyncio.Task` | ||||
|         "raise ValueError('asyncio side error!')": [ | ||||
|             _crash_msg, | ||||
|             "<Task 'trio_ctx'", | ||||
|             "@ ('aio_daemon'", | ||||
|             "ValueError: asyncio side error!", | ||||
| 
 | ||||
|             # XXX, we no longer show this frame by default! | ||||
|             # 'return await chan.receive()',  # `.to_asyncio` impl internals in tb | ||||
|         ], | ||||
| 
 | ||||
|         # parent-side propagation via actor-nursery/portal | ||||
|         # "tractor._exceptions.RemoteActorError: remote task raised a 'ValueError'": [ | ||||
|         "remote task raised a 'ValueError'": [ | ||||
|             _crash_msg, | ||||
|             "src_uid=('aio_daemon'", | ||||
|             "('aio_daemon'", | ||||
|         ], | ||||
| 
 | ||||
|         # a final pause in root-actor | ||||
|         "<Task '__main__.main'": [ | ||||
|             _pause_msg, | ||||
|             "<Task '__main__.main'", | ||||
|             "('root'", | ||||
|         ], | ||||
|     } | ||||
|     while attach_patts: | ||||
|         expect_any_of( | ||||
|             attach_patts=attach_patts, | ||||
|             child=child, | ||||
|             ctlc=ctlc, | ||||
|         ) | ||||
|         child.sendline('c') | ||||
| 
 | ||||
|     assert not attach_patts | ||||
| 
 | ||||
|     # final boxed error propagates to root | ||||
|     assert_before( | ||||
|         child, | ||||
|         [ | ||||
|             _crash_msg, | ||||
|             "<Task '__main__.main'", | ||||
|             "('root'", | ||||
|             "remote task raised a 'ValueError'", | ||||
|             "ValueError: asyncio side error!", | ||||
|         ] | ||||
|     ) | ||||
| 
 | ||||
|     if ctlc: | ||||
|         do_ctlc( | ||||
|             child, | ||||
|             # NOTE: setting this to 0 (or some other sufficient | ||||
|             # small val) can cause the test to fail since the | ||||
|             # `subactor` suffers a race where the root/parent | ||||
|             # sends an actor-cancel prior to it hitting its pause | ||||
|             # point; by def the value is 0.1 | ||||
|             delay=0.4, | ||||
|         ) | ||||
| 
 | ||||
|     child.sendline('c') | ||||
|     # with maybe_expect_timeout(): | ||||
|     child.expect(EOF) | ||||
| 
 | ||||
| 
 | ||||
| def test_sync_pause_from_non_greenbacked_aio_task(): | ||||
|     ''' | ||||
|     Where the `breakpoint()` caller task is NOT spawned by | ||||
|     `tractor.to_asyncio` and thus never activates | ||||
|     a `greenback.ensure_portal()` beforehand, presumably bc the task | ||||
|     was started by some lib/dep as in often seen in the field. | ||||
| 
 | ||||
|     Ensure sync pausing works when the pause is in, | ||||
| 
 | ||||
|     - the root actor running in infected-mode? | ||||
|       |_ since we don't need any IPC to acquire the debug lock? | ||||
|       |_ is there some way to handle this like the non-main-thread case? | ||||
| 
 | ||||
|     All other cases need to error out appropriately right? | ||||
| 
 | ||||
|     - for any subactor we can't avoid needing the repl lock.. | ||||
|       |_ is there a way to hook into `asyncio.ensure_future(obj)`? | ||||
| 
 | ||||
|     ''' | ||||
|     pass | ||||
|  | @ -1,306 +0,0 @@ | |||
| ''' | ||||
| That "native" runtime-hackin toolset better be dang useful! | ||||
| 
 | ||||
| Verify the funtion of a variety of "developer-experience" tools we | ||||
| offer from the `.devx` sub-pkg: | ||||
| 
 | ||||
| - use of the lovely `stackscope` for dumping actor `trio`-task trees | ||||
|   during operation and hangs. | ||||
| 
 | ||||
| TODO: | ||||
| - demonstration of `CallerInfo` call stack frame filtering such that | ||||
|   for logging and REPL purposes a user sees exactly the layers needed | ||||
|   when debugging a problem inside the stack vs. in their app. | ||||
| 
 | ||||
| ''' | ||||
| from __future__ import annotations | ||||
| from contextlib import ( | ||||
|     contextmanager as cm, | ||||
| ) | ||||
| import os | ||||
| import signal | ||||
| import time | ||||
| from typing import ( | ||||
|     TYPE_CHECKING, | ||||
| ) | ||||
| 
 | ||||
| from .conftest import ( | ||||
|     expect, | ||||
|     assert_before, | ||||
|     in_prompt_msg, | ||||
|     PROMPT, | ||||
|     _pause_msg, | ||||
| ) | ||||
| 
 | ||||
| import pytest | ||||
| from pexpect.exceptions import ( | ||||
|     # TIMEOUT, | ||||
|     EOF, | ||||
| ) | ||||
| 
 | ||||
| if TYPE_CHECKING: | ||||
|     from ..conftest import PexpectSpawner | ||||
| 
 | ||||
| 
 | ||||
| def test_shield_pause( | ||||
|     spawn: PexpectSpawner, | ||||
| ): | ||||
|     ''' | ||||
|     Verify the `tractor.pause()/.post_mortem()` API works inside an | ||||
|     already cancelled `trio.CancelScope` and that you can step to the | ||||
|     next checkpoint wherein the cancelled will get raised. | ||||
| 
 | ||||
|     ''' | ||||
|     child = spawn( | ||||
|         'shield_hang_in_sub' | ||||
|     ) | ||||
|     expect( | ||||
|         child, | ||||
|         'Yo my child hanging..?', | ||||
|     ) | ||||
|     assert_before( | ||||
|         child, | ||||
|         [ | ||||
|             'Entering shield sleep..', | ||||
|             'Enabling trace-trees on `SIGUSR1` since `stackscope` is installed @', | ||||
|         ] | ||||
|     ) | ||||
| 
 | ||||
|     script_pid: int = child.pid | ||||
|     print( | ||||
|         f'Sending SIGUSR1 to {script_pid}\n' | ||||
|         f'(kill -s SIGUSR1 {script_pid})\n' | ||||
|     ) | ||||
|     os.kill( | ||||
|         script_pid, | ||||
|         signal.SIGUSR1, | ||||
|     ) | ||||
|     time.sleep(0.2) | ||||
|     expect( | ||||
|         child, | ||||
|         # end-of-tree delimiter | ||||
|         "end-of-\('root'", | ||||
|     ) | ||||
|     assert_before( | ||||
|         child, | ||||
|         [ | ||||
|             # 'Srying to dump `stackscope` tree..', | ||||
|             # 'Dumping `stackscope` tree for actor', | ||||
|             "('root'",  # uid line | ||||
| 
 | ||||
|             # TODO!? this used to show? | ||||
|             # -[ ] mk reproducable for @oremanj? | ||||
|             # | ||||
|             # parent block point (non-shielded) | ||||
|             # 'await trio.sleep_forever()  # in root', | ||||
|         ] | ||||
|     ) | ||||
|     expect( | ||||
|         child, | ||||
|         # end-of-tree delimiter | ||||
|         "end-of-\('hanger'", | ||||
|     ) | ||||
|     assert_before( | ||||
|         child, | ||||
|         [ | ||||
|             # relay to the sub should be reported | ||||
|             'Relaying `SIGUSR1`[10] to sub-actor', | ||||
| 
 | ||||
|             "('hanger'",  # uid line | ||||
| 
 | ||||
|             # TODO!? SEE ABOVE | ||||
|             # hanger LOC where it's shield-halted | ||||
|             # 'await trio.sleep_forever()  # in subactor', | ||||
|         ] | ||||
|     ) | ||||
| 
 | ||||
|     # simulate the user sending a ctl-c to the hanging program. | ||||
|     # this should result in the terminator kicking in since | ||||
|     # the sub is shield blocking and can't respond to SIGINT. | ||||
|     os.kill( | ||||
|         child.pid, | ||||
|         signal.SIGINT, | ||||
|     ) | ||||
|     from tractor._supervise import _shutdown_msg | ||||
|     expect( | ||||
|         child, | ||||
|         # 'Shutting down actor runtime', | ||||
|         _shutdown_msg, | ||||
|         timeout=6, | ||||
|     ) | ||||
|     assert_before( | ||||
|         child, | ||||
|         [ | ||||
|             'raise KeyboardInterrupt', | ||||
|             # 'Shutting down actor runtime', | ||||
|             '#T-800 deployed to collect zombie B0', | ||||
|             "'--uid', \"('hanger',", | ||||
|         ] | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| def test_breakpoint_hook_restored( | ||||
|     spawn: PexpectSpawner, | ||||
| ): | ||||
|     ''' | ||||
|     Ensures our actor runtime sets a custom `breakpoint()` hook | ||||
|     on open then restores the stdlib's default on close. | ||||
| 
 | ||||
|     The hook state validation is done via `assert`s inside the | ||||
|     invoked script with only `breakpoint()` (not `tractor.pause()`) | ||||
|     calls used. | ||||
| 
 | ||||
|     ''' | ||||
|     child = spawn('restore_builtin_breakpoint') | ||||
| 
 | ||||
|     child.expect(PROMPT) | ||||
|     try: | ||||
|         assert_before( | ||||
|             child, | ||||
|             [ | ||||
|                 _pause_msg, | ||||
|                 "<Task '__main__.main'", | ||||
|                 "('root'", | ||||
|                 "first bp, tractor hook set", | ||||
|             ] | ||||
|         ) | ||||
|     # XXX if the above raises `AssertionError`, without sending | ||||
|     # the final 'continue' cmd to the REPL-active sub-process, | ||||
|     # we'll hang waiting for that pexpect instance to terminate.. | ||||
|     finally: | ||||
|         child.sendline('c') | ||||
| 
 | ||||
|     child.expect(PROMPT) | ||||
|     assert_before( | ||||
|         child, | ||||
|         [ | ||||
|             "last bp, stdlib hook restored", | ||||
|         ] | ||||
|     ) | ||||
| 
 | ||||
|     # since the stdlib hook was already restored there should be NO | ||||
|     # `tractor` `log.pdb()` content from console! | ||||
|     assert not in_prompt_msg( | ||||
|         child, | ||||
|         [ | ||||
|             _pause_msg, | ||||
|             "<Task '__main__.main'", | ||||
|             "('root'", | ||||
|         ], | ||||
|     ) | ||||
|     child.sendline('c') | ||||
|     child.expect(EOF) | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| _to_raise = Exception('Triggering a crash') | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     'to_raise', | ||||
|     [ | ||||
|         None, | ||||
|         _to_raise, | ||||
|         RuntimeError('Never crash handle this!'), | ||||
|     ], | ||||
| ) | ||||
| @pytest.mark.parametrize( | ||||
|     'raise_on_exit', | ||||
|     [ | ||||
|         True, | ||||
|         [type(_to_raise)], | ||||
|         False, | ||||
|     ] | ||||
| ) | ||||
| def test_crash_handler_cms( | ||||
|     debug_mode: bool, | ||||
|     to_raise: Exception, | ||||
|     raise_on_exit: bool|list[Exception], | ||||
| ): | ||||
|     ''' | ||||
|     Verify the `.devx.open_crash_handler()` API(s) by also | ||||
|     (conveniently enough) tesing its `repl_fixture: ContextManager` | ||||
|     param support which for this suite allows use to avoid use of | ||||
|     a `pexpect`-style-test since we use the fixture to avoid actually | ||||
|     entering `PdbpREPL.iteract()` :smirk: | ||||
| 
 | ||||
|     ''' | ||||
|     import tractor | ||||
|     # import trio | ||||
| 
 | ||||
|     # state flags | ||||
|     repl_acquired: bool = False | ||||
|     repl_released: bool = False | ||||
| 
 | ||||
|     @cm | ||||
|     def block_repl_ux( | ||||
|         repl: tractor.devx.debug.PdbREPL, | ||||
|         maybe_bxerr: ( | ||||
|             tractor.devx._debug.BoxedMaybeException | ||||
|             |None | ||||
|         ) = None, | ||||
|         enter_repl: bool = True, | ||||
| 
 | ||||
|     ) -> bool: | ||||
|         ''' | ||||
|         Set pre/post-REPL state vars and bypass actual conole | ||||
|         interaction. | ||||
| 
 | ||||
|         ''' | ||||
|         nonlocal repl_acquired, repl_released | ||||
| 
 | ||||
|         # task: trio.Task = trio.lowlevel.current_task() | ||||
|         # print(f'pre-REPL active_task={task.name}') | ||||
| 
 | ||||
|         print('pre-REPL') | ||||
|         repl_acquired = True | ||||
|         yield False  # never actually .interact() | ||||
|         print('post-REPL') | ||||
|         repl_released = True | ||||
| 
 | ||||
|     try: | ||||
|         # TODO, with runtime's `debug_mode` setting | ||||
|         # -[ ] need to open runtime tho obvi.. | ||||
|         # | ||||
|         # with tractor.devx.maybe_open_crash_handler( | ||||
|         #     pdb=True, | ||||
| 
 | ||||
|         with tractor.devx.open_crash_handler( | ||||
|             raise_on_exit=raise_on_exit, | ||||
|             repl_fixture=block_repl_ux | ||||
|         ) as bxerr: | ||||
|             if to_raise is not None: | ||||
|                 raise to_raise | ||||
| 
 | ||||
|     except Exception as _exc: | ||||
|         exc = _exc | ||||
|         if ( | ||||
|             raise_on_exit is True | ||||
|             or | ||||
|             type(to_raise) in raise_on_exit | ||||
|         ): | ||||
|             assert ( | ||||
|                 exc | ||||
|                 is | ||||
|                 to_raise | ||||
|                 is | ||||
|                 bxerr.value | ||||
|             ) | ||||
| 
 | ||||
|         else: | ||||
|             raise | ||||
|     else: | ||||
|         assert ( | ||||
|             to_raise is None | ||||
|             or | ||||
|             not raise_on_exit | ||||
|             or | ||||
|             type(to_raise) not in raise_on_exit | ||||
|         ) | ||||
|         assert bxerr.value is to_raise | ||||
| 
 | ||||
|     assert bxerr.raise_on_exit == raise_on_exit | ||||
| 
 | ||||
|     if to_raise is not None: | ||||
|         assert repl_acquired | ||||
|         assert repl_released | ||||
|  | @ -1,4 +0,0 @@ | |||
| ''' | ||||
| `tractor.ipc` subsystem(s)/unit testing suites. | ||||
| 
 | ||||
| ''' | ||||
|  | @ -1,114 +0,0 @@ | |||
| ''' | ||||
| Unit-ish tests for specific IPC transport protocol backends. | ||||
| 
 | ||||
| ''' | ||||
| from __future__ import annotations | ||||
| from pathlib import Path | ||||
| 
 | ||||
| import pytest | ||||
| import trio | ||||
| import tractor | ||||
| from tractor import ( | ||||
|     Actor, | ||||
|     _state, | ||||
|     _addr, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| @pytest.fixture | ||||
| def bindspace_dir_str() -> str: | ||||
| 
 | ||||
|     rt_dir: Path = tractor._state.get_rt_dir() | ||||
|     bs_dir: Path = rt_dir / 'doggy' | ||||
|     bs_dir_str: str = str(bs_dir) | ||||
|     assert not bs_dir.is_dir() | ||||
| 
 | ||||
|     yield bs_dir_str | ||||
| 
 | ||||
|     # delete it on suite teardown. | ||||
|     # ?TODO? should we support this internally | ||||
|     # or is leaking it ok? | ||||
|     if bs_dir.is_dir(): | ||||
|         bs_dir.rmdir() | ||||
| 
 | ||||
| 
 | ||||
| def test_uds_bindspace_created_implicitly( | ||||
|     debug_mode: bool, | ||||
|     bindspace_dir_str: str, | ||||
| ): | ||||
|     registry_addr: tuple = ( | ||||
|         f'{bindspace_dir_str}', | ||||
|         'registry@doggy.sock', | ||||
|     ) | ||||
|     bs_dir_str: str = registry_addr[0] | ||||
| 
 | ||||
|     # XXX, ensure bindspace-dir DNE beforehand! | ||||
|     assert not Path(bs_dir_str).is_dir() | ||||
| 
 | ||||
|     async def main(): | ||||
|         async with tractor.open_nursery( | ||||
|             enable_transports=['uds'], | ||||
|             registry_addrs=[registry_addr], | ||||
|             debug_mode=debug_mode, | ||||
|         ) as _an: | ||||
| 
 | ||||
|             # XXX MUST be created implicitly by | ||||
|             # `.ipc._uds.start_listener()`! | ||||
|             assert Path(bs_dir_str).is_dir() | ||||
| 
 | ||||
|             root: Actor = tractor.current_actor() | ||||
|             assert root.is_registrar | ||||
| 
 | ||||
|             assert registry_addr in root.reg_addrs | ||||
|             assert ( | ||||
|                 registry_addr | ||||
|                 in | ||||
|                 _state._runtime_vars['_registry_addrs'] | ||||
|             ) | ||||
|             assert ( | ||||
|                 _addr.wrap_address(registry_addr) | ||||
|                 in | ||||
|                 root.registry_addrs | ||||
|             ) | ||||
| 
 | ||||
|     trio.run(main) | ||||
| 
 | ||||
| 
 | ||||
| def test_uds_double_listen_raises_connerr( | ||||
|     debug_mode: bool, | ||||
|     bindspace_dir_str: str, | ||||
| ): | ||||
|     registry_addr: tuple = ( | ||||
|         f'{bindspace_dir_str}', | ||||
|         'registry@doggy.sock', | ||||
|     ) | ||||
| 
 | ||||
|     async def main(): | ||||
|         async with tractor.open_nursery( | ||||
|             enable_transports=['uds'], | ||||
|             registry_addrs=[registry_addr], | ||||
|             debug_mode=debug_mode, | ||||
|         ) as _an: | ||||
| 
 | ||||
|             # runtime up | ||||
|             root: Actor = tractor.current_actor() | ||||
| 
 | ||||
|             from tractor.ipc._uds import ( | ||||
|                 start_listener, | ||||
|                 UDSAddress, | ||||
|             ) | ||||
|             ya_bound_addr: UDSAddress = root.registry_addrs[0] | ||||
|             try: | ||||
|                 await start_listener( | ||||
|                     addr=ya_bound_addr, | ||||
|                 ) | ||||
|             except ConnectionError as connerr: | ||||
|                 assert type(src_exc := connerr.__context__) is OSError | ||||
|                 assert 'Address already in use' in src_exc.args | ||||
|                 # complete, exit test. | ||||
| 
 | ||||
|             else: | ||||
|                 pytest.fail('It dint raise a connerr !?') | ||||
| 
 | ||||
| 
 | ||||
|     trio.run(main) | ||||
|  | @ -1,95 +0,0 @@ | |||
| ''' | ||||
| Verify the `enable_transports` param drives various | ||||
| per-root/sub-actor IPC endpoint/server settings. | ||||
| 
 | ||||
| ''' | ||||
| from __future__ import annotations | ||||
| 
 | ||||
| import pytest | ||||
| import trio | ||||
| import tractor | ||||
| from tractor import ( | ||||
|     Actor, | ||||
|     Portal, | ||||
|     ipc, | ||||
|     msg, | ||||
|     _state, | ||||
|     _addr, | ||||
| ) | ||||
| 
 | ||||
| @tractor.context | ||||
| async def chk_tpts( | ||||
|     ctx: tractor.Context, | ||||
|     tpt_proto_key: str, | ||||
| ): | ||||
|     rtvars = _state._runtime_vars | ||||
|     assert ( | ||||
|         tpt_proto_key | ||||
|         in | ||||
|         rtvars['_enable_tpts'] | ||||
|     ) | ||||
|     actor: Actor = tractor.current_actor() | ||||
|     spec: msg.types.SpawnSpec = actor._spawn_spec | ||||
|     assert spec._runtime_vars == rtvars | ||||
| 
 | ||||
|     # ensure individual IPC ep-addr types | ||||
|     serv: ipc._server.Server = actor.ipc_server | ||||
|     addr: ipc._types.Address | ||||
|     for addr in serv.addrs: | ||||
|         assert addr.proto_key == tpt_proto_key | ||||
| 
 | ||||
|     # Actor delegate-props enforcement | ||||
|     assert ( | ||||
|         actor.accept_addrs | ||||
|         == | ||||
|         serv.accept_addrs | ||||
|     ) | ||||
| 
 | ||||
|     await ctx.started(serv.accept_addrs) | ||||
| 
 | ||||
| 
 | ||||
| # TODO, parametrize over mis-matched-proto-typed `registry_addrs` | ||||
| # since i seems to work in `piker` but not exactly sure if both tcp | ||||
| # & uds are being deployed then? | ||||
| # | ||||
| @pytest.mark.parametrize( | ||||
|     'tpt_proto_key', | ||||
|     ['tcp', 'uds'], | ||||
|     ids=lambda item: f'ipc_tpt={item!r}' | ||||
| ) | ||||
| def test_root_passes_tpt_to_sub( | ||||
|     tpt_proto_key: str, | ||||
|     reg_addr: tuple, | ||||
|     debug_mode: bool, | ||||
| ): | ||||
|     async def main(): | ||||
|         async with tractor.open_nursery( | ||||
|             enable_transports=[tpt_proto_key], | ||||
|             registry_addrs=[reg_addr], | ||||
|             debug_mode=debug_mode, | ||||
|         ) as an: | ||||
| 
 | ||||
|             assert ( | ||||
|                 tpt_proto_key | ||||
|                 in | ||||
|                 _state._runtime_vars['_enable_tpts'] | ||||
|             ) | ||||
| 
 | ||||
|             ptl: Portal = await an.start_actor( | ||||
|                 name='sub', | ||||
|                 enable_modules=[__name__], | ||||
|             ) | ||||
|             async with ptl.open_context( | ||||
|                 chk_tpts, | ||||
|                 tpt_proto_key=tpt_proto_key, | ||||
|             ) as (ctx, accept_addrs): | ||||
| 
 | ||||
|                 uw_addr: tuple | ||||
|                 for uw_addr in accept_addrs: | ||||
|                     addr = _addr.wrap_address(uw_addr) | ||||
|                     assert addr.is_valid | ||||
| 
 | ||||
|             # shudown sub-actor(s) | ||||
|             await an.cancel() | ||||
| 
 | ||||
|     trio.run(main) | ||||
|  | @ -1,72 +0,0 @@ | |||
| ''' | ||||
| High-level `.ipc._server` unit tests. | ||||
| 
 | ||||
| ''' | ||||
| from __future__ import annotations | ||||
| 
 | ||||
| import pytest | ||||
| import trio | ||||
| from tractor import ( | ||||
|     devx, | ||||
|     ipc, | ||||
|     log, | ||||
| ) | ||||
| from tractor._testing.addr import ( | ||||
|     get_rando_addr, | ||||
| ) | ||||
| # TODO, use/check-roundtripping with some of these wrapper types? | ||||
| # | ||||
| # from .._addr import Address | ||||
| # from ._chan import Channel | ||||
| # from ._transport import MsgTransport | ||||
| # from ._uds import UDSAddress | ||||
| # from ._tcp import TCPAddress | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     '_tpt_proto', | ||||
|     ['uds', 'tcp'] | ||||
| ) | ||||
| def test_basic_ipc_server( | ||||
|     _tpt_proto: str, | ||||
|     debug_mode: bool, | ||||
|     loglevel: str, | ||||
| ): | ||||
| 
 | ||||
|     # so we see the socket-listener reporting on console | ||||
|     log.get_console_log("INFO") | ||||
| 
 | ||||
|     rando_addr: tuple = get_rando_addr( | ||||
|         tpt_proto=_tpt_proto, | ||||
|     ) | ||||
|     async def main(): | ||||
|         async with ipc._server.open_ipc_server() as server: | ||||
| 
 | ||||
|             assert ( | ||||
|                 server._parent_tn | ||||
|                 and | ||||
|                 server._parent_tn is server._stream_handler_tn | ||||
|             ) | ||||
|             assert server._no_more_peers.is_set() | ||||
| 
 | ||||
|             eps: list[ipc._server.Endpoint] = await server.listen_on( | ||||
|                 accept_addrs=[rando_addr], | ||||
|                 stream_handler_nursery=None, | ||||
|             ) | ||||
|             assert ( | ||||
|                 len(eps) == 1 | ||||
|                 and | ||||
|                 (ep := eps[0])._listener | ||||
|                 and | ||||
|                 not ep.peer_tpts | ||||
|             ) | ||||
| 
 | ||||
|             server._parent_tn.cancel_scope.cancel() | ||||
| 
 | ||||
|         # !TODO! actually make a bg-task connection from a client | ||||
|         # using `ipc._chan._connect_chan()` | ||||
| 
 | ||||
|     with devx.maybe_open_crash_handler( | ||||
|         pdb=debug_mode, | ||||
|     ): | ||||
|         trio.run(main) | ||||
|  | @ -1,309 +0,0 @@ | |||
| ''' | ||||
| Sketchy network blackoutz, ugly byzantine gens, puedes eschuchar la | ||||
| cancelacion?.. | ||||
| 
 | ||||
| ''' | ||||
| from functools import partial | ||||
| from types import ModuleType | ||||
| 
 | ||||
| import pytest | ||||
| from _pytest.pathlib import import_path | ||||
| import trio | ||||
| import tractor | ||||
| from tractor import ( | ||||
|     TransportClosed, | ||||
| ) | ||||
| from tractor._testing import ( | ||||
|     examples_dir, | ||||
|     break_ipc, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     'pre_aclose_msgstream', | ||||
|     [ | ||||
|         False, | ||||
|         True, | ||||
|     ], | ||||
|     ids=[ | ||||
|         'no_msgstream_aclose', | ||||
|         'pre_aclose_msgstream', | ||||
|     ], | ||||
| ) | ||||
| @pytest.mark.parametrize( | ||||
|     'ipc_break', | ||||
|     [ | ||||
|         # no breaks | ||||
|         { | ||||
|             'break_parent_ipc_after': False, | ||||
|             'break_child_ipc_after': False, | ||||
|         }, | ||||
| 
 | ||||
|         # only parent breaks | ||||
|         { | ||||
|             'break_parent_ipc_after': 500, | ||||
|             'break_child_ipc_after': False, | ||||
|         }, | ||||
| 
 | ||||
|         # only child breaks | ||||
|         { | ||||
|             'break_parent_ipc_after': False, | ||||
|             'break_child_ipc_after': 500, | ||||
|         }, | ||||
| 
 | ||||
|         # both: break parent first | ||||
|         { | ||||
|             'break_parent_ipc_after': 500, | ||||
|             'break_child_ipc_after': 800, | ||||
|         }, | ||||
|         # both: break child first | ||||
|         { | ||||
|             'break_parent_ipc_after': 800, | ||||
|             'break_child_ipc_after': 500, | ||||
|         }, | ||||
| 
 | ||||
|     ], | ||||
|     ids=[ | ||||
|         'no_break', | ||||
|         'break_parent', | ||||
|         'break_child', | ||||
|         'break_both_parent_first', | ||||
|         'break_both_child_first', | ||||
|     ], | ||||
| ) | ||||
| def test_ipc_channel_break_during_stream( | ||||
|     debug_mode: bool, | ||||
|     loglevel: str, | ||||
|     spawn_backend: str, | ||||
|     ipc_break: dict|None, | ||||
|     pre_aclose_msgstream: bool, | ||||
|     tpt_proto: str, | ||||
| ): | ||||
|     ''' | ||||
|     Ensure we can have an IPC channel break its connection during | ||||
|     streaming and it's still possible for the (simulated) user to kill | ||||
|     the actor tree using SIGINT. | ||||
| 
 | ||||
|     We also verify the type of connection error expected in the parent | ||||
|     depending on which side if the IPC breaks first. | ||||
| 
 | ||||
|     ''' | ||||
|     if spawn_backend != 'trio': | ||||
|         if debug_mode: | ||||
|             pytest.skip('`debug_mode` only supported on `trio` spawner') | ||||
| 
 | ||||
|         # non-`trio` spawners should never hit the hang condition that | ||||
|         # requires the user to do ctl-c to cancel the actor tree. | ||||
|         # expect_final_exc = trio.ClosedResourceError | ||||
|         expect_final_exc = TransportClosed | ||||
| 
 | ||||
|     mod: ModuleType = import_path( | ||||
|         examples_dir() / 'advanced_faults' | ||||
|         / 'ipc_failure_during_stream.py', | ||||
|         root=examples_dir(), | ||||
|         consider_namespace_packages=False, | ||||
|     ) | ||||
| 
 | ||||
|     # by def we expect KBI from user after a simulated "hang | ||||
|     # period" wherein the user eventually hits ctl-c to kill the | ||||
|     # root-actor tree. | ||||
|     expect_final_exc: BaseException = KeyboardInterrupt | ||||
|     expect_final_cause: BaseException|None = None | ||||
| 
 | ||||
|     if ( | ||||
|         # only expect EoC if trans is broken on the child side, | ||||
|         ipc_break['break_child_ipc_after'] is not False | ||||
|         # AND we tell the child to call `MsgStream.aclose()`. | ||||
|         and pre_aclose_msgstream | ||||
|     ): | ||||
|         # expect_final_exc = trio.EndOfChannel | ||||
|         # ^XXX NOPE! XXX^ since now `.open_stream()` absorbs this | ||||
|         # gracefully! | ||||
|         expect_final_exc = KeyboardInterrupt | ||||
| 
 | ||||
|     # NOTE when ONLY the child breaks or it breaks BEFORE the | ||||
|     # parent we expect the parent to get a closed resource error | ||||
|     # on the next `MsgStream.receive()` and then fail out and | ||||
|     # cancel the child from there. | ||||
|     # | ||||
|     # ONLY CHILD breaks | ||||
|     if ( | ||||
|         ipc_break['break_child_ipc_after'] | ||||
|         and | ||||
|         ipc_break['break_parent_ipc_after'] is False | ||||
|     ): | ||||
|         # NOTE: we DO NOT expect this any more since | ||||
|         # the child side's channel will be broken silently | ||||
|         # and nothing on the parent side will indicate this! | ||||
|         # expect_final_exc = trio.ClosedResourceError | ||||
| 
 | ||||
|         # NOTE: child will send a 'stop' msg before it breaks | ||||
|         # the transport channel BUT, that will be absorbed by the | ||||
|         # `ctx.open_stream()` block and thus the `.open_context()` | ||||
|         # should hang, after which the test script simulates | ||||
|         # a user sending ctl-c by raising a KBI. | ||||
|         if pre_aclose_msgstream: | ||||
|             expect_final_exc = KeyboardInterrupt | ||||
|             if tpt_proto == 'uds': | ||||
|                 expect_final_exc = TransportClosed | ||||
|                 expect_final_cause = trio.BrokenResourceError | ||||
| 
 | ||||
|             # XXX OLD XXX | ||||
|             # if child calls `MsgStream.aclose()` then expect EoC. | ||||
|             # ^ XXX not any more ^ since eoc is always absorbed | ||||
|             # gracefully and NOT bubbled to the `.open_context()` | ||||
|             # block! | ||||
|             # expect_final_exc = trio.EndOfChannel | ||||
| 
 | ||||
|     # BOTH but, CHILD breaks FIRST | ||||
|     elif ( | ||||
|         ipc_break['break_child_ipc_after'] is not False | ||||
|         and ( | ||||
|             ipc_break['break_parent_ipc_after'] | ||||
|             > ipc_break['break_child_ipc_after'] | ||||
|         ) | ||||
|     ): | ||||
|         if pre_aclose_msgstream: | ||||
|             expect_final_exc = KeyboardInterrupt | ||||
| 
 | ||||
|             if tpt_proto == 'uds': | ||||
|                 expect_final_exc = TransportClosed | ||||
|                 expect_final_cause = trio.BrokenResourceError | ||||
| 
 | ||||
|     # NOTE when the parent IPC side dies (even if the child does as well | ||||
|     # but the child fails BEFORE the parent) we always expect the | ||||
|     # IPC layer to raise a closed-resource, NEVER do we expect | ||||
|     # a stop msg since the parent-side ctx apis will error out | ||||
|     # IMMEDIATELY before the child ever sends any 'stop' msg. | ||||
|     # | ||||
|     # ONLY PARENT breaks | ||||
|     elif ( | ||||
|         ipc_break['break_parent_ipc_after'] | ||||
|         and | ||||
|         ipc_break['break_child_ipc_after'] is False | ||||
|     ): | ||||
|         expect_final_exc = tractor.TransportClosed | ||||
|         expect_final_cause = trio.ClosedResourceError | ||||
| 
 | ||||
|     # BOTH but, PARENT breaks FIRST | ||||
|     elif ( | ||||
|         ipc_break['break_parent_ipc_after'] is not False | ||||
|         and ( | ||||
|             ipc_break['break_child_ipc_after'] | ||||
|             > | ||||
|             ipc_break['break_parent_ipc_after'] | ||||
|         ) | ||||
|     ): | ||||
|         expect_final_exc = tractor.TransportClosed | ||||
|         expect_final_cause = trio.ClosedResourceError | ||||
| 
 | ||||
|     with pytest.raises( | ||||
|         expected_exception=( | ||||
|             expect_final_exc, | ||||
|             ExceptionGroup, | ||||
|         ), | ||||
|     ) as excinfo: | ||||
|         try: | ||||
|             trio.run( | ||||
|                 partial( | ||||
|                     mod.main, | ||||
|                     debug_mode=debug_mode, | ||||
|                     start_method=spawn_backend, | ||||
|                     loglevel=loglevel, | ||||
|                     pre_close=pre_aclose_msgstream, | ||||
|                     tpt_proto=tpt_proto, | ||||
|                     **ipc_break, | ||||
|                 ) | ||||
|             ) | ||||
|         except KeyboardInterrupt as _kbi: | ||||
|             kbi = _kbi | ||||
|             if expect_final_exc is not KeyboardInterrupt: | ||||
|                 pytest.fail( | ||||
|                     'Rxed unexpected KBI !?\n' | ||||
|                     f'{repr(kbi)}' | ||||
|                 ) | ||||
| 
 | ||||
|             raise | ||||
| 
 | ||||
|         except tractor.TransportClosed as _tc: | ||||
|             tc = _tc | ||||
|             if expect_final_exc is KeyboardInterrupt: | ||||
|                 pytest.fail( | ||||
|                     'Unexpected transport failure !?\n' | ||||
|                     f'{repr(tc)}' | ||||
|                 ) | ||||
|             cause: Exception = tc.__cause__ | ||||
|             assert ( | ||||
|                 # type(cause) is trio.ClosedResourceError | ||||
|                 type(cause) is expect_final_cause | ||||
| 
 | ||||
|                 # TODO, should we expect a certain exc-message (per | ||||
|                 # tpt) as well?? | ||||
|                 # and | ||||
|                 # cause.args[0] == 'another task closed this fd' | ||||
|             ) | ||||
| 
 | ||||
|             raise | ||||
| 
 | ||||
|     # get raw instance from pytest wrapper | ||||
|     value = excinfo.value | ||||
|     if isinstance(value, ExceptionGroup): | ||||
|         excs = value.exceptions | ||||
|         assert len(excs) == 1 | ||||
|         final_exc = excs[0] | ||||
|         assert isinstance(final_exc, expect_final_exc) | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def break_ipc_after_started( | ||||
|     ctx: tractor.Context, | ||||
| ) -> None: | ||||
|     await ctx.started() | ||||
|     async with ctx.open_stream() as stream: | ||||
| 
 | ||||
|         # TODO: make a test which verifies the error | ||||
|         # for this, i.e. raises a `MsgTypeError` | ||||
|         # await ctx.chan.send(None) | ||||
| 
 | ||||
|         await break_ipc( | ||||
|             stream=stream, | ||||
|             pre_close=True, | ||||
|         ) | ||||
|         print('child broke IPC and terminating') | ||||
| 
 | ||||
| 
 | ||||
| def test_stream_closed_right_after_ipc_break_and_zombie_lord_engages(): | ||||
|     ''' | ||||
|     Verify that is a subactor's IPC goes down just after bringing up | ||||
|     a stream the parent can trigger a SIGINT and the child will be | ||||
|     reaped out-of-IPC by the localhost process supervision machinery: | ||||
|     aka "zombie lord". | ||||
| 
 | ||||
|     ''' | ||||
|     async def main(): | ||||
|         with trio.fail_after(3): | ||||
|             async with tractor.open_nursery() as an: | ||||
|                 portal = await an.start_actor( | ||||
|                     'ipc_breaker', | ||||
|                     enable_modules=[__name__], | ||||
|                 ) | ||||
| 
 | ||||
|                 with trio.move_on_after(1): | ||||
|                     async with ( | ||||
|                         portal.open_context( | ||||
|                             break_ipc_after_started | ||||
|                         ) as (ctx, sent), | ||||
|                     ): | ||||
|                         async with ctx.open_stream(): | ||||
|                             await trio.sleep(0.5) | ||||
| 
 | ||||
|                         print('parent waiting on context') | ||||
| 
 | ||||
|                 print( | ||||
|                     'parent exited context\n' | ||||
|                     'parent raising KBI..\n' | ||||
|                 ) | ||||
|                 raise KeyboardInterrupt | ||||
| 
 | ||||
|     with pytest.raises(KeyboardInterrupt): | ||||
|         trio.run(main) | ||||
|  | @ -5,8 +5,8 @@ Advanced streaming patterns using bidirectional streams and contexts. | |||
| from collections import Counter | ||||
| import itertools | ||||
| import platform | ||||
| from typing import Set, Dict, List | ||||
| 
 | ||||
| import pytest | ||||
| import trio | ||||
| import tractor | ||||
| 
 | ||||
|  | @ -15,7 +15,7 @@ def is_win(): | |||
|     return platform.system() == 'Windows' | ||||
| 
 | ||||
| 
 | ||||
| _registry: dict[str, set[tractor.MsgStream]] = { | ||||
| _registry: Dict[str, Set[tractor.ReceiveMsgStream]] = { | ||||
|     'even': set(), | ||||
|     'odd': set(), | ||||
| } | ||||
|  | @ -77,7 +77,7 @@ async def subscribe( | |||
| 
 | ||||
| async def consumer( | ||||
| 
 | ||||
|     subs: list[str], | ||||
|     subs: List[str], | ||||
| 
 | ||||
| ) -> None: | ||||
| 
 | ||||
|  | @ -144,16 +144,8 @@ def test_dynamic_pub_sub(): | |||
| 
 | ||||
|     try: | ||||
|         trio.run(main) | ||||
|     except ( | ||||
|         trio.TooSlowError, | ||||
|         ExceptionGroup, | ||||
|     ) as err: | ||||
|         if isinstance(err, ExceptionGroup): | ||||
|             for suberr in err.exceptions: | ||||
|                 if isinstance(suberr, trio.TooSlowError): | ||||
|                     break | ||||
|             else: | ||||
|                 pytest.fail('Never got a `TooSlowError` ?') | ||||
|     except trio.TooSlowError: | ||||
|         pass | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
|  | @ -307,76 +299,44 @@ async def inf_streamer( | |||
| 
 | ||||
|     async with ( | ||||
|         ctx.open_stream() as stream, | ||||
| 
 | ||||
|         # XXX TODO, INTERESTING CASE!! | ||||
|         # - if we don't collapse the eg then the embedded | ||||
|         # `trio.EndOfChannel` doesn't propagate directly to the above | ||||
|         # .open_stream() parent, resulting in it also raising instead | ||||
|         # of gracefully absorbing as normal.. so how to handle? | ||||
|         tractor.trionics.collapse_eg(), | ||||
|         trio.open_nursery() as tn, | ||||
|         trio.open_nursery() as n, | ||||
|     ): | ||||
|         async def close_stream_on_sentinel(): | ||||
|         async def bail_on_sentinel(): | ||||
|             async for msg in stream: | ||||
|                 if msg == 'done': | ||||
|                     print( | ||||
|                         'streamer RXed "done" sentinel msg!\n' | ||||
|                         'CLOSING `MsgStream`!' | ||||
|                     ) | ||||
|                     await stream.aclose() | ||||
|                 else: | ||||
|                     print(f'streamer received {msg}') | ||||
|             else: | ||||
|                 print('streamer exited recv loop') | ||||
| 
 | ||||
|         # start termination detector | ||||
|         tn.start_soon(close_stream_on_sentinel) | ||||
|         n.start_soon(bail_on_sentinel) | ||||
| 
 | ||||
|         cap: int = 10000  # so that we don't spin forever when bug.. | ||||
|         for val in range(cap): | ||||
|         for val in itertools.count(): | ||||
|             try: | ||||
|                 print(f'streamer sending {val}') | ||||
|                 await stream.send(val) | ||||
|                 if val > cap: | ||||
|                     raise RuntimeError( | ||||
|                         'Streamer never cancelled by setinel?' | ||||
|                     ) | ||||
|                 await trio.sleep(0.001) | ||||
| 
 | ||||
|             # close out the stream gracefully | ||||
|             except trio.ClosedResourceError: | ||||
|                 print('transport closed on streamer side!') | ||||
|                 assert stream.closed | ||||
|                 # close out the stream gracefully | ||||
|                 break | ||||
|         else: | ||||
|             raise RuntimeError( | ||||
|                 'Streamer not cancelled before finished sending?' | ||||
|             ) | ||||
| 
 | ||||
|     print('streamer exited .open_streamer() block') | ||||
|     print('terminating streamer') | ||||
| 
 | ||||
| 
 | ||||
| def test_local_task_fanout_from_stream( | ||||
|     debug_mode: bool, | ||||
| ): | ||||
| def test_local_task_fanout_from_stream(): | ||||
|     ''' | ||||
|     Single stream with multiple local consumer tasks using the | ||||
|     ``MsgStream.subscribe()` api. | ||||
| 
 | ||||
|     Ensure all tasks receive all values after stream completes | ||||
|     sending. | ||||
|     Ensure all tasks receive all values after stream completes sending. | ||||
| 
 | ||||
|     ''' | ||||
|     consumers: int = 22 | ||||
|     consumers = 22 | ||||
| 
 | ||||
|     async def main(): | ||||
| 
 | ||||
|         counts = Counter() | ||||
| 
 | ||||
|         async with tractor.open_nursery( | ||||
|             debug_mode=debug_mode, | ||||
|         ) as tn: | ||||
|             p: tractor.Portal = await tn.start_actor( | ||||
|         async with tractor.open_nursery() as tn: | ||||
|             p = await tn.start_actor( | ||||
|                 'inf_streamer', | ||||
|                 enable_modules=[__name__], | ||||
|             ) | ||||
|  | @ -384,6 +344,7 @@ def test_local_task_fanout_from_stream( | |||
|                 p.open_context(inf_streamer) as (ctx, _), | ||||
|                 ctx.open_stream() as stream, | ||||
|             ): | ||||
| 
 | ||||
|                 async def pull_and_count(name: str): | ||||
|                     # name = trio.lowlevel.current_task().name | ||||
|                     async with stream.subscribe() as recver: | ||||
|  | @ -392,7 +353,7 @@ def test_local_task_fanout_from_stream( | |||
|                             tractor.trionics.BroadcastReceiver | ||||
|                         ) | ||||
|                         async for val in recver: | ||||
|                             print(f'bx {name} rx: {val}') | ||||
|                             # print(f'{name}: {val}') | ||||
|                             counts[name] += 1 | ||||
| 
 | ||||
|                         print(f'{name} bcaster ended') | ||||
|  | @ -402,14 +363,10 @@ def test_local_task_fanout_from_stream( | |||
|                 with trio.fail_after(3): | ||||
|                     async with trio.open_nursery() as nurse: | ||||
|                         for i in range(consumers): | ||||
|                             nurse.start_soon( | ||||
|                                 pull_and_count, | ||||
|                                 i, | ||||
|                             ) | ||||
|                             nurse.start_soon(pull_and_count, i) | ||||
| 
 | ||||
|                         # delay to let bcast consumers pull msgs | ||||
|                         await trio.sleep(0.5) | ||||
|                         print('terminating nursery of bcast rxer consumers!') | ||||
|                         print('\nterminating') | ||||
|                         await stream.send('done') | ||||
| 
 | ||||
|             print('closed stream connection') | ||||
|  |  | |||
|  | @ -11,10 +11,8 @@ from itertools import repeat | |||
| import pytest | ||||
| import trio | ||||
| import tractor | ||||
| from tractor._testing import ( | ||||
|     tractor_test, | ||||
| ) | ||||
| from .conftest import no_windows | ||||
| 
 | ||||
| from conftest import tractor_test, no_windows | ||||
| 
 | ||||
| 
 | ||||
| def is_win(): | ||||
|  | @ -45,82 +43,45 @@ async def do_nuthin(): | |||
|     ], | ||||
|     ids=['no_args', 'unexpected_args'], | ||||
| ) | ||||
| def test_remote_error(reg_addr, args_err): | ||||
|     ''' | ||||
|     Verify an error raised in a subactor that is propagated | ||||
| def test_remote_error(arb_addr, args_err): | ||||
|     """Verify an error raised in a subactor that is propagated | ||||
|     to the parent nursery, contains the underlying boxed builtin | ||||
|     error type info and causes cancellation and reraising all the | ||||
|     way up the stack. | ||||
| 
 | ||||
|     ''' | ||||
|     """ | ||||
|     args, errtype = args_err | ||||
| 
 | ||||
|     async def main(): | ||||
|         async with tractor.open_nursery( | ||||
|             registry_addrs=[reg_addr], | ||||
|             arbiter_addr=arb_addr, | ||||
|         ) as nursery: | ||||
| 
 | ||||
|             # on a remote type error caused by bad input args | ||||
|             # this should raise directly which means we **don't** get | ||||
|             # an exception group outside the nursery since the error | ||||
|             # here and the far end task error are one in the same? | ||||
|             portal = await nursery.run_in_actor( | ||||
|                 assert_err, | ||||
|                 name='errorer', | ||||
|                 **args | ||||
|                 assert_err, name='errorer', **args | ||||
|             ) | ||||
| 
 | ||||
|             # get result(s) from main task | ||||
|             try: | ||||
|                 # this means the root actor will also raise a local | ||||
|                 # parent task error and thus an eg will propagate out | ||||
|                 # of this actor nursery. | ||||
|                 await portal.result() | ||||
|             except tractor.RemoteActorError as err: | ||||
|                 assert err.boxed_type == errtype | ||||
|                 assert err.type == errtype | ||||
|                 print("Look Maa that actor failed hard, hehh") | ||||
|                 raise | ||||
| 
 | ||||
|     # ensure boxed errors | ||||
|     if args: | ||||
|         with pytest.raises(tractor.RemoteActorError) as excinfo: | ||||
|             trio.run(main) | ||||
|     with pytest.raises(tractor.RemoteActorError) as excinfo: | ||||
|         trio.run(main) | ||||
| 
 | ||||
|         assert excinfo.value.boxed_type == errtype | ||||
| 
 | ||||
|     else: | ||||
|         # the root task will also error on the `Portal.result()` | ||||
|         # call so we expect an error from there AND the child. | ||||
|         # |_ tho seems like on new `trio` this doesn't always | ||||
|         #    happen? | ||||
|         with pytest.raises(( | ||||
|             BaseExceptionGroup, | ||||
|             tractor.RemoteActorError, | ||||
|         )) as excinfo: | ||||
|             trio.run(main) | ||||
| 
 | ||||
|         # ensure boxed errors are `errtype` | ||||
|         err: BaseException = excinfo.value | ||||
|         if isinstance(err, BaseExceptionGroup): | ||||
|             suberrs: list[BaseException] = err.exceptions | ||||
|         else: | ||||
|             suberrs: list[BaseException] = [err] | ||||
| 
 | ||||
|         for exc in suberrs: | ||||
|             assert exc.boxed_type == errtype | ||||
|     # ensure boxed error is correct | ||||
|     assert excinfo.value.type == errtype | ||||
| 
 | ||||
| 
 | ||||
| def test_multierror( | ||||
|     reg_addr: tuple[str, int], | ||||
| ): | ||||
|     ''' | ||||
|     Verify we raise a ``BaseExceptionGroup`` out of a nursery where | ||||
| def test_multierror(arb_addr): | ||||
|     """Verify we raise a ``trio.MultiError`` out of a nursery where | ||||
|     more then one actor errors. | ||||
| 
 | ||||
|     ''' | ||||
|     """ | ||||
|     async def main(): | ||||
|         async with tractor.open_nursery( | ||||
|             registry_addrs=[reg_addr], | ||||
|             arbiter_addr=arb_addr, | ||||
|         ) as nursery: | ||||
| 
 | ||||
|             await nursery.run_in_actor(assert_err, name='errorer1') | ||||
|  | @ -130,14 +91,14 @@ def test_multierror( | |||
|             try: | ||||
|                 await portal2.result() | ||||
|             except tractor.RemoteActorError as err: | ||||
|                 assert err.boxed_type is AssertionError | ||||
|                 assert err.type == AssertionError | ||||
|                 print("Look Maa that first actor failed hard, hehh") | ||||
|                 raise | ||||
| 
 | ||||
|         # here we should get a ``BaseExceptionGroup`` containing exceptions | ||||
|         # here we should get a `trio.MultiError` containing exceptions | ||||
|         # from both subactors | ||||
| 
 | ||||
|     with pytest.raises(BaseExceptionGroup): | ||||
|     with pytest.raises(trio.MultiError): | ||||
|         trio.run(main) | ||||
| 
 | ||||
| 
 | ||||
|  | @ -145,14 +106,14 @@ def test_multierror( | |||
| @pytest.mark.parametrize( | ||||
|     'num_subactors', range(25, 26), | ||||
| ) | ||||
| def test_multierror_fast_nursery(reg_addr, start_method, num_subactors, delay): | ||||
|     """Verify we raise a ``BaseExceptionGroup`` out of a nursery where | ||||
| def test_multierror_fast_nursery(arb_addr, start_method, num_subactors, delay): | ||||
|     """Verify we raise a ``trio.MultiError`` out of a nursery where | ||||
|     more then one actor errors and also with a delay before failure | ||||
|     to test failure during an ongoing spawning. | ||||
|     """ | ||||
|     async def main(): | ||||
|         async with tractor.open_nursery( | ||||
|             registry_addrs=[reg_addr], | ||||
|             arbiter_addr=arb_addr, | ||||
|         ) as nursery: | ||||
| 
 | ||||
|             for i in range(num_subactors): | ||||
|  | @ -162,11 +123,10 @@ def test_multierror_fast_nursery(reg_addr, start_method, num_subactors, delay): | |||
|                     delay=delay | ||||
|                 ) | ||||
| 
 | ||||
|     # with pytest.raises(trio.MultiError) as exc_info: | ||||
|     with pytest.raises(BaseExceptionGroup) as exc_info: | ||||
|     with pytest.raises(trio.MultiError) as exc_info: | ||||
|         trio.run(main) | ||||
| 
 | ||||
|     assert exc_info.type == ExceptionGroup | ||||
|     assert exc_info.type == tractor.MultiError | ||||
|     err = exc_info.value | ||||
|     exceptions = err.exceptions | ||||
| 
 | ||||
|  | @ -182,7 +142,7 @@ def test_multierror_fast_nursery(reg_addr, start_method, num_subactors, delay): | |||
| 
 | ||||
|     for exc in exceptions: | ||||
|         assert isinstance(exc, tractor.RemoteActorError) | ||||
|         assert exc.boxed_type is AssertionError | ||||
|         assert exc.type == AssertionError | ||||
| 
 | ||||
| 
 | ||||
| async def do_nothing(): | ||||
|  | @ -190,20 +150,15 @@ async def do_nothing(): | |||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize('mechanism', ['nursery_cancel', KeyboardInterrupt]) | ||||
| def test_cancel_single_subactor(reg_addr, mechanism): | ||||
|     ''' | ||||
|     Ensure a ``ActorNursery.start_actor()`` spawned subactor | ||||
| def test_cancel_single_subactor(arb_addr, mechanism): | ||||
|     """Ensure a ``ActorNursery.start_actor()`` spawned subactor | ||||
|     cancels when the nursery is cancelled. | ||||
| 
 | ||||
|     ''' | ||||
|     """ | ||||
|     async def spawn_actor(): | ||||
|         ''' | ||||
|         Spawn an actor that blocks indefinitely then cancel via | ||||
|         either `ActorNursery.cancel()` or an exception raise. | ||||
| 
 | ||||
|         ''' | ||||
|         """Spawn an actor that blocks indefinitely. | ||||
|         """ | ||||
|         async with tractor.open_nursery( | ||||
|             registry_addrs=[reg_addr], | ||||
|             arbiter_addr=arb_addr, | ||||
|         ) as nursery: | ||||
| 
 | ||||
|             portal = await nursery.start_actor( | ||||
|  | @ -236,10 +191,7 @@ async def stream_forever(): | |||
| async def test_cancel_infinite_streamer(start_method): | ||||
| 
 | ||||
|     # stream for at most 1 seconds | ||||
|     with ( | ||||
|         trio.fail_after(4), | ||||
|         trio.move_on_after(1) as cancel_scope | ||||
|     ): | ||||
|     with trio.move_on_after(1) as cancel_scope: | ||||
|         async with tractor.open_nursery() as n: | ||||
|             portal = await n.start_actor( | ||||
|                 'donny', | ||||
|  | @ -262,8 +214,8 @@ async def test_cancel_infinite_streamer(start_method): | |||
|     [ | ||||
|         # daemon actors sit idle while single task actors error out | ||||
|         (1, tractor.RemoteActorError, AssertionError, (assert_err, {}), None), | ||||
|         (2, BaseExceptionGroup, AssertionError, (assert_err, {}), None), | ||||
|         (3, BaseExceptionGroup, AssertionError, (assert_err, {}), None), | ||||
|         (2, tractor.MultiError, AssertionError, (assert_err, {}), None), | ||||
|         (3, tractor.MultiError, AssertionError, (assert_err, {}), None), | ||||
| 
 | ||||
|         # 1 daemon actor errors out while single task actors sleep forever | ||||
|         (3, tractor.RemoteActorError, AssertionError, (sleep_forever, {}), | ||||
|  | @ -274,7 +226,7 @@ async def test_cancel_infinite_streamer(start_method): | |||
|          (do_nuthin, {}), (assert_err, {'delay': 1}, True)), | ||||
|         # daemon complete quickly delay while single task | ||||
|         # actors error after brief delay | ||||
|         (3, BaseExceptionGroup, AssertionError, | ||||
|         (3, tractor.MultiError, AssertionError, | ||||
|          (assert_err, {'delay': 1}), (do_nuthin, {}, False)), | ||||
|     ], | ||||
|     ids=[ | ||||
|  | @ -287,32 +239,20 @@ async def test_cancel_infinite_streamer(start_method): | |||
|     ], | ||||
| ) | ||||
| @tractor_test | ||||
| async def test_some_cancels_all( | ||||
|     num_actors_and_errs: tuple, | ||||
|     start_method: str, | ||||
|     loglevel: str, | ||||
| ): | ||||
|     ''' | ||||
|     Verify a subset of failed subactors causes all others in | ||||
| async def test_some_cancels_all(num_actors_and_errs, start_method, loglevel): | ||||
|     """Verify a subset of failed subactors causes all others in | ||||
|     the nursery to be cancelled just like the strategy in trio. | ||||
| 
 | ||||
|     This is the first and only supervisory strategy at the moment. | ||||
| 
 | ||||
|     ''' | ||||
|     ( | ||||
|         num_actors, | ||||
|         first_err, | ||||
|         err_type, | ||||
|         ria_func, | ||||
|         da_func, | ||||
|     ) = num_actors_and_errs | ||||
|     """ | ||||
|     num_actors, first_err, err_type, ria_func, da_func = num_actors_and_errs | ||||
|     try: | ||||
|         async with tractor.open_nursery() as an: | ||||
|         async with tractor.open_nursery() as n: | ||||
| 
 | ||||
|             # spawn the same number of deamon actors which should be cancelled | ||||
|             dactor_portals = [] | ||||
|             for i in range(num_actors): | ||||
|                 dactor_portals.append(await an.start_actor( | ||||
|                 dactor_portals.append(await n.start_actor( | ||||
|                     f'deamon_{i}', | ||||
|                     enable_modules=[__name__], | ||||
|                 )) | ||||
|  | @ -322,7 +262,7 @@ async def test_some_cancels_all( | |||
|             for i in range(num_actors): | ||||
|                 # start actor(s) that will fail immediately | ||||
|                 riactor_portals.append( | ||||
|                     await an.run_in_actor( | ||||
|                     await n.run_in_actor( | ||||
|                         func, | ||||
|                         name=f'actor_{i}', | ||||
|                         **kwargs | ||||
|  | @ -338,7 +278,7 @@ async def test_some_cancels_all( | |||
|                         await portal.run(func, **kwargs) | ||||
| 
 | ||||
|                     except tractor.RemoteActorError as err: | ||||
|                         assert err.boxed_type == err_type | ||||
|                         assert err.type == err_type | ||||
|                         # we only expect this first error to propogate | ||||
|                         # (all other daemons are cancelled before they | ||||
|                         # can be scheduled) | ||||
|  | @ -352,20 +292,19 @@ async def test_some_cancels_all( | |||
| 
 | ||||
|         # should error here with a ``RemoteActorError`` or ``MultiError`` | ||||
| 
 | ||||
|     except first_err as _err: | ||||
|         err = _err | ||||
|         if isinstance(err, BaseExceptionGroup): | ||||
|     except first_err as err: | ||||
|         if isinstance(err, tractor.MultiError): | ||||
|             assert len(err.exceptions) == num_actors | ||||
|             for exc in err.exceptions: | ||||
|                 if isinstance(exc, tractor.RemoteActorError): | ||||
|                     assert exc.boxed_type == err_type | ||||
|                     assert exc.type == err_type | ||||
|                 else: | ||||
|                     assert isinstance(exc, trio.Cancelled) | ||||
|         elif isinstance(err, tractor.RemoteActorError): | ||||
|             assert err.boxed_type == err_type | ||||
|             assert err.type == err_type | ||||
| 
 | ||||
|         assert an.cancelled is True | ||||
|         assert not an._children | ||||
|         assert n.cancelled is True | ||||
|         assert not n._children | ||||
|     else: | ||||
|         pytest.fail("Should have gotten a remote assertion error?") | ||||
| 
 | ||||
|  | @ -398,7 +337,7 @@ async def spawn_and_error(breadth, depth) -> None: | |||
| @tractor_test | ||||
| async def test_nested_multierrors(loglevel, start_method): | ||||
|     ''' | ||||
|     Test that failed actor sets are wrapped in `BaseExceptionGroup`s. This | ||||
|     Test that failed actor sets are wrapped in `trio.MultiError`s. This | ||||
|     test goes only 2 nurseries deep but we should eventually have tests | ||||
|     for arbitrary n-depth actor trees. | ||||
| 
 | ||||
|  | @ -426,7 +365,7 @@ async def test_nested_multierrors(loglevel, start_method): | |||
|                         breadth=subactor_breadth, | ||||
|                         depth=depth, | ||||
|                     ) | ||||
|         except BaseExceptionGroup as err: | ||||
|         except trio.MultiError as err: | ||||
|             assert len(err.exceptions) == subactor_breadth | ||||
|             for subexc in err.exceptions: | ||||
| 
 | ||||
|  | @ -441,21 +380,21 @@ async def test_nested_multierrors(loglevel, start_method): | |||
|                     elif isinstance(subexc, tractor.RemoteActorError): | ||||
|                         # on windows it seems we can't exactly be sure wtf | ||||
|                         # will happen.. | ||||
|                         assert subexc.boxed_type in ( | ||||
|                         assert subexc.type in ( | ||||
|                             tractor.RemoteActorError, | ||||
|                             trio.Cancelled, | ||||
|                             BaseExceptionGroup, | ||||
|                             trio.MultiError | ||||
|                         ) | ||||
| 
 | ||||
|                     elif isinstance(subexc, BaseExceptionGroup): | ||||
|                     elif isinstance(subexc, trio.MultiError): | ||||
|                         for subsub in subexc.exceptions: | ||||
| 
 | ||||
|                             if subsub in (tractor.RemoteActorError,): | ||||
|                                 subsub = subsub.boxed_type | ||||
|                                 subsub = subsub.type | ||||
| 
 | ||||
|                             assert type(subsub) in ( | ||||
|                                 trio.Cancelled, | ||||
|                                 BaseExceptionGroup, | ||||
|                                 trio.MultiError, | ||||
|                             ) | ||||
|                 else: | ||||
|                     assert isinstance(subexc, tractor.RemoteActorError) | ||||
|  | @ -466,16 +405,16 @@ async def test_nested_multierrors(loglevel, start_method): | |||
|                     # we get back the (sent) cancel signal instead | ||||
|                     if is_win(): | ||||
|                         if isinstance(subexc, tractor.RemoteActorError): | ||||
|                             assert subexc.boxed_type in ( | ||||
|                                 BaseExceptionGroup, | ||||
|                             assert subexc.type in ( | ||||
|                                 trio.MultiError, | ||||
|                                 tractor.RemoteActorError | ||||
|                             ) | ||||
|                         else: | ||||
|                             assert isinstance(subexc, BaseExceptionGroup) | ||||
|                             assert isinstance(subexc, trio.MultiError) | ||||
|                     else: | ||||
|                         assert subexc.boxed_type is ExceptionGroup | ||||
|                         assert subexc.type is trio.MultiError | ||||
|                 else: | ||||
|                     assert subexc.boxed_type in ( | ||||
|                     assert subexc.type in ( | ||||
|                         tractor.RemoteActorError, | ||||
|                         trio.Cancelled | ||||
|                     ) | ||||
|  | @ -496,7 +435,7 @@ def test_cancel_via_SIGINT( | |||
|         with trio.fail_after(2): | ||||
|             async with tractor.open_nursery() as tn: | ||||
|                 await tn.start_actor('sucka') | ||||
|                 if 'mp' in spawn_backend: | ||||
|                 if spawn_backend == 'mp': | ||||
|                     time.sleep(0.1) | ||||
|                 os.kill(pid, signal.SIGINT) | ||||
|                 await trio.sleep_forever() | ||||
|  | @ -520,9 +459,7 @@ def test_cancel_via_SIGINT_other_task( | |||
|     if is_win():  # smh | ||||
|         timeout += 1 | ||||
| 
 | ||||
|     async def spawn_and_sleep_forever( | ||||
|         task_status=trio.TASK_STATUS_IGNORED | ||||
|     ): | ||||
|     async def spawn_and_sleep_forever(task_status=trio.TASK_STATUS_IGNORED): | ||||
|         async with tractor.open_nursery() as tn: | ||||
|             for i in range(3): | ||||
|                 await tn.run_in_actor( | ||||
|  | @ -535,16 +472,9 @@ def test_cancel_via_SIGINT_other_task( | |||
|     async def main(): | ||||
|         # should never timeout since SIGINT should cancel the current program | ||||
|         with trio.fail_after(timeout): | ||||
|             async with ( | ||||
| 
 | ||||
|                 # XXX ?TODO? why no work!? | ||||
|                 # tractor.trionics.collapse_eg(), | ||||
|                 trio.open_nursery( | ||||
|                     strict_exception_groups=False, | ||||
|                 ) as tn, | ||||
|             ): | ||||
|                 await tn.start(spawn_and_sleep_forever) | ||||
|                 if 'mp' in spawn_backend: | ||||
|             async with trio.open_nursery() as n: | ||||
|                 await n.start(spawn_and_sleep_forever) | ||||
|                 if spawn_backend == 'mp': | ||||
|                     time.sleep(0.1) | ||||
|                 os.kill(pid, signal.SIGINT) | ||||
| 
 | ||||
|  | @ -554,123 +484,38 @@ def test_cancel_via_SIGINT_other_task( | |||
| 
 | ||||
| async def spin_for(period=3): | ||||
|     "Sync sleep." | ||||
|     print(f'sync sleeping in sub-sub for {period}\n') | ||||
|     time.sleep(period) | ||||
| 
 | ||||
| 
 | ||||
| async def spawn_sub_with_sync_blocking_task(): | ||||
|     async with tractor.open_nursery() as an: | ||||
|         print('starting sync blocking subactor..\n') | ||||
|         await an.run_in_actor( | ||||
| async def spawn(): | ||||
|     async with tractor.open_nursery() as tn: | ||||
|         await tn.run_in_actor( | ||||
|             spin_for, | ||||
|             name='sleeper', | ||||
|         ) | ||||
|         print('exiting first subactor layer..\n') | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     'man_cancel_outer', | ||||
|     [ | ||||
|         False,  # passes if delay != 2 | ||||
| 
 | ||||
|         # always causes an unexpected eg-w-embedded-assert-err? | ||||
|         pytest.param(True, | ||||
|              marks=pytest.mark.xfail( | ||||
|                  reason=( | ||||
|                     'always causes an unexpected eg-w-embedded-assert-err?' | ||||
|                 ) | ||||
|             ), | ||||
|         ), | ||||
|     ], | ||||
| ) | ||||
| @no_windows | ||||
| def test_cancel_while_childs_child_in_sync_sleep( | ||||
|     loglevel: str, | ||||
|     start_method: str, | ||||
|     spawn_backend: str, | ||||
|     debug_mode: bool, | ||||
|     reg_addr: tuple, | ||||
|     man_cancel_outer: bool, | ||||
|     loglevel, | ||||
|     start_method, | ||||
|     spawn_backend, | ||||
| ): | ||||
|     ''' | ||||
|     Verify that a child cancelled while executing sync code is torn | ||||
|     """Verify that a child cancelled while executing sync code is torn | ||||
|     down even when that cancellation is triggered by the parent | ||||
|     2 nurseries "up". | ||||
| 
 | ||||
|     Though the grandchild should stay blocking its actor runtime, its | ||||
|     parent should issue a "zombie reaper" to hard kill it after | ||||
|     sufficient timeout. | ||||
| 
 | ||||
|     ''' | ||||
|     """ | ||||
|     if start_method == 'forkserver': | ||||
|         pytest.skip("Forksever sux hard at resuming from sync sleep...") | ||||
| 
 | ||||
|     async def main(): | ||||
|         # | ||||
|         # XXX BIG TODO NOTE XXX | ||||
|         # | ||||
|         # it seems there's a strange race that can happen | ||||
|         # where where the fail-after will trigger outer scope | ||||
|         # .cancel() which then causes the inner scope to raise, | ||||
|         # | ||||
|         # BaseExceptionGroup('Exceptions from Trio nursery', [ | ||||
|         #   BaseExceptionGroup('Exceptions from Trio nursery', | ||||
|         #   [ | ||||
|         #       Cancelled(), | ||||
|         #       Cancelled(), | ||||
|         #   ] | ||||
|         #   ), | ||||
|         #   AssertionError('assert 0') | ||||
|         # ]) | ||||
|         # | ||||
|         # WHY THIS DOESN'T MAKE SENSE: | ||||
|         # --------------------------- | ||||
|         # - it should raise too-slow-error when too slow.. | ||||
|         #  * verified that using simple-cs and manually cancelling | ||||
|         #    you get same outcome -> indicates that the fail-after | ||||
|         #    can have its TooSlowError overriden! | ||||
|         #  |_ to check this it's easy, simplly decrease the timeout | ||||
|         #     as per the var below. | ||||
|         # | ||||
|         # - when using the manual simple-cs the outcome is different | ||||
|         #   DESPITE the `assert 0` which means regardless of the | ||||
|         #   inner scope effectively failing in the same way, the | ||||
|         #   bubbling up **is NOT the same**. | ||||
|         # | ||||
|         # delays trigger diff outcomes.. | ||||
|         # --------------------------- | ||||
|         # as seen by uncommenting various lines below there is from | ||||
|         # my POV an unexpected outcome due to the delay=2 case. | ||||
|         # | ||||
|         # delay = 1  # no AssertionError in eg, TooSlowError raised. | ||||
|         # delay = 2  # is AssertionError in eg AND no TooSlowError !? | ||||
|         delay = 4  # is AssertionError in eg AND no _cs cancellation. | ||||
| 
 | ||||
|         with trio.fail_after(delay) as _cs: | ||||
|         # with trio.CancelScope() as cs: | ||||
|         # ^XXX^ can be used instead to see same outcome. | ||||
| 
 | ||||
|             async with ( | ||||
|                 # tractor.trionics.collapse_eg(),  # doesn't help | ||||
|                 tractor.open_nursery( | ||||
|                     hide_tb=False, | ||||
|                     debug_mode=debug_mode, | ||||
|                     registry_addrs=[reg_addr], | ||||
|                 ) as an, | ||||
|             ): | ||||
|                 await an.run_in_actor( | ||||
|                     spawn_sub_with_sync_blocking_task, | ||||
|                     name='sync_blocking_sub', | ||||
|         with trio.fail_after(2): | ||||
|             async with tractor.open_nursery() as tn: | ||||
|                 await tn.run_in_actor( | ||||
|                     spawn, | ||||
|                     name='spawn', | ||||
|                 ) | ||||
|                 await trio.sleep(1) | ||||
| 
 | ||||
|                 if man_cancel_outer: | ||||
|                     print('Cancelling manually in root') | ||||
|                     _cs.cancel() | ||||
| 
 | ||||
|                 # trigger exc-srced taskc down | ||||
|                 # the actor tree. | ||||
|                 print('RAISING IN ROOT') | ||||
|                 assert 0 | ||||
| 
 | ||||
|     with pytest.raises(AssertionError): | ||||
|  | @ -720,12 +565,6 @@ def test_fast_graceful_cancel_when_spawn_task_in_soft_proc_wait_for_daemon( | |||
|                     nurse.start_soon(delayed_kbi) | ||||
| 
 | ||||
|                     await p.run(do_nuthin) | ||||
| 
 | ||||
|         # need to explicitly re-raise the lone kbi..now | ||||
|         except* KeyboardInterrupt as kbi_eg: | ||||
|             assert (len(excs := kbi_eg.exceptions) == 1) | ||||
|             raise excs[0] | ||||
| 
 | ||||
|         finally: | ||||
|             duration = time.time() - start | ||||
|             if duration > timeout: | ||||
|  |  | |||
|  | @ -6,15 +6,14 @@ sub-sub-actor daemons. | |||
| ''' | ||||
| from typing import Optional | ||||
| import asyncio | ||||
| from contextlib import ( | ||||
|     asynccontextmanager as acm, | ||||
|     aclosing, | ||||
| ) | ||||
| from contextlib import asynccontextmanager as acm | ||||
| 
 | ||||
| import pytest | ||||
| import trio | ||||
| from trio_typing import TaskStatus | ||||
| import tractor | ||||
| from tractor import RemoteActorError | ||||
| from async_generator import aclosing | ||||
| 
 | ||||
| 
 | ||||
| async def aio_streamer( | ||||
|  | @ -95,8 +94,8 @@ async def trio_main( | |||
| 
 | ||||
|     # stash a "service nursery" as "actor local" (aka a Python global) | ||||
|     global _nursery | ||||
|     tn = _nursery | ||||
|     assert tn | ||||
|     n = _nursery | ||||
|     assert n | ||||
| 
 | ||||
|     async def consume_stream(): | ||||
|         async with wrapper_mngr() as stream: | ||||
|  | @ -104,10 +103,10 @@ async def trio_main( | |||
|                 print(msg) | ||||
| 
 | ||||
|     # run 2 tasks to ensure broadcaster chan use | ||||
|     tn.start_soon(consume_stream) | ||||
|     tn.start_soon(consume_stream) | ||||
|     n.start_soon(consume_stream) | ||||
|     n.start_soon(consume_stream) | ||||
| 
 | ||||
|     tn.start_soon(trio_sleep_and_err) | ||||
|     n.start_soon(trio_sleep_and_err) | ||||
| 
 | ||||
|     await trio.sleep_forever() | ||||
| 
 | ||||
|  | @ -117,11 +116,8 @@ async def open_actor_local_nursery( | |||
|     ctx: tractor.Context, | ||||
| ): | ||||
|     global _nursery | ||||
|     async with ( | ||||
|         tractor.trionics.collapse_eg(), | ||||
|         trio.open_nursery() as tn | ||||
|     ): | ||||
|         _nursery = tn | ||||
|     async with trio.open_nursery() as n: | ||||
|         _nursery = n | ||||
|         await ctx.started() | ||||
|         await trio.sleep(10) | ||||
|         # await trio.sleep(1) | ||||
|  | @ -135,7 +131,7 @@ async def open_actor_local_nursery( | |||
|         # never yields back.. aka a scenario where the | ||||
|         # ``tractor.context`` task IS NOT in the service n's cancel | ||||
|         # scope. | ||||
|         tn.cancel_scope.cancel() | ||||
|         n.cancel_scope.cancel() | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|  | @ -145,7 +141,7 @@ async def open_actor_local_nursery( | |||
| ) | ||||
| def test_actor_managed_trio_nursery_task_error_cancels_aio( | ||||
|     asyncio_mode: bool, | ||||
|     reg_addr: tuple, | ||||
|     arb_addr | ||||
| ): | ||||
|     ''' | ||||
|     Verify that a ``trio`` nursery created managed in a child actor | ||||
|  | @ -160,7 +156,7 @@ def test_actor_managed_trio_nursery_task_error_cancels_aio( | |||
|         async with tractor.open_nursery() as n: | ||||
|             p = await n.start_actor( | ||||
|                 'nursery_mngr', | ||||
|                 infect_asyncio=asyncio_mode,  # TODO, is this enabling debug mode? | ||||
|                 infect_asyncio=asyncio_mode, | ||||
|                 enable_modules=[__name__], | ||||
|             ) | ||||
|             async with ( | ||||
|  | @ -174,4 +170,4 @@ def test_actor_managed_trio_nursery_task_error_cancels_aio( | |||
| 
 | ||||
|     # verify boxed error | ||||
|     err = excinfo.value | ||||
|     assert err.boxed_type is NameError | ||||
|     assert isinstance(err.type(), NameError) | ||||
|  |  | |||
|  | @ -1,79 +1,36 @@ | |||
| import itertools | ||||
| 
 | ||||
| import pytest | ||||
| import trio | ||||
| import tractor | ||||
| from tractor import open_actor_cluster | ||||
| from tractor.trionics import gather_contexts | ||||
| from tractor._testing import tractor_test | ||||
| 
 | ||||
| from conftest import tractor_test | ||||
| 
 | ||||
| 
 | ||||
| MESSAGE = 'tractoring at full speed' | ||||
| 
 | ||||
| 
 | ||||
| def test_empty_mngrs_input_raises() -> None: | ||||
| 
 | ||||
|     async def main(): | ||||
|         with trio.fail_after(3): | ||||
|             async with ( | ||||
|                 open_actor_cluster( | ||||
|                     modules=[__name__], | ||||
| 
 | ||||
|                     # NOTE: ensure we can passthrough runtime opts | ||||
|                     loglevel='cancel', | ||||
|                     debug_mode=False, | ||||
| 
 | ||||
|                 ) as portals, | ||||
| 
 | ||||
|                 gather_contexts(mngrs=()), | ||||
|             ): | ||||
|                 # should fail before this? | ||||
|                 assert portals | ||||
| 
 | ||||
|                 # test should fail if we mk it here! | ||||
|                 assert 0, 'Should have raised val-err !?' | ||||
| 
 | ||||
|     with pytest.raises(ValueError): | ||||
|         trio.run(main) | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def worker( | ||||
|     ctx: tractor.Context, | ||||
| 
 | ||||
| ) -> None: | ||||
| 
 | ||||
| async def worker(ctx: tractor.Context) -> None: | ||||
|     await ctx.started() | ||||
| 
 | ||||
|     async with ctx.open_stream( | ||||
|         allow_overruns=True, | ||||
|     ) as stream: | ||||
| 
 | ||||
|         # TODO: this with the below assert causes a hang bug? | ||||
|         # with trio.move_on_after(1): | ||||
| 
 | ||||
|     async with ctx.open_stream(backpressure=True) as stream: | ||||
|         async for msg in stream: | ||||
|             # do something with msg | ||||
|             print(msg) | ||||
|             assert msg == MESSAGE | ||||
| 
 | ||||
|         # TODO: does this ever cause a hang | ||||
|         # assert 0 | ||||
| 
 | ||||
| 
 | ||||
| @tractor_test | ||||
| async def test_streaming_to_actor_cluster() -> None: | ||||
| 
 | ||||
|     async with ( | ||||
|         open_actor_cluster(modules=[__name__]) as portals, | ||||
| 
 | ||||
|         gather_contexts( | ||||
|             mngrs=[p.open_context(worker) for p in portals.values()], | ||||
|         ) as contexts, | ||||
| 
 | ||||
|         gather_contexts( | ||||
|             mngrs=[ctx[0].open_stream() for ctx in contexts], | ||||
|         ) as streams, | ||||
| 
 | ||||
|     ): | ||||
|         with trio.move_on_after(1): | ||||
|             for stream in itertools.cycle(streams): | ||||
|  |  | |||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							|  | @ -0,0 +1,858 @@ | |||
| """ | ||||
| That "native" debug mode better work! | ||||
| 
 | ||||
| All these tests can be understood (somewhat) by running the equivalent | ||||
| `examples/debugging/` scripts manually. | ||||
| 
 | ||||
| TODO: | ||||
|     - none of these tests have been run successfully on windows yet but | ||||
|       there's been manual testing that verified it works. | ||||
|     - wonder if any of it'll work on OS X? | ||||
| 
 | ||||
| """ | ||||
| from os import path | ||||
| from typing import Optional | ||||
| import platform | ||||
| import sys | ||||
| import time | ||||
| 
 | ||||
| import pytest | ||||
| import pexpect | ||||
| from pexpect.exceptions import ( | ||||
|     TIMEOUT, | ||||
|     EOF, | ||||
| ) | ||||
| 
 | ||||
| from conftest import repodir | ||||
| 
 | ||||
| # TODO: The next great debugger audit could be done by you! | ||||
| # - recurrent entry to breakpoint() from single actor *after* and an | ||||
| #   error in another task? | ||||
| # - root error before child errors | ||||
| # - root error after child errors | ||||
| # - root error before child breakpoint | ||||
| # - root error after child breakpoint | ||||
| # - recurrent root errors | ||||
| 
 | ||||
| 
 | ||||
| if platform.system() == 'Windows': | ||||
|     pytest.skip( | ||||
|         'Debugger tests have no windows support (yet)', | ||||
|         allow_module_level=True, | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| def examples_dir(): | ||||
|     """Return the abspath to the examples directory. | ||||
|     """ | ||||
|     return path.join(repodir(), 'examples', 'debugging/') | ||||
| 
 | ||||
| 
 | ||||
| def mk_cmd(ex_name: str) -> str: | ||||
|     """Generate a command suitable to pass to ``pexpect.spawn()``. | ||||
|     """ | ||||
|     return ' '.join( | ||||
|         ['python', | ||||
|          path.join(examples_dir(), f'{ex_name}.py')] | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| @pytest.fixture | ||||
| def spawn( | ||||
|     start_method, | ||||
|     testdir, | ||||
|     arb_addr, | ||||
| ) -> 'pexpect.spawn': | ||||
| 
 | ||||
|     if start_method != 'trio': | ||||
|         pytest.skip( | ||||
|             "Debugger tests are only supported on the trio backend" | ||||
|         ) | ||||
| 
 | ||||
|     def _spawn(cmd): | ||||
|         return testdir.spawn( | ||||
|             cmd=mk_cmd(cmd), | ||||
|             expect_timeout=3, | ||||
|         ) | ||||
| 
 | ||||
|     return _spawn | ||||
| 
 | ||||
| 
 | ||||
| def assert_before( | ||||
|     child, | ||||
|     patts: list[str], | ||||
| 
 | ||||
| ) -> None: | ||||
| 
 | ||||
|     before = str(child.before.decode()) | ||||
| 
 | ||||
|     for patt in patts: | ||||
|         try: | ||||
|             assert patt in before | ||||
|         except AssertionError: | ||||
|             print(before) | ||||
|             raise | ||||
| 
 | ||||
| 
 | ||||
| @pytest.fixture( | ||||
|     params=[False, True], | ||||
|     ids='ctl-c={}'.format, | ||||
| ) | ||||
| def ctlc(request) -> bool: | ||||
| 
 | ||||
|     use_ctlc = request.param | ||||
| 
 | ||||
|     if ( | ||||
|         sys.version_info <= (3, 10) | ||||
|         and use_ctlc | ||||
|     ): | ||||
|         # on 3.9 it seems the REPL UX | ||||
|         # is highly unreliable and frankly annoying | ||||
|         # to test for. It does work from manual testing | ||||
|         # but i just don't think it's wroth it to try | ||||
|         # and get this working especially since we want to | ||||
|         # be 3.10+ mega-asap. | ||||
|         pytest.skip('Py3.9 and `pdbpp` son no bueno..') | ||||
| 
 | ||||
|     if use_ctlc: | ||||
|         # XXX: disable pygments highlighting for auto-tests | ||||
|         # since some envs (like actions CI) will struggle | ||||
|         # the the added color-char encoding.. | ||||
|         from tractor._debug import TractorConfig | ||||
|         TractorConfig.use_pygements = False | ||||
| 
 | ||||
|     yield use_ctlc | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     'user_in_out', | ||||
|     [ | ||||
|         ('c', 'AssertionError'), | ||||
|         ('q', 'AssertionError'), | ||||
|     ], | ||||
|     ids=lambda item: f'{item[0]} -> {item[1]}', | ||||
| ) | ||||
| def test_root_actor_error(spawn, user_in_out): | ||||
|     """Demonstrate crash handler entering pdbpp from basic error in root actor. | ||||
|     """ | ||||
|     user_input, expect_err_str = user_in_out | ||||
| 
 | ||||
|     child = spawn('root_actor_error') | ||||
| 
 | ||||
|     # scan for the pdbpp prompt | ||||
|     child.expect(r"\(Pdb\+\+\)") | ||||
| 
 | ||||
|     before = str(child.before.decode()) | ||||
| 
 | ||||
|     # make sure expected logging and error arrives | ||||
|     assert "Attaching to pdb in crashed actor: ('root'" in before | ||||
|     assert 'AssertionError' in before | ||||
| 
 | ||||
|     # send user command | ||||
|     child.sendline(user_input) | ||||
| 
 | ||||
|     # process should exit | ||||
|     child.expect(pexpect.EOF) | ||||
|     assert expect_err_str in str(child.before) | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     'user_in_out', | ||||
|     [ | ||||
|         ('c', None), | ||||
|         ('q', 'bdb.BdbQuit'), | ||||
|     ], | ||||
|     ids=lambda item: f'{item[0]} -> {item[1]}', | ||||
| ) | ||||
| def test_root_actor_bp(spawn, user_in_out): | ||||
|     """Demonstrate breakpoint from in root actor. | ||||
|     """ | ||||
|     user_input, expect_err_str = user_in_out | ||||
|     child = spawn('root_actor_breakpoint') | ||||
| 
 | ||||
|     # scan for the pdbpp prompt | ||||
|     child.expect(r"\(Pdb\+\+\)") | ||||
| 
 | ||||
|     assert 'Error' not in str(child.before) | ||||
| 
 | ||||
|     # send user command | ||||
|     child.sendline(user_input) | ||||
|     child.expect('\r\n') | ||||
| 
 | ||||
|     # process should exit | ||||
|     child.expect(pexpect.EOF) | ||||
| 
 | ||||
|     if expect_err_str is None: | ||||
|         assert 'Error' not in str(child.before) | ||||
|     else: | ||||
|         assert expect_err_str in str(child.before) | ||||
| 
 | ||||
| 
 | ||||
| def do_ctlc( | ||||
|     child, | ||||
|     count: int = 3, | ||||
|     delay: float = 0.1, | ||||
|     patt: Optional[str] = None, | ||||
| 
 | ||||
|     # XXX: literally no idea why this is an issue in CI but likely will | ||||
|     # flush out (hopefully) with proper 3.10 release of `pdbpp`... | ||||
|     expect_prompt: bool = True, | ||||
| 
 | ||||
| ) -> None: | ||||
| 
 | ||||
|     # make sure ctl-c sends don't do anything but repeat output | ||||
|     for _ in range(count): | ||||
|         time.sleep(delay) | ||||
|         child.sendcontrol('c') | ||||
| 
 | ||||
|         # TODO: figure out why this makes CI fail.. | ||||
|         # if you run this test manually it works just fine.. | ||||
|         from conftest import _ci_env | ||||
|         if expect_prompt and not _ci_env: | ||||
|             before = str(child.before.decode()) | ||||
|             time.sleep(delay) | ||||
|             child.expect(r"\(Pdb\+\+\)") | ||||
|             time.sleep(delay) | ||||
| 
 | ||||
|             if patt: | ||||
|                 # should see the last line on console | ||||
|                 assert patt in before | ||||
| 
 | ||||
| 
 | ||||
| def test_root_actor_bp_forever( | ||||
|     spawn, | ||||
|     ctlc: bool, | ||||
| ): | ||||
|     "Re-enter a breakpoint from the root actor-task." | ||||
|     child = spawn('root_actor_breakpoint_forever') | ||||
| 
 | ||||
|     # do some "next" commands to demonstrate recurrent breakpoint | ||||
|     # entries | ||||
|     for _ in range(10): | ||||
| 
 | ||||
|         child.expect(r"\(Pdb\+\+\)") | ||||
| 
 | ||||
|         if ctlc: | ||||
|             do_ctlc(child) | ||||
| 
 | ||||
|         child.sendline('next') | ||||
| 
 | ||||
|     # do one continue which should trigger a | ||||
|     # new task to lock the tty | ||||
|     child.sendline('continue') | ||||
|     child.expect(r"\(Pdb\+\+\)") | ||||
| 
 | ||||
|     # seems that if we hit ctrl-c too fast the | ||||
|     # sigint guard machinery might not kick in.. | ||||
|     time.sleep(0.001) | ||||
| 
 | ||||
|     if ctlc: | ||||
|         do_ctlc(child) | ||||
| 
 | ||||
|     # XXX: this previously caused a bug! | ||||
|     child.sendline('n') | ||||
|     child.expect(r"\(Pdb\+\+\)") | ||||
| 
 | ||||
|     child.sendline('n') | ||||
|     child.expect(r"\(Pdb\+\+\)") | ||||
| 
 | ||||
|     # quit out of the loop | ||||
|     child.sendline('q') | ||||
|     child.expect(pexpect.EOF) | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     'do_next', | ||||
|     (True, False), | ||||
|     ids='do_next={}'.format, | ||||
| ) | ||||
| def test_subactor_error( | ||||
|     spawn, | ||||
|     ctlc: bool, | ||||
|     do_next: bool, | ||||
| ): | ||||
|     "Single subactor raising an error" | ||||
| 
 | ||||
|     child = spawn('subactor_error') | ||||
| 
 | ||||
|     # scan for the pdbpp prompt | ||||
|     child.expect(r"\(Pdb\+\+\)") | ||||
| 
 | ||||
|     before = str(child.before.decode()) | ||||
|     assert "Attaching to pdb in crashed actor: ('name_error'" in before | ||||
| 
 | ||||
|     if do_next: | ||||
|         child.sendline('n') | ||||
| 
 | ||||
|     else: | ||||
|         # make sure ctl-c sends don't do anything but repeat output | ||||
|         if ctlc: | ||||
|             do_ctlc( | ||||
|                 child, | ||||
|             ) | ||||
| 
 | ||||
|         # send user command and (in this case it's the same for 'continue' | ||||
|         # vs. 'quit') the debugger should enter a second time in the nursery | ||||
|         # creating actor | ||||
|         child.sendline('continue') | ||||
| 
 | ||||
|     child.expect(r"\(Pdb\+\+\)") | ||||
|     before = str(child.before.decode()) | ||||
| 
 | ||||
|     # root actor gets debugger engaged | ||||
|     assert "Attaching to pdb in crashed actor: ('root'" in before | ||||
|     # error is a remote error propagated from the subactor | ||||
|     assert "RemoteActorError: ('name_error'" in before | ||||
| 
 | ||||
|     # another round | ||||
|     if ctlc: | ||||
|         do_ctlc(child) | ||||
| 
 | ||||
|     child.sendline('c') | ||||
|     child.expect('\r\n') | ||||
| 
 | ||||
|     # process should exit | ||||
|     child.expect(pexpect.EOF) | ||||
| 
 | ||||
| 
 | ||||
| def test_subactor_breakpoint( | ||||
|     spawn, | ||||
|     ctlc: bool, | ||||
| ): | ||||
|     "Single subactor with an infinite breakpoint loop" | ||||
| 
 | ||||
|     child = spawn('subactor_breakpoint') | ||||
| 
 | ||||
|     # scan for the pdbpp prompt | ||||
|     child.expect(r"\(Pdb\+\+\)") | ||||
| 
 | ||||
|     before = str(child.before.decode()) | ||||
|     assert "Attaching pdb to actor: ('breakpoint_forever'" in before | ||||
| 
 | ||||
|     # do some "next" commands to demonstrate recurrent breakpoint | ||||
|     # entries | ||||
|     for _ in range(10): | ||||
|         child.sendline('next') | ||||
|         child.expect(r"\(Pdb\+\+\)") | ||||
| 
 | ||||
|         if ctlc: | ||||
|             do_ctlc(child) | ||||
| 
 | ||||
|     # now run some "continues" to show re-entries | ||||
|     for _ in range(5): | ||||
|         child.sendline('continue') | ||||
|         child.expect(r"\(Pdb\+\+\)") | ||||
|         before = str(child.before.decode()) | ||||
|         assert "Attaching pdb to actor: ('breakpoint_forever'" in before | ||||
| 
 | ||||
|         if ctlc: | ||||
|             do_ctlc(child) | ||||
| 
 | ||||
|     # finally quit the loop | ||||
|     child.sendline('q') | ||||
| 
 | ||||
|     # child process should exit but parent will capture pdb.BdbQuit | ||||
|     child.expect(r"\(Pdb\+\+\)") | ||||
| 
 | ||||
|     before = str(child.before.decode()) | ||||
|     assert "RemoteActorError: ('breakpoint_forever'" in before | ||||
|     assert 'bdb.BdbQuit' in before | ||||
| 
 | ||||
|     if ctlc: | ||||
|         do_ctlc(child) | ||||
| 
 | ||||
|     # quit the parent | ||||
|     child.sendline('c') | ||||
| 
 | ||||
|     # process should exit | ||||
|     child.expect(pexpect.EOF) | ||||
| 
 | ||||
|     before = str(child.before.decode()) | ||||
|     assert "RemoteActorError: ('breakpoint_forever'" in before | ||||
|     assert 'bdb.BdbQuit' in before | ||||
| 
 | ||||
| 
 | ||||
| def test_multi_subactors( | ||||
|     spawn, | ||||
|     ctlc: bool, | ||||
| ): | ||||
|     ''' | ||||
|     Multiple subactors, both erroring and | ||||
|     breakpointing as well as a nested subactor erroring. | ||||
| 
 | ||||
|     ''' | ||||
|     child = spawn(r'multi_subactors') | ||||
| 
 | ||||
|     # scan for the pdbpp prompt | ||||
|     child.expect(r"\(Pdb\+\+\)") | ||||
| 
 | ||||
|     before = str(child.before.decode()) | ||||
|     assert "Attaching pdb to actor: ('breakpoint_forever'" in before | ||||
| 
 | ||||
|     if ctlc: | ||||
|         do_ctlc(child) | ||||
| 
 | ||||
|     # do some "next" commands to demonstrate recurrent breakpoint | ||||
|     # entries | ||||
|     for _ in range(10): | ||||
|         child.sendline('next') | ||||
|         child.expect(r"\(Pdb\+\+\)") | ||||
| 
 | ||||
|         if ctlc: | ||||
|             do_ctlc(child) | ||||
| 
 | ||||
|     # continue to next error | ||||
|     child.sendline('c') | ||||
| 
 | ||||
|     # first name_error failure | ||||
|     child.expect(r"\(Pdb\+\+\)") | ||||
|     before = str(child.before.decode()) | ||||
|     assert "Attaching to pdb in crashed actor: ('name_error'" in before | ||||
|     assert "NameError" in before | ||||
| 
 | ||||
|     if ctlc: | ||||
|         do_ctlc(child) | ||||
| 
 | ||||
|     # continue again | ||||
|     child.sendline('c') | ||||
| 
 | ||||
|     # 2nd name_error failure | ||||
|     child.expect(r"\(Pdb\+\+\)") | ||||
| 
 | ||||
|     # XXX: lol honestly no idea why CI is cuck but | ||||
|     # seems like this likely falls into our unhandled nested | ||||
|     # case and isn't working in that env due to raciness.. | ||||
|     from conftest import _ci_env | ||||
|     if not ctlc and _ci_env: | ||||
|         name = 'name_error' if ctlc else 'name_error_1' | ||||
|         assert_before(child, [ | ||||
|             f"Attaching to pdb in crashed actor: ('{name}'", | ||||
|             "NameError", | ||||
|         ]) | ||||
| 
 | ||||
|     if ctlc: | ||||
|         do_ctlc(child) | ||||
| 
 | ||||
|     # breakpoint loop should re-engage | ||||
|     child.sendline('c') | ||||
|     child.expect(r"\(Pdb\+\+\)") | ||||
|     before = str(child.before.decode()) | ||||
|     assert "Attaching pdb to actor: ('breakpoint_forever'" in before | ||||
| 
 | ||||
|     if ctlc: | ||||
|         do_ctlc(child) | ||||
| 
 | ||||
|     # wait for spawn error to show up | ||||
|     spawn_err = "Attaching to pdb in crashed actor: ('spawn_error'" | ||||
|     start = time.time() | ||||
|     while ( | ||||
|         spawn_err not in before | ||||
|         and (time.time() - start) < 3 | ||||
|     ): | ||||
|         child.sendline('c') | ||||
|         time.sleep(0.1) | ||||
|         child.expect(r"\(Pdb\+\+\)") | ||||
|         before = str(child.before.decode()) | ||||
| 
 | ||||
|         if ctlc: | ||||
|             do_ctlc(child) | ||||
| 
 | ||||
|     # 2nd depth nursery should trigger | ||||
|     if not ctlc: | ||||
|         assert_before(child, [ | ||||
|             spawn_err, | ||||
|             "RemoteActorError: ('name_error_1'", | ||||
|         ]) | ||||
| 
 | ||||
|     # now run some "continues" to show re-entries | ||||
|     for _ in range(5): | ||||
|         child.sendline('c') | ||||
|         child.expect(r"\(Pdb\+\+\)") | ||||
| 
 | ||||
|     # quit the loop and expect parent to attach | ||||
|     child.sendline('q') | ||||
| 
 | ||||
|     try: | ||||
|         child.expect(r"\(Pdb\+\+\)") | ||||
|     except TIMEOUT: | ||||
|         if _ci_env and not ctlc: | ||||
|             raise | ||||
| 
 | ||||
|         # in ci seems like this can sometimes just result | ||||
|         # in full tree death? | ||||
|         print('tree died?') | ||||
| 
 | ||||
|     else: | ||||
|         before = str(child.before.decode()) | ||||
|         assert_before(child, [ | ||||
|             # debugger attaches to root | ||||
|             "Attaching to pdb in crashed actor: ('root'", | ||||
| 
 | ||||
|             # expect a multierror with exceptions for each sub-actor | ||||
|             "RemoteActorError: ('breakpoint_forever'", | ||||
|             "RemoteActorError: ('name_error'", | ||||
|             "RemoteActorError: ('spawn_error'", | ||||
|             "RemoteActorError: ('name_error_1'", | ||||
|             'bdb.BdbQuit', | ||||
|         ]) | ||||
| 
 | ||||
|     if ctlc: | ||||
|         do_ctlc(child) | ||||
| 
 | ||||
|     # process should exit | ||||
|     child.sendline('c') | ||||
| 
 | ||||
|     try: | ||||
|         child.expect(pexpect.EOF) | ||||
|     except TIMEOUT: | ||||
|         child.expect(r"\(Pdb\+\+\)") | ||||
| 
 | ||||
|     # repeat of previous multierror for final output | ||||
|     assert_before(child, [ | ||||
|         "RemoteActorError: ('breakpoint_forever'", | ||||
|         "RemoteActorError: ('name_error'", | ||||
|         "RemoteActorError: ('spawn_error'", | ||||
|         "RemoteActorError: ('name_error_1'", | ||||
|         'bdb.BdbQuit', | ||||
|     ]) | ||||
| 
 | ||||
| 
 | ||||
| def test_multi_daemon_subactors( | ||||
|     spawn, | ||||
|     loglevel: str, | ||||
|     ctlc: bool | ||||
| ): | ||||
|     ''' | ||||
|     Multiple daemon subactors, both erroring and breakpointing within a | ||||
|     stream. | ||||
| 
 | ||||
|     ''' | ||||
|     child = spawn('multi_daemon_subactors') | ||||
| 
 | ||||
|     child.expect(r"\(Pdb\+\+\)") | ||||
| 
 | ||||
|     # there is a race for which subactor will acquire | ||||
|     # the root's tty lock first | ||||
| 
 | ||||
|     before = str(child.before.decode()) | ||||
| 
 | ||||
|     bp_forever_msg = "Attaching pdb to actor: ('bp_forever'" | ||||
|     name_error_msg = "NameError" | ||||
| 
 | ||||
|     if bp_forever_msg in before: | ||||
|         next_msg = name_error_msg | ||||
| 
 | ||||
|     elif name_error_msg in before: | ||||
|         next_msg = bp_forever_msg | ||||
| 
 | ||||
|     else: | ||||
|         raise ValueError("Neither log msg was found !?") | ||||
| 
 | ||||
|     if ctlc: | ||||
|         do_ctlc(child) | ||||
| 
 | ||||
|     # NOTE: previously since we did not have clobber prevention | ||||
|     # in the root actor this final resume could result in the debugger | ||||
|     # tearing down since both child actors would be cancelled and it was | ||||
|     # unlikely that `bp_forever` would re-acquire the tty lock again. | ||||
|     # Now, we should have a final resumption in the root plus a possible | ||||
|     # second entry by `bp_forever`. | ||||
| 
 | ||||
|     child.sendline('c') | ||||
|     child.expect(r"\(Pdb\+\+\)") | ||||
|     before = str(child.before.decode()) | ||||
| 
 | ||||
|     assert next_msg in before | ||||
| 
 | ||||
|     # XXX: hooray the root clobbering the child here was fixed! | ||||
|     # IMO, this demonstrates the true power of SC system design. | ||||
| 
 | ||||
|     # now the root actor won't clobber the bp_forever child | ||||
|     # during it's first access to the debug lock, but will instead | ||||
|     # wait for the lock to release, by the edge triggered | ||||
|     # ``_debug.Lock.no_remote_has_tty`` event before sending cancel messages | ||||
|     # (via portals) to its underlings B) | ||||
| 
 | ||||
|     # at some point here there should have been some warning msg from | ||||
|     # the root announcing it avoided a clobber of the child's lock, but | ||||
|     # it seems unreliable in testing here to gnab it: | ||||
|     # assert "in use by child ('bp_forever'," in before | ||||
| 
 | ||||
|     if ctlc: | ||||
|         do_ctlc(child) | ||||
| 
 | ||||
|     # wait for final error in root | ||||
|     while True: | ||||
| 
 | ||||
|         child.sendline('c') | ||||
|         child.expect(r"\(Pdb\+\+\)") | ||||
|         before = str(child.before.decode()) | ||||
|         try: | ||||
| 
 | ||||
|             # root error should be packed as remote error | ||||
|             assert "_exceptions.RemoteActorError: ('name_error'" in before | ||||
|             break | ||||
| 
 | ||||
|         except AssertionError: | ||||
|             assert bp_forever_msg in before | ||||
| 
 | ||||
|         if ctlc: | ||||
|             do_ctlc(child) | ||||
| 
 | ||||
|     try: | ||||
|         child.sendline('c') | ||||
|         child.expect(pexpect.EOF) | ||||
| 
 | ||||
|     except TIMEOUT: | ||||
|         # Failed to exit using continue..? | ||||
|         child.sendline('q') | ||||
|         child.expect(pexpect.EOF) | ||||
| 
 | ||||
| 
 | ||||
| def test_multi_subactors_root_errors( | ||||
|     spawn, | ||||
|     ctlc: bool | ||||
| ): | ||||
|     ''' | ||||
|     Multiple subactors, both erroring and breakpointing as well as | ||||
|     a nested subactor erroring. | ||||
| 
 | ||||
|     ''' | ||||
|     child = spawn('multi_subactor_root_errors') | ||||
| 
 | ||||
|     # scan for the pdbpp prompt | ||||
|     child.expect(r"\(Pdb\+\+\)") | ||||
| 
 | ||||
|     # at most one subactor should attach before the root is cancelled | ||||
|     before = str(child.before.decode()) | ||||
|     assert "NameError: name 'doggypants' is not defined" in before | ||||
| 
 | ||||
|     if ctlc: | ||||
|         do_ctlc(child) | ||||
| 
 | ||||
|     # continue again to catch 2nd name error from | ||||
|     # actor 'name_error_1' (which is 2nd depth). | ||||
|     child.sendline('c') | ||||
|     try: | ||||
|         child.expect(r"\(Pdb\+\+\)") | ||||
|     except TIMEOUT: | ||||
|         child.sendline('') | ||||
| 
 | ||||
|     # XXX: lol honestly no idea why CI is cuck but | ||||
|     # seems like this likely falls into our unhandled nested | ||||
|     # case and isn't working in that env due to raciness.. | ||||
|     from conftest import _ci_env | ||||
|     if not ctlc and _ci_env: | ||||
|         name = 'name_error' if ctlc else 'name_error_1' | ||||
|         assert_before(child, [ | ||||
|             f"Attaching to pdb in crashed actor: ('{name}'", | ||||
|             "NameError", | ||||
|         ]) | ||||
| 
 | ||||
|     if ctlc: | ||||
|         do_ctlc(child) | ||||
| 
 | ||||
|     child.sendline('c') | ||||
|     child.expect(r"\(Pdb\+\+\)") | ||||
|     assert_before(child, [ | ||||
|         "Attaching to pdb in crashed actor: ('spawn_error'", | ||||
|         # boxed error from previous step | ||||
|         "RemoteActorError: ('name_error_1'", | ||||
|         "NameError", | ||||
|     ]) | ||||
| 
 | ||||
|     if ctlc: | ||||
|         do_ctlc(child) | ||||
| 
 | ||||
|     child.sendline('c') | ||||
|     child.expect(r"\(Pdb\+\+\)") | ||||
|     assert_before(child, [ | ||||
|         "Attaching to pdb in crashed actor: ('root'", | ||||
|         # boxed error from previous step | ||||
|         "RemoteActorError: ('name_error'", | ||||
|         "NameError", | ||||
|     ]) | ||||
| 
 | ||||
|     # warnings assert we probably don't need | ||||
|     # assert "Cancelling nursery in ('spawn_error'," in before | ||||
| 
 | ||||
|     if ctlc: | ||||
|         do_ctlc(child) | ||||
| 
 | ||||
|     # continue again | ||||
|     child.sendline('c') | ||||
|     child.expect(pexpect.EOF) | ||||
| 
 | ||||
|     before = str(child.before.decode()) | ||||
|     # error from root actor and root task that created top level nursery | ||||
|     assert "AssertionError" in before | ||||
| 
 | ||||
| 
 | ||||
| def test_multi_nested_subactors_error_through_nurseries( | ||||
|     spawn, | ||||
| 
 | ||||
|     # TODO: address debugger issue for nested tree: | ||||
|     # <issuelink> | ||||
|     # ctlc: bool, | ||||
| ): | ||||
|     """Verify deeply nested actors that error trigger debugger entries | ||||
|     at each actor nurserly (level) all the way up the tree. | ||||
| 
 | ||||
|     """ | ||||
|     # NOTE: previously, inside this script was a bug where if the | ||||
|     # parent errors before a 2-levels-lower actor has released the lock, | ||||
|     # the parent tries to cancel it but it's stuck in the debugger? | ||||
|     # A test (below) has now been added to explicitly verify this is | ||||
|     # fixed. | ||||
| 
 | ||||
|     child = spawn('multi_nested_subactors_error_up_through_nurseries') | ||||
| 
 | ||||
|     timed_out_early: bool = False | ||||
| 
 | ||||
|     for i in range(12): | ||||
|         try: | ||||
|             child.expect(r"\(Pdb\+\+\)") | ||||
|             child.sendline('c') | ||||
|             time.sleep(0.1) | ||||
| 
 | ||||
|         except EOF: | ||||
| 
 | ||||
|             # race conditions on how fast the continue is sent? | ||||
|             print(f"Failed early on {i}?") | ||||
|             timed_out_early = True | ||||
|             break | ||||
|     else: | ||||
|         child.expect(pexpect.EOF) | ||||
| 
 | ||||
|     if not timed_out_early: | ||||
|         before = str(child.before.decode()) | ||||
|         assert "NameError" in before | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.timeout(15) | ||||
| def test_root_nursery_cancels_before_child_releases_tty_lock( | ||||
|     spawn, | ||||
|     start_method, | ||||
|     ctlc: bool, | ||||
| ): | ||||
|     """Test that when the root sends a cancel message before a nested | ||||
|     child has unblocked (which can happen when it has the tty lock and | ||||
|     is engaged in pdb) it is indeed cancelled after exiting the debugger. | ||||
|     """ | ||||
|     timed_out_early = False | ||||
| 
 | ||||
|     child = spawn('root_cancelled_but_child_is_in_tty_lock') | ||||
| 
 | ||||
|     child.expect(r"\(Pdb\+\+\)") | ||||
| 
 | ||||
|     before = str(child.before.decode()) | ||||
|     assert "NameError: name 'doggypants' is not defined" in before | ||||
|     assert "tractor._exceptions.RemoteActorError: ('name_error'" not in before | ||||
|     time.sleep(0.5) | ||||
| 
 | ||||
|     if ctlc: | ||||
|         do_ctlc(child) | ||||
| 
 | ||||
|     child.sendline('c') | ||||
| 
 | ||||
|     for i in range(4): | ||||
|         time.sleep(0.5) | ||||
|         try: | ||||
|             child.expect(r"\(Pdb\+\+\)") | ||||
| 
 | ||||
|         except ( | ||||
|             EOF, | ||||
|             TIMEOUT, | ||||
|         ): | ||||
|             # races all over.. | ||||
| 
 | ||||
|             print(f"Failed early on {i}?") | ||||
|             before = str(child.before.decode()) | ||||
| 
 | ||||
|             timed_out_early = True | ||||
| 
 | ||||
|             # race conditions on how fast the continue is sent? | ||||
|             break | ||||
| 
 | ||||
|         before = str(child.before.decode()) | ||||
|         assert "NameError: name 'doggypants' is not defined" in before | ||||
| 
 | ||||
|         if ctlc: | ||||
|             do_ctlc(child) | ||||
| 
 | ||||
|         child.sendline('c') | ||||
|         time.sleep(0.1) | ||||
| 
 | ||||
|     for i in range(3): | ||||
|         try: | ||||
|             child.expect(pexpect.EOF) | ||||
|             break | ||||
|         except TIMEOUT: | ||||
|             child.sendline('c') | ||||
|             time.sleep(0.1) | ||||
|             print('child was able to grab tty lock again?') | ||||
|     else: | ||||
|         child.sendline('q') | ||||
|         child.expect(pexpect.EOF) | ||||
| 
 | ||||
|     if not timed_out_early: | ||||
| 
 | ||||
|         before = str(child.before.decode()) | ||||
|         assert "tractor._exceptions.RemoteActorError: ('spawner0'" in before | ||||
|         assert "tractor._exceptions.RemoteActorError: ('name_error'" in before | ||||
|         assert "NameError: name 'doggypants' is not defined" in before | ||||
| 
 | ||||
| 
 | ||||
| def test_root_cancels_child_context_during_startup( | ||||
|     spawn, | ||||
|     ctlc: bool, | ||||
| ): | ||||
|     '''Verify a fast fail in the root doesn't lock up the child reaping | ||||
|     and all while using the new context api. | ||||
| 
 | ||||
|     ''' | ||||
|     child = spawn('fast_error_in_root_after_spawn') | ||||
| 
 | ||||
|     child.expect(r"\(Pdb\+\+\)") | ||||
| 
 | ||||
|     before = str(child.before.decode()) | ||||
|     assert "AssertionError" in before | ||||
| 
 | ||||
|     if ctlc: | ||||
|         do_ctlc(child) | ||||
| 
 | ||||
|     child.sendline('c') | ||||
|     child.expect(pexpect.EOF) | ||||
| 
 | ||||
| 
 | ||||
| def test_different_debug_mode_per_actor( | ||||
|     spawn, | ||||
|     ctlc: bool, | ||||
| ): | ||||
|     child = spawn('per_actor_debug') | ||||
|     child.expect(r"\(Pdb\+\+\)") | ||||
| 
 | ||||
|     # only one actor should enter the debugger | ||||
|     before = str(child.before.decode()) | ||||
|     assert "Attaching to pdb in crashed actor: ('debugged_boi'" in before | ||||
|     assert "RuntimeError" in before | ||||
| 
 | ||||
|     if ctlc: | ||||
|         do_ctlc(child) | ||||
| 
 | ||||
|     child.sendline('c') | ||||
|     child.expect(pexpect.EOF) | ||||
| 
 | ||||
|     before = str(child.before.decode()) | ||||
| 
 | ||||
|     # NOTE: this debugged actor error currently WON'T show up since the | ||||
|     # root will actually cancel and terminate the nursery before the error | ||||
|     # msg reported back from the debug mode actor is processed. | ||||
|     # assert "tractor._exceptions.RemoteActorError: ('debugged_boi'" in before | ||||
| 
 | ||||
|     assert "tractor._exceptions.RemoteActorError: ('crash_boi'" in before | ||||
| 
 | ||||
|     # the crash boi should not have made a debugger request but | ||||
|     # instead crashed completely | ||||
|     assert "tractor._exceptions.RemoteActorError: ('crash_boi'" in before | ||||
|     assert "RuntimeError" in before | ||||
|  | @ -7,29 +7,27 @@ import platform | |||
| from functools import partial | ||||
| import itertools | ||||
| 
 | ||||
| import psutil | ||||
| import pytest | ||||
| import subprocess | ||||
| import tractor | ||||
| from tractor.trionics import collapse_eg | ||||
| from tractor._testing import tractor_test | ||||
| import trio | ||||
| 
 | ||||
| from conftest import tractor_test | ||||
| 
 | ||||
| 
 | ||||
| @tractor_test | ||||
| async def test_reg_then_unreg(reg_addr): | ||||
| async def test_reg_then_unreg(arb_addr): | ||||
|     actor = tractor.current_actor() | ||||
|     assert actor.is_arbiter | ||||
|     assert len(actor._registry) == 1  # only self is registered | ||||
| 
 | ||||
|     async with tractor.open_nursery( | ||||
|         registry_addrs=[reg_addr], | ||||
|         arbiter_addr=arb_addr, | ||||
|     ) as n: | ||||
| 
 | ||||
|         portal = await n.start_actor('actor', enable_modules=[__name__]) | ||||
|         uid = portal.channel.uid | ||||
| 
 | ||||
|         async with tractor.get_registry(reg_addr) as aportal: | ||||
|         async with tractor.get_arbiter(*arb_addr) as aportal: | ||||
|             # this local actor should be the arbiter | ||||
|             assert actor is aportal.actor | ||||
| 
 | ||||
|  | @ -55,27 +53,15 @@ async def hi(): | |||
|     return the_line.format(tractor.current_actor().name) | ||||
| 
 | ||||
| 
 | ||||
| async def say_hello( | ||||
|     other_actor: str, | ||||
|     reg_addr: tuple[str, int], | ||||
| ): | ||||
| async def say_hello(other_actor): | ||||
|     await trio.sleep(1)  # wait for other actor to spawn | ||||
|     async with tractor.find_actor( | ||||
|         other_actor, | ||||
|         registry_addrs=[reg_addr], | ||||
|     ) as portal: | ||||
|     async with tractor.find_actor(other_actor) as portal: | ||||
|         assert portal is not None | ||||
|         return await portal.run(__name__, 'hi') | ||||
| 
 | ||||
| 
 | ||||
| async def say_hello_use_wait( | ||||
|     other_actor: str, | ||||
|     reg_addr: tuple[str, int], | ||||
| ): | ||||
|     async with tractor.wait_for_actor( | ||||
|         other_actor, | ||||
|         registry_addr=reg_addr, | ||||
|     ) as portal: | ||||
| async def say_hello_use_wait(other_actor): | ||||
|     async with tractor.wait_for_actor(other_actor) as portal: | ||||
|         assert portal is not None | ||||
|         result = await portal.run(__name__, 'hi') | ||||
|         return result | ||||
|  | @ -83,29 +69,21 @@ async def say_hello_use_wait( | |||
| 
 | ||||
| @tractor_test | ||||
| @pytest.mark.parametrize('func', [say_hello, say_hello_use_wait]) | ||||
| async def test_trynamic_trio( | ||||
|     func, | ||||
|     start_method, | ||||
|     reg_addr, | ||||
| ): | ||||
|     ''' | ||||
|     Root actor acting as the "director" and running one-shot-task-actors | ||||
|     for the directed subs. | ||||
| 
 | ||||
|     ''' | ||||
| async def test_trynamic_trio(func, start_method, arb_addr): | ||||
|     """Main tractor entry point, the "master" process (for now | ||||
|     acts as the "director"). | ||||
|     """ | ||||
|     async with tractor.open_nursery() as n: | ||||
|         print("Alright... Action!") | ||||
| 
 | ||||
|         donny = await n.run_in_actor( | ||||
|             func, | ||||
|             other_actor='gretchen', | ||||
|             reg_addr=reg_addr, | ||||
|             name='donny', | ||||
|         ) | ||||
|         gretchen = await n.run_in_actor( | ||||
|             func, | ||||
|             other_actor='donny', | ||||
|             reg_addr=reg_addr, | ||||
|             name='gretchen', | ||||
|         ) | ||||
|         print(await gretchen.result()) | ||||
|  | @ -153,27 +131,17 @@ async def unpack_reg(actor_or_portal): | |||
| 
 | ||||
| 
 | ||||
| async def spawn_and_check_registry( | ||||
|     reg_addr: tuple, | ||||
|     arb_addr: tuple, | ||||
|     use_signal: bool, | ||||
|     debug_mode: bool = False, | ||||
|     remote_arbiter: bool = False, | ||||
|     with_streaming: bool = False, | ||||
|     maybe_daemon: tuple[ | ||||
|         subprocess.Popen, | ||||
|         psutil.Process, | ||||
|     ]|None = None, | ||||
| 
 | ||||
| ) -> None: | ||||
| 
 | ||||
|     if maybe_daemon: | ||||
|         popen, proc = maybe_daemon | ||||
|         # breakpoint() | ||||
| 
 | ||||
|     async with tractor.open_root_actor( | ||||
|         registry_addrs=[reg_addr], | ||||
|         debug_mode=debug_mode, | ||||
|         arbiter_addr=arb_addr, | ||||
|     ): | ||||
|         async with tractor.get_registry(reg_addr) as portal: | ||||
|         async with tractor.get_arbiter(*arb_addr) as portal: | ||||
|             # runtime needs to be up to call this | ||||
|             actor = tractor.current_actor() | ||||
| 
 | ||||
|  | @ -189,30 +157,28 @@ async def spawn_and_check_registry( | |||
|                 extra = 2  # local root actor + remote arbiter | ||||
| 
 | ||||
|             # ensure current actor is registered | ||||
|             registry: dict = await get_reg() | ||||
|             registry = await get_reg() | ||||
|             assert actor.uid in registry | ||||
| 
 | ||||
|             try: | ||||
|                 async with tractor.open_nursery() as an: | ||||
|                     async with ( | ||||
|                         collapse_eg(), | ||||
|                         trio.open_nursery() as trion, | ||||
|                     ): | ||||
|                 async with tractor.open_nursery() as n: | ||||
|                     async with trio.open_nursery() as trion: | ||||
| 
 | ||||
|                         portals = {} | ||||
|                         for i in range(3): | ||||
|                             name = f'a{i}' | ||||
|                             if with_streaming: | ||||
|                                 portals[name] = await an.start_actor( | ||||
|                                 portals[name] = await n.start_actor( | ||||
|                                     name=name, enable_modules=[__name__]) | ||||
| 
 | ||||
|                             else:  # no streaming | ||||
|                                 portals[name] = await an.run_in_actor( | ||||
|                                 portals[name] = await n.run_in_actor( | ||||
|                                     trio.sleep_forever, name=name) | ||||
| 
 | ||||
|                         # wait on last actor to come up | ||||
|                         async with tractor.wait_for_actor(name): | ||||
|                             registry = await get_reg() | ||||
|                             for uid in an._children: | ||||
|                             for uid in n._children: | ||||
|                                 assert uid in registry | ||||
| 
 | ||||
|                         assert len(portals) + extra == len(registry) | ||||
|  | @ -245,24 +211,20 @@ async def spawn_and_check_registry( | |||
| @pytest.mark.parametrize('use_signal', [False, True]) | ||||
| @pytest.mark.parametrize('with_streaming', [False, True]) | ||||
| def test_subactors_unregister_on_cancel( | ||||
|     debug_mode: bool, | ||||
|     start_method, | ||||
|     use_signal, | ||||
|     reg_addr, | ||||
|     arb_addr, | ||||
|     with_streaming, | ||||
| ): | ||||
|     ''' | ||||
|     Verify that cancelling a nursery results in all subactors | ||||
|     """Verify that cancelling a nursery results in all subactors | ||||
|     deregistering themselves with the arbiter. | ||||
| 
 | ||||
|     ''' | ||||
|     """ | ||||
|     with pytest.raises(KeyboardInterrupt): | ||||
|         trio.run( | ||||
|             partial( | ||||
|                 spawn_and_check_registry, | ||||
|                 reg_addr, | ||||
|                 arb_addr, | ||||
|                 use_signal, | ||||
|                 debug_mode=debug_mode, | ||||
|                 remote_arbiter=False, | ||||
|                 with_streaming=with_streaming, | ||||
|             ), | ||||
|  | @ -272,11 +234,10 @@ def test_subactors_unregister_on_cancel( | |||
| @pytest.mark.parametrize('use_signal', [False, True]) | ||||
| @pytest.mark.parametrize('with_streaming', [False, True]) | ||||
| def test_subactors_unregister_on_cancel_remote_daemon( | ||||
|     daemon: subprocess.Popen, | ||||
|     debug_mode: bool, | ||||
|     daemon, | ||||
|     start_method, | ||||
|     use_signal, | ||||
|     reg_addr, | ||||
|     arb_addr, | ||||
|     with_streaming, | ||||
| ): | ||||
|     """Verify that cancelling a nursery results in all subactors | ||||
|  | @ -287,15 +248,10 @@ def test_subactors_unregister_on_cancel_remote_daemon( | |||
|         trio.run( | ||||
|             partial( | ||||
|                 spawn_and_check_registry, | ||||
|                 reg_addr, | ||||
|                 arb_addr, | ||||
|                 use_signal, | ||||
|                 debug_mode=debug_mode, | ||||
|                 remote_arbiter=True, | ||||
|                 with_streaming=with_streaming, | ||||
|                 maybe_daemon=( | ||||
|                     daemon, | ||||
|                     psutil.Process(daemon.pid) | ||||
|                 ), | ||||
|             ), | ||||
|         ) | ||||
| 
 | ||||
|  | @ -306,7 +262,7 @@ async def streamer(agen): | |||
| 
 | ||||
| 
 | ||||
| async def close_chans_before_nursery( | ||||
|     reg_addr: tuple, | ||||
|     arb_addr: tuple, | ||||
|     use_signal: bool, | ||||
|     remote_arbiter: bool = False, | ||||
| ) -> None: | ||||
|  | @ -319,9 +275,9 @@ async def close_chans_before_nursery( | |||
|         entries_at_end = 1 | ||||
| 
 | ||||
|     async with tractor.open_root_actor( | ||||
|         registry_addrs=[reg_addr], | ||||
|         arbiter_addr=arb_addr, | ||||
|     ): | ||||
|         async with tractor.get_registry(reg_addr) as aportal: | ||||
|         async with tractor.get_arbiter(*arb_addr) as aportal: | ||||
|             try: | ||||
|                 get_reg = partial(unpack_reg, aportal) | ||||
| 
 | ||||
|  | @ -339,12 +295,9 @@ async def close_chans_before_nursery( | |||
|                         async with portal2.open_stream_from( | ||||
|                             stream_forever | ||||
|                         ) as agen2: | ||||
|                             async with ( | ||||
|                                 collapse_eg(), | ||||
|                                 trio.open_nursery() as tn, | ||||
|                             ): | ||||
|                                 tn.start_soon(streamer, agen1) | ||||
|                                 tn.start_soon(cancel, use_signal, .5) | ||||
|                             async with trio.open_nursery() as n: | ||||
|                                 n.start_soon(streamer, agen1) | ||||
|                                 n.start_soon(cancel, use_signal, .5) | ||||
|                                 try: | ||||
|                                     await streamer(agen2) | ||||
|                                 finally: | ||||
|  | @ -376,7 +329,7 @@ async def close_chans_before_nursery( | |||
| def test_close_channel_explicit( | ||||
|     start_method, | ||||
|     use_signal, | ||||
|     reg_addr, | ||||
|     arb_addr, | ||||
| ): | ||||
|     """Verify that closing a stream explicitly and killing the actor's | ||||
|     "root nursery" **before** the containing nursery tears down also | ||||
|  | @ -386,7 +339,7 @@ def test_close_channel_explicit( | |||
|         trio.run( | ||||
|             partial( | ||||
|                 close_chans_before_nursery, | ||||
|                 reg_addr, | ||||
|                 arb_addr, | ||||
|                 use_signal, | ||||
|                 remote_arbiter=False, | ||||
|             ), | ||||
|  | @ -395,10 +348,10 @@ def test_close_channel_explicit( | |||
| 
 | ||||
| @pytest.mark.parametrize('use_signal', [False, True]) | ||||
| def test_close_channel_explicit_remote_arbiter( | ||||
|     daemon: subprocess.Popen, | ||||
|     daemon, | ||||
|     start_method, | ||||
|     use_signal, | ||||
|     reg_addr, | ||||
|     arb_addr, | ||||
| ): | ||||
|     """Verify that closing a stream explicitly and killing the actor's | ||||
|     "root nursery" **before** the containing nursery tears down also | ||||
|  | @ -408,7 +361,7 @@ def test_close_channel_explicit_remote_arbiter( | |||
|         trio.run( | ||||
|             partial( | ||||
|                 close_chans_before_nursery, | ||||
|                 reg_addr, | ||||
|                 arb_addr, | ||||
|                 use_signal, | ||||
|                 remote_arbiter=True, | ||||
|             ), | ||||
|  |  | |||
|  | @ -11,17 +11,18 @@ import platform | |||
| import shutil | ||||
| 
 | ||||
| import pytest | ||||
| from tractor._testing import ( | ||||
|     examples_dir, | ||||
| ) | ||||
| 
 | ||||
| from conftest import repodir | ||||
| 
 | ||||
| 
 | ||||
| def examples_dir(): | ||||
|     """Return the abspath to the examples directory. | ||||
|     """ | ||||
|     return os.path.join(repodir(), 'examples') | ||||
| 
 | ||||
| 
 | ||||
| @pytest.fixture | ||||
| def run_example_in_subproc( | ||||
|     loglevel: str, | ||||
|     testdir: pytest.Pytester, | ||||
|     reg_addr: tuple[str, int], | ||||
| ): | ||||
| def run_example_in_subproc(loglevel, testdir, arb_addr): | ||||
| 
 | ||||
|     @contextmanager | ||||
|     def run(script_code): | ||||
|  | @ -31,8 +32,8 @@ def run_example_in_subproc( | |||
|             # on windows we need to create a special __main__.py which will | ||||
|             # be executed with ``python -m <modulename>`` on windows.. | ||||
|             shutil.copyfile( | ||||
|                 examples_dir() / '__main__.py', | ||||
|                 str(testdir / '__main__.py'), | ||||
|                 os.path.join(examples_dir(), '__main__.py'), | ||||
|                 os.path.join(str(testdir), '__main__.py') | ||||
|             ) | ||||
| 
 | ||||
|             # drop the ``if __name__ == '__main__'`` guard onwards from | ||||
|  | @ -66,9 +67,6 @@ def run_example_in_subproc( | |||
|         # due to backpressure!!! | ||||
|         proc = testdir.popen( | ||||
|             cmdargs, | ||||
|             stdin=subprocess.PIPE, | ||||
|             stdout=subprocess.PIPE, | ||||
|             stderr=subprocess.PIPE, | ||||
|             **kwargs, | ||||
|         ) | ||||
|         assert not proc.returncode | ||||
|  | @ -84,37 +82,26 @@ def run_example_in_subproc( | |||
| 
 | ||||
|     # walk yields: (dirpath, dirnames, filenames) | ||||
|     [ | ||||
|         (p[0], f) | ||||
|         for p in os.walk(examples_dir()) | ||||
|         for f in p[2] | ||||
|         (p[0], f) for p in os.walk(examples_dir()) for f in p[2] | ||||
| 
 | ||||
|         if ( | ||||
|             '__' not in f | ||||
|             and f[0] != '_' | ||||
|             and 'debugging' not in p[0] | ||||
|             and 'integration' not in p[0] | ||||
|             and 'advanced_faults' not in p[0] | ||||
|             and 'multihost' not in p[0] | ||||
|             and 'trio' not in p[0] | ||||
|         ) | ||||
|         if '__' not in f | ||||
|         and f[0] != '_' | ||||
|         and 'debugging' not in p[0] | ||||
|         and 'integration' not in p[0] | ||||
|     ], | ||||
| 
 | ||||
|     ids=lambda t: t[1], | ||||
| ) | ||||
| def test_example( | ||||
|     run_example_in_subproc, | ||||
|     example_script, | ||||
| ): | ||||
|     ''' | ||||
|     Load and run scripts from this repo's ``examples/`` dir as a user | ||||
| def test_example(run_example_in_subproc, example_script): | ||||
|     """Load and run scripts from this repo's ``examples/`` dir as a user | ||||
|     would copy and pasing them into their editor. | ||||
| 
 | ||||
|     On windows a little more "finessing" is done to make | ||||
|     ``multiprocessing`` play nice: we copy the ``__main__.py`` into the | ||||
|     test directory and invoke the script as a module with ``python -m | ||||
|     test_example``. | ||||
| 
 | ||||
|     ''' | ||||
|     ex_file: str = os.path.join(*example_script) | ||||
|     """ | ||||
|     ex_file = os.path.join(*example_script) | ||||
| 
 | ||||
|     if 'rpc_bidir_streaming' in ex_file and sys.version_info < (3, 9): | ||||
|         pytest.skip("2-way streaming example requires py3.9 async with syntax") | ||||
|  | @ -123,14 +110,10 @@ def test_example( | |||
|         code = ex.read() | ||||
| 
 | ||||
|         with run_example_in_subproc(code) as proc: | ||||
|             err = None | ||||
|             try: | ||||
|                 if not proc.poll(): | ||||
|                     _, err = proc.communicate(timeout=15) | ||||
| 
 | ||||
|             except subprocess.TimeoutExpired as e: | ||||
|                 proc.kill() | ||||
|                 err = e.stderr | ||||
|             proc.wait() | ||||
|             err, _ = proc.stderr.read(), proc.stdout.read() | ||||
|             # print(f'STDERR: {err}') | ||||
|             # print(f'STDOUT: {out}') | ||||
| 
 | ||||
|             # if we get some gnarly output let's aggregate and raise | ||||
|             if err: | ||||
|  | @ -144,8 +127,7 @@ def test_example( | |||
|                     # shouldn't eventually once we figure out what's | ||||
|                     # a better way to be explicit about aio side | ||||
|                     # cancels? | ||||
|                     and | ||||
|                     'asyncio.exceptions.CancelledError' not in last_error | ||||
|                     and 'asyncio.exceptions.CancelledError' not in last_error | ||||
|                 ): | ||||
|                     raise Exception(errmsg) | ||||
| 
 | ||||
|  |  | |||
|  | @ -1,946 +0,0 @@ | |||
| ''' | ||||
| Low-level functional audits for our | ||||
| "capability based messaging"-spec feats. | ||||
| 
 | ||||
| B~) | ||||
| 
 | ||||
| ''' | ||||
| from contextlib import ( | ||||
|     contextmanager as cm, | ||||
|     # nullcontext, | ||||
| ) | ||||
| import importlib | ||||
| from typing import ( | ||||
|     Any, | ||||
|     Type, | ||||
|     Union, | ||||
| ) | ||||
| 
 | ||||
| from msgspec import ( | ||||
|     # structs, | ||||
|     # msgpack, | ||||
|     Raw, | ||||
|     # Struct, | ||||
|     ValidationError, | ||||
| ) | ||||
| import pytest | ||||
| import trio | ||||
| 
 | ||||
| import tractor | ||||
| from tractor import ( | ||||
|     Actor, | ||||
|     # _state, | ||||
|     MsgTypeError, | ||||
|     Context, | ||||
| ) | ||||
| from tractor.msg import ( | ||||
|     _codec, | ||||
|     _ctxvar_MsgCodec, | ||||
|     _exts, | ||||
| 
 | ||||
|     NamespacePath, | ||||
|     MsgCodec, | ||||
|     MsgDec, | ||||
|     mk_codec, | ||||
|     mk_dec, | ||||
|     apply_codec, | ||||
|     current_codec, | ||||
| ) | ||||
| from tractor.msg.types import ( | ||||
|     log, | ||||
|     Started, | ||||
|     # _payload_msgs, | ||||
|     # PayloadMsg, | ||||
|     # mk_msg_spec, | ||||
| ) | ||||
| from tractor.msg._ops import ( | ||||
|     limit_plds, | ||||
| ) | ||||
| 
 | ||||
| def enc_nsp(obj: Any) -> Any: | ||||
|     actor: Actor = tractor.current_actor( | ||||
|         err_on_no_runtime=False, | ||||
|     ) | ||||
|     uid: tuple[str, str]|None = None if not actor else actor.uid | ||||
|     print(f'{uid} ENC HOOK') | ||||
| 
 | ||||
|     match obj: | ||||
|         # case NamespacePath()|str(): | ||||
|         case NamespacePath(): | ||||
|             encoded: str = str(obj) | ||||
|             print( | ||||
|                 f'----- ENCODING `NamespacePath` as `str` ------\n' | ||||
|                 f'|_obj:{type(obj)!r} = {obj!r}\n' | ||||
|                 f'|_encoded: str = {encoded!r}\n' | ||||
|             ) | ||||
|             # if type(obj) != NamespacePath: | ||||
|             #     breakpoint() | ||||
|             return encoded | ||||
|         case _: | ||||
|             logmsg: str = ( | ||||
|                 f'{uid}\n' | ||||
|                 'FAILED ENCODE\n' | ||||
|                 f'obj-> `{obj}: {type(obj)}`\n' | ||||
|             ) | ||||
|             raise NotImplementedError(logmsg) | ||||
| 
 | ||||
| 
 | ||||
| def dec_nsp( | ||||
|     obj_type: Type, | ||||
|     obj: Any, | ||||
| 
 | ||||
| ) -> Any: | ||||
|     # breakpoint() | ||||
|     actor: Actor = tractor.current_actor( | ||||
|         err_on_no_runtime=False, | ||||
|     ) | ||||
|     uid: tuple[str, str]|None = None if not actor else actor.uid | ||||
|     print( | ||||
|         f'{uid}\n' | ||||
|         'CUSTOM DECODE\n' | ||||
|         f'type-arg-> {obj_type}\n' | ||||
|         f'obj-arg-> `{obj}`: {type(obj)}\n' | ||||
|     ) | ||||
|     nsp = None | ||||
|     # XXX, never happens right? | ||||
|     if obj_type is Raw: | ||||
|         breakpoint() | ||||
| 
 | ||||
|     if ( | ||||
|         obj_type is NamespacePath | ||||
|         and isinstance(obj, str) | ||||
|         and ':' in obj | ||||
|     ): | ||||
|         nsp = NamespacePath(obj) | ||||
|         # TODO: we could built a generic handler using | ||||
|         # JUST matching the obj_type part? | ||||
|         # nsp = obj_type(obj) | ||||
| 
 | ||||
|     if nsp: | ||||
|         print(f'Returning NSP instance: {nsp}') | ||||
|         return nsp | ||||
| 
 | ||||
|     logmsg: str = ( | ||||
|         f'{uid}\n' | ||||
|         'FAILED DECODE\n' | ||||
|         f'type-> {obj_type}\n' | ||||
|         f'obj-arg-> `{obj}`: {type(obj)}\n\n' | ||||
|         f'current codec:\n' | ||||
|         f'{current_codec()}\n' | ||||
|     ) | ||||
|     # TODO: figure out the ignore subsys for this! | ||||
|     # -[ ] option whether to defense-relay backc the msg | ||||
|     #   inside an `Invalid`/`Ignore` | ||||
|     # -[ ] how to make this handling pluggable such that a | ||||
|     #   `Channel`/`MsgTransport` can intercept and process | ||||
|     #   back msgs either via exception handling or some other | ||||
|     #   signal? | ||||
|     log.warning(logmsg) | ||||
|     # NOTE: this delivers the invalid | ||||
|     # value up to `msgspec`'s decoding | ||||
|     # machinery for error raising. | ||||
|     return obj | ||||
|     # raise NotImplementedError(logmsg) | ||||
| 
 | ||||
| 
 | ||||
| def ex_func(*args): | ||||
|     ''' | ||||
|     A mod level func we can ref and load via our `NamespacePath` | ||||
|     python-object pointer `str` subtype. | ||||
| 
 | ||||
|     ''' | ||||
|     print(f'ex_func({args})') | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     'add_codec_hooks', | ||||
|     [ | ||||
|         True, | ||||
|         False, | ||||
|     ], | ||||
|     ids=['use_codec_hooks', 'no_codec_hooks'], | ||||
| ) | ||||
| def test_custom_extension_types( | ||||
|     debug_mode: bool, | ||||
|     add_codec_hooks: bool | ||||
| ): | ||||
|     ''' | ||||
|     Verify that a `MsgCodec` (used for encoding all outbound IPC msgs | ||||
|     and decoding all inbound `PayloadMsg`s) and a paired `MsgDec` | ||||
|     (used for decoding the `PayloadMsg.pld: Raw` received within a given | ||||
|     task's ipc `Context` scope) can both send and receive "extension types" | ||||
|     as supported via custom converter hooks passed to `msgspec`. | ||||
| 
 | ||||
|     ''' | ||||
|     nsp_pld_dec: MsgDec = mk_dec( | ||||
|         spec=None,  # ONLY support the ext type | ||||
|         dec_hook=dec_nsp if add_codec_hooks else None, | ||||
|         ext_types=[NamespacePath], | ||||
|     ) | ||||
|     nsp_codec: MsgCodec = mk_codec( | ||||
|         # ipc_pld_spec=Raw,  # default! | ||||
| 
 | ||||
|         # NOTE XXX: the encode hook MUST be used no matter what since | ||||
|         # our `NamespacePath` is not any of a `Any` native type nor | ||||
|         # a `msgspec.Struct` subtype - so `msgspec` has no way to know | ||||
|         # how to encode it unless we provide the custom hook. | ||||
|         # | ||||
|         # AGAIN that is, regardless of whether we spec an | ||||
|         # `Any`-decoded-pld the enc has no knowledge (by default) | ||||
|         # how to enc `NamespacePath` (nsp), so we add a custom | ||||
|         # hook to do that ALWAYS. | ||||
|         enc_hook=enc_nsp if add_codec_hooks else None, | ||||
| 
 | ||||
|         # XXX NOTE: pretty sure this is mutex with the `type=` to | ||||
|         # `Decoder`? so it won't work in tandem with the | ||||
|         # `ipc_pld_spec` passed above? | ||||
|         ext_types=[NamespacePath], | ||||
| 
 | ||||
|         # TODO? is it useful to have the `.pld` decoded *prior* to | ||||
|         # the `PldRx`?? like perf or mem related? | ||||
|         # ext_dec=nsp_pld_dec, | ||||
|     ) | ||||
|     if add_codec_hooks: | ||||
|         assert nsp_codec.dec.dec_hook is None | ||||
| 
 | ||||
|         # TODO? if we pass `ext_dec` above? | ||||
|         # assert nsp_codec.dec.dec_hook is dec_nsp | ||||
| 
 | ||||
|         assert nsp_codec.enc.enc_hook is enc_nsp | ||||
| 
 | ||||
|     nsp = NamespacePath.from_ref(ex_func) | ||||
| 
 | ||||
|     try: | ||||
|         nsp_bytes: bytes = nsp_codec.encode(nsp) | ||||
|         nsp_rt_sin_msg = nsp_pld_dec.decode(nsp_bytes) | ||||
|         nsp_rt_sin_msg.load_ref() is ex_func | ||||
|     except TypeError: | ||||
|         if not add_codec_hooks: | ||||
|             pass | ||||
| 
 | ||||
|     try: | ||||
|         msg_bytes: bytes = nsp_codec.encode( | ||||
|             Started( | ||||
|                 cid='cid', | ||||
|                 pld=nsp, | ||||
|             ) | ||||
|         ) | ||||
|         # since the ext-type obj should also be set as the msg.pld | ||||
|         assert nsp_bytes in msg_bytes | ||||
|         started_rt: Started = nsp_codec.decode(msg_bytes) | ||||
|         pld: Raw = started_rt.pld | ||||
|         assert isinstance(pld, Raw) | ||||
|         nsp_rt: NamespacePath = nsp_pld_dec.decode(pld) | ||||
|         assert isinstance(nsp_rt, NamespacePath) | ||||
|         # in obj comparison terms they should be the same | ||||
|         assert nsp_rt == nsp | ||||
|         # ensure we've decoded to ext type! | ||||
|         assert nsp_rt.load_ref() is ex_func | ||||
| 
 | ||||
|     except TypeError: | ||||
|         if not add_codec_hooks: | ||||
|             pass | ||||
| 
 | ||||
| @tractor.context | ||||
| async def sleep_forever_in_sub( | ||||
|     ctx: Context, | ||||
| ) -> None: | ||||
|     await trio.sleep_forever() | ||||
| 
 | ||||
| 
 | ||||
| def mk_custom_codec( | ||||
|     add_hooks: bool, | ||||
| 
 | ||||
| ) -> tuple[ | ||||
|     MsgCodec,  # encode to send | ||||
|     MsgDec,  # pld receive-n-decode | ||||
| ]: | ||||
|     ''' | ||||
|     Create custom `msgpack` enc/dec-hooks and set a `Decoder` | ||||
|     which only loads `pld_spec` (like `NamespacePath`) types. | ||||
| 
 | ||||
|     ''' | ||||
| 
 | ||||
|     # XXX NOTE XXX: despite defining `NamespacePath` as a type | ||||
|     # field on our `PayloadMsg.pld`, we still need a enc/dec_hook() pair | ||||
|     # to cast to/from that type on the wire. See the docs: | ||||
|     # https://jcristharif.com/msgspec/extending.html#mapping-to-from-native-types | ||||
| 
 | ||||
|     # if pld_spec is Any: | ||||
|     #     pld_spec = Raw | ||||
| 
 | ||||
|     nsp_codec: MsgCodec = mk_codec( | ||||
|         # ipc_pld_spec=Raw,  # default! | ||||
| 
 | ||||
|         # NOTE XXX: the encode hook MUST be used no matter what since | ||||
|         # our `NamespacePath` is not any of a `Any` native type nor | ||||
|         # a `msgspec.Struct` subtype - so `msgspec` has no way to know | ||||
|         # how to encode it unless we provide the custom hook. | ||||
|         # | ||||
|         # AGAIN that is, regardless of whether we spec an | ||||
|         # `Any`-decoded-pld the enc has no knowledge (by default) | ||||
|         # how to enc `NamespacePath` (nsp), so we add a custom | ||||
|         # hook to do that ALWAYS. | ||||
|         enc_hook=enc_nsp if add_hooks else None, | ||||
| 
 | ||||
|         # XXX NOTE: pretty sure this is mutex with the `type=` to | ||||
|         # `Decoder`? so it won't work in tandem with the | ||||
|         # `ipc_pld_spec` passed above? | ||||
|         ext_types=[NamespacePath], | ||||
|     ) | ||||
|     # dec_hook=dec_nsp if add_hooks else None, | ||||
|     return nsp_codec | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     'limit_plds_args', | ||||
|     [ | ||||
|         ( | ||||
|             {'dec_hook': None, 'ext_types': None}, | ||||
|             None, | ||||
|         ), | ||||
|         ( | ||||
|             {'dec_hook': dec_nsp, 'ext_types': None}, | ||||
|             TypeError, | ||||
|         ), | ||||
|         ( | ||||
|             {'dec_hook': dec_nsp, 'ext_types': [NamespacePath]}, | ||||
|             None, | ||||
|         ), | ||||
|         ( | ||||
|             {'dec_hook': dec_nsp, 'ext_types': [NamespacePath|None]}, | ||||
|             None, | ||||
|         ), | ||||
|     ], | ||||
|     ids=[ | ||||
|         'no_hook_no_ext_types', | ||||
|         'only_hook', | ||||
|         'hook_and_ext_types', | ||||
|         'hook_and_ext_types_w_null', | ||||
|     ] | ||||
| ) | ||||
| def test_pld_limiting_usage( | ||||
|     limit_plds_args: tuple[dict, Exception|None], | ||||
| ): | ||||
|     ''' | ||||
|     Verify `dec_hook()` and `ext_types` need to either both be | ||||
|     provided or we raise a explanator type-error. | ||||
| 
 | ||||
|     ''' | ||||
|     kwargs, maybe_err = limit_plds_args | ||||
|     async def main(): | ||||
|         async with tractor.open_nursery() as an:  # just to open runtime | ||||
| 
 | ||||
|             # XXX SHOULD NEVER WORK outside an ipc ctx scope! | ||||
|             try: | ||||
|                 with limit_plds(**kwargs): | ||||
|                     pass | ||||
|             except RuntimeError: | ||||
|                 pass | ||||
| 
 | ||||
|             p: tractor.Portal = await an.start_actor( | ||||
|                 'sub', | ||||
|                 enable_modules=[__name__], | ||||
|             ) | ||||
|             async with ( | ||||
|                 p.open_context( | ||||
|                     sleep_forever_in_sub | ||||
|                 ) as (ctx, first), | ||||
|             ): | ||||
|                 try: | ||||
|                     with limit_plds(**kwargs): | ||||
|                         pass | ||||
|                 except maybe_err as exc: | ||||
|                     assert type(exc) is maybe_err | ||||
|                     pass | ||||
| 
 | ||||
| 
 | ||||
| def chk_codec_applied( | ||||
|     expect_codec: MsgCodec|None, | ||||
|     enter_value: MsgCodec|None = None, | ||||
| 
 | ||||
| ) -> MsgCodec: | ||||
|     ''' | ||||
|     buncha sanity checks ensuring that the IPC channel's | ||||
|     context-vars are set to the expected codec and that are | ||||
|     ctx-var wrapper APIs match the same. | ||||
| 
 | ||||
|     ''' | ||||
|     # TODO: play with tricyle again, bc this is supposed to work | ||||
|     # the way we want? | ||||
|     # | ||||
|     # TreeVar | ||||
|     # task: trio.Task = trio.lowlevel.current_task() | ||||
|     # curr_codec = _ctxvar_MsgCodec.get_in(task) | ||||
| 
 | ||||
|     # ContextVar | ||||
|     # task_ctx: Context = task.context | ||||
|     # assert _ctxvar_MsgCodec in task_ctx | ||||
|     # curr_codec: MsgCodec = task.context[_ctxvar_MsgCodec] | ||||
|     if expect_codec is None: | ||||
|         assert enter_value is None | ||||
|         return | ||||
| 
 | ||||
|     # NOTE: currently we use this! | ||||
|     # RunVar | ||||
|     curr_codec: MsgCodec = current_codec() | ||||
|     last_read_codec = _ctxvar_MsgCodec.get() | ||||
|     # assert curr_codec is last_read_codec | ||||
| 
 | ||||
|     assert ( | ||||
|         (same_codec := expect_codec) is | ||||
|         # returned from `mk_codec()` | ||||
| 
 | ||||
|         # yielded value from `apply_codec()` | ||||
| 
 | ||||
|         # read from current task's `contextvars.Context` | ||||
|         curr_codec is | ||||
|         last_read_codec | ||||
| 
 | ||||
|         # the default `msgspec` settings | ||||
|         is not _codec._def_msgspec_codec | ||||
|         is not _codec._def_tractor_codec | ||||
|     ) | ||||
| 
 | ||||
|     if enter_value: | ||||
|         assert enter_value is same_codec | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def send_back_values( | ||||
|     ctx: Context, | ||||
|     rent_pld_spec_type_strs: list[str], | ||||
|     add_hooks: bool, | ||||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|     Setup up a custom codec to load instances of `NamespacePath` | ||||
|     and ensure we can round trip a func ref with our parent. | ||||
| 
 | ||||
|     ''' | ||||
|     uid: tuple = tractor.current_actor().uid | ||||
| 
 | ||||
|     # init state in sub-actor should be default | ||||
|     chk_codec_applied( | ||||
|         expect_codec=_codec._def_tractor_codec, | ||||
|     ) | ||||
| 
 | ||||
|     # load pld spec from input str | ||||
|     rent_pld_spec = _exts.dec_type_union( | ||||
|         rent_pld_spec_type_strs, | ||||
|         mods=[ | ||||
|             importlib.import_module(__name__), | ||||
|         ], | ||||
|     ) | ||||
|     rent_pld_spec_types: set[Type] = _codec.unpack_spec_types( | ||||
|         rent_pld_spec, | ||||
|     ) | ||||
| 
 | ||||
|     # ONLY add ext-hooks if the rent specified a non-std type! | ||||
|     add_hooks: bool = ( | ||||
|         NamespacePath in rent_pld_spec_types | ||||
|         and | ||||
|         add_hooks | ||||
|     ) | ||||
| 
 | ||||
|     # same as on parent side config. | ||||
|     nsp_codec: MsgCodec|None = None | ||||
|     if add_hooks: | ||||
|         nsp_codec = mk_codec( | ||||
|             enc_hook=enc_nsp, | ||||
|             ext_types=[NamespacePath], | ||||
|         ) | ||||
| 
 | ||||
|     with ( | ||||
|         maybe_apply_codec(nsp_codec) as codec, | ||||
|         limit_plds( | ||||
|             rent_pld_spec, | ||||
|             dec_hook=dec_nsp if add_hooks else None, | ||||
|             ext_types=[NamespacePath]  if add_hooks else None, | ||||
|         ) as pld_dec, | ||||
|     ): | ||||
|         # ?XXX? SHOULD WE NOT be swapping the global codec since it | ||||
|         # breaks `Context.started()` roundtripping checks?? | ||||
|         chk_codec_applied( | ||||
|             expect_codec=nsp_codec, | ||||
|             enter_value=codec, | ||||
|         ) | ||||
| 
 | ||||
|         # ?TODO, mismatch case(s)? | ||||
|         # | ||||
|         # ensure pld spec matches on both sides | ||||
|         ctx_pld_dec: MsgDec = ctx._pld_rx._pld_dec | ||||
|         assert pld_dec is ctx_pld_dec | ||||
|         child_pld_spec: Type = pld_dec.spec | ||||
|         child_pld_spec_types: set[Type] = _codec.unpack_spec_types( | ||||
|             child_pld_spec, | ||||
|         ) | ||||
|         assert ( | ||||
|             child_pld_spec_types.issuperset( | ||||
|                 rent_pld_spec_types | ||||
|             ) | ||||
|         ) | ||||
| 
 | ||||
|         # ?TODO, try loop for each of the types in pld-superset? | ||||
|         # | ||||
|         # for send_value in [ | ||||
|         #     nsp, | ||||
|         #     str(nsp), | ||||
|         #     None, | ||||
|         # ]: | ||||
|         nsp = NamespacePath.from_ref(ex_func) | ||||
|         try: | ||||
|             print( | ||||
|                 f'{uid}: attempting to `.started({nsp})`\n' | ||||
|                 f'\n' | ||||
|                 f'rent_pld_spec: {rent_pld_spec}\n' | ||||
|                 f'child_pld_spec: {child_pld_spec}\n' | ||||
|                 f'codec: {codec}\n' | ||||
|             ) | ||||
|             # await tractor.pause() | ||||
|             await ctx.started(nsp) | ||||
| 
 | ||||
|         except tractor.MsgTypeError as _mte: | ||||
|             mte = _mte | ||||
| 
 | ||||
|             # false -ve case | ||||
|             if add_hooks: | ||||
|                 raise RuntimeError( | ||||
|                     f'EXPECTED to `.started()` value given spec ??\n\n' | ||||
|                     f'child_pld_spec -> {child_pld_spec}\n' | ||||
|                     f'value = {nsp}: {type(nsp)}\n' | ||||
|                 ) | ||||
| 
 | ||||
|             # true -ve case | ||||
|             raise mte | ||||
| 
 | ||||
|         # TODO: maybe we should add our own wrapper error so as to | ||||
|         # be interchange-lib agnostic? | ||||
|         # -[ ] the error type is wtv is raised from the hook so we | ||||
|         #   could also require a type-class of errors for | ||||
|         #   indicating whether the hook-failure can be handled by | ||||
|         #   a nasty-dialog-unprot sub-sys? | ||||
|         except TypeError as typerr: | ||||
|             # false -ve | ||||
|             if add_hooks: | ||||
|                 raise RuntimeError('Should have been able to send `nsp`??') | ||||
| 
 | ||||
|             # true -ve | ||||
|             print('Failed to send `nsp` due to no ext hooks set!') | ||||
|             raise typerr | ||||
| 
 | ||||
|         # now try sending a set of valid and invalid plds to ensure | ||||
|         # the pld spec is respected. | ||||
|         sent: list[Any] = [] | ||||
|         async with ctx.open_stream() as ipc: | ||||
|             print( | ||||
|                 f'{uid}: streaming all pld types to rent..' | ||||
|             ) | ||||
| 
 | ||||
|             # for send_value, expect_send in iter_send_val_items: | ||||
|             for send_value in [ | ||||
|                 nsp, | ||||
|                 str(nsp), | ||||
|                 None, | ||||
|             ]: | ||||
|                 send_type: Type = type(send_value) | ||||
|                 print( | ||||
|                     f'{uid}: SENDING NEXT pld\n' | ||||
|                     f'send_type: {send_type}\n' | ||||
|                     f'send_value: {send_value}\n' | ||||
|                 ) | ||||
|                 try: | ||||
|                     await ipc.send(send_value) | ||||
|                     sent.append(send_value) | ||||
| 
 | ||||
|                 except ValidationError as valerr: | ||||
|                     print(f'{uid} FAILED TO SEND {send_value}!') | ||||
| 
 | ||||
|                     # false -ve | ||||
|                     if add_hooks: | ||||
|                         raise RuntimeError( | ||||
|                             f'EXPECTED to roundtrip value given spec:\n' | ||||
|                             f'rent_pld_spec -> {rent_pld_spec}\n' | ||||
|                             f'child_pld_spec -> {child_pld_spec}\n' | ||||
|                             f'value = {send_value}: {send_type}\n' | ||||
|                         ) | ||||
| 
 | ||||
|                     # true -ve | ||||
|                     raise valerr | ||||
|                     # continue | ||||
| 
 | ||||
|             else: | ||||
|                 print( | ||||
|                     f'{uid}: finished sending all values\n' | ||||
|                     'Should be exiting stream block!\n' | ||||
|                 ) | ||||
| 
 | ||||
|         print(f'{uid}: exited streaming block!') | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| @cm | ||||
| def maybe_apply_codec(codec: MsgCodec|None) -> MsgCodec|None: | ||||
|     if codec is None: | ||||
|         yield None | ||||
|         return | ||||
| 
 | ||||
|     with apply_codec(codec) as codec: | ||||
|         yield codec | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     'pld_spec', | ||||
|     [ | ||||
|         Any, | ||||
|         NamespacePath, | ||||
|         NamespacePath|None,  # the "maybe" spec Bo | ||||
|     ], | ||||
|     ids=[ | ||||
|         'any_type', | ||||
|         'only_nsp_ext', | ||||
|         'maybe_nsp_ext', | ||||
|     ] | ||||
| ) | ||||
| @pytest.mark.parametrize( | ||||
|     'add_hooks', | ||||
|     [ | ||||
|         True, | ||||
|         False, | ||||
|     ], | ||||
|     ids=[ | ||||
|         'use_codec_hooks', | ||||
|         'no_codec_hooks', | ||||
|     ], | ||||
| ) | ||||
| def test_ext_types_over_ipc( | ||||
|     debug_mode: bool, | ||||
|     pld_spec: Union[Type], | ||||
|     add_hooks: bool, | ||||
| ): | ||||
|     ''' | ||||
|     Ensure we can support extension types coverted using | ||||
|     `enc/dec_hook()`s passed to the `.msg.limit_plds()` API | ||||
|     and that sane errors happen when we try do the same without | ||||
|     the codec hooks. | ||||
| 
 | ||||
|     ''' | ||||
|     pld_types: set[Type] = _codec.unpack_spec_types(pld_spec) | ||||
| 
 | ||||
|     async def main(): | ||||
| 
 | ||||
|         # sanity check the default pld-spec beforehand | ||||
|         chk_codec_applied( | ||||
|             expect_codec=_codec._def_tractor_codec, | ||||
|         ) | ||||
| 
 | ||||
|         # extension type we want to send as msg payload | ||||
|         nsp = NamespacePath.from_ref(ex_func) | ||||
| 
 | ||||
|         # ^NOTE, 2 cases: | ||||
|         # - codec hooks noto added -> decode nsp as `str` | ||||
|         # - codec with hooks -> decode nsp as `NamespacePath` | ||||
|         nsp_codec: MsgCodec|None = None | ||||
|         if ( | ||||
|             NamespacePath in pld_types | ||||
|             and | ||||
|             add_hooks | ||||
|         ): | ||||
|             nsp_codec = mk_codec( | ||||
|                 enc_hook=enc_nsp, | ||||
|                 ext_types=[NamespacePath], | ||||
|             ) | ||||
| 
 | ||||
|         async with tractor.open_nursery( | ||||
|             debug_mode=debug_mode, | ||||
|         ) as an: | ||||
|             p: tractor.Portal = await an.start_actor( | ||||
|                 'sub', | ||||
|                 enable_modules=[__name__], | ||||
|             ) | ||||
|             with ( | ||||
|                 maybe_apply_codec(nsp_codec) as codec, | ||||
|             ): | ||||
|                 chk_codec_applied( | ||||
|                     expect_codec=nsp_codec, | ||||
|                     enter_value=codec, | ||||
|                 ) | ||||
|                 rent_pld_spec_type_strs: list[str] = _exts.enc_type_union(pld_spec) | ||||
| 
 | ||||
|                 # XXX should raise an mte (`MsgTypeError`) | ||||
|                 # when `add_hooks == False` bc the input | ||||
|                 # `expect_ipc_send` kwarg has a nsp which can't be | ||||
|                 # serialized! | ||||
|                 # | ||||
|                 # TODO:can we ensure this happens from the | ||||
|                 # `Return`-side (aka the sub) as well? | ||||
|                 try: | ||||
|                     ctx: tractor.Context | ||||
|                     ipc: tractor.MsgStream | ||||
|                     async with ( | ||||
| 
 | ||||
|                         # XXX should raise an mte (`MsgTypeError`) | ||||
|                         # when `add_hooks == False`.. | ||||
|                         p.open_context( | ||||
|                             send_back_values, | ||||
|                             # expect_debug=debug_mode, | ||||
|                             rent_pld_spec_type_strs=rent_pld_spec_type_strs, | ||||
|                             add_hooks=add_hooks, | ||||
|                             # expect_ipc_send=expect_ipc_send, | ||||
|                         ) as (ctx, first), | ||||
| 
 | ||||
|                         ctx.open_stream() as ipc, | ||||
|                     ): | ||||
|                         with ( | ||||
|                             limit_plds( | ||||
|                                 pld_spec, | ||||
|                                 dec_hook=dec_nsp if add_hooks else None, | ||||
|                                 ext_types=[NamespacePath]  if add_hooks else None, | ||||
|                             ) as pld_dec, | ||||
|                         ): | ||||
|                             ctx_pld_dec: MsgDec = ctx._pld_rx._pld_dec | ||||
|                             assert pld_dec is ctx_pld_dec | ||||
| 
 | ||||
|                             # if ( | ||||
|                             #     not add_hooks | ||||
|                             #     and | ||||
|                             #     NamespacePath in  | ||||
|                             # ): | ||||
|                             #     pytest.fail('ctx should fail to open without custom enc_hook!?') | ||||
| 
 | ||||
|                             await ipc.send(nsp) | ||||
|                             nsp_rt = await ipc.receive() | ||||
| 
 | ||||
|                             assert nsp_rt == nsp | ||||
|                             assert nsp_rt.load_ref() is ex_func | ||||
| 
 | ||||
|                 # this test passes bc we can go no further! | ||||
|                 except MsgTypeError as mte: | ||||
|                     # if not add_hooks: | ||||
|                     #     # teardown nursery | ||||
|                     #     await p.cancel_actor() | ||||
|                         # return | ||||
| 
 | ||||
|                     raise mte | ||||
| 
 | ||||
|             await p.cancel_actor() | ||||
| 
 | ||||
|     if ( | ||||
|         NamespacePath in pld_types | ||||
|         and | ||||
|         add_hooks | ||||
|     ): | ||||
|         trio.run(main) | ||||
| 
 | ||||
|     else: | ||||
|         with pytest.raises( | ||||
|             expected_exception=tractor.RemoteActorError, | ||||
|         ) as excinfo: | ||||
|             trio.run(main) | ||||
| 
 | ||||
|         exc = excinfo.value | ||||
|         # bc `.started(nsp: NamespacePath)` will raise | ||||
|         assert exc.boxed_type is TypeError | ||||
| 
 | ||||
| 
 | ||||
| # def chk_pld_type( | ||||
| #     payload_spec: Type[Struct]|Any, | ||||
| #     pld: Any, | ||||
| 
 | ||||
| #     expect_roundtrip: bool|None = None, | ||||
| 
 | ||||
| # ) -> bool: | ||||
| 
 | ||||
| #     pld_val_type: Type = type(pld) | ||||
| 
 | ||||
| #     # TODO: verify that the overridden subtypes | ||||
| #     # DO NOT have modified type-annots from original! | ||||
| #     # 'Start',  .pld: FuncSpec | ||||
| #     # 'StartAck',  .pld: IpcCtxSpec | ||||
| #     # 'Stop',  .pld: UNSEt | ||||
| #     # 'Error',  .pld: ErrorData | ||||
| 
 | ||||
| #     codec: MsgCodec = mk_codec( | ||||
| #         # NOTE: this ONLY accepts `PayloadMsg.pld` fields of a specified | ||||
| #         # type union. | ||||
| #         ipc_pld_spec=payload_spec, | ||||
| #     ) | ||||
| 
 | ||||
| #     # make a one-off dec to compare with our `MsgCodec` instance | ||||
| #     # which does the below `mk_msg_spec()` call internally | ||||
| #     ipc_msg_spec: Union[Type[Struct]] | ||||
| #     msg_types: list[PayloadMsg[payload_spec]] | ||||
| #     ( | ||||
| #         ipc_msg_spec, | ||||
| #         msg_types, | ||||
| #     ) = mk_msg_spec( | ||||
| #         payload_type_union=payload_spec, | ||||
| #     ) | ||||
| #     _enc = msgpack.Encoder() | ||||
| #     _dec = msgpack.Decoder( | ||||
| #         type=ipc_msg_spec or Any,  # like `PayloadMsg[Any]` | ||||
| #     ) | ||||
| 
 | ||||
| #     assert ( | ||||
| #         payload_spec | ||||
| #         == | ||||
| #         codec.pld_spec | ||||
| #     ) | ||||
| 
 | ||||
| #     # assert codec.dec == dec | ||||
| #     # | ||||
| #     # ^-XXX-^ not sure why these aren't "equal" but when cast | ||||
| #     # to `str` they seem to match ?? .. kk | ||||
| 
 | ||||
| #     assert ( | ||||
| #         str(ipc_msg_spec) | ||||
| #         == | ||||
| #         str(codec.msg_spec) | ||||
| #         == | ||||
| #         str(_dec.type) | ||||
| #         == | ||||
| #         str(codec.dec.type) | ||||
| #     ) | ||||
| 
 | ||||
| #     # verify the boxed-type for all variable payload-type msgs. | ||||
| #     if not msg_types: | ||||
| #         breakpoint() | ||||
| 
 | ||||
| #     roundtrip: bool|None = None | ||||
| #     pld_spec_msg_names: list[str] = [ | ||||
| #         td.__name__ for td in _payload_msgs | ||||
| #     ] | ||||
| #     for typedef in msg_types: | ||||
| 
 | ||||
| #         skip_runtime_msg: bool = typedef.__name__ not in pld_spec_msg_names | ||||
| #         if skip_runtime_msg: | ||||
| #             continue | ||||
| 
 | ||||
| #         pld_field = structs.fields(typedef)[1] | ||||
| #         assert pld_field.type is payload_spec # TODO-^ does this need to work to get all subtypes to adhere? | ||||
| 
 | ||||
| #         kwargs: dict[str, Any] = { | ||||
| #             'cid': '666', | ||||
| #             'pld': pld, | ||||
| #         } | ||||
| #         enc_msg: PayloadMsg = typedef(**kwargs) | ||||
| 
 | ||||
| #         _wire_bytes: bytes = _enc.encode(enc_msg) | ||||
| #         wire_bytes: bytes = codec.enc.encode(enc_msg) | ||||
| #         assert _wire_bytes == wire_bytes | ||||
| 
 | ||||
| #         ve: ValidationError|None = None | ||||
| #         try: | ||||
| #             dec_msg = codec.dec.decode(wire_bytes) | ||||
| #             _dec_msg = _dec.decode(wire_bytes) | ||||
| 
 | ||||
| #             # decoded msg and thus payload should be exactly same! | ||||
| #             assert (roundtrip := ( | ||||
| #                 _dec_msg | ||||
| #                 == | ||||
| #                 dec_msg | ||||
| #                 == | ||||
| #                 enc_msg | ||||
| #             )) | ||||
| 
 | ||||
| #             if ( | ||||
| #                 expect_roundtrip is not None | ||||
| #                 and expect_roundtrip != roundtrip | ||||
| #             ): | ||||
| #                 breakpoint() | ||||
| 
 | ||||
| #             assert ( | ||||
| #                 pld | ||||
| #                 == | ||||
| #                 dec_msg.pld | ||||
| #                 == | ||||
| #                 enc_msg.pld | ||||
| #             ) | ||||
| #             # assert (roundtrip := (_dec_msg == enc_msg)) | ||||
| 
 | ||||
| #         except ValidationError as _ve: | ||||
| #             ve = _ve | ||||
| #             roundtrip: bool = False | ||||
| #             if pld_val_type is payload_spec: | ||||
| #                 raise ValueError( | ||||
| #                    'Got `ValidationError` despite type-var match!?\n' | ||||
| #                     f'pld_val_type: {pld_val_type}\n' | ||||
| #                     f'payload_type: {payload_spec}\n' | ||||
| #                 ) from ve | ||||
| 
 | ||||
| #             else: | ||||
| #                 # ow we good cuz the pld spec mismatched. | ||||
| #                 print( | ||||
| #                     'Got expected `ValidationError` since,\n' | ||||
| #                     f'{pld_val_type} is not {payload_spec}\n' | ||||
| #                 ) | ||||
| #         else: | ||||
| #             if ( | ||||
| #                 payload_spec is not Any | ||||
| #                 and | ||||
| #                 pld_val_type is not payload_spec | ||||
| #             ): | ||||
| #                 raise ValueError( | ||||
| #                    'DID NOT `ValidationError` despite expected type match!?\n' | ||||
| #                     f'pld_val_type: {pld_val_type}\n' | ||||
| #                     f'payload_type: {payload_spec}\n' | ||||
| #                 ) | ||||
| 
 | ||||
| #     # full code decode should always be attempted! | ||||
| #     if roundtrip is None: | ||||
| #         breakpoint() | ||||
| 
 | ||||
| #     return roundtrip | ||||
| 
 | ||||
| 
 | ||||
| # ?TODO? maybe remove since covered in the newer `test_pldrx_limiting` | ||||
| # via end-2-end testing of all this? | ||||
| # -[ ] IOW do we really NEED this lowlevel unit testing? | ||||
| # | ||||
| # def test_limit_msgspec( | ||||
| #     debug_mode: bool, | ||||
| # ): | ||||
| #     ''' | ||||
| #     Internals unit testing to verify that type-limiting an IPC ctx's | ||||
| #     msg spec with `Pldrx.limit_plds()` results in various | ||||
| #     encapsulated `msgspec` object settings and state. | ||||
| 
 | ||||
| #     ''' | ||||
| #     async def main(): | ||||
| #         async with tractor.open_root_actor( | ||||
| #             debug_mode=debug_mode, | ||||
| #         ): | ||||
| #             # ensure we can round-trip a boxing `PayloadMsg` | ||||
| #             assert chk_pld_type( | ||||
| #                 payload_spec=Any, | ||||
| #                 pld=None, | ||||
| #                 expect_roundtrip=True, | ||||
| #             ) | ||||
| 
 | ||||
| #             # verify that a mis-typed payload value won't decode | ||||
| #             assert not chk_pld_type( | ||||
| #                 payload_spec=int, | ||||
| #                 pld='doggy', | ||||
| #             ) | ||||
| 
 | ||||
| #             # parametrize the boxed `.pld` type as a custom-struct | ||||
| #             # and ensure that parametrization propagates | ||||
| #             # to all payload-msg-spec-able subtypes! | ||||
| #             class CustomPayload(Struct): | ||||
| #                 name: str | ||||
| #                 value: Any | ||||
| 
 | ||||
| #             assert not chk_pld_type( | ||||
| #                 payload_spec=CustomPayload, | ||||
| #                 pld='doggy', | ||||
| #             ) | ||||
| 
 | ||||
| #             assert chk_pld_type( | ||||
| #                 payload_spec=CustomPayload, | ||||
| #                 pld=CustomPayload(name='doggy', value='urmom') | ||||
| #             ) | ||||
| 
 | ||||
| #             # yah, we can `.pause_from_sync()` now! | ||||
| #             # breakpoint() | ||||
| 
 | ||||
| #     trio.run(main) | ||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							|  | @ -7,24 +7,31 @@ import pytest | |||
| import trio | ||||
| import tractor | ||||
| 
 | ||||
| from tractor._testing import tractor_test | ||||
| from conftest import tractor_test | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.trio | ||||
| async def test_no_runtime(): | ||||
| async def test_no_arbitter(): | ||||
|     """An arbitter must be established before any nurseries | ||||
|     can be created. | ||||
| 
 | ||||
|     (In other words ``tractor.open_root_actor()`` must be engaged at | ||||
|     some point?) | ||||
|     """ | ||||
|     with pytest.raises(RuntimeError) : | ||||
|         async with tractor.find_actor('doggy'): | ||||
|     with pytest.raises(RuntimeError): | ||||
|         with tractor.open_nursery(): | ||||
|             pass | ||||
| 
 | ||||
| 
 | ||||
| def test_no_main(): | ||||
|     """An async function **must** be passed to ``tractor.run()``. | ||||
|     """ | ||||
|     with pytest.raises(TypeError): | ||||
|         tractor.run(None) | ||||
| 
 | ||||
| 
 | ||||
| @tractor_test | ||||
| async def test_self_is_registered(reg_addr): | ||||
| async def test_self_is_registered(arb_addr): | ||||
|     "Verify waiting on the arbiter to register itself using the standard api." | ||||
|     actor = tractor.current_actor() | ||||
|     assert actor.is_arbiter | ||||
|  | @ -34,20 +41,20 @@ async def test_self_is_registered(reg_addr): | |||
| 
 | ||||
| 
 | ||||
| @tractor_test | ||||
| async def test_self_is_registered_localportal(reg_addr): | ||||
| async def test_self_is_registered_localportal(arb_addr): | ||||
|     "Verify waiting on the arbiter to register itself using a local portal." | ||||
|     actor = tractor.current_actor() | ||||
|     assert actor.is_arbiter | ||||
|     async with tractor.get_registry(reg_addr) as portal: | ||||
|     async with tractor.get_arbiter(*arb_addr) as portal: | ||||
|         assert isinstance(portal, tractor._portal.LocalPortal) | ||||
| 
 | ||||
|         with trio.fail_after(0.2): | ||||
|             sockaddr = await portal.run_from_ns( | ||||
|                     'self', 'wait_for_actor', name='root') | ||||
|             assert sockaddr[0] == reg_addr | ||||
|             assert sockaddr[0] == arb_addr | ||||
| 
 | ||||
| 
 | ||||
| def test_local_actor_async_func(reg_addr): | ||||
| def test_local_actor_async_func(arb_addr): | ||||
|     """Verify a simple async function in-process. | ||||
|     """ | ||||
|     nums = [] | ||||
|  | @ -55,7 +62,7 @@ def test_local_actor_async_func(reg_addr): | |||
|     async def print_loop(): | ||||
| 
 | ||||
|         async with tractor.open_root_actor( | ||||
|             registry_addrs=[reg_addr], | ||||
|             arbiter_addr=arb_addr, | ||||
|         ): | ||||
|             # arbiter is started in-proc if dne | ||||
|             assert tractor.current_actor().is_arbiter | ||||
|  |  | |||
|  | @ -7,10 +7,8 @@ import time | |||
| import pytest | ||||
| import trio | ||||
| import tractor | ||||
| from tractor._testing import ( | ||||
| from conftest import ( | ||||
|     tractor_test, | ||||
| ) | ||||
| from .conftest import ( | ||||
|     sig_prog, | ||||
|     _INT_SIGNAL, | ||||
|     _INT_RETURN_CODE, | ||||
|  | @ -30,9 +28,9 @@ def test_abort_on_sigint(daemon): | |||
| 
 | ||||
| 
 | ||||
| @tractor_test | ||||
| async def test_cancel_remote_arbiter(daemon, reg_addr): | ||||
| async def test_cancel_remote_arbiter(daemon, arb_addr): | ||||
|     assert not tractor.current_actor().is_arbiter | ||||
|     async with tractor.get_registry(reg_addr) as portal: | ||||
|     async with tractor.get_arbiter(*arb_addr) as portal: | ||||
|         await portal.cancel_actor() | ||||
| 
 | ||||
|     time.sleep(0.1) | ||||
|  | @ -41,16 +39,16 @@ async def test_cancel_remote_arbiter(daemon, reg_addr): | |||
| 
 | ||||
|     # no arbiter socket should exist | ||||
|     with pytest.raises(OSError): | ||||
|         async with tractor.get_registry(reg_addr) as portal: | ||||
|         async with tractor.get_arbiter(*arb_addr) as portal: | ||||
|             pass | ||||
| 
 | ||||
| 
 | ||||
| def test_register_duplicate_name(daemon, reg_addr): | ||||
| def test_register_duplicate_name(daemon, arb_addr): | ||||
| 
 | ||||
|     async def main(): | ||||
| 
 | ||||
|         async with tractor.open_nursery( | ||||
|             registry_addrs=[reg_addr], | ||||
|             arbiter_addr=arb_addr, | ||||
|         ) as n: | ||||
| 
 | ||||
|             assert not tractor.current_actor().is_arbiter | ||||
|  |  | |||
|  | @ -1,239 +0,0 @@ | |||
| ''' | ||||
| Define the details of inter-actor "out-of-band" (OoB) cancel | ||||
| semantics, that is how cancellation works when a cancel request comes | ||||
| from the different concurrency (primitive's) "layer" then where the | ||||
| eventual `trio.Task` actually raises a signal. | ||||
| 
 | ||||
| ''' | ||||
| from functools import partial | ||||
| # from contextlib import asynccontextmanager as acm | ||||
| # import itertools | ||||
| 
 | ||||
| import pytest | ||||
| import trio | ||||
| import tractor | ||||
| from tractor import (  # typing | ||||
|     ActorNursery, | ||||
|     Portal, | ||||
|     Context, | ||||
|     # ContextCancelled, | ||||
|     # RemoteActorError, | ||||
| ) | ||||
| # from tractor._testing import ( | ||||
| #     tractor_test, | ||||
| #     expect_ctxc, | ||||
| # ) | ||||
| 
 | ||||
| # XXX TODO cases: | ||||
| # - [ ] peer cancelled itself - so other peers should | ||||
| #   get errors reflecting that the peer was itself the .canceller? | ||||
| 
 | ||||
| # def test_self_cancel(): | ||||
| #     ''' | ||||
| #     2 cases: | ||||
| #     - calls `Actor.cancel()` locally in some task | ||||
| #     - calls LocalPortal.cancel_actor()` ? | ||||
| # | ||||
| # things to ensure! | ||||
| # -[ ] the ctxc raised in a child should ideally show the tb of the | ||||
| #     underlying `Cancelled` checkpoint, i.e. | ||||
| #     `raise scope_error from ctxc`? | ||||
| # | ||||
| # -[ ] a self-cancelled context, if not allowed to block on | ||||
| #     `ctx.result()` at some point will hang since the `ctx._scope` | ||||
| #     is never `.cancel_called`; cases for this include, | ||||
| #     - an `open_ctx()` which never starteds before being OoB actor | ||||
| #       cancelled. | ||||
| #       |_ parent task will be blocked in `.open_context()` for the | ||||
| #         `Started` msg, and when the OoB ctxc arrives `ctx._scope` | ||||
| #         will never have been signalled.. | ||||
| 
 | ||||
| #     ''' | ||||
| #     ... | ||||
| 
 | ||||
| # TODO, sanity test against the case in `/examples/trio/lockacquire_not_unmasked.py` | ||||
| # but with the `Lock.acquire()` from a `@context` to ensure the | ||||
| # implicit ignore-case-non-unmasking. | ||||
| # | ||||
| # @tractor.context | ||||
| # async def acquire_actor_global_lock( | ||||
| #     ctx: tractor.Context, | ||||
| #     ignore_special_cases: bool, | ||||
| # ): | ||||
| 
 | ||||
| #     async with maybe_unmask_excs( | ||||
| #         ignore_special_cases=ignore_special_cases, | ||||
| #     ): | ||||
| #         await ctx.started('locked') | ||||
| 
 | ||||
| #     # block til cancelled | ||||
| #     await trio.sleep_forever() | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def sleep_forever( | ||||
|     ctx: tractor.Context, | ||||
|     # ignore_special_cases: bool, | ||||
|     do_started: bool, | ||||
| ): | ||||
| 
 | ||||
|     # async with maybe_unmask_excs( | ||||
|     #     ignore_special_cases=ignore_special_cases, | ||||
|     # ): | ||||
|     #     await ctx.started('locked') | ||||
|     if do_started: | ||||
|         await ctx.started() | ||||
| 
 | ||||
|     # block til cancelled | ||||
|     print('sleepin on child-side..') | ||||
|     await trio.sleep_forever() | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     'cancel_ctx', | ||||
|     [True, False], | ||||
| ) | ||||
| def test_cancel_ctx_with_parent_side_entered_in_bg_task( | ||||
|     debug_mode: bool, | ||||
|     loglevel: str, | ||||
|     cancel_ctx: bool, | ||||
| ): | ||||
|     ''' | ||||
|     The most "basic" out-of-band-task self-cancellation case where | ||||
|     `Portal.open_context()` is entered in a bg task and the | ||||
|     parent-task (of the containing nursery) calls `Context.cancel()` | ||||
|     without the child knowing; the `Context._scope` should be | ||||
|     `.cancel_called` when the IPC ctx's child-side relays | ||||
|     a `ContextCancelled` with a `.canceller` set to the parent | ||||
|     actor('s task). | ||||
| 
 | ||||
|     ''' | ||||
|     async def main(): | ||||
|         with trio.fail_after( | ||||
|             2 if not debug_mode else 999, | ||||
|         ): | ||||
|             an: ActorNursery | ||||
|             async with ( | ||||
|                 tractor.open_nursery( | ||||
|                     debug_mode=debug_mode, | ||||
|                     loglevel='devx', | ||||
|                     enable_stack_on_sig=True, | ||||
|                 ) as an, | ||||
|                 trio.open_nursery() as tn, | ||||
|             ): | ||||
|                 ptl: Portal = await an.start_actor( | ||||
|                     'sub', | ||||
|                     enable_modules=[__name__], | ||||
|                 ) | ||||
| 
 | ||||
|                 async def _open_ctx_async( | ||||
|                     do_started: bool = True, | ||||
|                     task_status=trio.TASK_STATUS_IGNORED, | ||||
|                 ): | ||||
|                     # do we expect to never enter the | ||||
|                     # `.open_context()` below. | ||||
|                     if not do_started: | ||||
|                         task_status.started() | ||||
| 
 | ||||
|                     async with ptl.open_context( | ||||
|                         sleep_forever, | ||||
|                         do_started=do_started, | ||||
|                     ) as (ctx, first): | ||||
|                         task_status.started(ctx) | ||||
|                         await trio.sleep_forever() | ||||
| 
 | ||||
|                 # XXX, this is the key OoB part! | ||||
|                 # | ||||
|                 # - start the `.open_context()` in a bg task which | ||||
|                 #   blocks inside the embedded scope-body, | ||||
|                 # | ||||
|                 # -  when we call `Context.cancel()` it **is | ||||
|                 #   not** from the same task which eventually runs | ||||
|                 #   `.__aexit__()`, | ||||
|                 # | ||||
|                 # - since the bg "opener" task will be in | ||||
|                 #   a `trio.sleep_forever()`, it must be interrupted | ||||
|                 #   by the `ContextCancelled` delivered from the | ||||
|                 #   child-side; `Context._scope: CancelScope` MUST | ||||
|                 #   be `.cancel_called`! | ||||
|                 # | ||||
|                 print('ASYNC opening IPC context in subtask..') | ||||
|                 maybe_ctx: Context|None = await tn.start(partial( | ||||
|                     _open_ctx_async, | ||||
|                 )) | ||||
| 
 | ||||
|                 if ( | ||||
|                     maybe_ctx | ||||
|                     and | ||||
|                     cancel_ctx | ||||
|                 ): | ||||
|                     print('cancelling first IPC ctx!') | ||||
|                     await maybe_ctx.cancel() | ||||
| 
 | ||||
|                 # XXX, note that despite `maybe_context.cancel()` | ||||
|                 # being called above, it's the parent (bg) task | ||||
|                 # which was originally never interrupted in | ||||
|                 # the `ctx._scope` body due to missing case logic in | ||||
|                 # `ctx._maybe_cancel_and_set_remote_error()`. | ||||
|                 # | ||||
|                 # It didn't matter that the subactor process was | ||||
|                 # already terminated and reaped, nothing was | ||||
|                 # cancelling the ctx-parent task's scope! | ||||
|                 # | ||||
|                 print('cancelling subactor!') | ||||
|                 await ptl.cancel_actor() | ||||
| 
 | ||||
|                 if maybe_ctx: | ||||
|                     try: | ||||
|                         await maybe_ctx.wait_for_result() | ||||
|                     except tractor.ContextCancelled as ctxc: | ||||
|                         assert not cancel_ctx | ||||
|                         assert ( | ||||
|                             ctxc.canceller | ||||
|                             == | ||||
|                             tractor.current_actor().aid.uid | ||||
|                         ) | ||||
|                         # don't re-raise since it'll trigger | ||||
|                         # an EG from the above tn. | ||||
| 
 | ||||
|     if cancel_ctx: | ||||
|         # graceful self-cancel | ||||
|         trio.run(main) | ||||
| 
 | ||||
|     else: | ||||
|         # ctx parent task should see OoB ctxc due to | ||||
|         # `ptl.cancel_actor()`. | ||||
|         with pytest.raises(tractor.ContextCancelled) as excinfo: | ||||
|             trio.run(main) | ||||
| 
 | ||||
|         assert 'root' in excinfo.value.canceller[0] | ||||
| 
 | ||||
| 
 | ||||
| # def test_parent_actor_cancels_subactor_with_gt1_ctxs_open_to_it( | ||||
| #     debug_mode: bool, | ||||
| #     loglevel: str, | ||||
| # ): | ||||
| #     ''' | ||||
| #     Demos OoB cancellation from the perspective of a ctx opened with | ||||
| #     a child subactor where the parent cancels the child at the "actor | ||||
| #     layer" using `Portal.cancel_actor()` and thus the | ||||
| #     `ContextCancelled.canceller` received by the ctx's parent-side | ||||
| #     task will appear to be a "self cancellation" even though that | ||||
| #     specific task itself was not cancelled and thus | ||||
| #     `Context.cancel_called ==False`. | ||||
| #     ''' | ||||
|                 # TODO, do we have an existing implied ctx | ||||
|                 # cancel test like this? | ||||
|                 # with trio.move_on_after(0.5):# as cs: | ||||
|                 #     await _open_ctx_async( | ||||
|                 #         do_started=False, | ||||
|                 #     ) | ||||
| 
 | ||||
| 
 | ||||
|                 # in-line ctx scope should definitely raise | ||||
|                 # a ctxc with `.canceller = 'root'` | ||||
|                 # async with ptl.open_context( | ||||
|                 #     sleep_forever, | ||||
|                 #     do_started=True, | ||||
|                 # ) as pair: | ||||
| 
 | ||||
|  | @ -1,364 +0,0 @@ | |||
| ''' | ||||
| Audit sub-sys APIs from `.msg._ops` | ||||
| mostly for ensuring correct `contextvars` | ||||
| related settings around IPC contexts. | ||||
| 
 | ||||
| ''' | ||||
| from contextlib import ( | ||||
|     asynccontextmanager as acm, | ||||
| ) | ||||
| 
 | ||||
| from msgspec import ( | ||||
|     Struct, | ||||
| ) | ||||
| import pytest | ||||
| import trio | ||||
| 
 | ||||
| import tractor | ||||
| from tractor import ( | ||||
|     Context, | ||||
|     MsgTypeError, | ||||
|     current_ipc_ctx, | ||||
|     Portal, | ||||
| ) | ||||
| from tractor.msg import ( | ||||
|     _ops as msgops, | ||||
|     Return, | ||||
| ) | ||||
| from tractor.msg import ( | ||||
|     _codec, | ||||
| ) | ||||
| from tractor.msg.types import ( | ||||
|     log, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| class PldMsg( | ||||
|     Struct, | ||||
| 
 | ||||
|     # TODO: with multiple structs in-spec we need to tag them! | ||||
|     # -[ ] offer a built-in `PldMsg` type to inherit from which takes | ||||
|     #      case of these details? | ||||
|     # | ||||
|     # https://jcristharif.com/msgspec/structs.html#tagged-unions | ||||
|     # tag=True, | ||||
|     # tag_field='msg_type', | ||||
| ): | ||||
|     field: str | ||||
| 
 | ||||
| 
 | ||||
| maybe_msg_spec = PldMsg|None | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def maybe_expect_raises( | ||||
|     raises: BaseException|None = None, | ||||
|     ensure_in_message: list[str]|None = None, | ||||
|     post_mortem: bool = False, | ||||
|     timeout: int = 3, | ||||
| ) -> None: | ||||
|     ''' | ||||
|     Async wrapper for ensuring errors propagate from the inner scope. | ||||
| 
 | ||||
|     ''' | ||||
|     if tractor._state.debug_mode(): | ||||
|         timeout += 999 | ||||
| 
 | ||||
|     with trio.fail_after(timeout): | ||||
|         try: | ||||
|             yield | ||||
|         except BaseException as _inner_err: | ||||
|             inner_err = _inner_err | ||||
|             # wasn't-expected to error.. | ||||
|             if raises is None: | ||||
|                 raise | ||||
| 
 | ||||
|             else: | ||||
|                 assert type(inner_err) is raises | ||||
| 
 | ||||
|                 # maybe check for error txt content | ||||
|                 if ensure_in_message: | ||||
|                     part: str | ||||
|                     err_repr: str = repr(inner_err) | ||||
|                     for part in ensure_in_message: | ||||
|                         for i, arg in enumerate(inner_err.args): | ||||
|                             if part in err_repr: | ||||
|                                 break | ||||
|                         # if part never matches an arg, then we're | ||||
|                         # missing a match. | ||||
|                         else: | ||||
|                             raise ValueError( | ||||
|                                 'Failed to find error message content?\n\n' | ||||
|                                 f'expected: {ensure_in_message!r}\n' | ||||
|                                 f'part: {part!r}\n\n' | ||||
|                                 f'{inner_err.args}' | ||||
|                         ) | ||||
| 
 | ||||
|                 if post_mortem: | ||||
|                     await tractor.post_mortem() | ||||
| 
 | ||||
|         else: | ||||
|             if raises: | ||||
|                 raise RuntimeError( | ||||
|                     f'Expected a {raises.__name__!r} to be raised?' | ||||
|                 ) | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context( | ||||
|     pld_spec=maybe_msg_spec, | ||||
| ) | ||||
| async def child( | ||||
|     ctx: Context, | ||||
|     started_value: int|PldMsg|None, | ||||
|     return_value: str|None, | ||||
|     validate_pld_spec: bool, | ||||
|     raise_on_started_mte: bool = True, | ||||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|     Call ``Context.started()`` more then once (an error). | ||||
| 
 | ||||
|     ''' | ||||
|     expect_started_mte: bool = started_value == 10 | ||||
| 
 | ||||
|     # sanaity check that child RPC context is the current one | ||||
|     curr_ctx: Context = current_ipc_ctx() | ||||
|     assert ctx is curr_ctx | ||||
| 
 | ||||
|     rx: msgops.PldRx = ctx._pld_rx | ||||
|     curr_pldec: _codec.MsgDec = rx.pld_dec | ||||
| 
 | ||||
|     ctx_meta: dict = getattr( | ||||
|         child, | ||||
|         '_tractor_context_meta', | ||||
|         None, | ||||
|     ) | ||||
|     if ctx_meta: | ||||
|         assert ( | ||||
|             ctx_meta['pld_spec'] | ||||
|             is curr_pldec.spec | ||||
|             is curr_pldec.pld_spec | ||||
|         ) | ||||
| 
 | ||||
|     # 2 cases: hdndle send-side and recv-only validation | ||||
|     # - when `raise_on_started_mte == True`, send validate | ||||
|     # - else, parent-recv-side only validation | ||||
|     mte: MsgTypeError|None = None | ||||
|     try: | ||||
|         await ctx.started( | ||||
|             value=started_value, | ||||
|             validate_pld_spec=validate_pld_spec, | ||||
|         ) | ||||
| 
 | ||||
|     except MsgTypeError as _mte: | ||||
|         mte = _mte | ||||
|         log.exception('started()` raised an MTE!\n') | ||||
|         if not expect_started_mte: | ||||
|             raise RuntimeError( | ||||
|                 'Child-ctx-task SHOULD NOT HAVE raised an MTE for\n\n' | ||||
|                 f'{started_value!r}\n' | ||||
|             ) | ||||
| 
 | ||||
|         boxed_div: str = '------ - ------' | ||||
|         assert boxed_div not in mte._message | ||||
|         assert boxed_div not in mte.tb_str | ||||
|         assert boxed_div not in repr(mte) | ||||
|         assert boxed_div not in str(mte) | ||||
|         mte_repr: str = repr(mte) | ||||
|         for line in mte.message.splitlines(): | ||||
|             assert line in mte_repr | ||||
| 
 | ||||
|         # since this is a *local error* there should be no | ||||
|         # boxed traceback content! | ||||
|         assert not mte.tb_str | ||||
| 
 | ||||
|         # propagate to parent? | ||||
|         if raise_on_started_mte: | ||||
|             raise | ||||
| 
 | ||||
|     # no-send-side-error fallthrough | ||||
|     if ( | ||||
|         validate_pld_spec | ||||
|         and | ||||
|         expect_started_mte | ||||
|     ): | ||||
|         raise RuntimeError( | ||||
|             'Child-ctx-task SHOULD HAVE raised an MTE for\n\n' | ||||
|             f'{started_value!r}\n' | ||||
|         ) | ||||
| 
 | ||||
|     assert ( | ||||
|         not expect_started_mte | ||||
|         or | ||||
|         not validate_pld_spec | ||||
|     ) | ||||
| 
 | ||||
|     # if wait_for_parent_to_cancel: | ||||
|     #     ... | ||||
|     # | ||||
|     # ^-TODO-^ logic for diff validation policies on each side: | ||||
|     # | ||||
|     # -[ ] ensure that if we don't validate on the send | ||||
|     #   side, that we are eventually error-cancelled by our | ||||
|     #   parent due to the bad `Started` payload! | ||||
|     # -[ ] the boxed error should be srced from the parent's | ||||
|     #   runtime NOT ours! | ||||
|     # -[ ] we should still error on bad `return_value`s | ||||
|     #   despite the parent not yet error-cancelling us? | ||||
|     #   |_ how do we want the parent side to look in that | ||||
|     #     case? | ||||
|     #     -[ ] maybe the equiv of "during handling of the | ||||
|     #       above error another occurred" for the case where | ||||
|     #       the parent sends a MTE to this child and while | ||||
|     #       waiting for the child to terminate it gets back | ||||
|     #       the MTE for this case? | ||||
|     # | ||||
| 
 | ||||
|     # XXX should always fail on recv side since we can't | ||||
|     # really do much else beside terminate and relay the | ||||
|     # msg-type-error from this RPC task ;) | ||||
|     return return_value | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     'return_value', | ||||
|     [ | ||||
|         'yo', | ||||
|         None, | ||||
|     ], | ||||
|     ids=[ | ||||
|         'return[invalid-"yo"]', | ||||
|         'return[valid-None]', | ||||
|     ], | ||||
| ) | ||||
| @pytest.mark.parametrize( | ||||
|     'started_value', | ||||
|     [ | ||||
|         10, | ||||
|         PldMsg(field='yo'), | ||||
|     ], | ||||
|     ids=[ | ||||
|         'Started[invalid-10]', | ||||
|         'Started[valid-PldMsg]', | ||||
|     ], | ||||
| ) | ||||
| @pytest.mark.parametrize( | ||||
|     'pld_check_started_value', | ||||
|     [ | ||||
|         True, | ||||
|         False, | ||||
|     ], | ||||
|     ids=[ | ||||
|         'check-started-pld', | ||||
|         'no-started-pld-validate', | ||||
|     ], | ||||
| ) | ||||
| def test_basic_payload_spec( | ||||
|     debug_mode: bool, | ||||
|     loglevel: str, | ||||
|     return_value: str|None, | ||||
|     started_value: int|PldMsg, | ||||
|     pld_check_started_value: bool, | ||||
| ): | ||||
|     ''' | ||||
|     Validate the most basic `PldRx` msg-type-spec semantics around | ||||
|     a IPC `Context` endpoint start, started-sync, and final return | ||||
|     value depending on set payload types and the currently applied | ||||
|     pld-spec. | ||||
| 
 | ||||
|     ''' | ||||
|     invalid_return: bool = return_value == 'yo' | ||||
|     invalid_started: bool = started_value == 10 | ||||
| 
 | ||||
|     async def main(): | ||||
|         async with tractor.open_nursery( | ||||
|             debug_mode=debug_mode, | ||||
|             loglevel=loglevel, | ||||
|         ) as an: | ||||
|             p: Portal = await an.start_actor( | ||||
|                 'child', | ||||
|                 enable_modules=[__name__], | ||||
|             ) | ||||
| 
 | ||||
|             # since not opened yet. | ||||
|             assert current_ipc_ctx() is None | ||||
| 
 | ||||
|             if invalid_started: | ||||
|                 msg_type_str: str = 'Started' | ||||
|                 bad_value: int = 10 | ||||
|             elif invalid_return: | ||||
|                 msg_type_str: str = 'Return' | ||||
|                 bad_value: str = 'yo' | ||||
|             else: | ||||
|                 # XXX but should never be used below then.. | ||||
|                 msg_type_str: str = '' | ||||
|                 bad_value: str = '' | ||||
| 
 | ||||
|             maybe_mte: MsgTypeError|None = None | ||||
|             should_raise: Exception|None = ( | ||||
|                 MsgTypeError if ( | ||||
|                     invalid_return | ||||
|                     or | ||||
|                     invalid_started | ||||
|                 ) else None | ||||
|             ) | ||||
|             async with ( | ||||
|                 maybe_expect_raises( | ||||
|                     raises=should_raise, | ||||
|                     ensure_in_message=[ | ||||
|                         f"invalid `{msg_type_str}` msg payload", | ||||
|                         f'{bad_value}', | ||||
|                         f'has type {type(bad_value)!r}', | ||||
|                         'not match type-spec', | ||||
|                         f'`{msg_type_str}.pld: PldMsg|NoneType`', | ||||
|                     ], | ||||
|                     # only for debug | ||||
|                     # post_mortem=True, | ||||
|                 ), | ||||
|                 p.open_context( | ||||
|                     child, | ||||
|                     return_value=return_value, | ||||
|                     started_value=started_value, | ||||
|                     validate_pld_spec=pld_check_started_value, | ||||
|                 ) as (ctx, first), | ||||
|             ): | ||||
|                 # now opened with 'child' sub | ||||
|                 assert current_ipc_ctx() is ctx | ||||
| 
 | ||||
|                 assert type(first) is PldMsg | ||||
|                 assert first.field == 'yo' | ||||
| 
 | ||||
|                 try: | ||||
|                     res: None|PldMsg = await ctx.result(hide_tb=False) | ||||
|                     assert res is None | ||||
|                 except MsgTypeError as mte: | ||||
|                     maybe_mte = mte | ||||
|                     if not invalid_return: | ||||
|                         raise | ||||
| 
 | ||||
|                     # expected this invalid `Return.pld` so audit | ||||
|                     # the error state + meta-data | ||||
|                     assert mte.expected_msg_type is Return | ||||
|                     assert mte.cid == ctx.cid | ||||
|                     mte_repr: str = repr(mte) | ||||
|                     for line in mte.message.splitlines(): | ||||
|                         assert line in mte_repr | ||||
| 
 | ||||
|                     assert mte.tb_str | ||||
|                     # await tractor.pause(shield=True) | ||||
| 
 | ||||
|                     # verify expected remote mte deats | ||||
|                     assert ctx._local_error is None | ||||
|                     assert ( | ||||
|                         mte is | ||||
|                         ctx._remote_error is | ||||
|                         ctx.maybe_error is | ||||
|                         ctx.outcome | ||||
|                     ) | ||||
| 
 | ||||
|             if should_raise is None: | ||||
|                 assert maybe_mte is None | ||||
| 
 | ||||
|             await p.cancel_actor() | ||||
| 
 | ||||
|     trio.run(main) | ||||
|  | @ -4,8 +4,8 @@ from itertools import cycle | |||
| import pytest | ||||
| import trio | ||||
| import tractor | ||||
| from tractor.testing import tractor_test | ||||
| from tractor.experimental import msgpub | ||||
| from tractor._testing import tractor_test | ||||
| 
 | ||||
| 
 | ||||
| def test_type_checks(): | ||||
|  | @ -159,7 +159,7 @@ async def test_required_args(callwith_expecterror): | |||
| ) | ||||
| def test_multi_actor_subs_arbiter_pub( | ||||
|     loglevel, | ||||
|     reg_addr, | ||||
|     arb_addr, | ||||
|     pub_actor, | ||||
| ): | ||||
|     """Try out the neato @pub decorator system. | ||||
|  | @ -169,7 +169,7 @@ def test_multi_actor_subs_arbiter_pub( | |||
|     async def main(): | ||||
| 
 | ||||
|         async with tractor.open_nursery( | ||||
|             registry_addrs=[reg_addr], | ||||
|             arbiter_addr=arb_addr, | ||||
|             enable_modules=[__name__], | ||||
|         ) as n: | ||||
| 
 | ||||
|  | @ -254,12 +254,12 @@ def test_multi_actor_subs_arbiter_pub( | |||
| 
 | ||||
| def test_single_subactor_pub_multitask_subs( | ||||
|     loglevel, | ||||
|     reg_addr, | ||||
|     arb_addr, | ||||
| ): | ||||
|     async def main(): | ||||
| 
 | ||||
|         async with tractor.open_nursery( | ||||
|             registry_addrs=[reg_addr], | ||||
|             arbiter_addr=arb_addr, | ||||
|             enable_modules=[__name__], | ||||
|         ) as n: | ||||
| 
 | ||||
|  |  | |||
|  | @ -1,237 +0,0 @@ | |||
| ''' | ||||
| Special case testing for issues not (dis)covered in the primary | ||||
| `Context` related functional/scenario suites. | ||||
| 
 | ||||
| **NOTE: this mod is a WIP** space for handling | ||||
| odd/rare/undiscovered/not-yet-revealed faults which either | ||||
| loudly (ideal case) breakl our supervision protocol | ||||
| or (worst case) result in distributed sys hangs. | ||||
| 
 | ||||
| Suites here further try to clarify (if [partially] ill-defined) and | ||||
| verify our edge case semantics for inter-actor-relayed-exceptions | ||||
| including, | ||||
| 
 | ||||
| - lowlevel: what remote obj-data is interchanged for IPC and what is | ||||
|   native-obj form is expected from unpacking in the the new | ||||
|   mem-domain. | ||||
| 
 | ||||
| - which kinds of `RemoteActorError` (and its derivs) are expected by which | ||||
|   (types of) peers (parent, child, sibling, etc) with what | ||||
|   particular meta-data set such as, | ||||
| 
 | ||||
|   - `.src_uid`: the original (maybe) peer who raised. | ||||
|   - `.relay_uid`: the next-hop-peer who sent it. | ||||
|   - `.relay_path`: the sequence of peer actor hops. | ||||
|   - `.is_inception`: a predicate that denotes multi-hop remote errors. | ||||
| 
 | ||||
| - when should `ExceptionGroup`s be relayed from a particular | ||||
|   remote endpoint, they should never be caused by implicit `._rpc` | ||||
|   nursery machinery! | ||||
| 
 | ||||
| - various special `trio` edge cases around its cancellation semantics | ||||
|   and how we (currently) leverage `trio.Cancelled` as a signal for | ||||
|   whether a `Context` task should raise `ContextCancelled` (ctx). | ||||
| 
 | ||||
| ''' | ||||
| import pytest | ||||
| import trio | ||||
| import tractor | ||||
| from tractor import (  # typing | ||||
|     ActorNursery, | ||||
|     Portal, | ||||
|     Context, | ||||
|     ContextCancelled, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def sleep_n_chkpt_in_finally( | ||||
|     ctx: Context, | ||||
|     sleep_n_raise: bool, | ||||
| 
 | ||||
|     chld_raise_delay: float, | ||||
|     chld_finally_delay: float, | ||||
| 
 | ||||
|     rent_cancels: bool, | ||||
|     rent_ctxc_delay: float, | ||||
| 
 | ||||
|     expect_exc: str|None = None, | ||||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|     Sync, open a tn, then wait for cancel, run a chkpt inside | ||||
|     the user's `finally:` teardown. | ||||
| 
 | ||||
|     This covers a footgun case that `trio` core doesn't seem to care about | ||||
|     wherein an exc can be masked by a `trio.Cancelled` raised inside a tn emedded | ||||
|     `finally:`. | ||||
| 
 | ||||
|     Also see `test_trioisms::test_acm_embedded_nursery_propagates_enter_err` | ||||
|     for the down and gritty details. | ||||
| 
 | ||||
|     Since a `@context` endpoint fn can also contain code like this, | ||||
|     **and** bc we currently have no easy way other then | ||||
|     `trio.Cancelled` to signal cancellation on each side of an IPC `Context`, | ||||
|     the footgun issue can compound itself as demonstrated in this suite.. | ||||
| 
 | ||||
|     Here are some edge cases codified with our WIP "sclang" syntax | ||||
|     (note the parent(rent)/child(chld) naming here is just | ||||
|     pragmatism, generally these most of these cases can occurr | ||||
|     regardless of the distributed-task's supervision hiearchy), | ||||
| 
 | ||||
|     - rent c)=> chld.raises-then-taskc-in-finally | ||||
|      |_ chld's body raises an `exc: BaseException`. | ||||
|       _ in its `finally:` block it runs a chkpoint | ||||
|         which raises a taskc (`trio.Cancelled`) which | ||||
|         masks `exc` instead raising taskc up to the first tn. | ||||
|       _ the embedded/chld tn captures the masking taskc and then | ||||
|         raises it up to the ._rpc-ep-tn instead of `exc`. | ||||
|       _ the rent thinks the child ctxc-ed instead of errored.. | ||||
| 
 | ||||
|     ''' | ||||
|     await ctx.started() | ||||
| 
 | ||||
|     if expect_exc: | ||||
|         expect_exc: BaseException = tractor._exceptions.get_err_type( | ||||
|             type_name=expect_exc, | ||||
|         ) | ||||
| 
 | ||||
|     berr: BaseException|None = None | ||||
|     try: | ||||
|         if not sleep_n_raise: | ||||
|             await trio.sleep_forever() | ||||
|         elif sleep_n_raise: | ||||
| 
 | ||||
|             # XXX this sleep is less then the sleep the parent | ||||
|             # does before calling `ctx.cancel()` | ||||
|             await trio.sleep(chld_raise_delay) | ||||
| 
 | ||||
|             # XXX this will be masked by a taskc raised in | ||||
|             # the `finally:` if this fn doesn't terminate | ||||
|             # before any ctxc-req arrives AND a checkpoint is hit | ||||
|             # in that `finally:`. | ||||
|             raise RuntimeError('my app krurshed..') | ||||
| 
 | ||||
|     except BaseException as _berr: | ||||
|         berr = _berr | ||||
| 
 | ||||
|         # TODO: it'd sure be nice to be able to inject our own | ||||
|         # `ContextCancelled` here instead of of `trio.Cancelled` | ||||
|         # so that our runtime can expect it and this "user code" | ||||
|         # would be able to tell the diff between a generic trio | ||||
|         # cancel and a tractor runtime-IPC cancel. | ||||
|         if expect_exc: | ||||
|             if not isinstance( | ||||
|                 berr, | ||||
|                 expect_exc, | ||||
|             ): | ||||
|                 raise ValueError( | ||||
|                     f'Unexpected exc type ??\n' | ||||
|                     f'{berr!r}\n' | ||||
|                     f'\n' | ||||
|                     f'Expected a {expect_exc!r}\n' | ||||
|                 ) | ||||
| 
 | ||||
|         raise berr | ||||
| 
 | ||||
|     # simulate what user code might try even though | ||||
|     # it's a known boo-boo.. | ||||
|     finally: | ||||
|         # maybe wait for rent ctxc to arrive | ||||
|         with trio.CancelScope(shield=True): | ||||
|             await trio.sleep(chld_finally_delay) | ||||
| 
 | ||||
|         # !!XXX this will raise `trio.Cancelled` which | ||||
|         # will mask the RTE from above!!! | ||||
|         # | ||||
|         # YES, it's the same case as our extant | ||||
|         # `test_trioisms::test_acm_embedded_nursery_propagates_enter_err` | ||||
|         try: | ||||
|             await trio.lowlevel.checkpoint() | ||||
|         except trio.Cancelled as taskc: | ||||
|             if (scope_err := taskc.__context__): | ||||
|                 print( | ||||
|                     f'XXX MASKED REMOTE ERROR XXX\n' | ||||
|                     f'ENDPOINT exception -> {scope_err!r}\n' | ||||
|                     f'will be masked by -> {taskc!r}\n' | ||||
|                 ) | ||||
|                 # await tractor.pause(shield=True) | ||||
| 
 | ||||
|             raise taskc | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     'chld_callspec', | ||||
|     [ | ||||
|         dict( | ||||
|             sleep_n_raise=None, | ||||
|             chld_raise_delay=0.1, | ||||
|             chld_finally_delay=0.1, | ||||
|             expect_exc='Cancelled', | ||||
|             rent_cancels=True, | ||||
|             rent_ctxc_delay=0.1, | ||||
|         ), | ||||
|         dict( | ||||
|             sleep_n_raise='RuntimeError', | ||||
|             chld_raise_delay=0.1, | ||||
|             chld_finally_delay=1, | ||||
|             expect_exc='RuntimeError', | ||||
|             rent_cancels=False, | ||||
|             rent_ctxc_delay=0.1, | ||||
|         ), | ||||
|     ], | ||||
|     ids=lambda item: f'chld_callspec={item!r}' | ||||
| ) | ||||
| def test_unmasked_remote_exc( | ||||
|     debug_mode: bool, | ||||
|     chld_callspec: dict, | ||||
|     tpt_proto: str, | ||||
| ): | ||||
|     expect_exc_str: str|None = chld_callspec['sleep_n_raise'] | ||||
|     rent_ctxc_delay: float|None = chld_callspec['rent_ctxc_delay'] | ||||
|     async def main(): | ||||
|         an: ActorNursery | ||||
|         async with tractor.open_nursery( | ||||
|             debug_mode=debug_mode, | ||||
|             enable_transports=[tpt_proto], | ||||
|         ) as an: | ||||
|             ptl: Portal = await an.start_actor( | ||||
|                 'cancellee', | ||||
|                 enable_modules=[__name__], | ||||
|             ) | ||||
|             ctx: Context | ||||
|             async with ( | ||||
|                 ptl.open_context( | ||||
|                     sleep_n_chkpt_in_finally, | ||||
|                     **chld_callspec, | ||||
|                 ) as (ctx, sent), | ||||
|             ): | ||||
|                 assert not sent | ||||
|                 await trio.sleep(rent_ctxc_delay) | ||||
|                 await ctx.cancel() | ||||
| 
 | ||||
|                 # recv error or result from chld | ||||
|                 ctxc: ContextCancelled = await ctx.wait_for_result() | ||||
|                 assert ( | ||||
|                     ctxc is ctx.outcome | ||||
|                     and | ||||
|                     isinstance(ctxc, ContextCancelled) | ||||
|                 ) | ||||
| 
 | ||||
|             # always graceful terminate the sub in non-error cases | ||||
|             await an.cancel() | ||||
| 
 | ||||
|     if expect_exc_str: | ||||
|         expect_exc: BaseException = tractor._exceptions.get_err_type( | ||||
|             type_name=expect_exc_str, | ||||
|         ) | ||||
|         with pytest.raises( | ||||
|             expected_exception=tractor.RemoteActorError, | ||||
|         ) as excinfo: | ||||
|             trio.run(main) | ||||
| 
 | ||||
|         rae = excinfo.value | ||||
|         assert expect_exc == rae.boxed_type | ||||
| 
 | ||||
|     else: | ||||
|         trio.run(main) | ||||
|  | @ -1,6 +1,5 @@ | |||
| ''' | ||||
| Suites for our `.trionics.maybe_open_context()` multi-task | ||||
| shared-cached `@acm` API. | ||||
| Async context manager cache api testing: ``trionics.maybe_open_context():`` | ||||
| 
 | ||||
| ''' | ||||
| from contextlib import asynccontextmanager as acm | ||||
|  | @ -10,15 +9,6 @@ from typing import Awaitable | |||
| import pytest | ||||
| import trio | ||||
| import tractor | ||||
| from tractor.trionics import ( | ||||
|     maybe_open_context, | ||||
| ) | ||||
| from tractor.log import ( | ||||
|     get_console_log, | ||||
|     get_logger, | ||||
| ) | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| _resource: int = 0 | ||||
|  | @ -44,6 +34,7 @@ def test_resource_only_entered_once(key_on): | |||
|     global _resource | ||||
|     _resource = 0 | ||||
| 
 | ||||
|     kwargs = {} | ||||
|     key = None | ||||
|     if key_on == 'key_value': | ||||
|         key = 'some_common_key' | ||||
|  | @ -62,7 +53,7 @@ def test_resource_only_entered_once(key_on): | |||
|                 # different task names per task will be used | ||||
|                 kwargs = {'task_name': name} | ||||
| 
 | ||||
|             async with maybe_open_context( | ||||
|             async with tractor.trionics.maybe_open_context( | ||||
|                 maybe_increment_counter, | ||||
|                 kwargs=kwargs, | ||||
|                 key=key, | ||||
|  | @ -82,13 +73,11 @@ def test_resource_only_entered_once(key_on): | |||
|         with trio.move_on_after(0.5): | ||||
|             async with ( | ||||
|                 tractor.open_root_actor(), | ||||
|                 trio.open_nursery() as tn, | ||||
|                 trio.open_nursery() as n, | ||||
|             ): | ||||
| 
 | ||||
|                 for i in range(10): | ||||
|                     tn.start_soon( | ||||
|                         enter_cached_mngr, | ||||
|                         f'task_{i}', | ||||
|                     ) | ||||
|                     n.start_soon(enter_cached_mngr, f'task_{i}') | ||||
|                     await trio.sleep(0.001) | ||||
| 
 | ||||
|     trio.run(main) | ||||
|  | @ -110,55 +99,27 @@ async def streamer( | |||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def open_stream() -> Awaitable[ | ||||
|     tuple[ | ||||
|         tractor.ActorNursery, | ||||
|         tractor.MsgStream, | ||||
|     ] | ||||
| ]: | ||||
|     try: | ||||
|         async with tractor.open_nursery() as an: | ||||
|             portal = await an.start_actor( | ||||
|                 'streamer', | ||||
|                 enable_modules=[__name__], | ||||
|             ) | ||||
|             try: | ||||
|                 async with ( | ||||
|                     portal.open_context(streamer) as (ctx, first), | ||||
|                     ctx.open_stream() as stream, | ||||
|                 ): | ||||
|                     print('Entered open_stream() caller') | ||||
|                     yield an, stream | ||||
|                     print('Exited open_stream() caller') | ||||
| async def open_stream() -> Awaitable[tractor.MsgStream]: | ||||
| 
 | ||||
|             finally: | ||||
|                 print( | ||||
|                     'Cancelling streamer with,\n' | ||||
|                     '=> `Portal.cancel_actor()`' | ||||
|                 ) | ||||
|                 await portal.cancel_actor() | ||||
|                 print('Cancelled streamer') | ||||
|     async with tractor.open_nursery() as tn: | ||||
|         portal = await tn.start_actor('streamer', enable_modules=[__name__]) | ||||
|         async with ( | ||||
|             portal.open_context(streamer) as (ctx, first), | ||||
|             ctx.open_stream() as stream, | ||||
|         ): | ||||
|             yield stream | ||||
| 
 | ||||
|     except Exception as err: | ||||
|         print( | ||||
|             f'`open_stream()` errored?\n' | ||||
|             f'{err!r}\n' | ||||
|         ) | ||||
|         await tractor.pause(shield=True) | ||||
|         raise err | ||||
|         await portal.cancel_actor() | ||||
|     print('CANCELLED STREAMER') | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def maybe_open_stream(taskname: str): | ||||
|     async with maybe_open_context( | ||||
|     async with tractor.trionics.maybe_open_context( | ||||
|         # NOTE: all secondary tasks should cache hit on the same key | ||||
|         acm_func=open_stream, | ||||
|     ) as ( | ||||
|         cache_hit, | ||||
|         (an, stream) | ||||
|     ): | ||||
|         # when the actor + portal + ctx + stream has already been | ||||
|         # allocated we want to just bcast to this task. | ||||
|     ) as (cache_hit, stream): | ||||
| 
 | ||||
|         if cache_hit: | ||||
|             print(f'{taskname} loaded from cache') | ||||
| 
 | ||||
|  | @ -166,77 +127,27 @@ async def maybe_open_stream(taskname: str): | |||
|             # if this feed is already allocated by the first | ||||
|             # task that entereed | ||||
|             async with stream.subscribe() as bstream: | ||||
|                 yield an, bstream | ||||
|                 print( | ||||
|                     f'cached task exited\n' | ||||
|                     f')>\n' | ||||
|                     f' |_{taskname}\n' | ||||
|                 ) | ||||
| 
 | ||||
|             # we should always unreg the "cloned" bcrc for this | ||||
|             # consumer-task | ||||
|             assert id(bstream) not in bstream._state.subs | ||||
| 
 | ||||
|                 yield bstream | ||||
|         else: | ||||
|             # yield the actual stream | ||||
|             try: | ||||
|                 yield an, stream | ||||
|             finally: | ||||
|                 print( | ||||
|                     f'NON-cached task exited\n' | ||||
|                     f')>\n' | ||||
|                     f' |_{taskname}\n' | ||||
|                 ) | ||||
| 
 | ||||
|         first_bstream = stream._broadcaster | ||||
|         bcrx_state = first_bstream._state | ||||
|         subs: dict[int, int] = bcrx_state.subs | ||||
|         if len(subs) == 1: | ||||
|             assert id(first_bstream) in subs | ||||
|             # ^^TODO! the bcrx should always de-allocate all subs, | ||||
|             # including the implicit first one allocated on entry | ||||
|             # by the first subscribing peer task, no? | ||||
|             # | ||||
|             # -[ ] adjust `MsgStream.subscribe()` to do this mgmt! | ||||
|             #  |_ allows reverting `MsgStream.receive()` to the | ||||
|             #    non-bcaster method. | ||||
|             #  |_ we can decide whether to reset `._broadcaster`? | ||||
|             # | ||||
|             # await tractor.pause(shield=True) | ||||
|             yield stream | ||||
| 
 | ||||
| 
 | ||||
| def test_open_local_sub_to_stream( | ||||
|     debug_mode: bool, | ||||
| ): | ||||
| def test_open_local_sub_to_stream(): | ||||
|     ''' | ||||
|     Verify a single inter-actor stream can can be fanned-out shared to | ||||
|     N local tasks using `trionics.maybe_open_context()`. | ||||
|     N local tasks using ``trionics.maybe_open_context():``. | ||||
| 
 | ||||
|     ''' | ||||
|     timeout: float = 3.6 | ||||
|     if platform.system() == "Windows": | ||||
|         timeout: float = 10 | ||||
| 
 | ||||
|     if debug_mode: | ||||
|         timeout = 999 | ||||
|         print(f'IN debug_mode, setting large timeout={timeout!r}..') | ||||
|     timeout = 3 if platform.system() != "Windows" else 10 | ||||
| 
 | ||||
|     async def main(): | ||||
| 
 | ||||
|         full = list(range(1000)) | ||||
|         an: tractor.ActorNursery|None = None | ||||
|         num_tasks: int = 10 | ||||
| 
 | ||||
|         async def get_sub_and_pull(taskname: str): | ||||
| 
 | ||||
|             nonlocal an | ||||
| 
 | ||||
|             stream: tractor.MsgStream | ||||
|             async with ( | ||||
|                 maybe_open_stream(taskname) as ( | ||||
|                     an, | ||||
|                     stream, | ||||
|                 ), | ||||
|                 maybe_open_stream(taskname) as stream, | ||||
|             ): | ||||
|                 if '0' in taskname: | ||||
|                     assert isinstance(stream, tractor.MsgStream) | ||||
|  | @ -248,159 +159,24 @@ def test_open_local_sub_to_stream( | |||
| 
 | ||||
|                 first = await stream.receive() | ||||
|                 print(f'{taskname} started with value {first}') | ||||
|                 seq: list[int] = [] | ||||
|                 seq = [] | ||||
|                 async for msg in stream: | ||||
|                     seq.append(msg) | ||||
| 
 | ||||
|                 assert set(seq).issubset(set(full)) | ||||
| 
 | ||||
|             # end of @acm block | ||||
|             print(f'{taskname} finished') | ||||
| 
 | ||||
|         root: tractor.Actor | ||||
|         with trio.fail_after(timeout) as cs: | ||||
|         with trio.fail_after(timeout): | ||||
|             # TODO: turns out this isn't multi-task entrant XD | ||||
|             # We probably need an indepotent entry semantic? | ||||
|             async with tractor.open_root_actor( | ||||
|                 debug_mode=debug_mode, | ||||
|                 # maybe_enable_greenback=True, | ||||
|                 # | ||||
|                 # ^TODO? doesn't seem to mk breakpoint() usage work | ||||
|                 # bc each bg task needs to open a portal?? | ||||
|                 # - [ ] we should consider making this part of | ||||
|                 #      our taskman defaults? | ||||
|                 #   |_see https://github.com/goodboy/tractor/pull/363 | ||||
|                 # | ||||
|             ) as root: | ||||
|                 assert root.is_registrar | ||||
| 
 | ||||
|             async with tractor.open_root_actor(): | ||||
|                 async with ( | ||||
|                     trio.open_nursery() as tn, | ||||
|                     trio.open_nursery() as nurse, | ||||
|                 ): | ||||
|                     for i in range(num_tasks): | ||||
|                         tn.start_soon( | ||||
|                             get_sub_and_pull, | ||||
|                             f'task_{i}', | ||||
|                         ) | ||||
|                     for i in range(10): | ||||
|                         nurse.start_soon(get_sub_and_pull, f'task_{i}') | ||||
|                         await trio.sleep(0.001) | ||||
| 
 | ||||
|                 print('all consumer tasks finished!') | ||||
| 
 | ||||
|                 # ?XXX, ensure actor-nursery is shutdown or we might | ||||
|                 # hang here due to a minor task deadlock/race-condition? | ||||
|                 # | ||||
|                 # - seems that all we need is a checkpoint to ensure | ||||
|                 #   the last suspended task, which is inside | ||||
|                 #   `.maybe_open_context()`, can do the | ||||
|                 #   `Portal.cancel_actor()` call? | ||||
|                 # | ||||
|                 # - if that bg task isn't resumed, then this blocks | ||||
|                 #   timeout might hit before that? | ||||
|                 # | ||||
|                 if root.ipc_server.has_peers(): | ||||
|                     await trio.lowlevel.checkpoint() | ||||
| 
 | ||||
|                     # alt approach, cancel the entire `an` | ||||
|                     # await tractor.pause() | ||||
|                     # await an.cancel() | ||||
| 
 | ||||
|             # end of runtime scope | ||||
|             print('root actor terminated.') | ||||
| 
 | ||||
|         if cs.cancelled_caught: | ||||
|             pytest.fail( | ||||
|                 'Should NOT time out in `open_root_actor()` ?' | ||||
|             ) | ||||
| 
 | ||||
|         print('exiting main.') | ||||
| 
 | ||||
|     trio.run(main) | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def cancel_outer_cs( | ||||
|     cs: trio.CancelScope|None = None, | ||||
|     delay: float = 0, | ||||
| ): | ||||
|     # on first task delay this enough to block | ||||
|     # the 2nd task but then cancel it mid sleep | ||||
|     # so that the tn.start() inside the key-err handler block | ||||
|     # is cancelled and would previously corrupt the | ||||
|     # mutext state. | ||||
|     log.info(f'task entering sleep({delay})') | ||||
|     await trio.sleep(delay) | ||||
|     if cs: | ||||
|         log.info('task calling cs.cancel()') | ||||
|         cs.cancel() | ||||
|     trio.lowlevel.checkpoint() | ||||
|     yield | ||||
|     await trio.sleep_forever() | ||||
| 
 | ||||
| 
 | ||||
| def test_lock_not_corrupted_on_fast_cancel( | ||||
|     debug_mode: bool, | ||||
|     loglevel: str, | ||||
| ): | ||||
|     ''' | ||||
|     Verify that if the caching-task (the first to enter | ||||
|     `maybe_open_context()`) is cancelled mid-cache-miss, the embedded | ||||
|     mutex can never be left in a corrupted state. | ||||
| 
 | ||||
|     That is, the lock is always eventually released ensuring a peer | ||||
|     (cache-hitting) task will never, | ||||
| 
 | ||||
|     - be left to inf-block/hang on the `lock.acquire()`. | ||||
|     - try to release the lock when still owned by the caching-task | ||||
|       due to it having erronously exited without calling | ||||
|       `lock.release()`. | ||||
| 
 | ||||
| 
 | ||||
|     ''' | ||||
|     delay: float = 1. | ||||
| 
 | ||||
|     async def use_moc( | ||||
|         cs: trio.CancelScope|None, | ||||
|         delay: float, | ||||
|     ): | ||||
|         log.info('task entering moc') | ||||
|         async with maybe_open_context( | ||||
|             cancel_outer_cs, | ||||
|             kwargs={ | ||||
|                 'cs': cs, | ||||
|                 'delay': delay, | ||||
|             }, | ||||
|         ) as (cache_hit, _null): | ||||
|             if cache_hit: | ||||
|                 log.info('2nd task entered') | ||||
|             else: | ||||
|                 log.info('1st task entered') | ||||
| 
 | ||||
|             await trio.sleep_forever() | ||||
| 
 | ||||
|     async def main(): | ||||
|         with trio.fail_after(delay + 2): | ||||
|             async with ( | ||||
|                 tractor.open_root_actor( | ||||
|                     debug_mode=debug_mode, | ||||
|                     loglevel=loglevel, | ||||
|                 ), | ||||
|                 trio.open_nursery() as tn, | ||||
|             ): | ||||
|                 get_console_log('info') | ||||
|                 log.info('yo starting') | ||||
|                 cs = tn.cancel_scope | ||||
|                 tn.start_soon( | ||||
|                     use_moc, | ||||
|                     cs, | ||||
|                     delay, | ||||
|                     name='child', | ||||
|                 ) | ||||
|                 with trio.CancelScope() as rent_cs: | ||||
|                     await use_moc( | ||||
|                         cs=rent_cs, | ||||
|                         delay=delay, | ||||
|                     ) | ||||
| 
 | ||||
|                 print('all consumer tasks finished') | ||||
| 
 | ||||
|     trio.run(main) | ||||
|  |  | |||
|  | @ -1,211 +0,0 @@ | |||
| import time | ||||
| 
 | ||||
| import trio | ||||
| import pytest | ||||
| 
 | ||||
| import tractor | ||||
| from tractor.ipc._ringbuf import ( | ||||
|     open_ringbuf, | ||||
|     RBToken, | ||||
|     RingBuffSender, | ||||
|     RingBuffReceiver | ||||
| ) | ||||
| from tractor._testing.samples import ( | ||||
|     generate_sample_messages, | ||||
| ) | ||||
| 
 | ||||
| # in case you don't want to melt your cores, uncomment dis! | ||||
| pytestmark = pytest.mark.skip | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def child_read_shm( | ||||
|     ctx: tractor.Context, | ||||
|     msg_amount: int, | ||||
|     token: RBToken, | ||||
|     total_bytes: int, | ||||
| ) -> None: | ||||
|     recvd_bytes = 0 | ||||
|     await ctx.started() | ||||
|     start_ts = time.time() | ||||
|     async with RingBuffReceiver(token) as receiver: | ||||
|         while recvd_bytes < total_bytes: | ||||
|             msg = await receiver.receive_some() | ||||
|             recvd_bytes += len(msg) | ||||
| 
 | ||||
|         # make sure we dont hold any memoryviews | ||||
|         # before the ctx manager aclose() | ||||
|         msg = None | ||||
| 
 | ||||
|     end_ts = time.time() | ||||
|     elapsed = end_ts - start_ts | ||||
|     elapsed_ms = int(elapsed * 1000) | ||||
| 
 | ||||
|     print(f'\n\telapsed ms: {elapsed_ms}') | ||||
|     print(f'\tmsg/sec: {int(msg_amount / elapsed):,}') | ||||
|     print(f'\tbytes/sec: {int(recvd_bytes / elapsed):,}') | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def child_write_shm( | ||||
|     ctx: tractor.Context, | ||||
|     msg_amount: int, | ||||
|     rand_min: int, | ||||
|     rand_max: int, | ||||
|     token: RBToken, | ||||
| ) -> None: | ||||
|     msgs, total_bytes = generate_sample_messages( | ||||
|         msg_amount, | ||||
|         rand_min=rand_min, | ||||
|         rand_max=rand_max, | ||||
|     ) | ||||
|     await ctx.started(total_bytes) | ||||
|     async with RingBuffSender(token) as sender: | ||||
|         for msg in msgs: | ||||
|             await sender.send_all(msg) | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     'msg_amount,rand_min,rand_max,buf_size', | ||||
|     [ | ||||
|         # simple case, fixed payloads, large buffer | ||||
|         (100_000, 0, 0, 10 * 1024), | ||||
| 
 | ||||
|         # guaranteed wrap around on every write | ||||
|         (100, 10 * 1024, 20 * 1024, 10 * 1024), | ||||
| 
 | ||||
|         # large payload size, but large buffer | ||||
|         (10_000, 256 * 1024, 512 * 1024, 10 * 1024 * 1024) | ||||
|     ], | ||||
|     ids=[ | ||||
|         'fixed_payloads_large_buffer', | ||||
|         'wrap_around_every_write', | ||||
|         'large_payloads_large_buffer', | ||||
|     ] | ||||
| ) | ||||
| def test_ringbuf( | ||||
|     msg_amount: int, | ||||
|     rand_min: int, | ||||
|     rand_max: int, | ||||
|     buf_size: int | ||||
| ): | ||||
|     async def main(): | ||||
|         with open_ringbuf( | ||||
|             'test_ringbuf', | ||||
|             buf_size=buf_size | ||||
|         ) as token: | ||||
|             proc_kwargs = { | ||||
|                 'pass_fds': (token.write_eventfd, token.wrap_eventfd) | ||||
|             } | ||||
| 
 | ||||
|             common_kwargs = { | ||||
|                 'msg_amount': msg_amount, | ||||
|                 'token': token, | ||||
|             } | ||||
|             async with tractor.open_nursery() as an: | ||||
|                 send_p = await an.start_actor( | ||||
|                     'ring_sender', | ||||
|                     enable_modules=[__name__], | ||||
|                     proc_kwargs=proc_kwargs | ||||
|                 ) | ||||
|                 recv_p = await an.start_actor( | ||||
|                     'ring_receiver', | ||||
|                     enable_modules=[__name__], | ||||
|                     proc_kwargs=proc_kwargs | ||||
|                 ) | ||||
|                 async with ( | ||||
|                     send_p.open_context( | ||||
|                         child_write_shm, | ||||
|                         rand_min=rand_min, | ||||
|                         rand_max=rand_max, | ||||
|                         **common_kwargs | ||||
|                     ) as (sctx, total_bytes), | ||||
|                     recv_p.open_context( | ||||
|                         child_read_shm, | ||||
|                         **common_kwargs, | ||||
|                         total_bytes=total_bytes, | ||||
|                     ) as (sctx, _sent), | ||||
|                 ): | ||||
|                     await recv_p.result() | ||||
| 
 | ||||
|                 await send_p.cancel_actor() | ||||
|                 await recv_p.cancel_actor() | ||||
| 
 | ||||
| 
 | ||||
|     trio.run(main) | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def child_blocked_receiver( | ||||
|     ctx: tractor.Context, | ||||
|     token: RBToken | ||||
| ): | ||||
|     async with RingBuffReceiver(token) as receiver: | ||||
|         await ctx.started() | ||||
|         await receiver.receive_some() | ||||
| 
 | ||||
| 
 | ||||
| def test_ring_reader_cancel(): | ||||
|     async def main(): | ||||
|         with open_ringbuf('test_ring_cancel_reader') as token: | ||||
|             async with ( | ||||
|                 tractor.open_nursery() as an, | ||||
|                 RingBuffSender(token) as _sender, | ||||
|             ): | ||||
|                 recv_p = await an.start_actor( | ||||
|                     'ring_blocked_receiver', | ||||
|                     enable_modules=[__name__], | ||||
|                     proc_kwargs={ | ||||
|                         'pass_fds': (token.write_eventfd, token.wrap_eventfd) | ||||
|                     } | ||||
|                 ) | ||||
|                 async with ( | ||||
|                     recv_p.open_context( | ||||
|                         child_blocked_receiver, | ||||
|                         token=token | ||||
|                     ) as (sctx, _sent), | ||||
|                 ): | ||||
|                     await trio.sleep(1) | ||||
|                     await an.cancel() | ||||
| 
 | ||||
| 
 | ||||
|     with pytest.raises(tractor._exceptions.ContextCancelled): | ||||
|         trio.run(main) | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def child_blocked_sender( | ||||
|     ctx: tractor.Context, | ||||
|     token: RBToken | ||||
| ): | ||||
|     async with RingBuffSender(token) as sender: | ||||
|         await ctx.started() | ||||
|         await sender.send_all(b'this will wrap') | ||||
| 
 | ||||
| 
 | ||||
| def test_ring_sender_cancel(): | ||||
|     async def main(): | ||||
|         with open_ringbuf( | ||||
|             'test_ring_cancel_sender', | ||||
|             buf_size=1 | ||||
|         ) as token: | ||||
|             async with tractor.open_nursery() as an: | ||||
|                 recv_p = await an.start_actor( | ||||
|                     'ring_blocked_sender', | ||||
|                     enable_modules=[__name__], | ||||
|                     proc_kwargs={ | ||||
|                         'pass_fds': (token.write_eventfd, token.wrap_eventfd) | ||||
|                     } | ||||
|                 ) | ||||
|                 async with ( | ||||
|                     recv_p.open_context( | ||||
|                         child_blocked_sender, | ||||
|                         token=token | ||||
|                     ) as (sctx, _sent), | ||||
|                 ): | ||||
|                     await trio.sleep(1) | ||||
|                     await an.cancel() | ||||
| 
 | ||||
| 
 | ||||
|     with pytest.raises(tractor._exceptions.ContextCancelled): | ||||
|         trio.run(main) | ||||
|  | @ -1,240 +0,0 @@ | |||
| ''' | ||||
| Special attention cases for using "infect `asyncio`" mode from a root | ||||
| actor; i.e. not using a std `trio.run()` bootstrap. | ||||
| 
 | ||||
| ''' | ||||
| import asyncio | ||||
| from functools import partial | ||||
| 
 | ||||
| import pytest | ||||
| import trio | ||||
| import tractor | ||||
| from tractor import ( | ||||
|     to_asyncio, | ||||
| ) | ||||
| from tests.test_infected_asyncio import ( | ||||
|     aio_echo_server, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     'raise_error_mid_stream', | ||||
|     [ | ||||
|         False, | ||||
|         Exception, | ||||
|         KeyboardInterrupt, | ||||
|     ], | ||||
|     ids='raise_error={}'.format, | ||||
| ) | ||||
| def test_infected_root_actor( | ||||
|     raise_error_mid_stream: bool|Exception, | ||||
| 
 | ||||
|     # conftest wide | ||||
|     loglevel: str, | ||||
|     debug_mode: bool, | ||||
| ): | ||||
|     ''' | ||||
|     Verify you can run the `tractor` runtime with `Actor.is_infected_aio() == True` | ||||
|     in the root actor. | ||||
| 
 | ||||
|     ''' | ||||
|     async def _trio_main(): | ||||
|         with trio.fail_after(2 if not debug_mode else 999): | ||||
|             first: str | ||||
|             chan: to_asyncio.LinkedTaskChannel | ||||
|             async with ( | ||||
|                 tractor.open_root_actor( | ||||
|                     debug_mode=debug_mode, | ||||
|                     loglevel=loglevel, | ||||
|                 ), | ||||
|                 to_asyncio.open_channel_from( | ||||
|                     aio_echo_server, | ||||
|                 ) as (first, chan), | ||||
|             ): | ||||
|                 assert first == 'start' | ||||
| 
 | ||||
|                 for i in range(1000): | ||||
|                     await chan.send(i) | ||||
|                     out = await chan.receive() | ||||
|                     assert out == i | ||||
|                     print(f'asyncio echoing {i}') | ||||
| 
 | ||||
|                     if ( | ||||
|                         raise_error_mid_stream | ||||
|                         and | ||||
|                         i == 500 | ||||
|                     ): | ||||
|                         raise raise_error_mid_stream | ||||
| 
 | ||||
|                     if out is None: | ||||
|                         try: | ||||
|                             out = await chan.receive() | ||||
|                         except trio.EndOfChannel: | ||||
|                             break | ||||
|                         else: | ||||
|                             raise RuntimeError( | ||||
|                                 'aio channel never stopped?' | ||||
|                             ) | ||||
| 
 | ||||
|     if raise_error_mid_stream: | ||||
|         with pytest.raises(raise_error_mid_stream): | ||||
|             tractor.to_asyncio.run_as_asyncio_guest( | ||||
|                 trio_main=_trio_main, | ||||
|             ) | ||||
|     else: | ||||
|         tractor.to_asyncio.run_as_asyncio_guest( | ||||
|             trio_main=_trio_main, | ||||
|         ) | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| async def sync_and_err( | ||||
|     # just signature placeholders for compat with | ||||
|     # ``to_asyncio.open_channel_from()`` | ||||
|     to_trio: trio.MemorySendChannel, | ||||
|     from_trio: asyncio.Queue, | ||||
|     ev: asyncio.Event, | ||||
| 
 | ||||
| ): | ||||
|     if to_trio: | ||||
|         to_trio.send_nowait('start') | ||||
| 
 | ||||
|     await ev.wait() | ||||
|     raise RuntimeError('asyncio-side') | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     'aio_err_trigger', | ||||
|     [ | ||||
|         'before_start_point', | ||||
|         'after_trio_task_starts', | ||||
|         'after_start_point', | ||||
|     ], | ||||
|     ids='aio_err_triggered={}'.format | ||||
| ) | ||||
| def test_trio_prestarted_task_bubbles( | ||||
|     aio_err_trigger: str, | ||||
| 
 | ||||
|     # conftest wide | ||||
|     loglevel: str, | ||||
|     debug_mode: bool, | ||||
| ): | ||||
|     async def pre_started_err( | ||||
|         raise_err: bool = False, | ||||
|         pre_sleep: float|None = None, | ||||
|         aio_trigger: asyncio.Event|None = None, | ||||
|         task_status=trio.TASK_STATUS_IGNORED, | ||||
|     ): | ||||
|         ''' | ||||
|         Maybe pre-started error then sleep. | ||||
| 
 | ||||
|         ''' | ||||
|         if pre_sleep is not None: | ||||
|             print(f'Sleeping from trio for {pre_sleep!r}s !') | ||||
|             await trio.sleep(pre_sleep) | ||||
| 
 | ||||
|         # signal aio-task to raise JUST AFTER this task | ||||
|         # starts but has not yet `.started()` | ||||
|         if aio_trigger: | ||||
|             print('Signalling aio-task to raise from `trio`!!') | ||||
|             aio_trigger.set() | ||||
| 
 | ||||
|         if raise_err: | ||||
|             print('Raising from trio!') | ||||
|             raise TypeError('trio-side') | ||||
| 
 | ||||
|         task_status.started() | ||||
|         await trio.sleep_forever() | ||||
| 
 | ||||
|     async def _trio_main(): | ||||
|         with trio.fail_after(2 if not debug_mode else 999): | ||||
|             first: str | ||||
|             chan: to_asyncio.LinkedTaskChannel | ||||
|             aio_ev = asyncio.Event() | ||||
| 
 | ||||
|             async with ( | ||||
|                 tractor.open_root_actor( | ||||
|                     debug_mode=False, | ||||
|                     loglevel=loglevel, | ||||
|                 ), | ||||
|             ): | ||||
|                 # TODO, tests for this with 3.13 egs? | ||||
|                 # from tractor.devx import open_crash_handler | ||||
|                 # with open_crash_handler(): | ||||
|                 async with ( | ||||
|                     # where we'll start a sub-task that errors BEFORE | ||||
|                     # calling `.started()` such that the error should | ||||
|                     # bubble before the guest run terminates! | ||||
|                     trio.open_nursery() as tn, | ||||
| 
 | ||||
|                     # THEN start an infect task which should error just | ||||
|                     # after the trio-side's task does. | ||||
|                     to_asyncio.open_channel_from( | ||||
|                         partial( | ||||
|                             sync_and_err, | ||||
|                             ev=aio_ev, | ||||
|                         ) | ||||
|                     ) as (first, chan), | ||||
|                 ): | ||||
| 
 | ||||
|                     for i in range(5): | ||||
|                         pre_sleep: float|None = None | ||||
|                         last_iter: bool = (i == 4) | ||||
| 
 | ||||
|                         # TODO, missing cases? | ||||
|                         # -[ ] error as well on | ||||
|                         #    'after_start_point' case as well for | ||||
|                         #    another case? | ||||
|                         raise_err: bool = False | ||||
| 
 | ||||
|                         if last_iter: | ||||
|                             raise_err: bool = True | ||||
| 
 | ||||
|                             # trigger aio task to error on next loop | ||||
|                             # tick/checkpoint | ||||
|                             if aio_err_trigger == 'before_start_point': | ||||
|                                 aio_ev.set() | ||||
| 
 | ||||
|                             pre_sleep: float = 0 | ||||
| 
 | ||||
|                         await tn.start( | ||||
|                             pre_started_err, | ||||
|                             raise_err, | ||||
|                             pre_sleep, | ||||
|                             (aio_ev if ( | ||||
|                                     aio_err_trigger == 'after_trio_task_starts' | ||||
|                                     and | ||||
|                                     last_iter | ||||
|                                 ) else None | ||||
|                             ), | ||||
|                         ) | ||||
| 
 | ||||
|                         if ( | ||||
|                             aio_err_trigger == 'after_start_point' | ||||
|                             and | ||||
|                             last_iter | ||||
|                         ): | ||||
|                             aio_ev.set() | ||||
| 
 | ||||
|     # ensure the trio-task's error bubbled despite the aio-side | ||||
|     # having (maybe) errored first. | ||||
|     if aio_err_trigger in ( | ||||
|         'after_trio_task_starts', | ||||
|         'after_start_point', | ||||
|     ): | ||||
|         patt: str = 'trio-side' | ||||
|         expect_exc = TypeError | ||||
| 
 | ||||
|     # when aio errors BEFORE (last) trio task is scheduled, we should | ||||
|     # never see anythinb but the aio-side. | ||||
|     else: | ||||
|         patt: str = 'asyncio-side' | ||||
|         expect_exc = RuntimeError | ||||
| 
 | ||||
|     with pytest.raises(expect_exc) as excinfo: | ||||
|         tractor.to_asyncio.run_as_asyncio_guest( | ||||
|             trio_main=_trio_main, | ||||
|         ) | ||||
| 
 | ||||
|     caught_exc = excinfo.value | ||||
|     assert patt in caught_exc.args | ||||
|  | @ -1,108 +0,0 @@ | |||
| ''' | ||||
| Runtime boot/init sanity. | ||||
| 
 | ||||
| ''' | ||||
| 
 | ||||
| import pytest | ||||
| import trio | ||||
| 
 | ||||
| import tractor | ||||
| from tractor._exceptions import RuntimeFailure | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def open_new_root_in_sub( | ||||
|     ctx: tractor.Context, | ||||
| ) -> None: | ||||
| 
 | ||||
|     async with tractor.open_root_actor(): | ||||
|         pass | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     'open_root_in', | ||||
|     ['root', 'sub'], | ||||
|     ids='open_2nd_root_in={}'.format, | ||||
| ) | ||||
| def test_only_one_root_actor( | ||||
|     open_root_in: str, | ||||
|     reg_addr: tuple, | ||||
|     debug_mode: bool | ||||
| ): | ||||
|     ''' | ||||
|     Verify we specially fail whenever more then one root actor | ||||
|     is attempted to be opened within an already opened tree. | ||||
| 
 | ||||
|     ''' | ||||
|     async def main(): | ||||
|         async with tractor.open_nursery() as an: | ||||
| 
 | ||||
|             if open_root_in == 'root': | ||||
|                 async with tractor.open_root_actor( | ||||
|                     registry_addrs=[reg_addr], | ||||
|                 ): | ||||
|                     pass | ||||
| 
 | ||||
|             ptl: tractor.Portal = await an.start_actor( | ||||
|                 name='bad_rooty_boi', | ||||
|                 enable_modules=[__name__], | ||||
|             ) | ||||
| 
 | ||||
|             async with ptl.open_context( | ||||
|                 open_new_root_in_sub, | ||||
|             ) as (ctx, first): | ||||
|                 pass | ||||
| 
 | ||||
|     if open_root_in == 'root': | ||||
|         with pytest.raises( | ||||
|             RuntimeFailure | ||||
|         ) as excinfo: | ||||
|             trio.run(main) | ||||
| 
 | ||||
|     else: | ||||
|         with pytest.raises( | ||||
|             tractor.RemoteActorError, | ||||
|         ) as excinfo: | ||||
|             trio.run(main) | ||||
| 
 | ||||
|         assert excinfo.value.boxed_type is RuntimeFailure | ||||
| 
 | ||||
| 
 | ||||
| def test_implicit_root_via_first_nursery( | ||||
|     reg_addr: tuple, | ||||
|     debug_mode: bool | ||||
| ): | ||||
|     ''' | ||||
|     The first `ActorNursery` open should implicitly call | ||||
|     `_root.open_root_actor()`. | ||||
| 
 | ||||
|     ''' | ||||
|     async def main(): | ||||
|         async with tractor.open_nursery() as an: | ||||
|             assert an._implicit_runtime_started | ||||
|             assert tractor.current_actor().aid.name == 'root' | ||||
| 
 | ||||
|     trio.run(main) | ||||
| 
 | ||||
| 
 | ||||
| def test_runtime_vars_unset( | ||||
|     reg_addr: tuple, | ||||
|     debug_mode: bool | ||||
| ): | ||||
|     ''' | ||||
|     Ensure any `._state._runtime_vars` are restored to default values | ||||
|     after the root actor-runtime exits! | ||||
| 
 | ||||
|     ''' | ||||
|     assert not tractor._state._runtime_vars['_debug_mode'] | ||||
|     async def main(): | ||||
|         assert not tractor._state._runtime_vars['_debug_mode'] | ||||
|         async with tractor.open_nursery( | ||||
|             debug_mode=True, | ||||
|         ): | ||||
|             assert tractor._state._runtime_vars['_debug_mode'] | ||||
| 
 | ||||
|         # after runtime closure, should be reverted! | ||||
|         assert not tractor._state._runtime_vars['_debug_mode'] | ||||
| 
 | ||||
|     trio.run(main) | ||||
|  | @ -1,8 +1,6 @@ | |||
| ''' | ||||
| RPC (or maybe better labelled as "RTS: remote task scheduling"?) | ||||
| related API and error checks. | ||||
| 
 | ||||
| ''' | ||||
| """ | ||||
| RPC related | ||||
| """ | ||||
| import itertools | ||||
| 
 | ||||
| import pytest | ||||
|  | @ -15,19 +13,9 @@ async def sleep_back_actor( | |||
|     func_name, | ||||
|     func_defined, | ||||
|     exposed_mods, | ||||
|     *, | ||||
|     reg_addr: tuple, | ||||
| ): | ||||
|     if actor_name: | ||||
|         async with tractor.find_actor( | ||||
|             actor_name, | ||||
|             # NOTE: must be set manually since | ||||
|             # the subactor doesn't have the reg_addr | ||||
|             # fixture code run in it! | ||||
|             # TODO: maybe we should just set this once in the | ||||
|             # _state mod and derive to all children? | ||||
|             registry_addrs=[reg_addr], | ||||
|         ) as portal: | ||||
|         async with tractor.find_actor(actor_name) as portal: | ||||
|             try: | ||||
|                 await portal.run(__name__, func_name) | ||||
|             except tractor.RemoteActorError as err: | ||||
|  | @ -36,7 +24,7 @@ async def sleep_back_actor( | |||
|                 if not exposed_mods: | ||||
|                     expect = tractor.ModuleNotExposed | ||||
| 
 | ||||
|                 assert err.boxed_type is expect | ||||
|                 assert err.type is expect | ||||
|                 raise | ||||
|     else: | ||||
|         await trio.sleep(float('inf')) | ||||
|  | @ -54,25 +42,14 @@ async def short_sleep(): | |||
|         (['tmp_mod'], 'import doggy', ModuleNotFoundError), | ||||
|         (['tmp_mod'], '4doggy', SyntaxError), | ||||
|     ], | ||||
|     ids=[ | ||||
|         'no_mods', | ||||
|         'this_mod', | ||||
|         'this_mod_bad_func', | ||||
|         'fail_to_import', | ||||
|         'fail_on_syntax', | ||||
|     ], | ||||
|     ids=['no_mods', 'this_mod', 'this_mod_bad_func', 'fail_to_import', | ||||
|          'fail_on_syntax'], | ||||
| ) | ||||
| def test_rpc_errors( | ||||
|     reg_addr, | ||||
|     to_call, | ||||
|     testdir, | ||||
| ): | ||||
|     ''' | ||||
|     Test errors when making various RPC requests to an actor | ||||
| def test_rpc_errors(arb_addr, to_call, testdir): | ||||
|     """Test errors when making various RPC requests to an actor | ||||
|     that either doesn't have the requested module exposed or doesn't define | ||||
|     the named function. | ||||
| 
 | ||||
|     ''' | ||||
|     """ | ||||
|     exposed_mods, funcname, inside_err = to_call | ||||
|     subactor_exposed_mods = [] | ||||
|     func_defined = globals().get(funcname, False) | ||||
|  | @ -100,13 +77,8 @@ def test_rpc_errors( | |||
| 
 | ||||
|         # spawn a subactor which calls us back | ||||
|         async with tractor.open_nursery( | ||||
|             registry_addrs=[reg_addr], | ||||
|             arbiter_addr=arb_addr, | ||||
|             enable_modules=exposed_mods.copy(), | ||||
| 
 | ||||
|             # NOTE: will halt test in REPL if uncommented, so only | ||||
|             # do that if actually debugging subactor but keep it | ||||
|             # disabled for the test. | ||||
|             # debug_mode=True, | ||||
|         ) as n: | ||||
| 
 | ||||
|             actor = tractor.current_actor() | ||||
|  | @ -123,7 +95,6 @@ def test_rpc_errors( | |||
|                 exposed_mods=exposed_mods, | ||||
|                 func_defined=True if func_defined else False, | ||||
|                 enable_modules=subactor_exposed_mods, | ||||
|                 reg_addr=reg_addr, | ||||
|             ) | ||||
| 
 | ||||
|     def run(): | ||||
|  | @ -134,20 +105,18 @@ def test_rpc_errors( | |||
|         run() | ||||
|     else: | ||||
|         # underlying errors aren't propagated upwards (yet) | ||||
|         with pytest.raises( | ||||
|             expected_exception=(remote_err, ExceptionGroup), | ||||
|         ) as err: | ||||
|         with pytest.raises(remote_err) as err: | ||||
|             run() | ||||
| 
 | ||||
|         # get raw instance from pytest wrapper | ||||
|         value = err.value | ||||
| 
 | ||||
|         # might get multiple `trio.Cancelled`s as well inside an inception | ||||
|         if isinstance(value, ExceptionGroup): | ||||
|         if isinstance(value, trio.MultiError): | ||||
|             value = next(itertools.dropwhile( | ||||
|                 lambda exc: not isinstance(exc, tractor.RemoteActorError), | ||||
|                 value.exceptions | ||||
|             )) | ||||
| 
 | ||||
|         if getattr(value, 'type', None): | ||||
|             assert value.boxed_type is inside_err | ||||
|             assert value.type is inside_err | ||||
|  |  | |||
|  | @ -1,74 +0,0 @@ | |||
| """ | ||||
| Verifying internal runtime state and undocumented extras. | ||||
| 
 | ||||
| """ | ||||
| import os | ||||
| 
 | ||||
| import pytest | ||||
| import trio | ||||
| import tractor | ||||
| 
 | ||||
| from tractor._testing import tractor_test | ||||
| 
 | ||||
| 
 | ||||
| _file_path: str = '' | ||||
| 
 | ||||
| 
 | ||||
| def unlink_file(): | ||||
|     print('Removing tmp file!') | ||||
|     os.remove(_file_path) | ||||
| 
 | ||||
| 
 | ||||
| async def crash_and_clean_tmpdir( | ||||
|     tmp_file_path: str, | ||||
|     error: bool = True, | ||||
| ): | ||||
|     global _file_path | ||||
|     _file_path = tmp_file_path | ||||
| 
 | ||||
|     actor = tractor.current_actor() | ||||
|     actor.lifetime_stack.callback(unlink_file) | ||||
| 
 | ||||
|     assert os.path.isfile(tmp_file_path) | ||||
|     await trio.sleep(0.1) | ||||
|     if error: | ||||
|         assert 0 | ||||
|     else: | ||||
|         actor.cancel_soon() | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     'error_in_child', | ||||
|     [True, False], | ||||
| ) | ||||
| @tractor_test | ||||
| async def test_lifetime_stack_wipes_tmpfile( | ||||
|     tmp_path, | ||||
|     error_in_child: bool, | ||||
| ): | ||||
|     child_tmp_file = tmp_path / "child.txt" | ||||
|     child_tmp_file.touch() | ||||
|     assert child_tmp_file.exists() | ||||
|     path = str(child_tmp_file) | ||||
| 
 | ||||
|     try: | ||||
|         with trio.move_on_after(0.5): | ||||
|             async with tractor.open_nursery() as n: | ||||
|                     await (  # inlined portal | ||||
|                         await n.run_in_actor( | ||||
|                             crash_and_clean_tmpdir, | ||||
|                             tmp_file_path=path, | ||||
|                             error=error_in_child, | ||||
|                         ) | ||||
|                     ).result() | ||||
| 
 | ||||
|     except ( | ||||
|         tractor.RemoteActorError, | ||||
|         # tractor.BaseExceptionGroup, | ||||
|         BaseExceptionGroup, | ||||
|     ): | ||||
|         pass | ||||
| 
 | ||||
|     # tmp file should have been wiped by | ||||
|     # teardown stack. | ||||
|     assert not child_tmp_file.exists() | ||||
|  | @ -1,167 +0,0 @@ | |||
| """ | ||||
| Shared mem primitives and APIs. | ||||
| 
 | ||||
| """ | ||||
| import uuid | ||||
| 
 | ||||
| # import numpy | ||||
| import pytest | ||||
| import trio | ||||
| import tractor | ||||
| from tractor.ipc._shm import ( | ||||
|     open_shm_list, | ||||
|     attach_shm_list, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def child_attach_shml_alot( | ||||
|     ctx: tractor.Context, | ||||
|     shm_key: str, | ||||
| ) -> None: | ||||
| 
 | ||||
|     await ctx.started(shm_key) | ||||
| 
 | ||||
|     # now try to attach a boatload of times in a loop.. | ||||
|     for _ in range(1000): | ||||
|         shml = attach_shm_list( | ||||
|             key=shm_key, | ||||
|             readonly=False, | ||||
|         ) | ||||
|         assert shml.shm.name == shm_key | ||||
|         await trio.sleep(0.001) | ||||
| 
 | ||||
| 
 | ||||
| def test_child_attaches_alot(): | ||||
|     async def main(): | ||||
|         async with tractor.open_nursery() as an: | ||||
| 
 | ||||
|             # allocate writeable list in parent | ||||
|             key = f'shml_{uuid.uuid4()}' | ||||
|             shml = open_shm_list( | ||||
|                 key=key, | ||||
|             ) | ||||
| 
 | ||||
|             portal = await an.start_actor( | ||||
|                 'shm_attacher', | ||||
|                 enable_modules=[__name__], | ||||
|             ) | ||||
| 
 | ||||
|             async with ( | ||||
|                 portal.open_context( | ||||
|                     child_attach_shml_alot, | ||||
|                     shm_key=shml.key, | ||||
|                 ) as (ctx, start_val), | ||||
|             ): | ||||
|                 assert start_val == key | ||||
|                 await ctx.result() | ||||
| 
 | ||||
|             await portal.cancel_actor() | ||||
| 
 | ||||
|     trio.run(main) | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def child_read_shm_list( | ||||
|     ctx: tractor.Context, | ||||
|     shm_key: str, | ||||
|     use_str: bool, | ||||
|     frame_size: int, | ||||
| ) -> None: | ||||
| 
 | ||||
|     # attach in child | ||||
|     shml = attach_shm_list( | ||||
|         key=shm_key, | ||||
|         # dtype=str if use_str else float, | ||||
|     ) | ||||
|     await ctx.started(shml.key) | ||||
| 
 | ||||
|     async with ctx.open_stream() as stream: | ||||
|         async for i in stream: | ||||
|             print(f'(child): reading shm list index: {i}') | ||||
| 
 | ||||
|             if use_str: | ||||
|                 expect = str(float(i)) | ||||
|             else: | ||||
|                 expect = float(i) | ||||
| 
 | ||||
|             if frame_size == 1: | ||||
|                 val = shml[i] | ||||
|                 assert expect == val | ||||
|                 print(f'(child): reading value: {val}') | ||||
|             else: | ||||
|                 frame = shml[i - frame_size:i] | ||||
|                 print(f'(child): reading frame: {frame}') | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     'use_str', | ||||
|     [False, True], | ||||
|     ids=lambda i: f'use_str_values={i}', | ||||
| ) | ||||
| @pytest.mark.parametrize( | ||||
|     'frame_size', | ||||
|     [1, 2**6, 2**10], | ||||
|     ids=lambda i: f'frame_size={i}', | ||||
| ) | ||||
| def test_parent_writer_child_reader( | ||||
|     use_str: bool, | ||||
|     frame_size: int, | ||||
| ): | ||||
| 
 | ||||
|     async def main(): | ||||
|         async with tractor.open_nursery( | ||||
|             # debug_mode=True, | ||||
|         ) as an: | ||||
| 
 | ||||
|             portal = await an.start_actor( | ||||
|                 'shm_reader', | ||||
|                 enable_modules=[__name__], | ||||
|                 debug_mode=True, | ||||
|             ) | ||||
| 
 | ||||
|             # allocate writeable list in parent | ||||
|             key = 'shm_list' | ||||
|             seq_size = int(2 * 2 ** 10) | ||||
|             shml = open_shm_list( | ||||
|                 key=key, | ||||
|                 size=seq_size, | ||||
|                 dtype=str if use_str else float, | ||||
|                 readonly=False, | ||||
|             ) | ||||
| 
 | ||||
|             async with ( | ||||
|                 portal.open_context( | ||||
|                     child_read_shm_list, | ||||
|                     shm_key=key, | ||||
|                     use_str=use_str, | ||||
|                     frame_size=frame_size, | ||||
|                 ) as (ctx, sent), | ||||
| 
 | ||||
|                 ctx.open_stream() as stream, | ||||
|             ): | ||||
| 
 | ||||
|                 assert sent == key | ||||
| 
 | ||||
|                 for i in range(seq_size): | ||||
| 
 | ||||
|                     val = float(i) | ||||
|                     if use_str: | ||||
|                         val = str(val) | ||||
| 
 | ||||
|                     # print(f'(parent): writing {val}') | ||||
|                     shml[i] = val | ||||
| 
 | ||||
|                     # only on frame fills do we | ||||
|                     # signal to the child that a frame's | ||||
|                     # worth is ready. | ||||
|                     if (i % frame_size) == 0: | ||||
|                         print(f'(parent): signalling frame full on {val}') | ||||
|                         await stream.send(i) | ||||
|                 else: | ||||
|                     print(f'(parent): signalling final frame on {val}') | ||||
|                     await stream.send(i) | ||||
| 
 | ||||
|             await portal.cancel_actor() | ||||
| 
 | ||||
|     trio.run(main) | ||||
|  | @ -1,111 +1,83 @@ | |||
| """ | ||||
| Spawning basics | ||||
| 
 | ||||
| """ | ||||
| from functools import partial | ||||
| from typing import ( | ||||
|     Any, | ||||
| ) | ||||
| from typing import Dict, Tuple, Optional | ||||
| 
 | ||||
| import pytest | ||||
| import trio | ||||
| import tractor | ||||
| 
 | ||||
| from tractor._testing import tractor_test | ||||
| from conftest import tractor_test | ||||
| 
 | ||||
| data_to_pass_down = { | ||||
|     'doggy': 10, | ||||
|     'kitty': 4, | ||||
| } | ||||
| data_to_pass_down = {'doggy': 10, 'kitty': 4} | ||||
| 
 | ||||
| 
 | ||||
| async def spawn( | ||||
|     should_be_root: bool, | ||||
|     data: dict, | ||||
|     reg_addr: tuple[str, int], | ||||
| 
 | ||||
|     debug_mode: bool = False, | ||||
|     is_arbiter: bool, | ||||
|     data: Dict, | ||||
|     arb_addr: Tuple[str, int], | ||||
| ): | ||||
|     namespaces = [__name__] | ||||
| 
 | ||||
|     await trio.sleep(0.1) | ||||
|     actor = tractor.current_actor(err_on_no_runtime=False) | ||||
| 
 | ||||
|     if should_be_root: | ||||
|         assert actor is None  # no runtime yet | ||||
|         async with ( | ||||
|             tractor.open_root_actor( | ||||
|                 arbiter_addr=reg_addr, | ||||
|             ), | ||||
|             tractor.open_nursery() as an, | ||||
|         ): | ||||
|             # now runtime exists | ||||
|             actor: tractor.Actor = tractor.current_actor() | ||||
|             assert actor.is_arbiter == should_be_root | ||||
|     async with tractor.open_root_actor( | ||||
|         arbiter_addr=arb_addr, | ||||
|     ): | ||||
| 
 | ||||
|             # spawns subproc here | ||||
|             portal: tractor.Portal = await an.run_in_actor( | ||||
|                 fn=spawn, | ||||
|         actor = tractor.current_actor() | ||||
|         assert actor.is_arbiter == is_arbiter | ||||
|         data = data_to_pass_down | ||||
| 
 | ||||
|                 # spawning args | ||||
|                 name='sub-actor', | ||||
|                 enable_modules=[__name__], | ||||
|         if actor.is_arbiter: | ||||
| 
 | ||||
|                 # passed to a subactor-recursive RPC invoke | ||||
|                 # of this same `spawn()` fn. | ||||
|                 should_be_root=False, | ||||
|                 data=data_to_pass_down, | ||||
|                 reg_addr=reg_addr, | ||||
|             ) | ||||
|             async with tractor.open_nursery( | ||||
|             ) as nursery: | ||||
| 
 | ||||
|             assert len(an._children) == 1 | ||||
|             assert ( | ||||
|                 portal.channel.uid | ||||
|                 in | ||||
|                 tractor.current_actor().ipc_server._peers | ||||
|             ) | ||||
|                 # forks here | ||||
|                 portal = await nursery.run_in_actor( | ||||
|                     spawn, | ||||
|                     is_arbiter=False, | ||||
|                     name='sub-actor', | ||||
|                     data=data, | ||||
|                     arb_addr=arb_addr, | ||||
|                     enable_modules=namespaces, | ||||
|                 ) | ||||
| 
 | ||||
|             # get result from child subactor | ||||
|             result = await portal.result() | ||||
|             assert result == 10 | ||||
|             return result | ||||
|     else: | ||||
|         assert actor.is_arbiter == should_be_root | ||||
|         return 10 | ||||
|                 assert len(nursery._children) == 1 | ||||
|                 assert portal.channel.uid in tractor.current_actor()._peers | ||||
|                 # be sure we can still get the result | ||||
|                 result = await portal.result() | ||||
|                 assert result == 10 | ||||
|                 return result | ||||
|         else: | ||||
|             return 10 | ||||
| 
 | ||||
| 
 | ||||
| def test_run_in_actor_same_func_in_child( | ||||
|     reg_addr: tuple, | ||||
|     debug_mode: bool, | ||||
| ): | ||||
| def test_local_arbiter_subactor_global_state(arb_addr): | ||||
|     result = trio.run( | ||||
|         partial( | ||||
|             spawn, | ||||
|             should_be_root=True, | ||||
|             data=data_to_pass_down, | ||||
|             reg_addr=reg_addr, | ||||
|             debug_mode=debug_mode, | ||||
|         ) | ||||
|         spawn, | ||||
|         True, | ||||
|         data_to_pass_down, | ||||
|         arb_addr, | ||||
|     ) | ||||
|     assert result == 10 | ||||
| 
 | ||||
| 
 | ||||
| async def movie_theatre_question(): | ||||
|     ''' | ||||
|     A question asked in a dark theatre, in a tangent | ||||
|     """A question asked in a dark theatre, in a tangent | ||||
|     (errr, I mean different) process. | ||||
| 
 | ||||
|     ''' | ||||
|     """ | ||||
|     return 'have you ever seen a portal?' | ||||
| 
 | ||||
| 
 | ||||
| @tractor_test | ||||
| async def test_movie_theatre_convo(start_method): | ||||
|     ''' | ||||
|     The main ``tractor`` routine. | ||||
|     """The main ``tractor`` routine. | ||||
|     """ | ||||
|     async with tractor.open_nursery() as n: | ||||
| 
 | ||||
|     ''' | ||||
|     async with tractor.open_nursery(debug_mode=True) as an: | ||||
| 
 | ||||
|         portal = await an.start_actor( | ||||
|         portal = await n.start_actor( | ||||
|             'frank', | ||||
|             # enable the actor to run funcs from this current module | ||||
|             enable_modules=[__name__], | ||||
|  | @ -121,9 +93,7 @@ async def test_movie_theatre_convo(start_method): | |||
|         await portal.cancel_actor() | ||||
| 
 | ||||
| 
 | ||||
| async def cellar_door( | ||||
|     return_value: str|None, | ||||
| ): | ||||
| async def cellar_door(return_value: Optional[str]): | ||||
|     return return_value | ||||
| 
 | ||||
| 
 | ||||
|  | @ -133,19 +103,17 @@ async def cellar_door( | |||
| ) | ||||
| @tractor_test | ||||
| async def test_most_beautiful_word( | ||||
|     start_method: str, | ||||
|     return_value: Any, | ||||
|     debug_mode: bool, | ||||
|     start_method, | ||||
|     return_value | ||||
| ): | ||||
|     ''' | ||||
|     The main ``tractor`` routine. | ||||
| 
 | ||||
|     ''' | ||||
|     with trio.fail_after(1): | ||||
|         async with tractor.open_nursery( | ||||
|             debug_mode=debug_mode, | ||||
|         ) as an: | ||||
|             portal = await an.run_in_actor( | ||||
|         async with tractor.open_nursery() as n: | ||||
| 
 | ||||
|             portal = await n.run_in_actor( | ||||
|                 cellar_door, | ||||
|                 return_value=return_value, | ||||
|                 name='some_linguist', | ||||
|  | @ -171,9 +139,9 @@ async def check_loglevel(level): | |||
| def test_loglevel_propagated_to_subactor( | ||||
|     start_method, | ||||
|     capfd, | ||||
|     reg_addr, | ||||
|     arb_addr, | ||||
| ): | ||||
|     if start_method == 'mp_forkserver': | ||||
|     if start_method == 'forkserver': | ||||
|         pytest.skip( | ||||
|             "a bug with `capfd` seems to make forkserver capture not work?") | ||||
| 
 | ||||
|  | @ -183,7 +151,7 @@ def test_loglevel_propagated_to_subactor( | |||
|         async with tractor.open_nursery( | ||||
|             name='arbiter', | ||||
|             start_method=start_method, | ||||
|             arbiter_addr=reg_addr, | ||||
|             arbiter_addr=arb_addr, | ||||
| 
 | ||||
|         ) as tn: | ||||
|             await tn.run_in_actor( | ||||
|  |  | |||
|  | @ -7,10 +7,9 @@ import platform | |||
| 
 | ||||
| import trio | ||||
| import tractor | ||||
| from tractor.testing import tractor_test | ||||
| import pytest | ||||
| 
 | ||||
| from tractor._testing import tractor_test | ||||
| 
 | ||||
| 
 | ||||
| def test_must_define_ctx(): | ||||
| 
 | ||||
|  | @ -38,13 +37,10 @@ async def async_gen_stream(sequence): | |||
|     assert cs.cancelled_caught | ||||
| 
 | ||||
| 
 | ||||
| # TODO: deprecated either remove entirely | ||||
| # or re-impl in terms of `MsgStream` one-sides | ||||
| # wrapper, but at least remove `Portal.open_stream_from()` | ||||
| @tractor.stream | ||||
| async def context_stream( | ||||
|     ctx: tractor.Context, | ||||
|     sequence: list[int], | ||||
|     sequence | ||||
| ): | ||||
|     for i in sequence: | ||||
|         await ctx.send_yield(i) | ||||
|  | @ -58,7 +54,7 @@ async def context_stream( | |||
| 
 | ||||
| 
 | ||||
| async def stream_from_single_subactor( | ||||
|     reg_addr, | ||||
|     arb_addr, | ||||
|     start_method, | ||||
|     stream_func, | ||||
| ): | ||||
|  | @ -67,7 +63,7 @@ async def stream_from_single_subactor( | |||
|     # only one per host address, spawns an actor if None | ||||
| 
 | ||||
|     async with tractor.open_nursery( | ||||
|         registry_addrs=[reg_addr], | ||||
|         arbiter_addr=arb_addr, | ||||
|         start_method=start_method, | ||||
|     ) as nursery: | ||||
| 
 | ||||
|  | @ -118,13 +114,13 @@ async def stream_from_single_subactor( | |||
| @pytest.mark.parametrize( | ||||
|     'stream_func', [async_gen_stream, context_stream] | ||||
| ) | ||||
| def test_stream_from_single_subactor(reg_addr, start_method, stream_func): | ||||
| def test_stream_from_single_subactor(arb_addr, start_method, stream_func): | ||||
|     """Verify streaming from a spawned async generator. | ||||
|     """ | ||||
|     trio.run( | ||||
|         partial( | ||||
|             stream_from_single_subactor, | ||||
|             reg_addr, | ||||
|             arb_addr, | ||||
|             start_method, | ||||
|             stream_func=stream_func, | ||||
|         ), | ||||
|  | @ -228,51 +224,33 @@ async def a_quadruple_example(): | |||
|         return result_stream | ||||
| 
 | ||||
| 
 | ||||
| async def cancel_after(wait, reg_addr): | ||||
|     async with tractor.open_root_actor(registry_addrs=[reg_addr]): | ||||
| async def cancel_after(wait, arb_addr): | ||||
|     async with tractor.open_root_actor(arbiter_addr=arb_addr): | ||||
|         with trio.move_on_after(wait): | ||||
|             return await a_quadruple_example() | ||||
| 
 | ||||
| 
 | ||||
| @pytest.fixture(scope='module') | ||||
| def time_quad_ex( | ||||
|     reg_addr: tuple, | ||||
|     ci_env: bool, | ||||
|     spawn_backend: str, | ||||
| ): | ||||
| def time_quad_ex(arb_addr, ci_env, spawn_backend): | ||||
|     if spawn_backend == 'mp': | ||||
|         ''' | ||||
|         no idea but the  mp *nix runs are flaking out here often... | ||||
| 
 | ||||
|         ''' | ||||
|         """no idea but the  mp *nix runs are flaking out here often... | ||||
|         """ | ||||
|         pytest.skip("Test is too flaky on mp in CI") | ||||
| 
 | ||||
|     timeout = 7 if platform.system() in ('Windows', 'Darwin') else 4 | ||||
|     start = time.time() | ||||
|     results = trio.run(cancel_after, timeout, reg_addr) | ||||
|     results = trio.run(cancel_after, timeout, arb_addr) | ||||
|     diff = time.time() - start | ||||
|     assert results | ||||
|     return results, diff | ||||
| 
 | ||||
| 
 | ||||
| def test_a_quadruple_example( | ||||
|     time_quad_ex: tuple, | ||||
|     ci_env: bool, | ||||
|     spawn_backend: str, | ||||
| ): | ||||
|     ''' | ||||
|     This also serves as a kind of "we'd like to be this fast test". | ||||
| def test_a_quadruple_example(time_quad_ex, ci_env, spawn_backend): | ||||
|     """This also serves as a kind of "we'd like to be this fast test".""" | ||||
| 
 | ||||
|     ''' | ||||
|     results, diff = time_quad_ex | ||||
|     assert results | ||||
|     this_fast = ( | ||||
|         6 if platform.system() in ( | ||||
|             'Windows', | ||||
|             'Darwin', | ||||
|         ) | ||||
|         else 3 | ||||
|     ) | ||||
|     this_fast = 6 if platform.system() in ('Windows', 'Darwin') else 2.666 | ||||
|     assert diff < this_fast | ||||
| 
 | ||||
| 
 | ||||
|  | @ -281,14 +259,14 @@ def test_a_quadruple_example( | |||
|     list(map(lambda i: i/10, range(3, 9))) | ||||
| ) | ||||
| def test_not_fast_enough_quad( | ||||
|     reg_addr, time_quad_ex, cancel_delay, ci_env, spawn_backend | ||||
|     arb_addr, time_quad_ex, cancel_delay, ci_env, spawn_backend | ||||
| ): | ||||
|     """Verify we can cancel midway through the quad example and all actors | ||||
|     cancel gracefully. | ||||
|     """ | ||||
|     results, diff = time_quad_ex | ||||
|     delay = max(diff - cancel_delay, 0) | ||||
|     results = trio.run(cancel_after, delay, reg_addr) | ||||
|     results = trio.run(cancel_after, delay, arb_addr) | ||||
|     system = platform.system() | ||||
|     if system in ('Windows', 'Darwin') and results is not None: | ||||
|         # In CI envoirments it seems later runs are quicker then the first | ||||
|  | @ -301,7 +279,7 @@ def test_not_fast_enough_quad( | |||
| 
 | ||||
| @tractor_test | ||||
| async def test_respawn_consumer_task( | ||||
|     reg_addr, | ||||
|     arb_addr, | ||||
|     spawn_backend, | ||||
|     loglevel, | ||||
| ): | ||||
Some files were not shown because too many files have changed in this diff Show More
		Loading…
	
		Reference in New Issue