Compare commits
	
		
			No commits in common. "main" and "ctx_cancel_semantics_and_overruns" have entirely different histories. 
		
	
	
		
			main
			...
			ctx_cancel
		
	
		|  | @ -8,70 +8,46 @@ on: | ||||||
|   workflow_dispatch: |   workflow_dispatch: | ||||||
| 
 | 
 | ||||||
| jobs: | jobs: | ||||||
|   # ------ sdist ------ | 
 | ||||||
|  |   mypy: | ||||||
|  |     name: 'MyPy' | ||||||
|  |     runs-on: ubuntu-latest | ||||||
|  | 
 | ||||||
|  |     steps: | ||||||
|  |       - name: Checkout | ||||||
|  |         uses: actions/checkout@v2 | ||||||
|  | 
 | ||||||
|  |       - name: Setup python | ||||||
|  |         uses: actions/setup-python@v2 | ||||||
|  |         with: | ||||||
|  |           python-version: '3.10' | ||||||
|  | 
 | ||||||
|  |       - name: Install dependencies | ||||||
|  |         run: pip install -U . --upgrade-strategy eager -r requirements-test.txt | ||||||
|  | 
 | ||||||
|  |       - name: Run MyPy check | ||||||
|  |         run: mypy tractor/ --ignore-missing-imports --show-traceback | ||||||
|  | 
 | ||||||
|   # test that we can generate a software distribution and install it |   # test that we can generate a software distribution and install it | ||||||
|   # thus avoid missing file issues after packaging. |   # thus avoid missing file issues after packaging. | ||||||
|   # |  | ||||||
|   # -[x] produce sdist with uv |  | ||||||
|   # ------ - ------ |  | ||||||
|   sdist-linux: |   sdist-linux: | ||||||
|     name: 'sdist' |     name: 'sdist' | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
| 
 | 
 | ||||||
|     steps: |     steps: | ||||||
|       - name: Checkout |       - name: Checkout | ||||||
|         uses: actions/checkout@v4 |         uses: actions/checkout@v2 | ||||||
| 
 | 
 | ||||||
|       - name: Install latest uv |       - name: Setup python | ||||||
|         uses: astral-sh/setup-uv@v6 |         uses: actions/setup-python@v2 | ||||||
|  |         with: | ||||||
|  |           python-version: '3.10' | ||||||
| 
 | 
 | ||||||
|       - name: Build sdist as tar.gz |       - name: Build sdist | ||||||
|         run: uv build --sdist --python=3.13 |         run: python setup.py sdist --formats=zip | ||||||
| 
 | 
 | ||||||
|       - name: Install sdist from .tar.gz |       - name: Install sdist from .zips | ||||||
|         run: python -m pip install dist/*.tar.gz |         run: python -m pip install dist/*.zip | ||||||
| 
 |  | ||||||
|   # ------ type-check ------ |  | ||||||
|   # mypy: |  | ||||||
|   #   name: 'MyPy' |  | ||||||
|   #   runs-on: ubuntu-latest |  | ||||||
| 
 |  | ||||||
|   #   steps: |  | ||||||
|   #     - name: Checkout |  | ||||||
|   #       uses: actions/checkout@v4 |  | ||||||
| 
 |  | ||||||
|   #     - name: Install latest uv |  | ||||||
|   #       uses: astral-sh/setup-uv@v6 |  | ||||||
| 
 |  | ||||||
|   #     # faster due to server caching? |  | ||||||
|   #     # https://docs.astral.sh/uv/guides/integration/github/#setting-up-python |  | ||||||
|   #     - name: "Set up Python" |  | ||||||
|   #       uses: actions/setup-python@v6 |  | ||||||
|   #       with: |  | ||||||
|   #         python-version-file: "pyproject.toml" |  | ||||||
| 
 |  | ||||||
|   #     # w uv |  | ||||||
|   #     # - name: Set up Python |  | ||||||
|   #     #   run: uv python install |  | ||||||
| 
 |  | ||||||
|   #     - name: Setup uv venv |  | ||||||
|   #       run: uv venv .venv --python=3.13 |  | ||||||
| 
 |  | ||||||
|   #     - name: Install |  | ||||||
|   #       run: uv sync --dev |  | ||||||
| 
 |  | ||||||
|   #     # TODO, ty cmd over repo |  | ||||||
|   #     # - name: type check with ty |  | ||||||
|   #     #   run: ty ./tractor/ |  | ||||||
| 
 |  | ||||||
|   #     # - uses: actions/cache@v3 |  | ||||||
|   #     #     name: Cache uv virtenv as default .venv |  | ||||||
|   #     #     with: |  | ||||||
|   #     #       path: ./.venv |  | ||||||
|   #     #       key: venv-${{ hashFiles('uv.lock') }} |  | ||||||
| 
 |  | ||||||
|   #     - name: Run MyPy check |  | ||||||
|   #       run: mypy tractor/ --ignore-missing-imports --show-traceback |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|   testing-linux: |   testing-linux: | ||||||
|  | @ -83,45 +59,32 @@ jobs: | ||||||
|       fail-fast: false |       fail-fast: false | ||||||
|       matrix: |       matrix: | ||||||
|         os: [ubuntu-latest] |         os: [ubuntu-latest] | ||||||
|         python-version: ['3.13'] |         python: ['3.10'] | ||||||
|         spawn_backend: [ |         spawn_backend: [ | ||||||
|           'trio', |           'trio', | ||||||
|           # 'mp_spawn', |           'mp_spawn', | ||||||
|           # 'mp_forkserver', |           'mp_forkserver', | ||||||
|         ] |         ] | ||||||
| 
 | 
 | ||||||
|     steps: |     steps: | ||||||
| 
 | 
 | ||||||
|       - uses: actions/checkout@v4 |       - name: Checkout | ||||||
|  |         uses: actions/checkout@v2 | ||||||
| 
 | 
 | ||||||
|       - name: 'Install uv + py-${{ matrix.python-version }}' |       - name: Setup python | ||||||
|         uses: astral-sh/setup-uv@v6 |         uses: actions/setup-python@v2 | ||||||
|         with: |         with: | ||||||
|           python-version: ${{ matrix.python-version }} |           python-version: '${{ matrix.python }}' | ||||||
| 
 | 
 | ||||||
|       # GH way.. faster? |       - name: Install dependencies | ||||||
|       # - name: setup-python@v6 |         run: pip install -U . -r requirements-test.txt -r requirements-docs.txt --upgrade-strategy eager | ||||||
|       #   uses: actions/setup-python@v6 |  | ||||||
|       #   with: |  | ||||||
|       #     python-version: '${{ matrix.python-version }}' |  | ||||||
| 
 | 
 | ||||||
|       # consider caching for speedups? |       - name: List dependencies | ||||||
|       # https://docs.astral.sh/uv/guides/integration/github/#caching |         run: pip list | ||||||
| 
 |  | ||||||
|       - name: Install the project w uv |  | ||||||
|         run: uv sync --all-extras --dev |  | ||||||
| 
 |  | ||||||
|       # - name: Install dependencies |  | ||||||
|       #   run: pip install -U . -r requirements-test.txt -r requirements-docs.txt --upgrade-strategy eager |  | ||||||
| 
 |  | ||||||
|       - name: List deps tree |  | ||||||
|         run: uv tree |  | ||||||
| 
 | 
 | ||||||
|       - name: Run tests |       - name: Run tests | ||||||
|         run: uv run pytest tests/ --spawn-backend=${{ matrix.spawn_backend }} -rsx |         run: pytest tests/ --spawn-backend=${{ matrix.spawn_backend }} -rsx | ||||||
| 
 | 
 | ||||||
|   # XXX legacy NOTE XXX |  | ||||||
|   # |  | ||||||
|   # We skip 3.10 on windows for now due to not having any collabs to |   # We skip 3.10 on windows for now due to not having any collabs to | ||||||
|   # debug the CI failures. Anyone wanting to hack and solve them is very |   # debug the CI failures. Anyone wanting to hack and solve them is very | ||||||
|   # welcome, but our primary user base is not using that OS. |   # welcome, but our primary user base is not using that OS. | ||||||
|  |  | ||||||
							
								
								
									
										19
									
								
								default.nix
								
								
								
								
							
							
						
						
									
										19
									
								
								default.nix
								
								
								
								
							|  | @ -1,19 +0,0 @@ | ||||||
| { pkgs ? import <nixpkgs> {} }: |  | ||||||
| let |  | ||||||
|   nativeBuildInputs = with pkgs; [ |  | ||||||
|     stdenv.cc.cc.lib |  | ||||||
|     uv |  | ||||||
|   ]; |  | ||||||
| 
 |  | ||||||
| in |  | ||||||
| pkgs.mkShell { |  | ||||||
|   inherit nativeBuildInputs; |  | ||||||
| 
 |  | ||||||
|   LD_LIBRARY_PATH = pkgs.lib.makeLibraryPath nativeBuildInputs; |  | ||||||
|   TMPDIR = "/tmp"; |  | ||||||
| 
 |  | ||||||
|   shellHook = '' |  | ||||||
|     set -e |  | ||||||
|     uv venv .venv --python=3.12 |  | ||||||
|   ''; |  | ||||||
| } |  | ||||||
							
								
								
									
										188
									
								
								docs/README.rst
								
								
								
								
							
							
						
						
									
										188
									
								
								docs/README.rst
								
								
								
								
							|  | @ -1,126 +1,47 @@ | ||||||
| |logo| ``tractor``: distributed structurred concurrency | |logo| ``tractor``: next-gen Python parallelism | ||||||
| 
 | 
 | ||||||
| ``tractor`` is a `structured concurrency`_ (SC), multi-processing_ runtime built on trio_. | |gh_actions| | ||||||
|  | |docs| | ||||||
| 
 | 
 | ||||||
| Fundamentally, ``tractor`` provides parallelism via | ``tractor`` is a `structured concurrent`_, multi-processing_ runtime | ||||||
| ``trio``-"*actors*": independent Python **processes** (i.e. | built on trio_. | ||||||
| *non-shared-memory threads*) which can schedule ``trio`` tasks whilst | 
 | ||||||
| maintaining *end-to-end SC* inside a *distributed supervision tree*. | Fundamentally, ``tractor`` gives you parallelism via | ||||||
|  | ``trio``-"*actors*": independent Python processes (aka | ||||||
|  | non-shared-memory threads) which maintain structured | ||||||
|  | concurrency (SC) *end-to-end* inside a *supervision tree*. | ||||||
| 
 | 
 | ||||||
| Cross-process (and thus cross-host) SC is accomplished through the | Cross-process (and thus cross-host) SC is accomplished through the | ||||||
| combined use of our, | combined use of our "actor nurseries_" and an "SC-transitive IPC | ||||||
| 
 | protocol" constructed on top of multiple Pythons each running a ``trio`` | ||||||
| - "actor nurseries_" which provide for spawning multiple, and | scheduled runtime - a call to ``trio.run()``. | ||||||
|   possibly nested, Python processes each running a ``trio`` scheduled |  | ||||||
|   runtime - a call to ``trio.run()``, |  | ||||||
| - an "SC-transitive supervision protocol" enforced as an |  | ||||||
|   IPC-message-spec encapsulating all RPC-dialogs. |  | ||||||
| 
 | 
 | ||||||
| We believe the system adheres to the `3 axioms`_ of an "`actor model`_" | We believe the system adheres to the `3 axioms`_ of an "`actor model`_" | ||||||
| but likely **does not** look like what **you** probably *think* an "actor | but likely *does not* look like what *you* probably think an "actor | ||||||
| model" looks like, and that's **intentional**. | model" looks like, and that's *intentional*. | ||||||
| 
 | 
 | ||||||
| 
 | The first step to grok ``tractor`` is to get the basics of ``trio`` down. | ||||||
| Where do i start!? | A great place to start is the `trio docs`_ and this `blog post`_. | ||||||
| ------------------ |  | ||||||
| The first step to grok ``tractor`` is to get an intermediate |  | ||||||
| knowledge of ``trio`` and **structured concurrency** B) |  | ||||||
| 
 |  | ||||||
| Some great places to start are, |  | ||||||
| 
 |  | ||||||
| - the seminal `blog post`_ |  | ||||||
| - obviously the `trio docs`_ |  | ||||||
| - wikipedia's nascent SC_ page |  | ||||||
| - the fancy diagrams @ libdill-docs_ |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| Features | Features | ||||||
| -------- | -------- | ||||||
| - **It's just** a ``trio`` API! | - **It's just** a ``trio`` API | ||||||
| - *Infinitely nesteable* process trees running embedded ``trio`` tasks. | - *Infinitely nesteable* process trees | ||||||
| - Swappable, OS-specific, process spawning via multiple backends. | - Builtin IPC streaming APIs with task fan-out broadcasting | ||||||
| - Modular IPC stack, allowing for custom interchange formats (eg. | - A "native" multi-core debugger REPL using `pdbp`_ (a fork & fix of | ||||||
|   as offered from `msgspec`_), varied transport protocols (TCP, RUDP, |   `pdb++`_ thanks to @mdmintz!) | ||||||
|   QUIC, wireguard), and OS-env specific higher-perf primitives (UDS, | - Support for a swappable, OS specific, process spawning layer | ||||||
|   shm-ring-buffers). | - A modular transport stack, allowing for custom serialization (eg. with | ||||||
| - Optionally distributed_: all IPC and RPC APIs work over multi-host |   `msgspec`_), communications protocols, and environment specific IPC | ||||||
|   transports the same as local. |   primitives | ||||||
| - Builtin high-level streaming API that enables your app to easily | - Support for spawning process-level-SC, inter-loop one-to-one-task oriented | ||||||
|   leverage the benefits of a "`cheap or nasty`_" `(un)protocol`_. |   ``asyncio`` actors via "infected ``asyncio``" mode | ||||||
| - A "native UX" around a multi-process safe debugger REPL using | - `structured chadcurrency`_ from the ground up | ||||||
|   `pdbp`_ (a fork & fix of `pdb++`_) |  | ||||||
| - "Infected ``asyncio``" mode: support for starting an actor's |  | ||||||
|   runtime as a `guest`_ on the ``asyncio`` loop allowing us to |  | ||||||
|   provide stringent SC-style ``trio.Task``-supervision around any |  | ||||||
|   ``asyncio.Task`` spawned via our ``tractor.to_asyncio`` APIs. |  | ||||||
| - A **very naive** and still very much work-in-progress inter-actor |  | ||||||
|   `discovery`_ sys with plans to support multiple `modern protocol`_ |  | ||||||
|   approaches. |  | ||||||
| - Various ``trio`` extension APIs via ``tractor.trionics`` such as, |  | ||||||
|   - task fan-out `broadcasting`_, |  | ||||||
|   - multi-task-single-resource-caching and fan-out-to-multi |  | ||||||
|     ``__aenter__()`` APIs for ``@acm`` functions, |  | ||||||
|   - (WIP) a ``TaskMngr``: one-cancels-one style nursery supervisor. |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| Status of `main` / infra |  | ||||||
| ------------------------ |  | ||||||
| 
 |  | ||||||
| - |gh_actions| |  | ||||||
| - |docs| |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| Install |  | ||||||
| ------- |  | ||||||
| ``tractor`` is still in a *alpha-near-beta-stage* for many |  | ||||||
| of its subsystems, however we are very close to having a stable |  | ||||||
| lowlevel runtime and API. |  | ||||||
| 
 |  | ||||||
| As such, it's currently recommended that you clone and install the |  | ||||||
| repo from source:: |  | ||||||
| 
 |  | ||||||
|     pip install git+git://github.com/goodboy/tractor.git |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| We use the very hip `uv`_ for project mgmt:: |  | ||||||
| 
 |  | ||||||
|     git clone https://github.com/goodboy/tractor.git |  | ||||||
|     cd tractor |  | ||||||
|     uv sync --dev |  | ||||||
|     uv run python examples/rpc_bidir_streaming.py |  | ||||||
| 
 |  | ||||||
| Consider activating a virtual/project-env before starting to hack on |  | ||||||
| the code base:: |  | ||||||
| 
 |  | ||||||
|     # you could use plain ol' venvs |  | ||||||
|     # https://docs.astral.sh/uv/pip/environments/ |  | ||||||
|     uv venv tractor_py313 --python 3.13 |  | ||||||
| 
 |  | ||||||
|     # but @goodboy prefers the more explicit (and shell agnostic) |  | ||||||
|     # https://docs.astral.sh/uv/configuration/environment/#uv_project_environment |  | ||||||
|     UV_PROJECT_ENVIRONMENT="tractor_py313 |  | ||||||
| 
 |  | ||||||
|     # hint hint, enter @goodboy's fave shell B) |  | ||||||
|     uv run --dev xonsh |  | ||||||
| 
 |  | ||||||
| Alongside all this we ofc offer "releases" on PyPi:: |  | ||||||
| 
 |  | ||||||
|     pip install tractor |  | ||||||
| 
 |  | ||||||
| Just note that YMMV since the main git branch is often much further |  | ||||||
| ahead then any latest release. |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| Example codez |  | ||||||
| ------------- |  | ||||||
| In ``tractor``'s (very lacking) documention we prefer to point to |  | ||||||
| example scripts in the repo over duplicating them in docs, but with |  | ||||||
| that in mind here are some definitive snippets to try and hook you |  | ||||||
| into digging deeper. |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| Run a func in a process | Run a func in a process | ||||||
| *********************** | ----------------------- | ||||||
| Use ``trio``'s style of focussing on *tasks as functions*: | Use ``trio``'s style of focussing on *tasks as functions*: | ||||||
| 
 | 
 | ||||||
| .. code:: python | .. code:: python | ||||||
|  | @ -178,7 +99,7 @@ might want to check out `trio-parallel`_. | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| Zombie safe: self-destruct a process tree | Zombie safe: self-destruct a process tree | ||||||
| ***************************************** | ----------------------------------------- | ||||||
| ``tractor`` tries to protect you from zombies, no matter what. | ``tractor`` tries to protect you from zombies, no matter what. | ||||||
| 
 | 
 | ||||||
| .. code:: python | .. code:: python | ||||||
|  | @ -234,7 +155,7 @@ it **is a bug**. | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| "Native" multi-process debugging | "Native" multi-process debugging | ||||||
| ******************************** | -------------------------------- | ||||||
| Using the magic of `pdbp`_ and our internal IPC, we've | Using the magic of `pdbp`_ and our internal IPC, we've | ||||||
| been able to create a native feeling debugging experience for | been able to create a native feeling debugging experience for | ||||||
| any (sub-)process in your ``tractor`` tree. | any (sub-)process in your ``tractor`` tree. | ||||||
|  | @ -289,7 +210,7 @@ We're hoping to add a respawn-from-repl system soon! | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| SC compatible bi-directional streaming | SC compatible bi-directional streaming | ||||||
| ************************************** | -------------------------------------- | ||||||
| Yes, you saw it here first; we provide 2-way streams | Yes, you saw it here first; we provide 2-way streams | ||||||
| with reliable, transitive setup/teardown semantics. | with reliable, transitive setup/teardown semantics. | ||||||
| 
 | 
 | ||||||
|  | @ -381,7 +302,7 @@ hear your thoughts on! | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| Worker poolz are easy peasy | Worker poolz are easy peasy | ||||||
| *************************** | --------------------------- | ||||||
| The initial ask from most new users is *"how do I make a worker | The initial ask from most new users is *"how do I make a worker | ||||||
| pool thing?"*. | pool thing?"*. | ||||||
| 
 | 
 | ||||||
|  | @ -403,10 +324,10 @@ This uses no extra threads, fancy semaphores or futures; all we need | ||||||
| is ``tractor``'s IPC! | is ``tractor``'s IPC! | ||||||
| 
 | 
 | ||||||
| "Infected ``asyncio``" mode | "Infected ``asyncio``" mode | ||||||
| *************************** | --------------------------- | ||||||
| Have a bunch of ``asyncio`` code you want to force to be SC at the process level? | Have a bunch of ``asyncio`` code you want to force to be SC at the process level? | ||||||
| 
 | 
 | ||||||
| Check out our experimental system for `guest`_-mode controlled | Check out our experimental system for `guest-mode`_ controlled | ||||||
| ``asyncio`` actors: | ``asyncio`` actors: | ||||||
| 
 | 
 | ||||||
| .. code:: python | .. code:: python | ||||||
|  | @ -512,7 +433,7 @@ We need help refining the `asyncio`-side channel API to be more | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| Higher level "cluster" APIs | Higher level "cluster" APIs | ||||||
| *************************** | --------------------------- | ||||||
| To be extra terse the ``tractor`` devs have started hacking some "higher | To be extra terse the ``tractor`` devs have started hacking some "higher | ||||||
| level" APIs for managing actor trees/clusters. These interfaces should | level" APIs for managing actor trees/clusters. These interfaces should | ||||||
| generally be condsidered provisional for now but we encourage you to try | generally be condsidered provisional for now but we encourage you to try | ||||||
|  | @ -569,6 +490,18 @@ spawn a flat cluster: | ||||||
| .. _full worker pool re-implementation: https://github.com/goodboy/tractor/blob/master/examples/parallelism/concurrent_actors_primes.py | .. _full worker pool re-implementation: https://github.com/goodboy/tractor/blob/master/examples/parallelism/concurrent_actors_primes.py | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | Install | ||||||
|  | ------- | ||||||
|  | From PyPi:: | ||||||
|  | 
 | ||||||
|  |     pip install tractor | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | From git:: | ||||||
|  | 
 | ||||||
|  |     pip install git+git://github.com/goodboy/tractor.git | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
| Under the hood | Under the hood | ||||||
| -------------- | -------------- | ||||||
| ``tractor`` is an attempt to pair trionic_ `structured concurrency`_ with | ``tractor`` is an attempt to pair trionic_ `structured concurrency`_ with | ||||||
|  | @ -660,7 +593,6 @@ matrix seems too hip, we're also mostly all in the the `trio gitter | ||||||
| channel`_! | channel`_! | ||||||
| 
 | 
 | ||||||
| .. _structured concurrent: https://trio.discourse.group/t/concise-definition-of-structured-concurrency/228 | .. _structured concurrent: https://trio.discourse.group/t/concise-definition-of-structured-concurrency/228 | ||||||
| .. _distributed: https://en.wikipedia.org/wiki/Distributed_computing |  | ||||||
| .. _multi-processing: https://en.wikipedia.org/wiki/Multiprocessing | .. _multi-processing: https://en.wikipedia.org/wiki/Multiprocessing | ||||||
| .. _trio: https://github.com/python-trio/trio | .. _trio: https://github.com/python-trio/trio | ||||||
| .. _nurseries: https://vorpus.org/blog/notes-on-structured-concurrency-or-go-statement-considered-harmful/#nurseries-a-structured-replacement-for-go-statements | .. _nurseries: https://vorpus.org/blog/notes-on-structured-concurrency-or-go-statement-considered-harmful/#nurseries-a-structured-replacement-for-go-statements | ||||||
|  | @ -672,32 +604,24 @@ channel`_! | ||||||
| .. _adherance to: https://www.youtube.com/watch?v=7erJ1DV_Tlo&t=1821s | .. _adherance to: https://www.youtube.com/watch?v=7erJ1DV_Tlo&t=1821s | ||||||
| .. _trio gitter channel: https://gitter.im/python-trio/general | .. _trio gitter channel: https://gitter.im/python-trio/general | ||||||
| .. _matrix channel: https://matrix.to/#/!tractor:matrix.org | .. _matrix channel: https://matrix.to/#/!tractor:matrix.org | ||||||
| .. _broadcasting: https://github.com/goodboy/tractor/pull/229 |  | ||||||
| .. _modern procotol: https://en.wikipedia.org/wiki/Rendezvous_protocol |  | ||||||
| .. _pdbp: https://github.com/mdmintz/pdbp | .. _pdbp: https://github.com/mdmintz/pdbp | ||||||
| .. _pdb++: https://github.com/pdbpp/pdbpp | .. _pdb++: https://github.com/pdbpp/pdbpp | ||||||
| .. _cheap or nasty: https://zguide.zeromq.org/docs/chapter7/#The-Cheap-or-Nasty-Pattern | .. _guest mode: https://trio.readthedocs.io/en/stable/reference-lowlevel.html?highlight=guest%20mode#using-guest-mode-to-run-trio-on-top-of-other-event-loops | ||||||
| .. _(un)protocol: https://zguide.zeromq.org/docs/chapter7/#Unprotocols |  | ||||||
| .. _discovery: https://zguide.zeromq.org/docs/chapter8/#Discovery |  | ||||||
| .. _modern protocol: https://en.wikipedia.org/wiki/Rendezvous_protocol |  | ||||||
| .. _messages: https://en.wikipedia.org/wiki/Message_passing | .. _messages: https://en.wikipedia.org/wiki/Message_passing | ||||||
| .. _trio docs: https://trio.readthedocs.io/en/latest/ | .. _trio docs: https://trio.readthedocs.io/en/latest/ | ||||||
| .. _blog post: https://vorpus.org/blog/notes-on-structured-concurrency-or-go-statement-considered-harmful/ | .. _blog post: https://vorpus.org/blog/notes-on-structured-concurrency-or-go-statement-considered-harmful/ | ||||||
| .. _structured concurrency: https://en.wikipedia.org/wiki/Structured_concurrency | .. _structured concurrency: https://en.wikipedia.org/wiki/Structured_concurrency | ||||||
| .. _SC: https://en.wikipedia.org/wiki/Structured_concurrency | .. _structured chadcurrency: https://en.wikipedia.org/wiki/Structured_concurrency | ||||||
| .. _libdill-docs: https://sustrik.github.io/libdill/structured-concurrency.html | .. _structured concurrency: https://en.wikipedia.org/wiki/Structured_concurrency | ||||||
| .. _unrequirements: https://en.wikipedia.org/wiki/Actor_model#Direct_communication_and_asynchrony | .. _unrequirements: https://en.wikipedia.org/wiki/Actor_model#Direct_communication_and_asynchrony | ||||||
| .. _async generators: https://www.python.org/dev/peps/pep-0525/ | .. _async generators: https://www.python.org/dev/peps/pep-0525/ | ||||||
| .. _trio-parallel: https://github.com/richardsheridan/trio-parallel | .. _trio-parallel: https://github.com/richardsheridan/trio-parallel | ||||||
| .. _uv: https://docs.astral.sh/uv/ |  | ||||||
| .. _msgspec: https://jcristharif.com/msgspec/ | .. _msgspec: https://jcristharif.com/msgspec/ | ||||||
| .. _guest: https://trio.readthedocs.io/en/stable/reference-lowlevel.html?highlight=guest%20mode#using-guest-mode-to-run-trio-on-top-of-other-event-loops | .. _guest-mode: https://trio.readthedocs.io/en/stable/reference-lowlevel.html?highlight=guest%20mode#using-guest-mode-to-run-trio-on-top-of-other-event-loops | ||||||
| 
 | 
 | ||||||
| .. | 
 | ||||||
|    NOTE, on generating badge links from the UI | .. |gh_actions| image:: https://img.shields.io/endpoint.svg?url=https%3A%2F%2Factions-badge.atrox.dev%2Fgoodboy%2Ftractor%2Fbadge&style=popout-square | ||||||
|    https://docs.github.com/en/actions/how-tos/monitoring-and-troubleshooting-workflows/monitoring-workflows/adding-a-workflow-status-badge?ref=gitguardian-blog-automated-secrets-detection#using-the-ui |     :target: https://actions-badge.atrox.dev/goodboy/tractor/goto | ||||||
| .. |gh_actions| image:: https://github.com/goodboy/tractor/actions/workflows/ci.yml/badge.svg?branch=main |  | ||||||
|     :target: https://github.com/goodboy/tractor/actions/workflows/ci.yml |  | ||||||
| 
 | 
 | ||||||
| .. |docs| image:: https://readthedocs.org/projects/tractor/badge/?version=latest | .. |docs| image:: https://readthedocs.org/projects/tractor/badge/?version=latest | ||||||
|     :target: https://tractor.readthedocs.io/en/latest/?badge=latest |     :target: https://tractor.readthedocs.io/en/latest/?badge=latest | ||||||
|  |  | ||||||
|  | @ -16,18 +16,80 @@ from tractor import ( | ||||||
|     ContextCancelled, |     ContextCancelled, | ||||||
|     MsgStream, |     MsgStream, | ||||||
|     _testing, |     _testing, | ||||||
|     trionics, |  | ||||||
| ) | ) | ||||||
| import trio | import trio | ||||||
| import pytest | import pytest | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | async def break_ipc( | ||||||
|  |     stream: MsgStream, | ||||||
|  |     method: str|None = None, | ||||||
|  |     pre_close: bool = False, | ||||||
|  | 
 | ||||||
|  |     def_method: str = 'eof', | ||||||
|  | 
 | ||||||
|  | ) -> None: | ||||||
|  |     ''' | ||||||
|  |     XXX: close the channel right after an error is raised | ||||||
|  |     purposely breaking the IPC transport to make sure the parent | ||||||
|  |     doesn't get stuck in debug or hang on the connection join. | ||||||
|  |     this more or less simulates an infinite msg-receive hang on | ||||||
|  |     the other end. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     # close channel via IPC prot msging before | ||||||
|  |     # any transport breakage | ||||||
|  |     if pre_close: | ||||||
|  |         await stream.aclose() | ||||||
|  | 
 | ||||||
|  |     method: str = method or def_method | ||||||
|  |     print( | ||||||
|  |         '#################################\n' | ||||||
|  |         'Simulating CHILD-side IPC BREAK!\n' | ||||||
|  |         f'method: {method}\n' | ||||||
|  |         f'pre `.aclose()`: {pre_close}\n' | ||||||
|  |         '#################################\n' | ||||||
|  |     ) | ||||||
|  | 
 | ||||||
|  |     match method: | ||||||
|  |         case 'trans_aclose': | ||||||
|  |             await stream._ctx.chan.transport.stream.aclose() | ||||||
|  | 
 | ||||||
|  |         case 'eof': | ||||||
|  |             await stream._ctx.chan.transport.stream.send_eof() | ||||||
|  | 
 | ||||||
|  |         case 'msg': | ||||||
|  |             await stream._ctx.chan.send(None) | ||||||
|  | 
 | ||||||
|  |         # TODO: the actual real-world simulated cases like | ||||||
|  |         # transport layer hangs and/or lower layer 2-gens type | ||||||
|  |         # scenarios.. | ||||||
|  |         # | ||||||
|  |         # -[ ] already have some issues for this general testing | ||||||
|  |         # area: | ||||||
|  |         #  - https://github.com/goodboy/tractor/issues/97 | ||||||
|  |         #  - https://github.com/goodboy/tractor/issues/124 | ||||||
|  |         #   - PR from @guille: | ||||||
|  |         #     https://github.com/goodboy/tractor/pull/149 | ||||||
|  |         # case 'hang': | ||||||
|  |         # TODO: framework research: | ||||||
|  |         # | ||||||
|  |         # - https://github.com/GuoTengda1993/pynetem | ||||||
|  |         # - https://github.com/shopify/toxiproxy | ||||||
|  |         # - https://manpages.ubuntu.com/manpages/trusty/man1/wirefilter.1.html | ||||||
|  | 
 | ||||||
|  |         case _: | ||||||
|  |             raise RuntimeError( | ||||||
|  |                 f'IPC break method unsupported: {method}' | ||||||
|  |             ) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
| async def break_ipc_then_error( | async def break_ipc_then_error( | ||||||
|     stream: MsgStream, |     stream: MsgStream, | ||||||
|     break_ipc_with: str|None = None, |     break_ipc_with: str|None = None, | ||||||
|     pre_close: bool = False, |     pre_close: bool = False, | ||||||
| ): | ): | ||||||
|     await _testing.break_ipc( |     await break_ipc( | ||||||
|         stream=stream, |         stream=stream, | ||||||
|         method=break_ipc_with, |         method=break_ipc_with, | ||||||
|         pre_close=pre_close, |         pre_close=pre_close, | ||||||
|  | @ -59,31 +121,25 @@ async def recv_and_spawn_net_killers( | ||||||
|     Receive stream msgs and spawn some IPC killers mid-stream. |     Receive stream msgs and spawn some IPC killers mid-stream. | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|     broke_ipc: bool = False |  | ||||||
|     await ctx.started() |     await ctx.started() | ||||||
|     async with ( |     async with ( | ||||||
|         ctx.open_stream() as stream, |         ctx.open_stream() as stream, | ||||||
|         trionics.collapse_eg(), |         trio.open_nursery() as n, | ||||||
|         trio.open_nursery() as tn, |  | ||||||
|     ): |     ): | ||||||
|         async for i in stream: |         async for i in stream: | ||||||
|             print(f'child echoing {i}') |             print(f'child echoing {i}') | ||||||
|             if not broke_ipc: |             await stream.send(i) | ||||||
|                 await stream.send(i) |  | ||||||
|             else: |  | ||||||
|                 await trio.sleep(0.01) |  | ||||||
| 
 | 
 | ||||||
|             if ( |             if ( | ||||||
|                 break_ipc_after |                 break_ipc_after | ||||||
|                 and |                 and | ||||||
|                 i >= break_ipc_after |                 i >= break_ipc_after | ||||||
|             ): |             ): | ||||||
|                 broke_ipc = True |                 n.start_soon( | ||||||
|                 tn.start_soon( |  | ||||||
|                     iter_ipc_stream, |                     iter_ipc_stream, | ||||||
|                     stream, |                     stream, | ||||||
|                 ) |                 ) | ||||||
|                 tn.start_soon( |                 n.start_soon( | ||||||
|                     partial( |                     partial( | ||||||
|                         break_ipc_then_error, |                         break_ipc_then_error, | ||||||
|                         stream=stream, |                         stream=stream, | ||||||
|  | @ -120,7 +176,6 @@ async def main( | ||||||
|     break_parent_ipc_after: int|bool = False, |     break_parent_ipc_after: int|bool = False, | ||||||
|     break_child_ipc_after: int|bool = False, |     break_child_ipc_after: int|bool = False, | ||||||
|     pre_close: bool = False, |     pre_close: bool = False, | ||||||
|     tpt_proto: str = 'tcp', |  | ||||||
| 
 | 
 | ||||||
| ) -> None: | ) -> None: | ||||||
| 
 | 
 | ||||||
|  | @ -132,7 +187,6 @@ async def main( | ||||||
|             # a hang since it never engages due to broken IPC |             # a hang since it never engages due to broken IPC | ||||||
|             debug_mode=debug_mode, |             debug_mode=debug_mode, | ||||||
|             loglevel=loglevel, |             loglevel=loglevel, | ||||||
|             enable_transports=[tpt_proto], |  | ||||||
| 
 | 
 | ||||||
|         ) as an, |         ) as an, | ||||||
|     ): |     ): | ||||||
|  | @ -147,8 +201,7 @@ async def main( | ||||||
|             _testing.expect_ctxc( |             _testing.expect_ctxc( | ||||||
|                 yay=( |                 yay=( | ||||||
|                     break_parent_ipc_after |                     break_parent_ipc_after | ||||||
|                     or |                     or break_child_ipc_after | ||||||
|                     break_child_ipc_after |  | ||||||
|                 ), |                 ), | ||||||
|                 # TODO: we CAN'T remove this right? |                 # TODO: we CAN'T remove this right? | ||||||
|                 # since we need the ctxc to bubble up from either |                 # since we need the ctxc to bubble up from either | ||||||
|  | @ -189,13 +242,14 @@ async def main( | ||||||
|                         # await stream._ctx.chan.send(None) |                         # await stream._ctx.chan.send(None) | ||||||
|                         # await stream._ctx.chan.transport.stream.send_eof() |                         # await stream._ctx.chan.transport.stream.send_eof() | ||||||
|                         await stream._ctx.chan.transport.stream.aclose() |                         await stream._ctx.chan.transport.stream.aclose() | ||||||
|  | 
 | ||||||
|                         ipc_break_sent = True |                         ipc_break_sent = True | ||||||
| 
 | 
 | ||||||
|                     # it actually breaks right here in the |                     # it actually breaks right here in the | ||||||
|                     # mp_spawn/forkserver backends and thus the |                     # mp_spawn/forkserver backends and thus the zombie | ||||||
|                     # zombie reaper never even kicks in? |                     # reaper never even kicks in? | ||||||
|  |                     print(f'parent sending {i}') | ||||||
|                     try: |                     try: | ||||||
|                         print(f'parent sending {i}') |  | ||||||
|                         await stream.send(i) |                         await stream.send(i) | ||||||
|                     except ContextCancelled as ctxc: |                     except ContextCancelled as ctxc: | ||||||
|                         print( |                         print( | ||||||
|  | @ -208,13 +262,6 @@ async def main( | ||||||
|                         # TODO: is this needed or no? |                         # TODO: is this needed or no? | ||||||
|                         raise |                         raise | ||||||
| 
 | 
 | ||||||
|                     except trio.ClosedResourceError: |  | ||||||
|                         # NOTE: don't send if we already broke the |  | ||||||
|                         # connection to avoid raising a closed-error |  | ||||||
|                         # such that we drop through to the ctl-c |  | ||||||
|                         # mashing by user. |  | ||||||
|                         await trio.sleep(0.01) |  | ||||||
| 
 |  | ||||||
|                     # timeout: int = 1 |                     # timeout: int = 1 | ||||||
|                     # with trio.move_on_after(timeout) as cs: |                     # with trio.move_on_after(timeout) as cs: | ||||||
|                     async with stuff_hangin_ctlc() as timeout: |                     async with stuff_hangin_ctlc() as timeout: | ||||||
|  |  | ||||||
|  | @ -1,136 +0,0 @@ | ||||||
| ''' |  | ||||||
| Examples of using the builtin `breakpoint()` from an `asyncio.Task` |  | ||||||
| running in a subactor spawned with `infect_asyncio=True`. |  | ||||||
| 
 |  | ||||||
| ''' |  | ||||||
| import asyncio |  | ||||||
| 
 |  | ||||||
| import trio |  | ||||||
| import tractor |  | ||||||
| from tractor import ( |  | ||||||
|     to_asyncio, |  | ||||||
|     Portal, |  | ||||||
| ) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| async def aio_sleep_forever(): |  | ||||||
|     await asyncio.sleep(float('inf')) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| async def bp_then_error( |  | ||||||
|     to_trio: trio.MemorySendChannel, |  | ||||||
|     from_trio: asyncio.Queue, |  | ||||||
| 
 |  | ||||||
|     raise_after_bp: bool = True, |  | ||||||
| 
 |  | ||||||
| ) -> None: |  | ||||||
| 
 |  | ||||||
|     # sync with `trio`-side (caller) task |  | ||||||
|     to_trio.send_nowait('start') |  | ||||||
| 
 |  | ||||||
|     # NOTE: what happens here inside the hook needs some refinement.. |  | ||||||
|     # => seems like it's still `.debug._set_trace()` but |  | ||||||
|     #    we set `Lock.local_task_in_debug = 'sync'`, we probably want |  | ||||||
|     #    some further, at least, meta-data about the task/actor in debug |  | ||||||
|     #    in terms of making it clear it's `asyncio` mucking about. |  | ||||||
|     breakpoint()  # asyncio-side |  | ||||||
| 
 |  | ||||||
|     # short checkpoint / delay |  | ||||||
|     await asyncio.sleep(0.5)  # asyncio-side |  | ||||||
| 
 |  | ||||||
|     if raise_after_bp: |  | ||||||
|         raise ValueError('asyncio side error!') |  | ||||||
| 
 |  | ||||||
|     # TODO: test case with this so that it gets cancelled? |  | ||||||
|     else: |  | ||||||
|         # XXX NOTE: this is required in order to get the SIGINT-ignored |  | ||||||
|         # hang case documented in the module script section! |  | ||||||
|         await aio_sleep_forever() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @tractor.context |  | ||||||
| async def trio_ctx( |  | ||||||
|     ctx: tractor.Context, |  | ||||||
|     bp_before_started: bool = False, |  | ||||||
| ): |  | ||||||
| 
 |  | ||||||
|     # this will block until the ``asyncio`` task sends a "first" |  | ||||||
|     # message, see first line in above func. |  | ||||||
|     async with ( |  | ||||||
|         to_asyncio.open_channel_from( |  | ||||||
|             bp_then_error, |  | ||||||
|             # raise_after_bp=not bp_before_started, |  | ||||||
|         ) as (first, chan), |  | ||||||
| 
 |  | ||||||
|         trio.open_nursery() as tn, |  | ||||||
|     ): |  | ||||||
|         assert first == 'start' |  | ||||||
| 
 |  | ||||||
|         if bp_before_started: |  | ||||||
|             await tractor.pause()  # trio-side |  | ||||||
| 
 |  | ||||||
|         await ctx.started(first)  # trio-side |  | ||||||
| 
 |  | ||||||
|         tn.start_soon( |  | ||||||
|             to_asyncio.run_task, |  | ||||||
|             aio_sleep_forever, |  | ||||||
|         ) |  | ||||||
|         await trio.sleep_forever() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| async def main( |  | ||||||
|     bps_all_over: bool = True, |  | ||||||
| 
 |  | ||||||
|     # TODO, WHICH OF THESE HAZ BUGZ? |  | ||||||
|     cancel_from_root: bool = False, |  | ||||||
|     err_from_root: bool = False, |  | ||||||
| 
 |  | ||||||
| ) -> None: |  | ||||||
| 
 |  | ||||||
|     async with tractor.open_nursery( |  | ||||||
|         debug_mode=True, |  | ||||||
|         maybe_enable_greenback=True, |  | ||||||
|         # loglevel='devx', |  | ||||||
|     ) as an: |  | ||||||
|         ptl: Portal = await an.start_actor( |  | ||||||
|             'aio_daemon', |  | ||||||
|             enable_modules=[__name__], |  | ||||||
|             infect_asyncio=True, |  | ||||||
|             debug_mode=True, |  | ||||||
|             # loglevel='cancel', |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|         async with ptl.open_context( |  | ||||||
|             trio_ctx, |  | ||||||
|             bp_before_started=bps_all_over, |  | ||||||
|         ) as (ctx, first): |  | ||||||
| 
 |  | ||||||
|             assert first == 'start' |  | ||||||
| 
 |  | ||||||
|             # pause in parent to ensure no cross-actor |  | ||||||
|             # locking problems exist! |  | ||||||
|             await tractor.pause()  # trio-root |  | ||||||
| 
 |  | ||||||
|             if cancel_from_root: |  | ||||||
|                 await ctx.cancel() |  | ||||||
| 
 |  | ||||||
|             if err_from_root: |  | ||||||
|                 assert 0 |  | ||||||
|             else: |  | ||||||
|                 await trio.sleep_forever() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|         # TODO: case where we cancel from trio-side while asyncio task |  | ||||||
|         # has debugger lock? |  | ||||||
|         # await ptl.cancel_actor() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| if __name__ == '__main__': |  | ||||||
| 
 |  | ||||||
|     # works fine B) |  | ||||||
|     trio.run(main) |  | ||||||
| 
 |  | ||||||
|     # will hang and ignores SIGINT !! |  | ||||||
|     # NOTE: you'll need to send a SIGQUIT (via ctl-\) to kill it |  | ||||||
|     # manually.. |  | ||||||
|     # trio.run(main, True) |  | ||||||
|  | @ -1,5 +1,5 @@ | ||||||
| ''' | ''' | ||||||
| Fast fail test with a `Context`. | Fast fail test with a context. | ||||||
| 
 | 
 | ||||||
| Ensure the partially initialized sub-actor process | Ensure the partially initialized sub-actor process | ||||||
| doesn't cause a hang on error/cancel of the parent | doesn't cause a hang on error/cancel of the parent | ||||||
|  |  | ||||||
|  | @ -4,15 +4,9 @@ import trio | ||||||
| 
 | 
 | ||||||
| async def breakpoint_forever(): | async def breakpoint_forever(): | ||||||
|     "Indefinitely re-enter debugger in child actor." |     "Indefinitely re-enter debugger in child actor." | ||||||
|     try: |     while True: | ||||||
|         while True: |         yield 'yo' | ||||||
|             yield 'yo' |         await tractor.breakpoint() | ||||||
|             await tractor.pause() |  | ||||||
|     except BaseException: |  | ||||||
|         tractor.log.get_console_log().exception( |  | ||||||
|             'Cancelled while trying to enter pause point!' |  | ||||||
|         ) |  | ||||||
|         raise |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def name_error(): | async def name_error(): | ||||||
|  | @ -21,14 +15,11 @@ async def name_error(): | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def main(): | async def main(): | ||||||
|     ''' |     """Test breakpoint in a streaming actor. | ||||||
|     Test breakpoint in a streaming actor. |     """ | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     async with tractor.open_nursery( |     async with tractor.open_nursery( | ||||||
|         debug_mode=True, |         debug_mode=True, | ||||||
|         loglevel='cancel', |         loglevel='error', | ||||||
|         # loglevel='devx', |  | ||||||
|     ) as n: |     ) as n: | ||||||
| 
 | 
 | ||||||
|         p0 = await n.start_actor('bp_forever', enable_modules=[__name__]) |         p0 = await n.start_actor('bp_forever', enable_modules=[__name__]) | ||||||
|  | @ -41,7 +32,7 @@ async def main(): | ||||||
|             try: |             try: | ||||||
|                 await p1.run(name_error) |                 await p1.run(name_error) | ||||||
|             except tractor.RemoteActorError as rae: |             except tractor.RemoteActorError as rae: | ||||||
|                 assert rae.boxed_type is NameError |                 assert rae.type is NameError | ||||||
| 
 | 
 | ||||||
|             async for i in stream: |             async for i in stream: | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
|  | @ -10,7 +10,7 @@ async def name_error(): | ||||||
| async def breakpoint_forever(): | async def breakpoint_forever(): | ||||||
|     "Indefinitely re-enter debugger in child actor." |     "Indefinitely re-enter debugger in child actor." | ||||||
|     while True: |     while True: | ||||||
|         await tractor.pause() |         await tractor.breakpoint() | ||||||
| 
 | 
 | ||||||
|         # NOTE: if the test never sent 'q'/'quit' commands |         # NOTE: if the test never sent 'q'/'quit' commands | ||||||
|         # on the pdb repl, without this checkpoint line the |         # on the pdb repl, without this checkpoint line the | ||||||
|  | @ -45,7 +45,6 @@ async def spawn_until(depth=0): | ||||||
|             ) |             ) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| # TODO: notes on the new boxed-relayed errors through proxy actors |  | ||||||
| async def main(): | async def main(): | ||||||
|     """The main ``tractor`` routine. |     """The main ``tractor`` routine. | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
|  | @ -40,7 +40,7 @@ async def main(): | ||||||
|     """ |     """ | ||||||
|     async with tractor.open_nursery( |     async with tractor.open_nursery( | ||||||
|         debug_mode=True, |         debug_mode=True, | ||||||
|         loglevel='devx', |         # loglevel='cancel', | ||||||
|     ) as n: |     ) as n: | ||||||
| 
 | 
 | ||||||
|         # spawn both actors |         # spawn both actors | ||||||
|  |  | ||||||
|  | @ -6,7 +6,7 @@ async def breakpoint_forever(): | ||||||
|     "Indefinitely re-enter debugger in child actor." |     "Indefinitely re-enter debugger in child actor." | ||||||
|     while True: |     while True: | ||||||
|         await trio.sleep(0.1) |         await trio.sleep(0.1) | ||||||
|         await tractor.pause() |         await tractor.breakpoint() | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def name_error(): | async def name_error(): | ||||||
|  | @ -38,7 +38,6 @@ async def main(): | ||||||
|     """ |     """ | ||||||
|     async with tractor.open_nursery( |     async with tractor.open_nursery( | ||||||
|         debug_mode=True, |         debug_mode=True, | ||||||
|         # loglevel='runtime', |  | ||||||
|     ) as n: |     ) as n: | ||||||
| 
 | 
 | ||||||
|         # Spawn both actors, don't bother with collecting results |         # Spawn both actors, don't bother with collecting results | ||||||
|  |  | ||||||
|  | @ -23,6 +23,5 @@ async def main(): | ||||||
|             n.start_soon(debug_actor.run, die) |             n.start_soon(debug_actor.run, die) | ||||||
|             n.start_soon(crash_boi.run, die) |             n.start_soon(crash_boi.run, die) | ||||||
| 
 | 
 | ||||||
| 
 |  | ||||||
| if __name__ == '__main__': | if __name__ == '__main__': | ||||||
|     trio.run(main) |     trio.run(main) | ||||||
|  |  | ||||||
|  | @ -1,56 +0,0 @@ | ||||||
| import trio |  | ||||||
| import tractor |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @tractor.context |  | ||||||
| async def name_error( |  | ||||||
|     ctx: tractor.Context, |  | ||||||
| ): |  | ||||||
|     ''' |  | ||||||
|     Raise a `NameError`, catch it and enter `.post_mortem()`, then |  | ||||||
|     expect the `._rpc._invoke()` crash handler to also engage. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     try: |  | ||||||
|         getattr(doggypants)  # noqa (on purpose) |  | ||||||
|     except NameError: |  | ||||||
|         await tractor.post_mortem() |  | ||||||
|         raise |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| async def main(): |  | ||||||
|     ''' |  | ||||||
|     Test 3 `PdbREPL` entries: |  | ||||||
|       - one in the child due to manual `.post_mortem()`, |  | ||||||
|       - another in the child due to runtime RPC crash handling. |  | ||||||
|       - final one here in parent from the RAE. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     # XXX NOTE: ideally the REPL arrives at this frame in the parent |  | ||||||
|     # ONE UP FROM the inner ctx block below! |  | ||||||
|     async with tractor.open_nursery( |  | ||||||
|         debug_mode=True, |  | ||||||
|         # loglevel='cancel', |  | ||||||
|     ) as an: |  | ||||||
|         p: tractor.Portal = await an.start_actor( |  | ||||||
|             'child', |  | ||||||
|             enable_modules=[__name__], |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|         # XXX should raise `RemoteActorError[NameError]` |  | ||||||
|         # AND be the active frame when REPL enters! |  | ||||||
|         try: |  | ||||||
|             async with p.open_context(name_error) as (ctx, first): |  | ||||||
|                 assert first |  | ||||||
|         except tractor.RemoteActorError as rae: |  | ||||||
|             assert rae.boxed_type is NameError |  | ||||||
| 
 |  | ||||||
|             # manually handle in root's parent task |  | ||||||
|             await tractor.post_mortem() |  | ||||||
|             raise |  | ||||||
|         else: |  | ||||||
|             raise RuntimeError('IPC ctx should have remote errored!?') |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| if __name__ == '__main__': |  | ||||||
|     trio.run(main) |  | ||||||
|  | @ -4,55 +4,21 @@ import sys | ||||||
| import trio | import trio | ||||||
| import tractor | import tractor | ||||||
| 
 | 
 | ||||||
| # ensure mod-path is correct! |  | ||||||
| from tractor.devx.debug import ( |  | ||||||
|     _sync_pause_from_builtin as _sync_pause_from_builtin, |  | ||||||
| ) |  | ||||||
| 
 |  | ||||||
| 
 | 
 | ||||||
| async def main() -> None: | async def main() -> None: | ||||||
|  |     async with tractor.open_nursery(debug_mode=True) as an: | ||||||
| 
 | 
 | ||||||
|     # intially unset, no entry. |         assert os.environ['PYTHONBREAKPOINT'] == 'tractor._debug._set_trace' | ||||||
|     orig_pybp_var: int = os.environ.get('PYTHONBREAKPOINT') |  | ||||||
|     assert orig_pybp_var in {None, "0"} |  | ||||||
| 
 |  | ||||||
|     async with tractor.open_nursery( |  | ||||||
|         debug_mode=True, |  | ||||||
|         loglevel='devx', |  | ||||||
|         maybe_enable_greenback=True, |  | ||||||
|         # ^XXX REQUIRED to enable `breakpoint()` support (from sync |  | ||||||
|         # fns) and thus required here to avoid an assertion err |  | ||||||
|         # on the next line |  | ||||||
|     ): |  | ||||||
|         assert ( |  | ||||||
|             (pybp_var := os.environ['PYTHONBREAKPOINT']) |  | ||||||
|             == |  | ||||||
|             'tractor.devx.debug._sync_pause_from_builtin' |  | ||||||
|         ) |  | ||||||
| 
 | 
 | ||||||
|         # TODO: an assert that verifies the hook has indeed been, hooked |         # TODO: an assert that verifies the hook has indeed been, hooked | ||||||
|         # XD |         # XD | ||||||
|         assert ( |         assert sys.breakpointhook is not tractor._debug._set_trace | ||||||
|             (pybp_hook := sys.breakpointhook) |  | ||||||
|             is not tractor.devx.debug._set_trace |  | ||||||
|         ) |  | ||||||
| 
 | 
 | ||||||
|         print( |         breakpoint() | ||||||
|             f'$PYTHONOBREAKPOINT: {pybp_var!r}\n' |  | ||||||
|             f'`sys.breakpointhook`: {pybp_hook!r}\n' |  | ||||||
|         ) |  | ||||||
|         breakpoint()  # first bp, tractor hook set. |  | ||||||
| 
 | 
 | ||||||
|     # XXX AFTER EXIT (of actor-runtime) verify the hook is unset.. |     # TODO: an assert that verifies the hook is unhooked.. | ||||||
|     # |  | ||||||
|     # YES, this is weird but it's how stdlib docs say to do it.. |  | ||||||
|     # https://docs.python.org/3/library/sys.html#sys.breakpointhook |  | ||||||
|     assert os.environ.get('PYTHONBREAKPOINT') is orig_pybp_var |  | ||||||
|     assert sys.breakpointhook |     assert sys.breakpointhook | ||||||
| 
 |     breakpoint() | ||||||
|     # now ensure a regular builtin pause still works |  | ||||||
|     breakpoint()  # last bp, stdlib hook restored |  | ||||||
| 
 |  | ||||||
| 
 | 
 | ||||||
| if __name__ == '__main__': | if __name__ == '__main__': | ||||||
|     trio.run(main) |     trio.run(main) | ||||||
|  |  | ||||||
|  | @ -10,7 +10,7 @@ async def main(): | ||||||
| 
 | 
 | ||||||
|         await trio.sleep(0.1) |         await trio.sleep(0.1) | ||||||
| 
 | 
 | ||||||
|         await tractor.pause() |         await tractor.breakpoint() | ||||||
| 
 | 
 | ||||||
|         await trio.sleep(0.1) |         await trio.sleep(0.1) | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
|  | @ -2,16 +2,13 @@ import trio | ||||||
| import tractor | import tractor | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def main( | async def main(): | ||||||
|     registry_addrs: tuple[str, int]|None = None |  | ||||||
| ): |  | ||||||
| 
 | 
 | ||||||
|     async with tractor.open_root_actor( |     async with tractor.open_root_actor( | ||||||
|         debug_mode=True, |         debug_mode=True, | ||||||
|         # loglevel='runtime', |  | ||||||
|     ): |     ): | ||||||
|         while True: |         while True: | ||||||
|             await tractor.pause() |             await tractor.breakpoint() | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| if __name__ == '__main__': | if __name__ == '__main__': | ||||||
|  |  | ||||||
|  | @ -24,9 +24,10 @@ async def spawn_until(depth=0): | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def main(): | async def main(): | ||||||
|     ''' |     """The main ``tractor`` routine. | ||||||
|     The process tree should look as approximately as follows when the | 
 | ||||||
|     debugger first engages: |     The process tree should look as approximately as follows when the debugger | ||||||
|  |     first engages: | ||||||
| 
 | 
 | ||||||
|     python examples/debugging/multi_nested_subactors_bp_forever.py |     python examples/debugging/multi_nested_subactors_bp_forever.py | ||||||
|     ├─ python -m tractor._child --uid ('spawner1', '7eab8462 ...) |     ├─ python -m tractor._child --uid ('spawner1', '7eab8462 ...) | ||||||
|  | @ -36,11 +37,10 @@ async def main(): | ||||||
|     └─ python -m tractor._child --uid ('spawner0', '1d42012b ...) |     └─ python -m tractor._child --uid ('spawner0', '1d42012b ...) | ||||||
|        └─ python -m tractor._child --uid ('name_error', '6c2733b8 ...) |        └─ python -m tractor._child --uid ('name_error', '6c2733b8 ...) | ||||||
| 
 | 
 | ||||||
|     ''' |     """ | ||||||
|     async with tractor.open_nursery( |     async with tractor.open_nursery( | ||||||
|         debug_mode=True, |         debug_mode=True, | ||||||
|         loglevel='devx', |         loglevel='warning' | ||||||
|         enable_transports=['uds'], |  | ||||||
|     ) as n: |     ) as n: | ||||||
| 
 | 
 | ||||||
|         # spawn both actors |         # spawn both actors | ||||||
|  |  | ||||||
|  | @ -1,35 +0,0 @@ | ||||||
| import trio |  | ||||||
| import tractor |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| async def main(): |  | ||||||
|     async with tractor.open_root_actor( |  | ||||||
|         debug_mode=True, |  | ||||||
|         loglevel='cancel', |  | ||||||
|     ) as _root: |  | ||||||
| 
 |  | ||||||
|         # manually trigger self-cancellation and wait |  | ||||||
|         # for it to fully trigger. |  | ||||||
|         _root.cancel_soon() |  | ||||||
|         await _root._cancel_complete.wait() |  | ||||||
|         print('root cancelled') |  | ||||||
| 
 |  | ||||||
|         # now ensure we can still use the REPL |  | ||||||
|         try: |  | ||||||
|             await tractor.pause() |  | ||||||
|         except trio.Cancelled as _taskc: |  | ||||||
|             assert (root_cs := _root._root_tn.cancel_scope).cancel_called |  | ||||||
|             # NOTE^^ above logic but inside `open_root_actor()` and |  | ||||||
|             # passed to the `shield=` expression is effectively what |  | ||||||
|             # we're testing here! |  | ||||||
|             await tractor.pause(shield=root_cs.cancel_called) |  | ||||||
| 
 |  | ||||||
|         # XXX, if shield logic *is wrong* inside `open_root_actor()`'s |  | ||||||
|         # crash-handler block this should never be interacted, |  | ||||||
|         # instead `trio.Cancelled` would be bubbled up: the original |  | ||||||
|         # BUG. |  | ||||||
|         assert 0 |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| if __name__ == '__main__': |  | ||||||
|     trio.run(main) |  | ||||||
|  | @ -1,84 +0,0 @@ | ||||||
| ''' |  | ||||||
| Verify we can dump a `stackscope` tree on a hang. |  | ||||||
| 
 |  | ||||||
| ''' |  | ||||||
| import os |  | ||||||
| import signal |  | ||||||
| 
 |  | ||||||
| import trio |  | ||||||
| import tractor |  | ||||||
| 
 |  | ||||||
| @tractor.context |  | ||||||
| async def start_n_shield_hang( |  | ||||||
|     ctx: tractor.Context, |  | ||||||
| ): |  | ||||||
|     # actor: tractor.Actor = tractor.current_actor() |  | ||||||
| 
 |  | ||||||
|     # sync to parent-side task |  | ||||||
|     await ctx.started(os.getpid()) |  | ||||||
| 
 |  | ||||||
|     print('Entering shield sleep..') |  | ||||||
|     with trio.CancelScope(shield=True): |  | ||||||
|         await trio.sleep_forever()  # in subactor |  | ||||||
| 
 |  | ||||||
|     # XXX NOTE ^^^ since this shields, we expect |  | ||||||
|     # the zombie reaper (aka T800) to engage on |  | ||||||
|     # SIGINT from the user and eventually hard-kill |  | ||||||
|     # this subprocess! |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| async def main( |  | ||||||
|     from_test: bool = False, |  | ||||||
| ) -> None: |  | ||||||
| 
 |  | ||||||
|     async with ( |  | ||||||
|         tractor.open_nursery( |  | ||||||
|             debug_mode=True, |  | ||||||
|             enable_stack_on_sig=True, |  | ||||||
|             # maybe_enable_greenback=False, |  | ||||||
|             loglevel='devx', |  | ||||||
|             enable_transports=['uds'], |  | ||||||
|         ) as an, |  | ||||||
|     ): |  | ||||||
|         ptl: tractor.Portal  = await an.start_actor( |  | ||||||
|             'hanger', |  | ||||||
|             enable_modules=[__name__], |  | ||||||
|             debug_mode=True, |  | ||||||
|         ) |  | ||||||
|         async with ptl.open_context( |  | ||||||
|             start_n_shield_hang, |  | ||||||
|         ) as (ctx, cpid): |  | ||||||
| 
 |  | ||||||
|             _, proc, _ = an._children[ptl.chan.uid] |  | ||||||
|             assert cpid == proc.pid |  | ||||||
| 
 |  | ||||||
|             print( |  | ||||||
|                 'Yo my child hanging..?\n' |  | ||||||
|                 # "i'm a user who wants to see a `stackscope` tree!\n" |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|             # XXX simulate the wrapping test's "user actions" |  | ||||||
|             # (i.e. if a human didn't run this manually but wants to |  | ||||||
|             # know what they should do to reproduce test behaviour) |  | ||||||
|             if from_test: |  | ||||||
|                 print( |  | ||||||
|                     f'Sending SIGUSR1 to {cpid!r}!\n' |  | ||||||
|                 ) |  | ||||||
|                 os.kill( |  | ||||||
|                     cpid, |  | ||||||
|                     signal.SIGUSR1, |  | ||||||
|                 ) |  | ||||||
| 
 |  | ||||||
|                 # simulate user cancelling program |  | ||||||
|                 await trio.sleep(0.5) |  | ||||||
|                 os.kill( |  | ||||||
|                     os.getpid(), |  | ||||||
|                     signal.SIGINT, |  | ||||||
|                 ) |  | ||||||
|             else: |  | ||||||
|                 # actually let user send the ctl-c |  | ||||||
|                 await trio.sleep_forever()  # in root |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| if __name__ == '__main__': |  | ||||||
|     trio.run(main) |  | ||||||
|  | @ -1,88 +0,0 @@ | ||||||
| import trio |  | ||||||
| import tractor |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| async def cancellable_pause_loop( |  | ||||||
|     task_status: trio.TaskStatus[trio.CancelScope] = trio.TASK_STATUS_IGNORED |  | ||||||
| ): |  | ||||||
|     with trio.CancelScope() as cs: |  | ||||||
|         task_status.started(cs) |  | ||||||
|         for _ in range(3): |  | ||||||
|             try: |  | ||||||
|                 # ON first entry, there is no level triggered |  | ||||||
|                 # cancellation yet, so this cp does a parent task |  | ||||||
|                 # ctx-switch so that this scope raises for the NEXT |  | ||||||
|                 # checkpoint we hit. |  | ||||||
|                 await trio.lowlevel.checkpoint() |  | ||||||
|                 await tractor.pause() |  | ||||||
| 
 |  | ||||||
|                 cs.cancel() |  | ||||||
| 
 |  | ||||||
|                 # parent should have called `cs.cancel()` by now |  | ||||||
|                 await trio.lowlevel.checkpoint() |  | ||||||
| 
 |  | ||||||
|             except trio.Cancelled: |  | ||||||
|                 print('INSIDE SHIELDED PAUSE') |  | ||||||
|                 await tractor.pause(shield=True) |  | ||||||
|         else: |  | ||||||
|             # should raise it again, bubbling up to parent |  | ||||||
|             print('BUBBLING trio.Cancelled to parent task-nursery') |  | ||||||
|             await trio.lowlevel.checkpoint() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| async def pm_on_cancelled(): |  | ||||||
|     async with trio.open_nursery() as tn: |  | ||||||
|         tn.cancel_scope.cancel() |  | ||||||
|         try: |  | ||||||
|             await trio.sleep_forever() |  | ||||||
|         except trio.Cancelled: |  | ||||||
|             # should also raise `Cancelled` since |  | ||||||
|             # we didn't pass `shield=True`. |  | ||||||
|             try: |  | ||||||
|                 await tractor.post_mortem(hide_tb=False) |  | ||||||
|             except trio.Cancelled as taskc: |  | ||||||
| 
 |  | ||||||
|                 # should enter just fine, in fact it should |  | ||||||
|                 # be debugging the internals of the previous |  | ||||||
|                 # sin-shield call above Bo |  | ||||||
|                 await tractor.post_mortem( |  | ||||||
|                     hide_tb=False, |  | ||||||
|                     shield=True, |  | ||||||
|                 ) |  | ||||||
|                 raise taskc |  | ||||||
| 
 |  | ||||||
|         else: |  | ||||||
|             raise RuntimeError('Dint cancel as expected!?') |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| async def cancelled_before_pause( |  | ||||||
| ): |  | ||||||
|     ''' |  | ||||||
|     Verify that using a shielded pause works despite surrounding |  | ||||||
|     cancellation called state in the calling task. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     async with trio.open_nursery() as tn: |  | ||||||
|         cs: trio.CancelScope = await tn.start(cancellable_pause_loop) |  | ||||||
|         await trio.sleep(0.1) |  | ||||||
| 
 |  | ||||||
|     assert cs.cancelled_caught |  | ||||||
| 
 |  | ||||||
|     await pm_on_cancelled() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| async def main(): |  | ||||||
|     async with tractor.open_nursery( |  | ||||||
|         debug_mode=True, |  | ||||||
|     ) as n: |  | ||||||
|         portal: tractor.Portal = await n.run_in_actor( |  | ||||||
|             cancelled_before_pause, |  | ||||||
|         ) |  | ||||||
|         await portal.result() |  | ||||||
| 
 |  | ||||||
|         # ensure the same works in the root actor! |  | ||||||
|         await pm_on_cancelled() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| if __name__ == '__main__': |  | ||||||
|     trio.run(main) |  | ||||||
|  | @ -4,9 +4,9 @@ import trio | ||||||
| 
 | 
 | ||||||
| async def gen(): | async def gen(): | ||||||
|     yield 'yo' |     yield 'yo' | ||||||
|     await tractor.pause() |     await tractor.breakpoint() | ||||||
|     yield 'yo' |     yield 'yo' | ||||||
|     await tractor.pause() |     await tractor.breakpoint() | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @tractor.context | @tractor.context | ||||||
|  | @ -15,7 +15,7 @@ async def just_bp( | ||||||
| ) -> None: | ) -> None: | ||||||
| 
 | 
 | ||||||
|     await ctx.started() |     await ctx.started() | ||||||
|     await tractor.pause() |     await tractor.breakpoint() | ||||||
| 
 | 
 | ||||||
|     # TODO: bps and errors in this call.. |     # TODO: bps and errors in this call.. | ||||||
|     async for val in gen(): |     async for val in gen(): | ||||||
|  | @ -33,11 +33,8 @@ async def just_bp( | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def main(): | async def main(): | ||||||
| 
 |  | ||||||
|     async with tractor.open_nursery( |     async with tractor.open_nursery( | ||||||
|         debug_mode=True, |         debug_mode=True, | ||||||
|         enable_transports=['uds'], |  | ||||||
|         loglevel='devx', |  | ||||||
|     ) as n: |     ) as n: | ||||||
|         p = await n.start_actor( |         p = await n.start_actor( | ||||||
|             'bp_boi', |             'bp_boi', | ||||||
|  |  | ||||||
|  | @ -3,20 +3,17 @@ import tractor | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def breakpoint_forever(): | async def breakpoint_forever(): | ||||||
|     ''' |     """Indefinitely re-enter debugger in child actor. | ||||||
|     Indefinitely re-enter debugger in child actor. |     """ | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     while True: |     while True: | ||||||
|         await trio.sleep(0.1) |         await trio.sleep(0.1) | ||||||
|         await tractor.pause() |         await tractor.breakpoint() | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def main(): | async def main(): | ||||||
| 
 | 
 | ||||||
|     async with tractor.open_nursery( |     async with tractor.open_nursery( | ||||||
|         debug_mode=True, |         debug_mode=True, | ||||||
|         loglevel='cancel', |  | ||||||
|     ) as n: |     ) as n: | ||||||
| 
 | 
 | ||||||
|         portal = await n.run_in_actor( |         portal = await n.run_in_actor( | ||||||
|  |  | ||||||
|  | @ -3,26 +3,16 @@ import tractor | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def name_error(): | async def name_error(): | ||||||
|     getattr(doggypants)  # noqa (on purpose) |     getattr(doggypants) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def main(): | async def main(): | ||||||
|     async with tractor.open_nursery( |     async with tractor.open_nursery( | ||||||
|         debug_mode=True, |         debug_mode=True, | ||||||
|         # loglevel='transport', |     ) as n: | ||||||
|     ) as an: |  | ||||||
| 
 | 
 | ||||||
|         # TODO: ideally the REPL arrives at this frame in the parent, |         portal = await n.run_in_actor(name_error) | ||||||
|         # ABOVE the @api_frame of `Portal.run_in_actor()` (which |         await portal.result() | ||||||
|         # should eventually not even be a portal method ... XD) |  | ||||||
|         # await tractor.pause() |  | ||||||
|         p: tractor.Portal = await an.run_in_actor(name_error) |  | ||||||
| 
 |  | ||||||
|         # with this style, should raise on this line |  | ||||||
|         await p.result() |  | ||||||
| 
 |  | ||||||
|         # with this alt style should raise at `open_nusery()` |  | ||||||
|         # return await p.result() |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| if __name__ == '__main__': | if __name__ == '__main__': | ||||||
|  |  | ||||||
|  | @ -1,169 +0,0 @@ | ||||||
| from functools import partial |  | ||||||
| import time |  | ||||||
| 
 |  | ||||||
| import trio |  | ||||||
| import tractor |  | ||||||
| 
 |  | ||||||
| # TODO: only import these when not running from test harness? |  | ||||||
| # can we detect `pexpect` usage maybe? |  | ||||||
| # from tractor.devx.debug import ( |  | ||||||
| #     get_lock, |  | ||||||
| #     get_debug_req, |  | ||||||
| # ) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def sync_pause( |  | ||||||
|     use_builtin: bool = False, |  | ||||||
|     error: bool = False, |  | ||||||
|     hide_tb: bool = True, |  | ||||||
|     pre_sleep: float|None = None, |  | ||||||
| ): |  | ||||||
|     if pre_sleep: |  | ||||||
|         time.sleep(pre_sleep) |  | ||||||
| 
 |  | ||||||
|     if use_builtin: |  | ||||||
|         breakpoint(hide_tb=hide_tb) |  | ||||||
| 
 |  | ||||||
|     else: |  | ||||||
|         # TODO: maybe for testing some kind of cm style interface |  | ||||||
|         # where the `._set_trace()` call doesn't happen until block |  | ||||||
|         # exit? |  | ||||||
|         # assert get_lock().ctx_in_debug is None |  | ||||||
|         # assert get_debug_req().repl is None |  | ||||||
|         tractor.pause_from_sync() |  | ||||||
|         # assert get_debug_req().repl is None |  | ||||||
| 
 |  | ||||||
|     if error: |  | ||||||
|         raise RuntimeError('yoyo sync code error') |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @tractor.context |  | ||||||
| async def start_n_sync_pause( |  | ||||||
|     ctx: tractor.Context, |  | ||||||
| ): |  | ||||||
|     actor: tractor.Actor = tractor.current_actor() |  | ||||||
| 
 |  | ||||||
|     # sync to parent-side task |  | ||||||
|     await ctx.started() |  | ||||||
| 
 |  | ||||||
|     print(f'Entering `sync_pause()` in subactor: {actor.uid}\n') |  | ||||||
|     sync_pause() |  | ||||||
|     print(f'Exited `sync_pause()` in subactor: {actor.uid}\n') |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| async def main() -> None: |  | ||||||
|     async with ( |  | ||||||
|         tractor.open_nursery( |  | ||||||
|             debug_mode=True, |  | ||||||
|             maybe_enable_greenback=True, |  | ||||||
|             enable_stack_on_sig=True, |  | ||||||
|             # loglevel='warning', |  | ||||||
|             # loglevel='devx', |  | ||||||
|         ) as an, |  | ||||||
|         trio.open_nursery() as tn, |  | ||||||
|     ): |  | ||||||
|         # just from root task |  | ||||||
|         sync_pause() |  | ||||||
| 
 |  | ||||||
|         p: tractor.Portal  = await an.start_actor( |  | ||||||
|             'subactor', |  | ||||||
|             enable_modules=[__name__], |  | ||||||
|             # infect_asyncio=True, |  | ||||||
|             debug_mode=True, |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|         # TODO: 3 sub-actor usage cases: |  | ||||||
|         # -[x] via a `.open_context()` |  | ||||||
|         # -[ ] via a `.run_in_actor()` call |  | ||||||
|         # -[ ] via a `.run()` |  | ||||||
|         # -[ ] via a `.to_thread.run_sync()` in subactor |  | ||||||
|         async with p.open_context( |  | ||||||
|             start_n_sync_pause, |  | ||||||
|         ) as (ctx, first): |  | ||||||
|             assert first is None |  | ||||||
| 
 |  | ||||||
|             # TODO: handle bg-thread-in-root-actor special cases! |  | ||||||
|             # |  | ||||||
|             # there are a couple very subtle situations possible here |  | ||||||
|             # and they are likely to become more important as cpython |  | ||||||
|             # moves to support no-GIL. |  | ||||||
|             # |  | ||||||
|             # Cases: |  | ||||||
|             # 1. root-actor bg-threads that call `.pause_from_sync()` |  | ||||||
|             #   whilst an in-tree subactor also is using ` .pause()`. |  | ||||||
|             # |_ since the root-actor bg thread can not |  | ||||||
|             #   `Lock._debug_lock.acquire_nowait()` without running |  | ||||||
|             #   a `trio.Task`, AND because the |  | ||||||
|             #   `PdbREPL.set_continue()` is called from that |  | ||||||
|             #   bg-thread, we can not `._debug_lock.release()` |  | ||||||
|             #   either! |  | ||||||
|             #  |_ this results in no actor-tree `Lock` being used |  | ||||||
|             #    on behalf of the bg-thread and thus the subactor's |  | ||||||
|             #    task and the thread trying to to use stdio |  | ||||||
|             #    simultaneously which results in the classic TTY |  | ||||||
|             #    clobbering! |  | ||||||
|             # |  | ||||||
|             # 2. mutiple sync-bg-threads that call |  | ||||||
|             #   `.pause_from_sync()` where one is scheduled via |  | ||||||
|             #   `Nursery.start_soon(to_thread.run_sync)` in a bg |  | ||||||
|             #   task. |  | ||||||
|             # |  | ||||||
|             #   Due to the GIL, the threads never truly try to step |  | ||||||
|             #   through the REPL simultaneously, BUT their `logging` |  | ||||||
|             #   and traceback outputs are interleaved since the GIL |  | ||||||
|             #   (seemingly) on every REPL-input from the user |  | ||||||
|             #   switches threads.. |  | ||||||
|             # |  | ||||||
|             #   Soo, the context switching semantics of the GIL |  | ||||||
|             #   result in a very confusing and messy interaction UX |  | ||||||
|             #   since eval and (tb) print output is NOT synced to |  | ||||||
|             #   each REPL-cycle (like we normally make it via |  | ||||||
|             #   a `.set_continue()` callback triggering the |  | ||||||
|             #   `Lock.release()`). Ideally we can solve this |  | ||||||
|             #   usability issue NOW because this will of course be |  | ||||||
|             #   that much more important when eventually there is no |  | ||||||
|             #   GIL! |  | ||||||
| 
 |  | ||||||
|             # XXX should cause double REPL entry and thus TTY |  | ||||||
|             # clobbering due to case 1. above! |  | ||||||
|             tn.start_soon( |  | ||||||
|                 partial( |  | ||||||
|                     trio.to_thread.run_sync, |  | ||||||
|                     partial( |  | ||||||
|                         sync_pause, |  | ||||||
|                         use_builtin=False, |  | ||||||
|                         # pre_sleep=0.5, |  | ||||||
|                     ), |  | ||||||
|                     abandon_on_cancel=True, |  | ||||||
|                     thread_name='start_soon_root_bg_thread', |  | ||||||
|                 ) |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|             await tractor.pause() |  | ||||||
| 
 |  | ||||||
|             # XXX should cause double REPL entry and thus TTY |  | ||||||
|             # clobbering due to case 2. above! |  | ||||||
|             await trio.to_thread.run_sync( |  | ||||||
|                 partial( |  | ||||||
|                     sync_pause, |  | ||||||
|                     # NOTE this already works fine since in the new |  | ||||||
|                     # thread the `breakpoint()` built-in is never |  | ||||||
|                     # overloaded, thus NO locking is used, HOWEVER |  | ||||||
|                     # the case 2. from above still exists! |  | ||||||
|                     use_builtin=True, |  | ||||||
|                 ), |  | ||||||
|                 # TODO: with this `False` we can hang!??! |  | ||||||
|                 # abandon_on_cancel=False, |  | ||||||
|                 abandon_on_cancel=True, |  | ||||||
|                 thread_name='inline_root_bg_thread', |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|         await ctx.cancel() |  | ||||||
| 
 |  | ||||||
|         # TODO: case where we cancel from trio-side while asyncio task |  | ||||||
|         # has debugger lock? |  | ||||||
|         await p.cancel_actor() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| if __name__ == '__main__': |  | ||||||
|     trio.run(main) |  | ||||||
|  | @ -1,11 +1,6 @@ | ||||||
| import time | import time | ||||||
| import trio | import trio | ||||||
| import tractor | import tractor | ||||||
| from tractor import ( |  | ||||||
|     ActorNursery, |  | ||||||
|     MsgStream, |  | ||||||
|     Portal, |  | ||||||
| ) |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| # this is the first 2 actors, streamer_1 and streamer_2 | # this is the first 2 actors, streamer_1 and streamer_2 | ||||||
|  | @ -17,18 +12,14 @@ async def stream_data(seed): | ||||||
| 
 | 
 | ||||||
| # this is the third actor; the aggregator | # this is the third actor; the aggregator | ||||||
| async def aggregate(seed): | async def aggregate(seed): | ||||||
|     ''' |     """Ensure that the two streams we receive match but only stream | ||||||
|     Ensure that the two streams we receive match but only stream |  | ||||||
|     a single set of values to the parent. |     a single set of values to the parent. | ||||||
| 
 |     """ | ||||||
|     ''' |     async with tractor.open_nursery() as nursery: | ||||||
|     an: ActorNursery |         portals = [] | ||||||
|     async with tractor.open_nursery() as an: |  | ||||||
|         portals: list[Portal] = [] |  | ||||||
|         for i in range(1, 3): |         for i in range(1, 3): | ||||||
| 
 |             # fork point | ||||||
|             # fork/spawn call |             portal = await nursery.start_actor( | ||||||
|             portal = await an.start_actor( |  | ||||||
|                 name=f'streamer_{i}', |                 name=f'streamer_{i}', | ||||||
|                 enable_modules=[__name__], |                 enable_modules=[__name__], | ||||||
|             ) |             ) | ||||||
|  | @ -52,11 +43,7 @@ async def aggregate(seed): | ||||||
|         async with trio.open_nursery() as n: |         async with trio.open_nursery() as n: | ||||||
| 
 | 
 | ||||||
|             for portal in portals: |             for portal in portals: | ||||||
|                 n.start_soon( |                 n.start_soon(push_to_chan, portal, send_chan.clone()) | ||||||
|                     push_to_chan, |  | ||||||
|                     portal, |  | ||||||
|                     send_chan.clone(), |  | ||||||
|                 ) |  | ||||||
| 
 | 
 | ||||||
|             # close this local task's reference to send side |             # close this local task's reference to send side | ||||||
|             await send_chan.aclose() |             await send_chan.aclose() | ||||||
|  | @ -73,7 +60,7 @@ async def aggregate(seed): | ||||||
| 
 | 
 | ||||||
|             print("FINISHED ITERATING in aggregator") |             print("FINISHED ITERATING in aggregator") | ||||||
| 
 | 
 | ||||||
|         await an.cancel() |         await nursery.cancel() | ||||||
|         print("WAITING on `ActorNursery` to finish") |         print("WAITING on `ActorNursery` to finish") | ||||||
|     print("AGGREGATOR COMPLETE!") |     print("AGGREGATOR COMPLETE!") | ||||||
| 
 | 
 | ||||||
|  | @ -88,21 +75,18 @@ async def main() -> list[int]: | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|     # yes, a nursery which spawns `trio`-"actors" B) |     # yes, a nursery which spawns `trio`-"actors" B) | ||||||
|     an: ActorNursery |     nursery: tractor.ActorNursery | ||||||
|     async with tractor.open_nursery( |     async with tractor.open_nursery() as nursery: | ||||||
|         loglevel='cancel', |  | ||||||
|         # debug_mode=True, |  | ||||||
|     ) as an: |  | ||||||
| 
 | 
 | ||||||
|         seed = int(1e3) |         seed = int(1e3) | ||||||
|         pre_start = time.time() |         pre_start = time.time() | ||||||
| 
 | 
 | ||||||
|         portal: Portal = await an.start_actor( |         portal: tractor.Portal = await nursery.start_actor( | ||||||
|             name='aggregator', |             name='aggregator', | ||||||
|             enable_modules=[__name__], |             enable_modules=[__name__], | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|         stream: MsgStream |         stream: tractor.MsgStream | ||||||
|         async with portal.open_stream_from( |         async with portal.open_stream_from( | ||||||
|             aggregate, |             aggregate, | ||||||
|             seed=seed, |             seed=seed, | ||||||
|  | @ -111,12 +95,11 @@ async def main() -> list[int]: | ||||||
|             start = time.time() |             start = time.time() | ||||||
|             # the portal call returns exactly what you'd expect |             # the portal call returns exactly what you'd expect | ||||||
|             # as if the remote "aggregate" function was called locally |             # as if the remote "aggregate" function was called locally | ||||||
|             result_stream: list[int] = [] |             result_stream = [] | ||||||
|             async for value in stream: |             async for value in stream: | ||||||
|                 result_stream.append(value) |                 result_stream.append(value) | ||||||
| 
 | 
 | ||||||
|         cancelled: bool = await portal.cancel_actor() |         await portal.cancel_actor() | ||||||
|         assert cancelled |  | ||||||
| 
 | 
 | ||||||
|         print(f"STREAM TIME = {time.time() - start}") |         print(f"STREAM TIME = {time.time() - start}") | ||||||
|         print(f"STREAM + SPAWN TIME = {time.time() - pre_start}") |         print(f"STREAM + SPAWN TIME = {time.time() - pre_start}") | ||||||
|  |  | ||||||
|  | @ -3,18 +3,20 @@ import trio | ||||||
| import tractor | import tractor | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def sleepy_jane() -> None: | async def sleepy_jane(): | ||||||
|     uid: tuple = tractor.current_actor().uid |     uid = tractor.current_actor().uid | ||||||
|     print(f'Yo i am actor {uid}') |     print(f'Yo i am actor {uid}') | ||||||
|     await trio.sleep_forever() |     await trio.sleep_forever() | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def main(): | async def main(): | ||||||
|     ''' |     ''' | ||||||
|     Spawn a flat actor cluster, with one process per detected core. |     Spawn a flat actor cluster, with one process per | ||||||
|  |     detected core. | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|     portal_map: dict[str, tractor.Portal] |     portal_map: dict[str, tractor.Portal] | ||||||
|  |     results: dict[str, str] | ||||||
| 
 | 
 | ||||||
|     # look at this hip new syntax! |     # look at this hip new syntax! | ||||||
|     async with ( |     async with ( | ||||||
|  | @ -23,15 +25,11 @@ async def main(): | ||||||
|             modules=[__name__] |             modules=[__name__] | ||||||
|         ) as portal_map, |         ) as portal_map, | ||||||
| 
 | 
 | ||||||
|         tractor.trionics.collapse_eg(), |         trio.open_nursery() as n, | ||||||
|         trio.open_nursery() as tn, |  | ||||||
|     ): |     ): | ||||||
| 
 | 
 | ||||||
|         for (name, portal) in portal_map.items(): |         for (name, portal) in portal_map.items(): | ||||||
|             tn.start_soon( |             n.start_soon(portal.run, sleepy_jane) | ||||||
|                 portal.run, |  | ||||||
|                 sleepy_jane, |  | ||||||
|             ) |  | ||||||
| 
 | 
 | ||||||
|         await trio.sleep(0.5) |         await trio.sleep(0.5) | ||||||
| 
 | 
 | ||||||
|  | @ -43,4 +41,4 @@ if __name__ == '__main__': | ||||||
|     try: |     try: | ||||||
|         trio.run(main) |         trio.run(main) | ||||||
|     except KeyboardInterrupt: |     except KeyboardInterrupt: | ||||||
|         print('trio cancelled by KBI') |         pass | ||||||
|  |  | ||||||
|  | @ -9,7 +9,7 @@ async def main(service_name): | ||||||
|     async with tractor.open_nursery() as an: |     async with tractor.open_nursery() as an: | ||||||
|         await an.start_actor(service_name) |         await an.start_actor(service_name) | ||||||
| 
 | 
 | ||||||
|         async with tractor.get_registry() as portal: |         async with tractor.get_arbiter('127.0.0.1', 1616) as portal: | ||||||
|             print(f"Arbiter is listening on {portal.channel}") |             print(f"Arbiter is listening on {portal.channel}") | ||||||
| 
 | 
 | ||||||
|         async with tractor.wait_for_actor(service_name) as sockaddr: |         async with tractor.wait_for_actor(service_name) as sockaddr: | ||||||
|  |  | ||||||
|  | @ -1,85 +0,0 @@ | ||||||
| from contextlib import ( |  | ||||||
|     asynccontextmanager as acm, |  | ||||||
| ) |  | ||||||
| from functools import partial |  | ||||||
| 
 |  | ||||||
| import tractor |  | ||||||
| import trio |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| log = tractor.log.get_logger( |  | ||||||
|     name=__name__ |  | ||||||
| ) |  | ||||||
| 
 |  | ||||||
| _lock: trio.Lock|None = None |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @acm |  | ||||||
| async def acquire_singleton_lock( |  | ||||||
| ) -> None: |  | ||||||
|     global _lock |  | ||||||
|     if _lock is None: |  | ||||||
|         log.info('Allocating LOCK') |  | ||||||
|         _lock = trio.Lock() |  | ||||||
| 
 |  | ||||||
|     log.info('TRYING TO LOCK ACQUIRE') |  | ||||||
|     async with _lock: |  | ||||||
|         log.info('ACQUIRED') |  | ||||||
|         yield _lock |  | ||||||
| 
 |  | ||||||
|     log.info('RELEASED') |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| async def hold_lock_forever( |  | ||||||
|     task_status=trio.TASK_STATUS_IGNORED |  | ||||||
| ): |  | ||||||
|     async with ( |  | ||||||
|         tractor.trionics.maybe_raise_from_masking_exc(), |  | ||||||
|         acquire_singleton_lock() as lock, |  | ||||||
|     ): |  | ||||||
|         task_status.started(lock) |  | ||||||
|         await trio.sleep_forever() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| async def main( |  | ||||||
|     ignore_special_cases: bool, |  | ||||||
|     loglevel: str = 'info', |  | ||||||
|     debug_mode: bool = True, |  | ||||||
| ): |  | ||||||
|     async with ( |  | ||||||
|         trio.open_nursery() as tn, |  | ||||||
| 
 |  | ||||||
|         # tractor.trionics.maybe_raise_from_masking_exc() |  | ||||||
|         # ^^^ XXX NOTE, interestingly putting the unmasker |  | ||||||
|         # here does not exhibit the same behaviour ?? |  | ||||||
|     ): |  | ||||||
|         if not ignore_special_cases: |  | ||||||
|             from tractor.trionics import _taskc |  | ||||||
|             _taskc._mask_cases.clear() |  | ||||||
| 
 |  | ||||||
|         _lock = await tn.start( |  | ||||||
|             hold_lock_forever, |  | ||||||
|         ) |  | ||||||
|         with trio.move_on_after(0.2): |  | ||||||
|             await tn.start( |  | ||||||
|                 hold_lock_forever, |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|         tn.cancel_scope.cancel() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| # XXX, manual test as script |  | ||||||
| if __name__ == '__main__': |  | ||||||
|     tractor.log.get_console_log(level='info') |  | ||||||
|     for case in [True, False]: |  | ||||||
|         log.info( |  | ||||||
|             f'\n' |  | ||||||
|             f'------ RUNNING SCRIPT TRIAL ------\n' |  | ||||||
|             f'ignore_special_cases: {case!r}\n' |  | ||||||
|         ) |  | ||||||
|         trio.run(partial( |  | ||||||
|             main, |  | ||||||
|             ignore_special_cases=case, |  | ||||||
|             loglevel='info', |  | ||||||
|         )) |  | ||||||
|  | @ -1,195 +0,0 @@ | ||||||
| from contextlib import ( |  | ||||||
|     contextmanager as cm, |  | ||||||
|     # TODO, any diff in async case(s)?? |  | ||||||
|     # asynccontextmanager as acm, |  | ||||||
| ) |  | ||||||
| from functools import partial |  | ||||||
| 
 |  | ||||||
| import tractor |  | ||||||
| import trio |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| log = tractor.log.get_logger( |  | ||||||
|     name=__name__ |  | ||||||
| ) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @cm |  | ||||||
| def teardown_on_exc( |  | ||||||
|     raise_from_handler: bool = False, |  | ||||||
| ): |  | ||||||
|     ''' |  | ||||||
|     You could also have a teardown handler which catches any exc and |  | ||||||
|     does some required teardown. In this case the problem is |  | ||||||
|     compounded UNLESS you ensure the handler's scope is OUTSIDE the |  | ||||||
|     `ux.aclose()`.. that is in the caller's enclosing scope. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     try: |  | ||||||
|         yield |  | ||||||
|     except BaseException as _berr: |  | ||||||
|         berr = _berr |  | ||||||
|         log.exception( |  | ||||||
|             f'Handling termination teardown in child due to,\n' |  | ||||||
|             f'{berr!r}\n' |  | ||||||
|         ) |  | ||||||
|         if raise_from_handler: |  | ||||||
|             # XXX teardown ops XXX |  | ||||||
|             # on termination these steps say need to be run to |  | ||||||
|             # ensure wider system consistency (like the state of |  | ||||||
|             # remote connections/services). |  | ||||||
|             # |  | ||||||
|             # HOWEVER, any bug in this teardown code is also |  | ||||||
|             # masked by the `tx.aclose()`! |  | ||||||
|             # this is also true if `_tn.cancel_scope` is |  | ||||||
|             # `.cancel_called` by the parent in a graceful |  | ||||||
|             # request case.. |  | ||||||
| 
 |  | ||||||
|             # simulate a bug in teardown handler. |  | ||||||
|             raise RuntimeError( |  | ||||||
|                 'woopsie teardown bug!' |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|         raise  # no teardown bug. |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| async def finite_stream_to_rent( |  | ||||||
|     tx: trio.abc.SendChannel, |  | ||||||
|     child_errors_mid_stream: bool, |  | ||||||
|     raise_unmasked: bool, |  | ||||||
| 
 |  | ||||||
|     task_status: trio.TaskStatus[ |  | ||||||
|         trio.CancelScope, |  | ||||||
|     ] = trio.TASK_STATUS_IGNORED, |  | ||||||
| ): |  | ||||||
|     async with ( |  | ||||||
|         # XXX without this unmasker the mid-streaming RTE is never |  | ||||||
|         # reported since it is masked by the `tx.aclose()` |  | ||||||
|         # call which in turn raises `Cancelled`! |  | ||||||
|         # |  | ||||||
|         # NOTE, this is WITHOUT doing any exception handling |  | ||||||
|         # inside the child  task! |  | ||||||
|         # |  | ||||||
|         # TODO, uncomment next LoC to see the supprsessed beg[RTE]! |  | ||||||
|         tractor.trionics.maybe_raise_from_masking_exc( |  | ||||||
|             raise_unmasked=raise_unmasked, |  | ||||||
|         ), |  | ||||||
| 
 |  | ||||||
|         tx as tx,  # .aclose() is the guilty masker chkpt! |  | ||||||
| 
 |  | ||||||
|         # XXX, this ONLY matters in the |  | ||||||
|         # `child_errors_mid_stream=False` case oddly!? |  | ||||||
|         # THAT IS, if no tn is opened in that case then the |  | ||||||
|         # test will not fail; it raises the RTE correctly? |  | ||||||
|         # |  | ||||||
|         # -> so it seems this new scope somehow affects the form of |  | ||||||
|         #    eventual in the parent EG? |  | ||||||
|         tractor.trionics.maybe_open_nursery( |  | ||||||
|             nursery=( |  | ||||||
|                 None |  | ||||||
|                 if not child_errors_mid_stream |  | ||||||
|                 else True |  | ||||||
|             ), |  | ||||||
|         ) as _tn, |  | ||||||
|     ): |  | ||||||
|         # pass our scope back to parent for supervision\ |  | ||||||
|         # control. |  | ||||||
|         cs: trio.CancelScope|None = ( |  | ||||||
|             None |  | ||||||
|             if _tn is True |  | ||||||
|             else _tn.cancel_scope |  | ||||||
|         ) |  | ||||||
|         task_status.started(cs) |  | ||||||
| 
 |  | ||||||
|         with teardown_on_exc( |  | ||||||
|             raise_from_handler=not child_errors_mid_stream, |  | ||||||
|         ): |  | ||||||
|             for i in range(100): |  | ||||||
|                 log.debug( |  | ||||||
|                     f'Child tx {i!r}\n' |  | ||||||
|                 ) |  | ||||||
|                 if ( |  | ||||||
|                     child_errors_mid_stream |  | ||||||
|                     and |  | ||||||
|                     i == 66 |  | ||||||
|                 ): |  | ||||||
|                     # oh wait but WOOPS there's a bug |  | ||||||
|                     # in that teardown code!? |  | ||||||
|                     raise RuntimeError( |  | ||||||
|                         'woopsie, a mid-streaming bug!?' |  | ||||||
|                     ) |  | ||||||
| 
 |  | ||||||
|                 await tx.send(i) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| async def main( |  | ||||||
|     # TODO! toggle this for the 2 cases! |  | ||||||
|     # 1. child errors mid-stream while parent is also requesting |  | ||||||
|     #   (graceful) cancel of that child streamer. |  | ||||||
|     # |  | ||||||
|     # 2. child contains a teardown handler which contains a |  | ||||||
|     #   bug and raises. |  | ||||||
|     # |  | ||||||
|     child_errors_mid_stream: bool, |  | ||||||
| 
 |  | ||||||
|     raise_unmasked: bool = False, |  | ||||||
|     loglevel: str = 'info', |  | ||||||
| ): |  | ||||||
|     tractor.log.get_console_log(level=loglevel) |  | ||||||
| 
 |  | ||||||
|     # the `.aclose()` being checkpoints on these |  | ||||||
|     # is the source of the problem.. |  | ||||||
|     tx, rx = trio.open_memory_channel(1) |  | ||||||
| 
 |  | ||||||
|     async with ( |  | ||||||
|         tractor.trionics.collapse_eg(), |  | ||||||
|         trio.open_nursery() as tn, |  | ||||||
|         rx as rx, |  | ||||||
|     ): |  | ||||||
|         _child_cs = await tn.start( |  | ||||||
|             partial( |  | ||||||
|                 finite_stream_to_rent, |  | ||||||
|                 child_errors_mid_stream=child_errors_mid_stream, |  | ||||||
|                 raise_unmasked=raise_unmasked, |  | ||||||
|                 tx=tx, |  | ||||||
|             ) |  | ||||||
|         ) |  | ||||||
|         async for msg in rx: |  | ||||||
|             log.debug( |  | ||||||
|                 f'Rent rx {msg!r}\n' |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|             # simulate some external cancellation |  | ||||||
|             # request **JUST BEFORE** the child errors. |  | ||||||
|             if msg == 65: |  | ||||||
|                 log.cancel( |  | ||||||
|                     f'Cancelling parent on,\n' |  | ||||||
|                     f'msg={msg}\n' |  | ||||||
|                     f'\n' |  | ||||||
|                     f'Simulates OOB cancel request!\n' |  | ||||||
|                 ) |  | ||||||
|                 tn.cancel_scope.cancel() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| # XXX, manual test as script |  | ||||||
| if __name__ == '__main__': |  | ||||||
|     tractor.log.get_console_log(level='info') |  | ||||||
|     for case in [True, False]: |  | ||||||
|         log.info( |  | ||||||
|             f'\n' |  | ||||||
|             f'------ RUNNING SCRIPT TRIAL ------\n' |  | ||||||
|             f'child_errors_midstream: {case!r}\n' |  | ||||||
|         ) |  | ||||||
|         try: |  | ||||||
|             trio.run(partial( |  | ||||||
|                 main, |  | ||||||
|                 child_errors_mid_stream=case, |  | ||||||
|                 # raise_unmasked=True, |  | ||||||
|                 loglevel='info', |  | ||||||
|             )) |  | ||||||
|         except Exception as _exc: |  | ||||||
|             exc = _exc |  | ||||||
|             log.exception( |  | ||||||
|                 'Should have raised an RTE or Cancelled?\n' |  | ||||||
|             ) |  | ||||||
|             breakpoint() |  | ||||||
|  | @ -1,18 +0,0 @@ | ||||||
| First generate a built disti: |  | ||||||
| 
 |  | ||||||
| ``` |  | ||||||
| python -m pip install --upgrade build |  | ||||||
| python -m build --sdist --outdir dist/alpha5/ |  | ||||||
| ``` |  | ||||||
| 
 |  | ||||||
| Then try a test ``pypi`` upload: |  | ||||||
| 
 |  | ||||||
| ``` |  | ||||||
| python -m twine upload --repository testpypi dist/alpha5/* |  | ||||||
| ``` |  | ||||||
| 
 |  | ||||||
| The push to `pypi` for realz. |  | ||||||
| 
 |  | ||||||
| ``` |  | ||||||
| python -m twine upload --repository testpypi dist/alpha5/* |  | ||||||
| ``` |  | ||||||
							
								
								
									
										126
									
								
								pyproject.toml
								
								
								
								
							
							
						
						
									
										126
									
								
								pyproject.toml
								
								
								
								
							|  | @ -1,117 +1,3 @@ | ||||||
| [build-system] |  | ||||||
| requires = ["hatchling"] |  | ||||||
| build-backend = "hatchling.build" |  | ||||||
| 
 |  | ||||||
| # ------ build-system ------ |  | ||||||
| 
 |  | ||||||
| [project] |  | ||||||
| name = "tractor" |  | ||||||
| version = "0.1.0a6dev0" |  | ||||||
| description = 'structured concurrent `trio`-"actors"' |  | ||||||
| authors = [{ name = "Tyler Goodlet", email = "goodboy_foss@protonmail.com" }] |  | ||||||
| requires-python = ">= 3.11" |  | ||||||
| readme = "docs/README.rst" |  | ||||||
| license = "AGPL-3.0-or-later" |  | ||||||
| keywords = [ |  | ||||||
|   "trio", |  | ||||||
|   "async", |  | ||||||
|   "concurrency", |  | ||||||
|   "structured concurrency", |  | ||||||
|   "actor model", |  | ||||||
|   "distributed", |  | ||||||
|   "multiprocessing", |  | ||||||
| ] |  | ||||||
| classifiers = [ |  | ||||||
|   "Development Status :: 3 - Alpha", |  | ||||||
|   "Operating System :: POSIX :: Linux", |  | ||||||
|   "Framework :: Trio", |  | ||||||
|   "License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)", |  | ||||||
|   "Programming Language :: Python :: Implementation :: CPython", |  | ||||||
|   "Programming Language :: Python :: 3 :: Only", |  | ||||||
|   "Programming Language :: Python :: 3.11", |  | ||||||
|   "Topic :: System :: Distributed Computing", |  | ||||||
| ] |  | ||||||
| dependencies = [ |  | ||||||
|   # trio runtime and friends |  | ||||||
|   # (poetry) proper range specs, |  | ||||||
|   # https://packaging.python.org/en/latest/discussions/install-requires-vs-requirements/#id5 |  | ||||||
|   # TODO, for 3.13 we must go go `0.27` which means we have to |  | ||||||
|   # disable strict egs or port to handling them internally! |  | ||||||
|   "trio>0.27", |  | ||||||
|   "tricycle>=0.4.1,<0.5", |  | ||||||
|   "wrapt>=1.16.0,<2", |  | ||||||
|   "colorlog>=6.8.2,<7", |  | ||||||
|   # built-in multi-actor `pdb` REPL |  | ||||||
|   "pdbp>=1.6,<2", # windows only (from `pdbp`) |  | ||||||
|   # typed IPC msging |  | ||||||
|   "msgspec>=0.19.0", |  | ||||||
|   "cffi>=1.17.1", |  | ||||||
|   "bidict>=0.23.1", |  | ||||||
| ] |  | ||||||
| 
 |  | ||||||
| # ------ project ------ |  | ||||||
| 
 |  | ||||||
| [dependency-groups] |  | ||||||
| dev = [ |  | ||||||
|   # test suite |  | ||||||
|   # TODO: maybe some of these layout choices? |  | ||||||
|   # https://docs.pytest.org/en/8.0.x/explanation/goodpractices.html#choosing-a-test-layout-import-rules |  | ||||||
|   "pytest>=8.3.5", |  | ||||||
|   "pexpect>=4.9.0,<5", |  | ||||||
|   # `tractor.devx` tooling |  | ||||||
|   "greenback>=1.2.1,<2", |  | ||||||
|   "stackscope>=0.2.2,<0.3", |  | ||||||
|   # ^ requires this? |  | ||||||
|   "typing-extensions>=4.14.1", |  | ||||||
| 
 |  | ||||||
|   "pyperclip>=1.9.0", |  | ||||||
|   "prompt-toolkit>=3.0.50", |  | ||||||
|   "xonsh>=0.19.2", |  | ||||||
|   "psutil>=7.0.0", |  | ||||||
| ] |  | ||||||
| # TODO, add these with sane versions; were originally in |  | ||||||
| # `requirements-docs.txt`.. |  | ||||||
| # docs = [ |  | ||||||
| #   "sphinx>=" |  | ||||||
| #   "sphinx_book_theme>=" |  | ||||||
| # ] |  | ||||||
| 
 |  | ||||||
| # ------ dependency-groups ------ |  | ||||||
| 
 |  | ||||||
| # ------ dependency-groups ------ |  | ||||||
| 
 |  | ||||||
| [tool.uv.sources] |  | ||||||
| # XXX NOTE, only for @goodboy's hacking on `pprint(sort_dicts=False)` |  | ||||||
| # for the `pp` alias.. |  | ||||||
| # pdbp = { path = "../pdbp", editable = true } |  | ||||||
| 
 |  | ||||||
| # ------ tool.uv.sources ------ |  | ||||||
| # TODO, distributed (multi-host) extensions |  | ||||||
| # linux kernel networking |  | ||||||
| # 'pyroute2 |  | ||||||
| 
 |  | ||||||
| # ------ tool.uv.sources ------ |  | ||||||
| 
 |  | ||||||
| [tool.uv] |  | ||||||
| # XXX NOTE, prefer the sys python bc apparently the distis from |  | ||||||
| # `astral` are built in a way that breaks `pdbp`+`tabcompleter`'s |  | ||||||
| # likely due to linking against `libedit` over `readline`.. |  | ||||||
| # |_https://docs.astral.sh/uv/concepts/python-versions/#managed-python-distributions |  | ||||||
| # |_https://gregoryszorc.com/docs/python-build-standalone/main/quirks.html#use-of-libedit-on-linux |  | ||||||
| # |  | ||||||
| # https://docs.astral.sh/uv/reference/settings/#python-preference |  | ||||||
| python-preference = 'system' |  | ||||||
| 
 |  | ||||||
| # ------ tool.uv ------ |  | ||||||
| 
 |  | ||||||
| [tool.hatch.build.targets.sdist] |  | ||||||
| include = ["tractor"] |  | ||||||
| 
 |  | ||||||
| [tool.hatch.build.targets.wheel] |  | ||||||
| include = ["tractor"] |  | ||||||
| 
 |  | ||||||
| # ------ tool.hatch ------ |  | ||||||
| 
 |  | ||||||
| [tool.towncrier] | [tool.towncrier] | ||||||
| package = "tractor" | package = "tractor" | ||||||
| filename = "NEWS.rst" | filename = "NEWS.rst" | ||||||
|  | @ -121,27 +7,26 @@ title_format = "tractor {version} ({project_date})" | ||||||
| template = "nooz/_template.rst" | template = "nooz/_template.rst" | ||||||
| all_bullets = true | all_bullets = true | ||||||
| 
 | 
 | ||||||
| [[tool.towncrier.type]] |   [[tool.towncrier.type]] | ||||||
|   directory = "feature" |   directory = "feature" | ||||||
|   name = "Features" |   name = "Features" | ||||||
|   showcontent = true |   showcontent = true | ||||||
| 
 | 
 | ||||||
| [[tool.towncrier.type]] |   [[tool.towncrier.type]] | ||||||
|   directory = "bugfix" |   directory = "bugfix" | ||||||
|   name = "Bug Fixes" |   name = "Bug Fixes" | ||||||
|   showcontent = true |   showcontent = true | ||||||
| 
 | 
 | ||||||
| [[tool.towncrier.type]] |   [[tool.towncrier.type]] | ||||||
|   directory = "doc" |   directory = "doc" | ||||||
|   name = "Improved Documentation" |   name = "Improved Documentation" | ||||||
|   showcontent = true |   showcontent = true | ||||||
| 
 | 
 | ||||||
| [[tool.towncrier.type]] |   [[tool.towncrier.type]] | ||||||
|   directory = "trivial" |   directory = "trivial" | ||||||
|   name = "Trivial/Internal Changes" |   name = "Trivial/Internal Changes" | ||||||
|   showcontent = true |   showcontent = true | ||||||
| 
 | 
 | ||||||
| # ------ tool.towncrier ------ |  | ||||||
| 
 | 
 | ||||||
| [tool.pytest.ini_options] | [tool.pytest.ini_options] | ||||||
| minversion = '6.0' | minversion = '6.0' | ||||||
|  | @ -157,8 +42,7 @@ addopts = [ | ||||||
|   '--show-capture=no', |   '--show-capture=no', | ||||||
| ] | ] | ||||||
| log_cli = false | log_cli = false | ||||||
|  | 
 | ||||||
| # TODO: maybe some of these layout choices? | # TODO: maybe some of these layout choices? | ||||||
| # https://docs.pytest.org/en/8.0.x/explanation/goodpractices.html#choosing-a-test-layout-import-rules | # https://docs.pytest.org/en/8.0.x/explanation/goodpractices.html#choosing-a-test-layout-import-rules | ||||||
| # pythonpath = "src" | # pythonpath = "src" | ||||||
| 
 |  | ||||||
| # ------ tool.pytest ------ |  | ||||||
|  |  | ||||||
|  | @ -1,8 +0,0 @@ | ||||||
| # vim: ft=ini |  | ||||||
| # pytest.ini for tractor |  | ||||||
| 
 |  | ||||||
| [pytest] |  | ||||||
| # don't show frickin captured logs AGAIN in the report.. |  | ||||||
| addopts = --show-capture='no' |  | ||||||
| log_cli = false |  | ||||||
| ; minversion = 6.0 |  | ||||||
|  | @ -0,0 +1,2 @@ | ||||||
|  | sphinx | ||||||
|  | sphinx_book_theme | ||||||
|  | @ -0,0 +1,8 @@ | ||||||
|  | pytest | ||||||
|  | pytest-trio | ||||||
|  | pytest-timeout | ||||||
|  | pdbp | ||||||
|  | mypy | ||||||
|  | trio_typing | ||||||
|  | pexpect | ||||||
|  | towncrier | ||||||
							
								
								
									
										82
									
								
								ruff.toml
								
								
								
								
							
							
						
						
									
										82
									
								
								ruff.toml
								
								
								
								
							|  | @ -1,82 +0,0 @@ | ||||||
| # from default `ruff.toml` @ |  | ||||||
| # https://docs.astral.sh/ruff/configuration/ |  | ||||||
| 
 |  | ||||||
| # Exclude a variety of commonly ignored directories. |  | ||||||
| exclude = [ |  | ||||||
|     ".bzr", |  | ||||||
|     ".direnv", |  | ||||||
|     ".eggs", |  | ||||||
|     ".git", |  | ||||||
|     ".git-rewrite", |  | ||||||
|     ".hg", |  | ||||||
|     ".ipynb_checkpoints", |  | ||||||
|     ".mypy_cache", |  | ||||||
|     ".nox", |  | ||||||
|     ".pants.d", |  | ||||||
|     ".pyenv", |  | ||||||
|     ".pytest_cache", |  | ||||||
|     ".pytype", |  | ||||||
|     ".ruff_cache", |  | ||||||
|     ".svn", |  | ||||||
|     ".tox", |  | ||||||
|     ".venv", |  | ||||||
|     ".vscode", |  | ||||||
|     "__pypackages__", |  | ||||||
|     "_build", |  | ||||||
|     "buck-out", |  | ||||||
|     "build", |  | ||||||
|     "dist", |  | ||||||
|     "node_modules", |  | ||||||
|     "site-packages", |  | ||||||
|     "venv", |  | ||||||
| ] |  | ||||||
| 
 |  | ||||||
| # Same as Black. |  | ||||||
| line-length = 88 |  | ||||||
| indent-width = 4 |  | ||||||
| 
 |  | ||||||
| # Assume Python 3.9 |  | ||||||
| target-version = "py311" |  | ||||||
| 
 |  | ||||||
| [lint] |  | ||||||
| # Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`)  codes by default. |  | ||||||
| # Unlike Flake8, Ruff doesn't enable pycodestyle warnings (`W`) or |  | ||||||
| # McCabe complexity (`C901`) by default. |  | ||||||
| select = ["E4", "E7", "E9", "F"] |  | ||||||
| ignore = [ |  | ||||||
|   'E402',  # https://docs.astral.sh/ruff/rules/module-import-not-at-top-of-file/ |  | ||||||
| ] |  | ||||||
| 
 |  | ||||||
| # Allow fix for all enabled rules (when `--fix`) is provided. |  | ||||||
| fixable = ["ALL"] |  | ||||||
| unfixable = [] |  | ||||||
| 
 |  | ||||||
| # Allow unused variables when underscore-prefixed. |  | ||||||
| # dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" |  | ||||||
| 
 |  | ||||||
| [format] |  | ||||||
| # Use single quotes in `ruff format`. |  | ||||||
| quote-style = "single" |  | ||||||
| 
 |  | ||||||
| # Like Black, indent with spaces, rather than tabs. |  | ||||||
| indent-style = "space" |  | ||||||
| 
 |  | ||||||
| # Like Black, respect magic trailing commas. |  | ||||||
| skip-magic-trailing-comma = false |  | ||||||
| 
 |  | ||||||
| # Like Black, automatically detect the appropriate line ending. |  | ||||||
| line-ending = "auto" |  | ||||||
| 
 |  | ||||||
| # Enable auto-formatting of code examples in docstrings. Markdown, |  | ||||||
| # reStructuredText code/literal blocks and doctests are all supported. |  | ||||||
| # |  | ||||||
| # This is currently disabled by default, but it is planned for this |  | ||||||
| # to be opt-out in the future. |  | ||||||
| docstring-code-format = false |  | ||||||
| 
 |  | ||||||
| # Set the line length limit used when formatting code snippets in |  | ||||||
| # docstrings. |  | ||||||
| # |  | ||||||
| # This only has an effect when the `docstring-code-format` setting is |  | ||||||
| # enabled. |  | ||||||
| docstring-code-line-length = "dynamic" |  | ||||||
|  | @ -0,0 +1,101 @@ | ||||||
|  | #!/usr/bin/env python | ||||||
|  | # | ||||||
|  | # tractor: structured concurrent "actors". | ||||||
|  | # | ||||||
|  | # Copyright 2018-eternity Tyler Goodlet. | ||||||
|  | 
 | ||||||
|  | # This program is free software: you can redistribute it and/or modify | ||||||
|  | # it under the terms of the GNU Affero General Public License as published by | ||||||
|  | # the Free Software Foundation, either version 3 of the License, or | ||||||
|  | # (at your option) any later version. | ||||||
|  | 
 | ||||||
|  | # This program is distributed in the hope that it will be useful, | ||||||
|  | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||||
|  | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||||
|  | # GNU Affero General Public License for more details. | ||||||
|  | 
 | ||||||
|  | # You should have received a copy of the GNU Affero General Public License | ||||||
|  | # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||||
|  | 
 | ||||||
|  | from setuptools import setup | ||||||
|  | 
 | ||||||
|  | with open('docs/README.rst', encoding='utf-8') as f: | ||||||
|  |     readme = f.read() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | setup( | ||||||
|  |     name="tractor", | ||||||
|  |     version='0.1.0a6dev0',  # alpha zone | ||||||
|  |     description='structured concurrrent `trio`-"actors"', | ||||||
|  |     long_description=readme, | ||||||
|  |     license='AGPLv3', | ||||||
|  |     author='Tyler Goodlet', | ||||||
|  |     maintainer='Tyler Goodlet', | ||||||
|  |     maintainer_email='goodboy_foss@protonmail.com', | ||||||
|  |     url='https://github.com/goodboy/tractor', | ||||||
|  |     platforms=['linux', 'windows'], | ||||||
|  |     packages=[ | ||||||
|  |         'tractor', | ||||||
|  |         'tractor.experimental',  # wacky ideas | ||||||
|  |         'tractor.trionics',  # trio extensions | ||||||
|  |         'tractor.msg',  # lowlevel data types | ||||||
|  |     ], | ||||||
|  |     install_requires=[ | ||||||
|  | 
 | ||||||
|  |         # trio related | ||||||
|  |         # proper range spec: | ||||||
|  |         # https://packaging.python.org/en/latest/discussions/install-requires-vs-requirements/#id5 | ||||||
|  |         'trio >= 0.24', | ||||||
|  | 
 | ||||||
|  |         # 'async_generator',  # in stdlib mostly! | ||||||
|  |         # 'trio_typing',  # trio==0.23.0 has type hints! | ||||||
|  |         # 'exceptiongroup',  # in stdlib as of 3.11! | ||||||
|  | 
 | ||||||
|  |         # tooling | ||||||
|  |         'tricycle', | ||||||
|  |         'trio_typing', | ||||||
|  |         'colorlog', | ||||||
|  |         'wrapt', | ||||||
|  | 
 | ||||||
|  |         # IPC serialization | ||||||
|  |         'msgspec', | ||||||
|  | 
 | ||||||
|  |         # debug mode REPL | ||||||
|  |         'pdbp', | ||||||
|  | 
 | ||||||
|  |         # pip ref docs on these specs: | ||||||
|  |         # https://pip.pypa.io/en/stable/reference/requirement-specifiers/#examples | ||||||
|  |         # and pep: | ||||||
|  |         # https://peps.python.org/pep-0440/#version-specifiers | ||||||
|  | 
 | ||||||
|  |         # windows deps workaround for ``pdbpp`` | ||||||
|  |         # https://github.com/pdbpp/pdbpp/issues/498 | ||||||
|  |         # https://github.com/pdbpp/fancycompleter/issues/37 | ||||||
|  |         'pyreadline3 ; platform_system == "Windows"', | ||||||
|  | 
 | ||||||
|  |     ], | ||||||
|  |     tests_require=['pytest'], | ||||||
|  |     python_requires=">=3.10", | ||||||
|  |     keywords=[ | ||||||
|  |         'trio', | ||||||
|  |         'async', | ||||||
|  |         'concurrency', | ||||||
|  |         'structured concurrency', | ||||||
|  |         'actor model', | ||||||
|  |         'distributed', | ||||||
|  |         'multiprocessing' | ||||||
|  |     ], | ||||||
|  |     classifiers=[ | ||||||
|  |         "Development Status :: 3 - Alpha", | ||||||
|  |         "Operating System :: POSIX :: Linux", | ||||||
|  |         "Operating System :: Microsoft :: Windows", | ||||||
|  |         "Framework :: Trio", | ||||||
|  |         "License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)", | ||||||
|  |         "Programming Language :: Python :: Implementation :: CPython", | ||||||
|  |         "Programming Language :: Python :: 3 :: Only", | ||||||
|  |         "Programming Language :: Python :: 3.10", | ||||||
|  |         "Intended Audience :: Science/Research", | ||||||
|  |         "Intended Audience :: Developers", | ||||||
|  |         "Topic :: System :: Distributed Computing", | ||||||
|  |     ], | ||||||
|  | ) | ||||||
|  | @ -1,27 +1,24 @@ | ||||||
| """ | """ | ||||||
| Top level of the testing suites! | ``tractor`` testing!! | ||||||
| 
 |  | ||||||
| """ | """ | ||||||
| from __future__ import annotations |  | ||||||
| import sys | import sys | ||||||
| import subprocess | import subprocess | ||||||
| import os | import os | ||||||
|  | import random | ||||||
| import signal | import signal | ||||||
| import platform | import platform | ||||||
| import time | import time | ||||||
| 
 | 
 | ||||||
| import pytest | import pytest | ||||||
|  | import tractor | ||||||
| from tractor._testing import ( | from tractor._testing import ( | ||||||
|     examples_dir as examples_dir, |     examples_dir as examples_dir, | ||||||
|     tractor_test as tractor_test, |     tractor_test as tractor_test, | ||||||
|     expect_ctxc as expect_ctxc, |     expect_ctxc as expect_ctxc, | ||||||
| ) | ) | ||||||
| 
 | 
 | ||||||
| pytest_plugins: list[str] = [ | # TODO: include wtv plugin(s) we build in `._testing.pytest`? | ||||||
|     'pytester', | pytest_plugins = ['pytester'] | ||||||
|     'tractor._testing.pytest', |  | ||||||
| ] |  | ||||||
| 
 |  | ||||||
| 
 | 
 | ||||||
| # Sending signal.SIGINT on subprocess fails on windows. Use CTRL_* alternatives | # Sending signal.SIGINT on subprocess fails on windows. Use CTRL_* alternatives | ||||||
| if platform.system() == 'Windows': | if platform.system() == 'Windows': | ||||||
|  | @ -33,11 +30,7 @@ else: | ||||||
|     _KILL_SIGNAL = signal.SIGKILL |     _KILL_SIGNAL = signal.SIGKILL | ||||||
|     _INT_SIGNAL = signal.SIGINT |     _INT_SIGNAL = signal.SIGINT | ||||||
|     _INT_RETURN_CODE = 1 if sys.version_info < (3, 8) else -signal.SIGINT.value |     _INT_RETURN_CODE = 1 if sys.version_info < (3, 8) else -signal.SIGINT.value | ||||||
|     _PROC_SPAWN_WAIT = ( |     _PROC_SPAWN_WAIT = 0.6 if sys.version_info < (3, 7) else 0.4 | ||||||
|         0.6 |  | ||||||
|         if sys.version_info < (3, 7) |  | ||||||
|         else 0.4 |  | ||||||
|     ) |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| no_windows = pytest.mark.skipif( | no_windows = pytest.mark.skipif( | ||||||
|  | @ -46,23 +39,26 @@ no_windows = pytest.mark.skipif( | ||||||
| ) | ) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def pytest_addoption( | def pytest_addoption(parser): | ||||||
|     parser: pytest.Parser, |  | ||||||
| ): |  | ||||||
|     # ?TODO? should this be exposed from our `._testing.pytest` |  | ||||||
|     # plugin or should we make it more explicit with `--tl` for |  | ||||||
|     # tractor logging like we do in other client projects? |  | ||||||
|     parser.addoption( |     parser.addoption( | ||||||
|         "--ll", |         "--ll", action="store", dest='loglevel', | ||||||
|         action="store", |  | ||||||
|         dest='loglevel', |  | ||||||
|         default='ERROR', help="logging level to set when testing" |         default='ERROR', help="logging level to set when testing" | ||||||
|     ) |     ) | ||||||
| 
 | 
 | ||||||
|  |     parser.addoption( | ||||||
|  |         "--spawn-backend", action="store", dest='spawn_backend', | ||||||
|  |         default='trio', | ||||||
|  |         help="Processing spawning backend to use for test run", | ||||||
|  |     ) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def pytest_configure(config): | ||||||
|  |     backend = config.option.spawn_backend | ||||||
|  |     tractor._spawn.try_set_start_method(backend) | ||||||
|  | 
 | ||||||
| 
 | 
 | ||||||
| @pytest.fixture(scope='session', autouse=True) | @pytest.fixture(scope='session', autouse=True) | ||||||
| def loglevel(request): | def loglevel(request): | ||||||
|     import tractor |  | ||||||
|     orig = tractor.log._default_loglevel |     orig = tractor.log._default_loglevel | ||||||
|     level = tractor.log._default_loglevel = request.config.option.loglevel |     level = tractor.log._default_loglevel = request.config.option.loglevel | ||||||
|     tractor.log.get_console_log(level) |     tractor.log.get_console_log(level) | ||||||
|  | @ -70,148 +66,109 @@ def loglevel(request): | ||||||
|     tractor.log._default_loglevel = orig |     tractor.log._default_loglevel = orig | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | @pytest.fixture(scope='session') | ||||||
|  | def spawn_backend(request) -> str: | ||||||
|  |     return request.config.option.spawn_backend | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
| _ci_env: bool = os.environ.get('CI', False) | _ci_env: bool = os.environ.get('CI', False) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @pytest.fixture(scope='session') | @pytest.fixture(scope='session') | ||||||
| def ci_env() -> bool: | def ci_env() -> bool: | ||||||
|     ''' |     ''' | ||||||
|     Detect CI environment. |     Detect CI envoirment. | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|     return _ci_env |     return _ci_env | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def sig_prog( | # TODO: also move this to `._testing` for now? | ||||||
|     proc: subprocess.Popen, | # -[ ] possibly generalize and re-use for multi-tree spawning | ||||||
|     sig: int, | #    along with the new stuff for multi-addrs in distribute_dis | ||||||
|     canc_timeout: float = 0.1, | #    branch? | ||||||
| ) -> int: | # | ||||||
|  | # choose randomly at import time | ||||||
|  | _reg_addr: tuple[str, int] = ( | ||||||
|  |     '127.0.0.1', | ||||||
|  |     random.randint(1000, 9999), | ||||||
|  | ) | ||||||
|  | _arb_addr = _reg_addr | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | @pytest.fixture(scope='session') | ||||||
|  | def arb_addr(): | ||||||
|  |     return _arb_addr | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def pytest_generate_tests(metafunc): | ||||||
|  |     spawn_backend = metafunc.config.option.spawn_backend | ||||||
|  | 
 | ||||||
|  |     if not spawn_backend: | ||||||
|  |         # XXX some weird windows bug with `pytest`? | ||||||
|  |         spawn_backend = 'trio' | ||||||
|  | 
 | ||||||
|  |     # TODO: maybe just use the literal `._spawn.SpawnMethodKey`? | ||||||
|  |     assert spawn_backend in ( | ||||||
|  |         'mp_spawn', | ||||||
|  |         'mp_forkserver', | ||||||
|  |         'trio', | ||||||
|  |     ) | ||||||
|  | 
 | ||||||
|  |     # NOTE: used to be used to dyanmically parametrize tests for when | ||||||
|  |     # you just passed --spawn-backend=`mp` on the cli, but now we expect | ||||||
|  |     # that cli input to be manually specified, BUT, maybe we'll do | ||||||
|  |     # something like this again in the future? | ||||||
|  |     if 'start_method' in metafunc.fixturenames: | ||||||
|  |         metafunc.parametrize("start_method", [spawn_backend], scope='module') | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def sig_prog(proc, sig): | ||||||
|     "Kill the actor-process with ``sig``." |     "Kill the actor-process with ``sig``." | ||||||
|     proc.send_signal(sig) |     proc.send_signal(sig) | ||||||
|     time.sleep(canc_timeout) |     time.sleep(0.1) | ||||||
|     if not proc.poll(): |     if not proc.poll(): | ||||||
|         # TODO: why sometimes does SIGINT not work on teardown? |         # TODO: why sometimes does SIGINT not work on teardown? | ||||||
|         # seems to happen only when trace logging enabled? |         # seems to happen only when trace logging enabled? | ||||||
|         proc.send_signal(_KILL_SIGNAL) |         proc.send_signal(_KILL_SIGNAL) | ||||||
|     ret: int = proc.wait() |     ret = proc.wait() | ||||||
|     assert ret |     assert ret | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| # TODO: factor into @cm and move to `._testing`? | # TODO: factor into @cm and move to `._testing`? | ||||||
| @pytest.fixture | @pytest.fixture | ||||||
| def daemon( | def daemon( | ||||||
|     debug_mode: bool, |  | ||||||
|     loglevel: str, |     loglevel: str, | ||||||
|     testdir: pytest.Pytester, |     testdir, | ||||||
|     reg_addr: tuple[str, int], |     arb_addr: tuple[str, int], | ||||||
|     tpt_proto: str, | ): | ||||||
| 
 |  | ||||||
| ) -> subprocess.Popen: |  | ||||||
|     ''' |     ''' | ||||||
|     Run a daemon root actor as a separate actor-process tree and |     Run a daemon actor as a "remote arbiter". | ||||||
|     "remote registrar" for discovery-protocol related tests. |  | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|     if loglevel in ('trace', 'debug'): |     if loglevel in ('trace', 'debug'): | ||||||
|         # XXX: too much logging will lock up the subproc (smh) |         # too much logging will lock up the subproc (smh) | ||||||
|         loglevel: str = 'info' |         loglevel = 'info' | ||||||
| 
 | 
 | ||||||
|     code: str = ( |     cmdargs = [ | ||||||
|         "import tractor; " |         sys.executable, '-c', | ||||||
|         "tractor.run_daemon([], " |         "import tractor; tractor.run_daemon([], registry_addr={}, loglevel={})" | ||||||
|         "registry_addrs={reg_addrs}, " |         .format( | ||||||
|         "debug_mode={debug_mode}, " |             arb_addr, | ||||||
|         "loglevel={ll})" |             "'{}'".format(loglevel) if loglevel else None) | ||||||
|     ).format( |  | ||||||
|         reg_addrs=str([reg_addr]), |  | ||||||
|         ll="'{}'".format(loglevel) if loglevel else None, |  | ||||||
|         debug_mode=debug_mode, |  | ||||||
|     ) |  | ||||||
|     cmd: list[str] = [ |  | ||||||
|         sys.executable, |  | ||||||
|         '-c', code, |  | ||||||
|     ] |     ] | ||||||
|     # breakpoint() |     kwargs = dict() | ||||||
|     kwargs = {} |  | ||||||
|     if platform.system() == 'Windows': |     if platform.system() == 'Windows': | ||||||
|         # without this, tests hang on windows forever |         # without this, tests hang on windows forever | ||||||
|         kwargs['creationflags'] = subprocess.CREATE_NEW_PROCESS_GROUP |         kwargs['creationflags'] = subprocess.CREATE_NEW_PROCESS_GROUP | ||||||
| 
 | 
 | ||||||
|     proc: subprocess.Popen = testdir.popen( |     proc = testdir.popen( | ||||||
|         cmd, |         cmdargs, | ||||||
|  |         stdout=subprocess.PIPE, | ||||||
|  |         stderr=subprocess.PIPE, | ||||||
|         **kwargs, |         **kwargs, | ||||||
|     ) |     ) | ||||||
| 
 |  | ||||||
|     # UDS sockets are **really** fast to bind()/listen()/connect() |  | ||||||
|     # so it's often required that we delay a bit more starting |  | ||||||
|     # the first actor-tree.. |  | ||||||
|     if tpt_proto == 'uds': |  | ||||||
|         global _PROC_SPAWN_WAIT |  | ||||||
|         _PROC_SPAWN_WAIT = 0.6 |  | ||||||
| 
 |  | ||||||
|     time.sleep(_PROC_SPAWN_WAIT) |  | ||||||
| 
 |  | ||||||
|     assert not proc.returncode |     assert not proc.returncode | ||||||
|  |     time.sleep(_PROC_SPAWN_WAIT) | ||||||
|     yield proc |     yield proc | ||||||
|     sig_prog(proc, _INT_SIGNAL) |     sig_prog(proc, _INT_SIGNAL) | ||||||
| 
 |  | ||||||
|     # XXX! yeah.. just be reaaal careful with this bc sometimes it |  | ||||||
|     # can lock up on the `_io.BufferedReader` and hang.. |  | ||||||
|     stderr: str = proc.stderr.read().decode() |  | ||||||
|     if stderr: |  | ||||||
|         print( |  | ||||||
|             f'Daemon actor tree produced STDERR:\n' |  | ||||||
|             f'{proc.args}\n' |  | ||||||
|             f'\n' |  | ||||||
|             f'{stderr}\n' |  | ||||||
|         ) |  | ||||||
|     if proc.returncode != -2: |  | ||||||
|         raise RuntimeError( |  | ||||||
|             'Daemon actor tree failed !?\n' |  | ||||||
|             f'{proc.args}\n' |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| # @pytest.fixture(autouse=True) |  | ||||||
| # def shared_last_failed(pytestconfig): |  | ||||||
| #     val = pytestconfig.cache.get("example/value", None) |  | ||||||
| #     breakpoint() |  | ||||||
| #     if val is None: |  | ||||||
| #         pytestconfig.cache.set("example/value", val) |  | ||||||
| #     return val |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| # TODO: a way to let test scripts (like from `examples/`) |  | ||||||
| # guarantee they won't `registry_addrs` collide! |  | ||||||
| # -[ ] maybe use some kinda standard `def main()` arg-spec that |  | ||||||
| #     we can introspect from a fixture that is called from the test |  | ||||||
| #     body? |  | ||||||
| # -[ ] test and figure out typing for below prototype! Bp |  | ||||||
| # |  | ||||||
| # @pytest.fixture |  | ||||||
| # def set_script_runtime_args( |  | ||||||
| #     reg_addr: tuple, |  | ||||||
| # ) -> Callable[[...], None]: |  | ||||||
| 
 |  | ||||||
| #     def import_n_partial_in_args_n_triorun( |  | ||||||
| #         script: Path,  # under examples? |  | ||||||
| #         **runtime_args, |  | ||||||
| #     ) -> Callable[[], Any]:  # a `partial`-ed equiv of `trio.run()` |  | ||||||
| 
 |  | ||||||
| #         # NOTE, below is taken from |  | ||||||
| #         # `.test_advanced_faults.test_ipc_channel_break_during_stream` |  | ||||||
| #         mod: ModuleType = import_path( |  | ||||||
| #             examples_dir() / 'advanced_faults' |  | ||||||
| #             / 'ipc_failure_during_stream.py', |  | ||||||
| #             root=examples_dir(), |  | ||||||
| #             consider_namespace_packages=False, |  | ||||||
| #         ) |  | ||||||
| #         return partial( |  | ||||||
| #             trio.run, |  | ||||||
| #             partial( |  | ||||||
| #                 mod.main, |  | ||||||
| #                 **runtime_args, |  | ||||||
| #             ) |  | ||||||
| #         ) |  | ||||||
| #     return import_n_partial_in_args_n_triorun |  | ||||||
|  |  | ||||||
|  | @ -1,253 +0,0 @@ | ||||||
| ''' |  | ||||||
| `tractor.devx.*` tooling sub-pkg test space. |  | ||||||
| 
 |  | ||||||
| ''' |  | ||||||
| from __future__ import annotations |  | ||||||
| import time |  | ||||||
| from typing import ( |  | ||||||
|     Callable, |  | ||||||
|     TYPE_CHECKING, |  | ||||||
| ) |  | ||||||
| 
 |  | ||||||
| import pytest |  | ||||||
| from pexpect.exceptions import ( |  | ||||||
|     TIMEOUT, |  | ||||||
| ) |  | ||||||
| from pexpect.spawnbase import SpawnBase |  | ||||||
| 
 |  | ||||||
| from tractor._testing import ( |  | ||||||
|     mk_cmd, |  | ||||||
| ) |  | ||||||
| from tractor.devx.debug import ( |  | ||||||
|     _pause_msg as _pause_msg, |  | ||||||
|     _crash_msg as _crash_msg, |  | ||||||
|     _repl_fail_msg as _repl_fail_msg, |  | ||||||
|     _ctlc_ignore_header as _ctlc_ignore_header, |  | ||||||
| ) |  | ||||||
| from ..conftest import ( |  | ||||||
|     _ci_env, |  | ||||||
| ) |  | ||||||
| 
 |  | ||||||
| if TYPE_CHECKING: |  | ||||||
|     from pexpect import pty_spawn |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| # a fn that sub-instantiates a `pexpect.spawn()` |  | ||||||
| # and returns it. |  | ||||||
| type PexpectSpawner = Callable[[str], pty_spawn.spawn] |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @pytest.fixture |  | ||||||
| def spawn( |  | ||||||
|     start_method: str, |  | ||||||
|     testdir: pytest.Pytester, |  | ||||||
|     reg_addr: tuple[str, int], |  | ||||||
| 
 |  | ||||||
| ) -> PexpectSpawner: |  | ||||||
|     ''' |  | ||||||
|     Use the `pexpect` module shipped via `testdir.spawn()` to |  | ||||||
|     run an `./examples/..` script by name. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     if start_method != 'trio': |  | ||||||
|         pytest.skip( |  | ||||||
|             '`pexpect` based tests only supported on `trio` backend' |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|     def unset_colors(): |  | ||||||
|         ''' |  | ||||||
|         Python 3.13 introduced colored tracebacks that break patt |  | ||||||
|         matching, |  | ||||||
| 
 |  | ||||||
|         https://docs.python.org/3/using/cmdline.html#envvar-PYTHON_COLORS |  | ||||||
|         https://docs.python.org/3/using/cmdline.html#using-on-controlling-color |  | ||||||
| 
 |  | ||||||
|         ''' |  | ||||||
|         import os |  | ||||||
|         os.environ['PYTHON_COLORS'] = '0' |  | ||||||
| 
 |  | ||||||
|     def _spawn( |  | ||||||
|         cmd: str, |  | ||||||
|         **mkcmd_kwargs, |  | ||||||
|     ) -> pty_spawn.spawn: |  | ||||||
|         unset_colors() |  | ||||||
|         return testdir.spawn( |  | ||||||
|             cmd=mk_cmd( |  | ||||||
|                 cmd, |  | ||||||
|                 **mkcmd_kwargs, |  | ||||||
|             ), |  | ||||||
|             expect_timeout=3, |  | ||||||
|             # preexec_fn=unset_colors, |  | ||||||
|             # ^TODO? get `pytest` core to expose underlying |  | ||||||
|             # `pexpect.spawn()` stuff? |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|     # such that test-dep can pass input script name. |  | ||||||
|     return _spawn  # the `PexpectSpawner`, type alias. |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @pytest.fixture( |  | ||||||
|     params=[False, True], |  | ||||||
|     ids='ctl-c={}'.format, |  | ||||||
| ) |  | ||||||
| def ctlc( |  | ||||||
|     request, |  | ||||||
|     ci_env: bool, |  | ||||||
| 
 |  | ||||||
| ) -> bool: |  | ||||||
| 
 |  | ||||||
|     use_ctlc = request.param |  | ||||||
| 
 |  | ||||||
|     node = request.node |  | ||||||
|     markers = node.own_markers |  | ||||||
|     for mark in markers: |  | ||||||
|         if mark.name == 'has_nested_actors': |  | ||||||
|             pytest.skip( |  | ||||||
|                 f'Test {node} has nested actors and fails with Ctrl-C.\n' |  | ||||||
|                 f'The test can sometimes run fine locally but until' |  | ||||||
|                 ' we solve' 'this issue this CI test will be xfail:\n' |  | ||||||
|                 'https://github.com/goodboy/tractor/issues/320' |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|         if mark.name == 'ctlcs_bish': |  | ||||||
|             pytest.skip( |  | ||||||
|                 f'Test {node} prolly uses something from the stdlib (namely `asyncio`..)\n' |  | ||||||
|                 f'The test and/or underlying example script can *sometimes* run fine ' |  | ||||||
|                 f'locally but more then likely until the cpython peeps get their sh#$ together, ' |  | ||||||
|                 f'this test will definitely not behave like `trio` under SIGINT..\n' |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|     if use_ctlc: |  | ||||||
|         # XXX: disable pygments highlighting for auto-tests |  | ||||||
|         # since some envs (like actions CI) will struggle |  | ||||||
|         # the the added color-char encoding.. |  | ||||||
|         from tractor.devx.debug import TractorConfig |  | ||||||
|         TractorConfig.use_pygements = False |  | ||||||
| 
 |  | ||||||
|     yield use_ctlc |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def expect( |  | ||||||
|     child, |  | ||||||
| 
 |  | ||||||
|     # normally a `pdb` prompt by default |  | ||||||
|     patt: str, |  | ||||||
| 
 |  | ||||||
|     **kwargs, |  | ||||||
| 
 |  | ||||||
| ) -> None: |  | ||||||
|     ''' |  | ||||||
|     Expect wrapper that prints last seen console |  | ||||||
|     data before failing. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     try: |  | ||||||
|         child.expect( |  | ||||||
|             patt, |  | ||||||
|             **kwargs, |  | ||||||
|         ) |  | ||||||
|     except TIMEOUT: |  | ||||||
|         before = str(child.before.decode()) |  | ||||||
|         print(before) |  | ||||||
|         raise |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| PROMPT = r"\(Pdb\+\)" |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def in_prompt_msg( |  | ||||||
|     child: SpawnBase, |  | ||||||
|     parts: list[str], |  | ||||||
| 
 |  | ||||||
|     pause_on_false: bool = False, |  | ||||||
|     err_on_false: bool = False, |  | ||||||
|     print_prompt_on_false: bool = True, |  | ||||||
| 
 |  | ||||||
| ) -> bool: |  | ||||||
|     ''' |  | ||||||
|     Predicate check if (the prompt's) std-streams output has all |  | ||||||
|     `str`-parts in it. |  | ||||||
| 
 |  | ||||||
|     Can be used in test asserts for bulk matching expected |  | ||||||
|     log/REPL output for a given `pdb` interact point. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     __tracebackhide__: bool = False |  | ||||||
| 
 |  | ||||||
|     before: str = str(child.before.decode()) |  | ||||||
|     for part in parts: |  | ||||||
|         if part not in before: |  | ||||||
|             if pause_on_false: |  | ||||||
|                 import pdbp |  | ||||||
|                 pdbp.set_trace() |  | ||||||
| 
 |  | ||||||
|             if print_prompt_on_false: |  | ||||||
|                 print(before) |  | ||||||
| 
 |  | ||||||
|             if err_on_false: |  | ||||||
|                 raise ValueError( |  | ||||||
|                     f'Could not find pattern in `before` output?\n' |  | ||||||
|                     f'part: {part!r}\n' |  | ||||||
|                 ) |  | ||||||
|             return False |  | ||||||
| 
 |  | ||||||
|     return True |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| # TODO: todo support terminal color-chars stripping so we can match |  | ||||||
| # against call stack frame output from the the 'll' command the like! |  | ||||||
| # -[ ] SO answer for stipping ANSI codes: https://stackoverflow.com/a/14693789 |  | ||||||
| def assert_before( |  | ||||||
|     child: SpawnBase, |  | ||||||
|     patts: list[str], |  | ||||||
| 
 |  | ||||||
|     **kwargs, |  | ||||||
| 
 |  | ||||||
| ) -> None: |  | ||||||
|     __tracebackhide__: bool = False |  | ||||||
| 
 |  | ||||||
|     assert in_prompt_msg( |  | ||||||
|         child=child, |  | ||||||
|         parts=patts, |  | ||||||
| 
 |  | ||||||
|         # since this is an "assert" helper ;) |  | ||||||
|         err_on_false=True, |  | ||||||
|         **kwargs |  | ||||||
|     ) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def do_ctlc( |  | ||||||
|     child, |  | ||||||
|     count: int = 3, |  | ||||||
|     delay: float = 0.1, |  | ||||||
|     patt: str|None = None, |  | ||||||
| 
 |  | ||||||
|     # expect repl UX to reprint the prompt after every |  | ||||||
|     # ctrl-c send. |  | ||||||
|     # XXX: no idea but, in CI this never seems to work even on 3.10 so |  | ||||||
|     # needs some further investigation potentially... |  | ||||||
|     expect_prompt: bool = not _ci_env, |  | ||||||
| 
 |  | ||||||
| ) -> str|None: |  | ||||||
| 
 |  | ||||||
|     before: str|None = None |  | ||||||
| 
 |  | ||||||
|     # make sure ctl-c sends don't do anything but repeat output |  | ||||||
|     for _ in range(count): |  | ||||||
|         time.sleep(delay) |  | ||||||
|         child.sendcontrol('c') |  | ||||||
| 
 |  | ||||||
|         # TODO: figure out why this makes CI fail.. |  | ||||||
|         # if you run this test manually it works just fine.. |  | ||||||
|         if expect_prompt: |  | ||||||
|             time.sleep(delay) |  | ||||||
|             child.expect(PROMPT) |  | ||||||
|             before = str(child.before.decode()) |  | ||||||
|             time.sleep(delay) |  | ||||||
| 
 |  | ||||||
|             if patt: |  | ||||||
|                 # should see the last line on console |  | ||||||
|                 assert patt in before |  | ||||||
| 
 |  | ||||||
|     # return the console content up to the final prompt |  | ||||||
|     return before |  | ||||||
|  | @ -1,381 +0,0 @@ | ||||||
| ''' |  | ||||||
| That "foreign loop/thread" debug REPL support better ALSO WORK! |  | ||||||
| 
 |  | ||||||
| Same as `test_native_pause.py`. |  | ||||||
| All these tests can be understood (somewhat) by running the |  | ||||||
| equivalent `examples/debugging/` scripts manually. |  | ||||||
| 
 |  | ||||||
| ''' |  | ||||||
| from contextlib import ( |  | ||||||
|     contextmanager as cm, |  | ||||||
| ) |  | ||||||
| # from functools import partial |  | ||||||
| # import itertools |  | ||||||
| import time |  | ||||||
| # from typing import ( |  | ||||||
| #     Iterator, |  | ||||||
| # ) |  | ||||||
| 
 |  | ||||||
| import pytest |  | ||||||
| from pexpect.exceptions import ( |  | ||||||
|     TIMEOUT, |  | ||||||
|     EOF, |  | ||||||
| ) |  | ||||||
| 
 |  | ||||||
| from .conftest import ( |  | ||||||
|     # _ci_env, |  | ||||||
|     do_ctlc, |  | ||||||
|     PROMPT, |  | ||||||
|     # expect, |  | ||||||
|     in_prompt_msg, |  | ||||||
|     assert_before, |  | ||||||
|     _pause_msg, |  | ||||||
|     _crash_msg, |  | ||||||
|     _ctlc_ignore_header, |  | ||||||
|     # _repl_fail_msg, |  | ||||||
| ) |  | ||||||
| 
 |  | ||||||
| @cm |  | ||||||
| def maybe_expect_timeout( |  | ||||||
|     ctlc: bool = False, |  | ||||||
| ) -> None: |  | ||||||
|     try: |  | ||||||
|         yield |  | ||||||
|     except TIMEOUT: |  | ||||||
|         # breakpoint() |  | ||||||
|         if ctlc: |  | ||||||
|             pytest.xfail( |  | ||||||
|                 'Some kinda redic threading SIGINT bug i think?\n' |  | ||||||
|                 'See the notes in `examples/debugging/sync_bp.py`..\n' |  | ||||||
|             ) |  | ||||||
|         raise |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @pytest.mark.ctlcs_bish |  | ||||||
| def test_pause_from_sync( |  | ||||||
|     spawn, |  | ||||||
|     ctlc: bool, |  | ||||||
| ): |  | ||||||
|     ''' |  | ||||||
|     Verify we can use the `pdbp` REPL from sync functions AND from |  | ||||||
|     any thread spawned with `trio.to_thread.run_sync()`. |  | ||||||
| 
 |  | ||||||
|     `examples/debugging/sync_bp.py` |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     child = spawn('sync_bp') |  | ||||||
| 
 |  | ||||||
|     # first `sync_pause()` after nurseries open |  | ||||||
|     child.expect(PROMPT) |  | ||||||
|     assert_before( |  | ||||||
|         child, |  | ||||||
|         [ |  | ||||||
|             # pre-prompt line |  | ||||||
|             _pause_msg, |  | ||||||
|             "<Task '__main__.main'", |  | ||||||
|             "('root'", |  | ||||||
|         ] |  | ||||||
|     ) |  | ||||||
|     if ctlc: |  | ||||||
|         do_ctlc(child) |  | ||||||
|         # ^NOTE^ subactor not spawned yet; don't need extra delay. |  | ||||||
| 
 |  | ||||||
|     child.sendline('c') |  | ||||||
| 
 |  | ||||||
|     # first `await tractor.pause()` inside `p.open_context()` body |  | ||||||
|     child.expect(PROMPT) |  | ||||||
| 
 |  | ||||||
|     # XXX shouldn't see gb loaded message with PDB loglevel! |  | ||||||
|     # assert not in_prompt_msg( |  | ||||||
|     #     child, |  | ||||||
|     #     ['`greenback` portal opened!'], |  | ||||||
|     # ) |  | ||||||
|     # should be same root task |  | ||||||
|     assert_before( |  | ||||||
|         child, |  | ||||||
|         [ |  | ||||||
|             _pause_msg, |  | ||||||
|             "<Task '__main__.main'", |  | ||||||
|             "('root'", |  | ||||||
|         ] |  | ||||||
|     ) |  | ||||||
| 
 |  | ||||||
|     if ctlc: |  | ||||||
|         do_ctlc( |  | ||||||
|             child, |  | ||||||
|             # NOTE: setting this to 0 (or some other sufficient |  | ||||||
|             # small val) can cause the test to fail since the |  | ||||||
|             # `subactor` suffers a race where the root/parent |  | ||||||
|             # sends an actor-cancel prior to it hitting its pause |  | ||||||
|             # point; by def the value is 0.1 |  | ||||||
|             delay=0.4, |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|     # XXX, fwiw without a brief sleep here the SIGINT might actually |  | ||||||
|     # trigger "subactor" cancellation by its parent  before the |  | ||||||
|     # shield-handler is engaged. |  | ||||||
|     # |  | ||||||
|     # => similar to the `delay` input to `do_ctlc()` below, setting |  | ||||||
|     # this too low can cause the test to fail since the `subactor` |  | ||||||
|     # suffers a race where the root/parent sends an actor-cancel |  | ||||||
|     # prior to the context task hitting its pause point (and thus |  | ||||||
|     # engaging the `sigint_shield()` handler in time); this value |  | ||||||
|     # seems be good enuf? |  | ||||||
|     time.sleep(0.6) |  | ||||||
| 
 |  | ||||||
|     # one of the bg thread or subactor should have |  | ||||||
|     # `Lock.acquire()`-ed |  | ||||||
|     # (NOT both, which will result in REPL clobbering!) |  | ||||||
|     attach_patts: dict[str, list[str]] = { |  | ||||||
|         'subactor': [ |  | ||||||
|             "'start_n_sync_pause'", |  | ||||||
|             "('subactor'", |  | ||||||
|         ], |  | ||||||
|         'inline_root_bg_thread': [ |  | ||||||
|             "<Thread(inline_root_bg_thread", |  | ||||||
|             "('root'", |  | ||||||
|         ], |  | ||||||
|         'start_soon_root_bg_thread': [ |  | ||||||
|             "<Thread(start_soon_root_bg_thread", |  | ||||||
|             "('root'", |  | ||||||
|         ], |  | ||||||
|     } |  | ||||||
|     conts: int = 0  # for debugging below matching logic on failure |  | ||||||
|     while attach_patts: |  | ||||||
|         child.sendline('c') |  | ||||||
|         conts += 1 |  | ||||||
|         child.expect(PROMPT) |  | ||||||
|         before = str(child.before.decode()) |  | ||||||
|         for key in attach_patts: |  | ||||||
|             if key in before: |  | ||||||
|                 attach_key: str = key |  | ||||||
|                 expected_patts: str = attach_patts.pop(key) |  | ||||||
|                 assert_before( |  | ||||||
|                     child, |  | ||||||
|                     [_pause_msg] |  | ||||||
|                     + |  | ||||||
|                     expected_patts |  | ||||||
|                 ) |  | ||||||
|                 break |  | ||||||
|         else: |  | ||||||
|             pytest.fail( |  | ||||||
|                 f'No keys found?\n\n' |  | ||||||
|                 f'{attach_patts.keys()}\n\n' |  | ||||||
|                 f'{before}\n' |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|         # ensure no other task/threads engaged a REPL |  | ||||||
|         # at the same time as the one that was detected above. |  | ||||||
|         for key, other_patts in attach_patts.copy().items(): |  | ||||||
|             assert not in_prompt_msg( |  | ||||||
|                 child, |  | ||||||
|                 other_patts, |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|         if ctlc: |  | ||||||
|             do_ctlc( |  | ||||||
|                 child, |  | ||||||
|                 patt=attach_key, |  | ||||||
|                 # NOTE same as comment above |  | ||||||
|                 delay=0.4, |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|     child.sendline('c') |  | ||||||
| 
 |  | ||||||
|     # XXX TODO, weird threading bug it seems despite the |  | ||||||
|     # `abandon_on_cancel: bool` setting to |  | ||||||
|     # `trio.to_thread.run_sync()`.. |  | ||||||
|     with maybe_expect_timeout( |  | ||||||
|         ctlc=ctlc, |  | ||||||
|     ): |  | ||||||
|         child.expect(EOF) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def expect_any_of( |  | ||||||
|     attach_patts: dict[str, list[str]], |  | ||||||
|     child,   # what type? |  | ||||||
|     ctlc: bool = False, |  | ||||||
|     prompt: str = _ctlc_ignore_header, |  | ||||||
|     ctlc_delay: float = .4, |  | ||||||
| 
 |  | ||||||
| ) -> list[str]: |  | ||||||
|     ''' |  | ||||||
|     Receive any of a `list[str]` of patterns provided in |  | ||||||
|     `attach_patts`. |  | ||||||
| 
 |  | ||||||
|     Used to test racing prompts from multiple actors and/or |  | ||||||
|     tasks using a common root process' `pdbp` REPL. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     assert attach_patts |  | ||||||
| 
 |  | ||||||
|     child.expect(PROMPT) |  | ||||||
|     before = str(child.before.decode()) |  | ||||||
| 
 |  | ||||||
|     for attach_key in attach_patts: |  | ||||||
|         if attach_key in before: |  | ||||||
|             expected_patts: str = attach_patts.pop(attach_key) |  | ||||||
|             assert_before( |  | ||||||
|                 child, |  | ||||||
|                 expected_patts |  | ||||||
|             ) |  | ||||||
|             break  # from for |  | ||||||
|     else: |  | ||||||
|         pytest.fail( |  | ||||||
|             f'No keys found?\n\n' |  | ||||||
|             f'{attach_patts.keys()}\n\n' |  | ||||||
|             f'{before}\n' |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|     # ensure no other task/threads engaged a REPL |  | ||||||
|     # at the same time as the one that was detected above. |  | ||||||
|     for key, other_patts in attach_patts.copy().items(): |  | ||||||
|         assert not in_prompt_msg( |  | ||||||
|             child, |  | ||||||
|             other_patts, |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|     if ctlc: |  | ||||||
|         do_ctlc( |  | ||||||
|             child, |  | ||||||
|             patt=prompt, |  | ||||||
|             # NOTE same as comment above |  | ||||||
|             delay=ctlc_delay, |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|     return expected_patts |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @pytest.mark.ctlcs_bish |  | ||||||
| def test_sync_pause_from_aio_task( |  | ||||||
|     spawn, |  | ||||||
| 
 |  | ||||||
|     ctlc: bool |  | ||||||
|     # ^TODO, fix for `asyncio`!! |  | ||||||
| ): |  | ||||||
|     ''' |  | ||||||
|     Verify we can use the `pdbp` REPL from an `asyncio.Task` spawned using |  | ||||||
|     APIs in `.to_asyncio`. |  | ||||||
| 
 |  | ||||||
|     `examples/debugging/asycio_bp.py` |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     child = spawn('asyncio_bp') |  | ||||||
| 
 |  | ||||||
|     # RACE on whether trio/asyncio task bps first |  | ||||||
|     attach_patts: dict[str, list[str]] = { |  | ||||||
| 
 |  | ||||||
|         # first pause in guest-mode (aka "infecting") |  | ||||||
|         # `trio.Task`. |  | ||||||
|         'trio-side': [ |  | ||||||
|             _pause_msg, |  | ||||||
|             "<Task 'trio_ctx'", |  | ||||||
|             "('aio_daemon'", |  | ||||||
|         ], |  | ||||||
| 
 |  | ||||||
|         # `breakpoint()` from `asyncio.Task`. |  | ||||||
|         'asyncio-side': [ |  | ||||||
|             _pause_msg, |  | ||||||
|             "<Task pending name='Task-2' coro=<greenback_shim()", |  | ||||||
|             "('aio_daemon'", |  | ||||||
|         ], |  | ||||||
|     } |  | ||||||
| 
 |  | ||||||
|     while attach_patts: |  | ||||||
|         expect_any_of( |  | ||||||
|             attach_patts=attach_patts, |  | ||||||
|             child=child, |  | ||||||
|             ctlc=ctlc, |  | ||||||
|         ) |  | ||||||
|         child.sendline('c') |  | ||||||
| 
 |  | ||||||
|     # NOW in race order, |  | ||||||
|     # - the asyncio-task will error |  | ||||||
|     # - the root-actor parent task will pause |  | ||||||
|     # |  | ||||||
|     attach_patts: dict[str, list[str]] = { |  | ||||||
| 
 |  | ||||||
|         # error raised in `asyncio.Task` |  | ||||||
|         "raise ValueError('asyncio side error!')": [ |  | ||||||
|             _crash_msg, |  | ||||||
|             "<Task 'trio_ctx'", |  | ||||||
|             "@ ('aio_daemon'", |  | ||||||
|             "ValueError: asyncio side error!", |  | ||||||
| 
 |  | ||||||
|             # XXX, we no longer show this frame by default! |  | ||||||
|             # 'return await chan.receive()',  # `.to_asyncio` impl internals in tb |  | ||||||
|         ], |  | ||||||
| 
 |  | ||||||
|         # parent-side propagation via actor-nursery/portal |  | ||||||
|         # "tractor._exceptions.RemoteActorError: remote task raised a 'ValueError'": [ |  | ||||||
|         "remote task raised a 'ValueError'": [ |  | ||||||
|             _crash_msg, |  | ||||||
|             "src_uid=('aio_daemon'", |  | ||||||
|             "('aio_daemon'", |  | ||||||
|         ], |  | ||||||
| 
 |  | ||||||
|         # a final pause in root-actor |  | ||||||
|         "<Task '__main__.main'": [ |  | ||||||
|             _pause_msg, |  | ||||||
|             "<Task '__main__.main'", |  | ||||||
|             "('root'", |  | ||||||
|         ], |  | ||||||
|     } |  | ||||||
|     while attach_patts: |  | ||||||
|         expect_any_of( |  | ||||||
|             attach_patts=attach_patts, |  | ||||||
|             child=child, |  | ||||||
|             ctlc=ctlc, |  | ||||||
|         ) |  | ||||||
|         child.sendline('c') |  | ||||||
| 
 |  | ||||||
|     assert not attach_patts |  | ||||||
| 
 |  | ||||||
|     # final boxed error propagates to root |  | ||||||
|     assert_before( |  | ||||||
|         child, |  | ||||||
|         [ |  | ||||||
|             _crash_msg, |  | ||||||
|             "<Task '__main__.main'", |  | ||||||
|             "('root'", |  | ||||||
|             "remote task raised a 'ValueError'", |  | ||||||
|             "ValueError: asyncio side error!", |  | ||||||
|         ] |  | ||||||
|     ) |  | ||||||
| 
 |  | ||||||
|     if ctlc: |  | ||||||
|         do_ctlc( |  | ||||||
|             child, |  | ||||||
|             # NOTE: setting this to 0 (or some other sufficient |  | ||||||
|             # small val) can cause the test to fail since the |  | ||||||
|             # `subactor` suffers a race where the root/parent |  | ||||||
|             # sends an actor-cancel prior to it hitting its pause |  | ||||||
|             # point; by def the value is 0.1 |  | ||||||
|             delay=0.4, |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|     child.sendline('c') |  | ||||||
|     # with maybe_expect_timeout(): |  | ||||||
|     child.expect(EOF) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def test_sync_pause_from_non_greenbacked_aio_task(): |  | ||||||
|     ''' |  | ||||||
|     Where the `breakpoint()` caller task is NOT spawned by |  | ||||||
|     `tractor.to_asyncio` and thus never activates |  | ||||||
|     a `greenback.ensure_portal()` beforehand, presumably bc the task |  | ||||||
|     was started by some lib/dep as in often seen in the field. |  | ||||||
| 
 |  | ||||||
|     Ensure sync pausing works when the pause is in, |  | ||||||
| 
 |  | ||||||
|     - the root actor running in infected-mode? |  | ||||||
|       |_ since we don't need any IPC to acquire the debug lock? |  | ||||||
|       |_ is there some way to handle this like the non-main-thread case? |  | ||||||
| 
 |  | ||||||
|     All other cases need to error out appropriately right? |  | ||||||
| 
 |  | ||||||
|     - for any subactor we can't avoid needing the repl lock.. |  | ||||||
|       |_ is there a way to hook into `asyncio.ensure_future(obj)`? |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     pass |  | ||||||
|  | @ -1,306 +0,0 @@ | ||||||
| ''' |  | ||||||
| That "native" runtime-hackin toolset better be dang useful! |  | ||||||
| 
 |  | ||||||
| Verify the funtion of a variety of "developer-experience" tools we |  | ||||||
| offer from the `.devx` sub-pkg: |  | ||||||
| 
 |  | ||||||
| - use of the lovely `stackscope` for dumping actor `trio`-task trees |  | ||||||
|   during operation and hangs. |  | ||||||
| 
 |  | ||||||
| TODO: |  | ||||||
| - demonstration of `CallerInfo` call stack frame filtering such that |  | ||||||
|   for logging and REPL purposes a user sees exactly the layers needed |  | ||||||
|   when debugging a problem inside the stack vs. in their app. |  | ||||||
| 
 |  | ||||||
| ''' |  | ||||||
| from __future__ import annotations |  | ||||||
| from contextlib import ( |  | ||||||
|     contextmanager as cm, |  | ||||||
| ) |  | ||||||
| import os |  | ||||||
| import signal |  | ||||||
| import time |  | ||||||
| from typing import ( |  | ||||||
|     TYPE_CHECKING, |  | ||||||
| ) |  | ||||||
| 
 |  | ||||||
| from .conftest import ( |  | ||||||
|     expect, |  | ||||||
|     assert_before, |  | ||||||
|     in_prompt_msg, |  | ||||||
|     PROMPT, |  | ||||||
|     _pause_msg, |  | ||||||
| ) |  | ||||||
| 
 |  | ||||||
| import pytest |  | ||||||
| from pexpect.exceptions import ( |  | ||||||
|     # TIMEOUT, |  | ||||||
|     EOF, |  | ||||||
| ) |  | ||||||
| 
 |  | ||||||
| if TYPE_CHECKING: |  | ||||||
|     from ..conftest import PexpectSpawner |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def test_shield_pause( |  | ||||||
|     spawn: PexpectSpawner, |  | ||||||
| ): |  | ||||||
|     ''' |  | ||||||
|     Verify the `tractor.pause()/.post_mortem()` API works inside an |  | ||||||
|     already cancelled `trio.CancelScope` and that you can step to the |  | ||||||
|     next checkpoint wherein the cancelled will get raised. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     child = spawn( |  | ||||||
|         'shield_hang_in_sub' |  | ||||||
|     ) |  | ||||||
|     expect( |  | ||||||
|         child, |  | ||||||
|         'Yo my child hanging..?', |  | ||||||
|     ) |  | ||||||
|     assert_before( |  | ||||||
|         child, |  | ||||||
|         [ |  | ||||||
|             'Entering shield sleep..', |  | ||||||
|             'Enabling trace-trees on `SIGUSR1` since `stackscope` is installed @', |  | ||||||
|         ] |  | ||||||
|     ) |  | ||||||
| 
 |  | ||||||
|     script_pid: int = child.pid |  | ||||||
|     print( |  | ||||||
|         f'Sending SIGUSR1 to {script_pid}\n' |  | ||||||
|         f'(kill -s SIGUSR1 {script_pid})\n' |  | ||||||
|     ) |  | ||||||
|     os.kill( |  | ||||||
|         script_pid, |  | ||||||
|         signal.SIGUSR1, |  | ||||||
|     ) |  | ||||||
|     time.sleep(0.2) |  | ||||||
|     expect( |  | ||||||
|         child, |  | ||||||
|         # end-of-tree delimiter |  | ||||||
|         "end-of-\('root'", |  | ||||||
|     ) |  | ||||||
|     assert_before( |  | ||||||
|         child, |  | ||||||
|         [ |  | ||||||
|             # 'Srying to dump `stackscope` tree..', |  | ||||||
|             # 'Dumping `stackscope` tree for actor', |  | ||||||
|             "('root'",  # uid line |  | ||||||
| 
 |  | ||||||
|             # TODO!? this used to show? |  | ||||||
|             # -[ ] mk reproducable for @oremanj? |  | ||||||
|             # |  | ||||||
|             # parent block point (non-shielded) |  | ||||||
|             # 'await trio.sleep_forever()  # in root', |  | ||||||
|         ] |  | ||||||
|     ) |  | ||||||
|     expect( |  | ||||||
|         child, |  | ||||||
|         # end-of-tree delimiter |  | ||||||
|         "end-of-\('hanger'", |  | ||||||
|     ) |  | ||||||
|     assert_before( |  | ||||||
|         child, |  | ||||||
|         [ |  | ||||||
|             # relay to the sub should be reported |  | ||||||
|             'Relaying `SIGUSR1`[10] to sub-actor', |  | ||||||
| 
 |  | ||||||
|             "('hanger'",  # uid line |  | ||||||
| 
 |  | ||||||
|             # TODO!? SEE ABOVE |  | ||||||
|             # hanger LOC where it's shield-halted |  | ||||||
|             # 'await trio.sleep_forever()  # in subactor', |  | ||||||
|         ] |  | ||||||
|     ) |  | ||||||
| 
 |  | ||||||
|     # simulate the user sending a ctl-c to the hanging program. |  | ||||||
|     # this should result in the terminator kicking in since |  | ||||||
|     # the sub is shield blocking and can't respond to SIGINT. |  | ||||||
|     os.kill( |  | ||||||
|         child.pid, |  | ||||||
|         signal.SIGINT, |  | ||||||
|     ) |  | ||||||
|     from tractor._supervise import _shutdown_msg |  | ||||||
|     expect( |  | ||||||
|         child, |  | ||||||
|         # 'Shutting down actor runtime', |  | ||||||
|         _shutdown_msg, |  | ||||||
|         timeout=6, |  | ||||||
|     ) |  | ||||||
|     assert_before( |  | ||||||
|         child, |  | ||||||
|         [ |  | ||||||
|             'raise KeyboardInterrupt', |  | ||||||
|             # 'Shutting down actor runtime', |  | ||||||
|             '#T-800 deployed to collect zombie B0', |  | ||||||
|             "'--uid', \"('hanger',", |  | ||||||
|         ] |  | ||||||
|     ) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def test_breakpoint_hook_restored( |  | ||||||
|     spawn: PexpectSpawner, |  | ||||||
| ): |  | ||||||
|     ''' |  | ||||||
|     Ensures our actor runtime sets a custom `breakpoint()` hook |  | ||||||
|     on open then restores the stdlib's default on close. |  | ||||||
| 
 |  | ||||||
|     The hook state validation is done via `assert`s inside the |  | ||||||
|     invoked script with only `breakpoint()` (not `tractor.pause()`) |  | ||||||
|     calls used. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     child = spawn('restore_builtin_breakpoint') |  | ||||||
| 
 |  | ||||||
|     child.expect(PROMPT) |  | ||||||
|     try: |  | ||||||
|         assert_before( |  | ||||||
|             child, |  | ||||||
|             [ |  | ||||||
|                 _pause_msg, |  | ||||||
|                 "<Task '__main__.main'", |  | ||||||
|                 "('root'", |  | ||||||
|                 "first bp, tractor hook set", |  | ||||||
|             ] |  | ||||||
|         ) |  | ||||||
|     # XXX if the above raises `AssertionError`, without sending |  | ||||||
|     # the final 'continue' cmd to the REPL-active sub-process, |  | ||||||
|     # we'll hang waiting for that pexpect instance to terminate.. |  | ||||||
|     finally: |  | ||||||
|         child.sendline('c') |  | ||||||
| 
 |  | ||||||
|     child.expect(PROMPT) |  | ||||||
|     assert_before( |  | ||||||
|         child, |  | ||||||
|         [ |  | ||||||
|             "last bp, stdlib hook restored", |  | ||||||
|         ] |  | ||||||
|     ) |  | ||||||
| 
 |  | ||||||
|     # since the stdlib hook was already restored there should be NO |  | ||||||
|     # `tractor` `log.pdb()` content from console! |  | ||||||
|     assert not in_prompt_msg( |  | ||||||
|         child, |  | ||||||
|         [ |  | ||||||
|             _pause_msg, |  | ||||||
|             "<Task '__main__.main'", |  | ||||||
|             "('root'", |  | ||||||
|         ], |  | ||||||
|     ) |  | ||||||
|     child.sendline('c') |  | ||||||
|     child.expect(EOF) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| _to_raise = Exception('Triggering a crash') |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @pytest.mark.parametrize( |  | ||||||
|     'to_raise', |  | ||||||
|     [ |  | ||||||
|         None, |  | ||||||
|         _to_raise, |  | ||||||
|         RuntimeError('Never crash handle this!'), |  | ||||||
|     ], |  | ||||||
| ) |  | ||||||
| @pytest.mark.parametrize( |  | ||||||
|     'raise_on_exit', |  | ||||||
|     [ |  | ||||||
|         True, |  | ||||||
|         [type(_to_raise)], |  | ||||||
|         False, |  | ||||||
|     ] |  | ||||||
| ) |  | ||||||
| def test_crash_handler_cms( |  | ||||||
|     debug_mode: bool, |  | ||||||
|     to_raise: Exception, |  | ||||||
|     raise_on_exit: bool|list[Exception], |  | ||||||
| ): |  | ||||||
|     ''' |  | ||||||
|     Verify the `.devx.open_crash_handler()` API(s) by also |  | ||||||
|     (conveniently enough) tesing its `repl_fixture: ContextManager` |  | ||||||
|     param support which for this suite allows use to avoid use of |  | ||||||
|     a `pexpect`-style-test since we use the fixture to avoid actually |  | ||||||
|     entering `PdbpREPL.iteract()` :smirk: |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     import tractor |  | ||||||
|     # import trio |  | ||||||
| 
 |  | ||||||
|     # state flags |  | ||||||
|     repl_acquired: bool = False |  | ||||||
|     repl_released: bool = False |  | ||||||
| 
 |  | ||||||
|     @cm |  | ||||||
|     def block_repl_ux( |  | ||||||
|         repl: tractor.devx.debug.PdbREPL, |  | ||||||
|         maybe_bxerr: ( |  | ||||||
|             tractor.devx._debug.BoxedMaybeException |  | ||||||
|             |None |  | ||||||
|         ) = None, |  | ||||||
|         enter_repl: bool = True, |  | ||||||
| 
 |  | ||||||
|     ) -> bool: |  | ||||||
|         ''' |  | ||||||
|         Set pre/post-REPL state vars and bypass actual conole |  | ||||||
|         interaction. |  | ||||||
| 
 |  | ||||||
|         ''' |  | ||||||
|         nonlocal repl_acquired, repl_released |  | ||||||
| 
 |  | ||||||
|         # task: trio.Task = trio.lowlevel.current_task() |  | ||||||
|         # print(f'pre-REPL active_task={task.name}') |  | ||||||
| 
 |  | ||||||
|         print('pre-REPL') |  | ||||||
|         repl_acquired = True |  | ||||||
|         yield False  # never actually .interact() |  | ||||||
|         print('post-REPL') |  | ||||||
|         repl_released = True |  | ||||||
| 
 |  | ||||||
|     try: |  | ||||||
|         # TODO, with runtime's `debug_mode` setting |  | ||||||
|         # -[ ] need to open runtime tho obvi.. |  | ||||||
|         # |  | ||||||
|         # with tractor.devx.maybe_open_crash_handler( |  | ||||||
|         #     pdb=True, |  | ||||||
| 
 |  | ||||||
|         with tractor.devx.open_crash_handler( |  | ||||||
|             raise_on_exit=raise_on_exit, |  | ||||||
|             repl_fixture=block_repl_ux |  | ||||||
|         ) as bxerr: |  | ||||||
|             if to_raise is not None: |  | ||||||
|                 raise to_raise |  | ||||||
| 
 |  | ||||||
|     except Exception as _exc: |  | ||||||
|         exc = _exc |  | ||||||
|         if ( |  | ||||||
|             raise_on_exit is True |  | ||||||
|             or |  | ||||||
|             type(to_raise) in raise_on_exit |  | ||||||
|         ): |  | ||||||
|             assert ( |  | ||||||
|                 exc |  | ||||||
|                 is |  | ||||||
|                 to_raise |  | ||||||
|                 is |  | ||||||
|                 bxerr.value |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|         else: |  | ||||||
|             raise |  | ||||||
|     else: |  | ||||||
|         assert ( |  | ||||||
|             to_raise is None |  | ||||||
|             or |  | ||||||
|             not raise_on_exit |  | ||||||
|             or |  | ||||||
|             type(to_raise) not in raise_on_exit |  | ||||||
|         ) |  | ||||||
|         assert bxerr.value is to_raise |  | ||||||
| 
 |  | ||||||
|     assert bxerr.raise_on_exit == raise_on_exit |  | ||||||
| 
 |  | ||||||
|     if to_raise is not None: |  | ||||||
|         assert repl_acquired |  | ||||||
|         assert repl_released |  | ||||||
|  | @ -1,4 +0,0 @@ | ||||||
| ''' |  | ||||||
| `tractor.ipc` subsystem(s)/unit testing suites. |  | ||||||
| 
 |  | ||||||
| ''' |  | ||||||
|  | @ -1,114 +0,0 @@ | ||||||
| ''' |  | ||||||
| Unit-ish tests for specific IPC transport protocol backends. |  | ||||||
| 
 |  | ||||||
| ''' |  | ||||||
| from __future__ import annotations |  | ||||||
| from pathlib import Path |  | ||||||
| 
 |  | ||||||
| import pytest |  | ||||||
| import trio |  | ||||||
| import tractor |  | ||||||
| from tractor import ( |  | ||||||
|     Actor, |  | ||||||
|     _state, |  | ||||||
|     _addr, |  | ||||||
| ) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @pytest.fixture |  | ||||||
| def bindspace_dir_str() -> str: |  | ||||||
| 
 |  | ||||||
|     rt_dir: Path = tractor._state.get_rt_dir() |  | ||||||
|     bs_dir: Path = rt_dir / 'doggy' |  | ||||||
|     bs_dir_str: str = str(bs_dir) |  | ||||||
|     assert not bs_dir.is_dir() |  | ||||||
| 
 |  | ||||||
|     yield bs_dir_str |  | ||||||
| 
 |  | ||||||
|     # delete it on suite teardown. |  | ||||||
|     # ?TODO? should we support this internally |  | ||||||
|     # or is leaking it ok? |  | ||||||
|     if bs_dir.is_dir(): |  | ||||||
|         bs_dir.rmdir() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def test_uds_bindspace_created_implicitly( |  | ||||||
|     debug_mode: bool, |  | ||||||
|     bindspace_dir_str: str, |  | ||||||
| ): |  | ||||||
|     registry_addr: tuple = ( |  | ||||||
|         f'{bindspace_dir_str}', |  | ||||||
|         'registry@doggy.sock', |  | ||||||
|     ) |  | ||||||
|     bs_dir_str: str = registry_addr[0] |  | ||||||
| 
 |  | ||||||
|     # XXX, ensure bindspace-dir DNE beforehand! |  | ||||||
|     assert not Path(bs_dir_str).is_dir() |  | ||||||
| 
 |  | ||||||
|     async def main(): |  | ||||||
|         async with tractor.open_nursery( |  | ||||||
|             enable_transports=['uds'], |  | ||||||
|             registry_addrs=[registry_addr], |  | ||||||
|             debug_mode=debug_mode, |  | ||||||
|         ) as _an: |  | ||||||
| 
 |  | ||||||
|             # XXX MUST be created implicitly by |  | ||||||
|             # `.ipc._uds.start_listener()`! |  | ||||||
|             assert Path(bs_dir_str).is_dir() |  | ||||||
| 
 |  | ||||||
|             root: Actor = tractor.current_actor() |  | ||||||
|             assert root.is_registrar |  | ||||||
| 
 |  | ||||||
|             assert registry_addr in root.reg_addrs |  | ||||||
|             assert ( |  | ||||||
|                 registry_addr |  | ||||||
|                 in |  | ||||||
|                 _state._runtime_vars['_registry_addrs'] |  | ||||||
|             ) |  | ||||||
|             assert ( |  | ||||||
|                 _addr.wrap_address(registry_addr) |  | ||||||
|                 in |  | ||||||
|                 root.registry_addrs |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|     trio.run(main) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def test_uds_double_listen_raises_connerr( |  | ||||||
|     debug_mode: bool, |  | ||||||
|     bindspace_dir_str: str, |  | ||||||
| ): |  | ||||||
|     registry_addr: tuple = ( |  | ||||||
|         f'{bindspace_dir_str}', |  | ||||||
|         'registry@doggy.sock', |  | ||||||
|     ) |  | ||||||
| 
 |  | ||||||
|     async def main(): |  | ||||||
|         async with tractor.open_nursery( |  | ||||||
|             enable_transports=['uds'], |  | ||||||
|             registry_addrs=[registry_addr], |  | ||||||
|             debug_mode=debug_mode, |  | ||||||
|         ) as _an: |  | ||||||
| 
 |  | ||||||
|             # runtime up |  | ||||||
|             root: Actor = tractor.current_actor() |  | ||||||
| 
 |  | ||||||
|             from tractor.ipc._uds import ( |  | ||||||
|                 start_listener, |  | ||||||
|                 UDSAddress, |  | ||||||
|             ) |  | ||||||
|             ya_bound_addr: UDSAddress = root.registry_addrs[0] |  | ||||||
|             try: |  | ||||||
|                 await start_listener( |  | ||||||
|                     addr=ya_bound_addr, |  | ||||||
|                 ) |  | ||||||
|             except ConnectionError as connerr: |  | ||||||
|                 assert type(src_exc := connerr.__context__) is OSError |  | ||||||
|                 assert 'Address already in use' in src_exc.args |  | ||||||
|                 # complete, exit test. |  | ||||||
| 
 |  | ||||||
|             else: |  | ||||||
|                 pytest.fail('It dint raise a connerr !?') |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     trio.run(main) |  | ||||||
|  | @ -1,95 +0,0 @@ | ||||||
| ''' |  | ||||||
| Verify the `enable_transports` param drives various |  | ||||||
| per-root/sub-actor IPC endpoint/server settings. |  | ||||||
| 
 |  | ||||||
| ''' |  | ||||||
| from __future__ import annotations |  | ||||||
| 
 |  | ||||||
| import pytest |  | ||||||
| import trio |  | ||||||
| import tractor |  | ||||||
| from tractor import ( |  | ||||||
|     Actor, |  | ||||||
|     Portal, |  | ||||||
|     ipc, |  | ||||||
|     msg, |  | ||||||
|     _state, |  | ||||||
|     _addr, |  | ||||||
| ) |  | ||||||
| 
 |  | ||||||
| @tractor.context |  | ||||||
| async def chk_tpts( |  | ||||||
|     ctx: tractor.Context, |  | ||||||
|     tpt_proto_key: str, |  | ||||||
| ): |  | ||||||
|     rtvars = _state._runtime_vars |  | ||||||
|     assert ( |  | ||||||
|         tpt_proto_key |  | ||||||
|         in |  | ||||||
|         rtvars['_enable_tpts'] |  | ||||||
|     ) |  | ||||||
|     actor: Actor = tractor.current_actor() |  | ||||||
|     spec: msg.types.SpawnSpec = actor._spawn_spec |  | ||||||
|     assert spec._runtime_vars == rtvars |  | ||||||
| 
 |  | ||||||
|     # ensure individual IPC ep-addr types |  | ||||||
|     serv: ipc._server.Server = actor.ipc_server |  | ||||||
|     addr: ipc._types.Address |  | ||||||
|     for addr in serv.addrs: |  | ||||||
|         assert addr.proto_key == tpt_proto_key |  | ||||||
| 
 |  | ||||||
|     # Actor delegate-props enforcement |  | ||||||
|     assert ( |  | ||||||
|         actor.accept_addrs |  | ||||||
|         == |  | ||||||
|         serv.accept_addrs |  | ||||||
|     ) |  | ||||||
| 
 |  | ||||||
|     await ctx.started(serv.accept_addrs) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| # TODO, parametrize over mis-matched-proto-typed `registry_addrs` |  | ||||||
| # since i seems to work in `piker` but not exactly sure if both tcp |  | ||||||
| # & uds are being deployed then? |  | ||||||
| # |  | ||||||
| @pytest.mark.parametrize( |  | ||||||
|     'tpt_proto_key', |  | ||||||
|     ['tcp', 'uds'], |  | ||||||
|     ids=lambda item: f'ipc_tpt={item!r}' |  | ||||||
| ) |  | ||||||
| def test_root_passes_tpt_to_sub( |  | ||||||
|     tpt_proto_key: str, |  | ||||||
|     reg_addr: tuple, |  | ||||||
|     debug_mode: bool, |  | ||||||
| ): |  | ||||||
|     async def main(): |  | ||||||
|         async with tractor.open_nursery( |  | ||||||
|             enable_transports=[tpt_proto_key], |  | ||||||
|             registry_addrs=[reg_addr], |  | ||||||
|             debug_mode=debug_mode, |  | ||||||
|         ) as an: |  | ||||||
| 
 |  | ||||||
|             assert ( |  | ||||||
|                 tpt_proto_key |  | ||||||
|                 in |  | ||||||
|                 _state._runtime_vars['_enable_tpts'] |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|             ptl: Portal = await an.start_actor( |  | ||||||
|                 name='sub', |  | ||||||
|                 enable_modules=[__name__], |  | ||||||
|             ) |  | ||||||
|             async with ptl.open_context( |  | ||||||
|                 chk_tpts, |  | ||||||
|                 tpt_proto_key=tpt_proto_key, |  | ||||||
|             ) as (ctx, accept_addrs): |  | ||||||
| 
 |  | ||||||
|                 uw_addr: tuple |  | ||||||
|                 for uw_addr in accept_addrs: |  | ||||||
|                     addr = _addr.wrap_address(uw_addr) |  | ||||||
|                     assert addr.is_valid |  | ||||||
| 
 |  | ||||||
|             # shudown sub-actor(s) |  | ||||||
|             await an.cancel() |  | ||||||
| 
 |  | ||||||
|     trio.run(main) |  | ||||||
|  | @ -1,72 +0,0 @@ | ||||||
| ''' |  | ||||||
| High-level `.ipc._server` unit tests. |  | ||||||
| 
 |  | ||||||
| ''' |  | ||||||
| from __future__ import annotations |  | ||||||
| 
 |  | ||||||
| import pytest |  | ||||||
| import trio |  | ||||||
| from tractor import ( |  | ||||||
|     devx, |  | ||||||
|     ipc, |  | ||||||
|     log, |  | ||||||
| ) |  | ||||||
| from tractor._testing.addr import ( |  | ||||||
|     get_rando_addr, |  | ||||||
| ) |  | ||||||
| # TODO, use/check-roundtripping with some of these wrapper types? |  | ||||||
| # |  | ||||||
| # from .._addr import Address |  | ||||||
| # from ._chan import Channel |  | ||||||
| # from ._transport import MsgTransport |  | ||||||
| # from ._uds import UDSAddress |  | ||||||
| # from ._tcp import TCPAddress |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @pytest.mark.parametrize( |  | ||||||
|     '_tpt_proto', |  | ||||||
|     ['uds', 'tcp'] |  | ||||||
| ) |  | ||||||
| def test_basic_ipc_server( |  | ||||||
|     _tpt_proto: str, |  | ||||||
|     debug_mode: bool, |  | ||||||
|     loglevel: str, |  | ||||||
| ): |  | ||||||
| 
 |  | ||||||
|     # so we see the socket-listener reporting on console |  | ||||||
|     log.get_console_log("INFO") |  | ||||||
| 
 |  | ||||||
|     rando_addr: tuple = get_rando_addr( |  | ||||||
|         tpt_proto=_tpt_proto, |  | ||||||
|     ) |  | ||||||
|     async def main(): |  | ||||||
|         async with ipc._server.open_ipc_server() as server: |  | ||||||
| 
 |  | ||||||
|             assert ( |  | ||||||
|                 server._parent_tn |  | ||||||
|                 and |  | ||||||
|                 server._parent_tn is server._stream_handler_tn |  | ||||||
|             ) |  | ||||||
|             assert server._no_more_peers.is_set() |  | ||||||
| 
 |  | ||||||
|             eps: list[ipc._server.Endpoint] = await server.listen_on( |  | ||||||
|                 accept_addrs=[rando_addr], |  | ||||||
|                 stream_handler_nursery=None, |  | ||||||
|             ) |  | ||||||
|             assert ( |  | ||||||
|                 len(eps) == 1 |  | ||||||
|                 and |  | ||||||
|                 (ep := eps[0])._listener |  | ||||||
|                 and |  | ||||||
|                 not ep.peer_tpts |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|             server._parent_tn.cancel_scope.cancel() |  | ||||||
| 
 |  | ||||||
|         # !TODO! actually make a bg-task connection from a client |  | ||||||
|         # using `ipc._chan._connect_chan()` |  | ||||||
| 
 |  | ||||||
|     with devx.maybe_open_crash_handler( |  | ||||||
|         pdb=debug_mode, |  | ||||||
|     ): |  | ||||||
|         trio.run(main) |  | ||||||
|  | @ -3,6 +3,7 @@ Sketchy network blackoutz, ugly byzantine gens, puedes eschuchar la | ||||||
| cancelacion?.. | cancelacion?.. | ||||||
| 
 | 
 | ||||||
| ''' | ''' | ||||||
|  | import itertools | ||||||
| from functools import partial | from functools import partial | ||||||
| from types import ModuleType | from types import ModuleType | ||||||
| 
 | 
 | ||||||
|  | @ -10,12 +11,8 @@ import pytest | ||||||
| from _pytest.pathlib import import_path | from _pytest.pathlib import import_path | ||||||
| import trio | import trio | ||||||
| import tractor | import tractor | ||||||
| from tractor import ( |  | ||||||
|     TransportClosed, |  | ||||||
| ) |  | ||||||
| from tractor._testing import ( | from tractor._testing import ( | ||||||
|     examples_dir, |     examples_dir, | ||||||
|     break_ipc, |  | ||||||
| ) | ) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | @ -77,7 +74,6 @@ def test_ipc_channel_break_during_stream( | ||||||
|     spawn_backend: str, |     spawn_backend: str, | ||||||
|     ipc_break: dict|None, |     ipc_break: dict|None, | ||||||
|     pre_aclose_msgstream: bool, |     pre_aclose_msgstream: bool, | ||||||
|     tpt_proto: str, |  | ||||||
| ): | ): | ||||||
|     ''' |     ''' | ||||||
|     Ensure we can have an IPC channel break its connection during |     Ensure we can have an IPC channel break its connection during | ||||||
|  | @ -94,22 +90,17 @@ def test_ipc_channel_break_during_stream( | ||||||
| 
 | 
 | ||||||
|         # non-`trio` spawners should never hit the hang condition that |         # non-`trio` spawners should never hit the hang condition that | ||||||
|         # requires the user to do ctl-c to cancel the actor tree. |         # requires the user to do ctl-c to cancel the actor tree. | ||||||
|         # expect_final_exc = trio.ClosedResourceError |         expect_final_exc = trio.ClosedResourceError | ||||||
|         expect_final_exc = TransportClosed |  | ||||||
| 
 | 
 | ||||||
|     mod: ModuleType = import_path( |     mod: ModuleType = import_path( | ||||||
|         examples_dir() / 'advanced_faults' |         examples_dir() / 'advanced_faults' / 'ipc_failure_during_stream.py', | ||||||
|         / 'ipc_failure_during_stream.py', |  | ||||||
|         root=examples_dir(), |         root=examples_dir(), | ||||||
|         consider_namespace_packages=False, |  | ||||||
|     ) |     ) | ||||||
| 
 | 
 | ||||||
|     # by def we expect KBI from user after a simulated "hang |     # by def we expect KBI from user after a simulated "hang | ||||||
|     # period" wherein the user eventually hits ctl-c to kill the |     # period" wherein the user eventually hits ctl-c to kill the | ||||||
|     # root-actor tree. |     # root-actor tree. | ||||||
|     expect_final_exc: BaseException = KeyboardInterrupt |     expect_final_exc: BaseException = KeyboardInterrupt | ||||||
|     expect_final_cause: BaseException|None = None |  | ||||||
| 
 |  | ||||||
|     if ( |     if ( | ||||||
|         # only expect EoC if trans is broken on the child side, |         # only expect EoC if trans is broken on the child side, | ||||||
|         ipc_break['break_child_ipc_after'] is not False |         ipc_break['break_child_ipc_after'] is not False | ||||||
|  | @ -144,9 +135,6 @@ def test_ipc_channel_break_during_stream( | ||||||
|         # a user sending ctl-c by raising a KBI. |         # a user sending ctl-c by raising a KBI. | ||||||
|         if pre_aclose_msgstream: |         if pre_aclose_msgstream: | ||||||
|             expect_final_exc = KeyboardInterrupt |             expect_final_exc = KeyboardInterrupt | ||||||
|             if tpt_proto == 'uds': |  | ||||||
|                 expect_final_exc = TransportClosed |  | ||||||
|                 expect_final_cause = trio.BrokenResourceError |  | ||||||
| 
 | 
 | ||||||
|             # XXX OLD XXX |             # XXX OLD XXX | ||||||
|             # if child calls `MsgStream.aclose()` then expect EoC. |             # if child calls `MsgStream.aclose()` then expect EoC. | ||||||
|  | @ -166,11 +154,7 @@ def test_ipc_channel_break_during_stream( | ||||||
|         if pre_aclose_msgstream: |         if pre_aclose_msgstream: | ||||||
|             expect_final_exc = KeyboardInterrupt |             expect_final_exc = KeyboardInterrupt | ||||||
| 
 | 
 | ||||||
|             if tpt_proto == 'uds': |     # NOTE when the parent IPC side dies (even if the child's does as well | ||||||
|                 expect_final_exc = TransportClosed |  | ||||||
|                 expect_final_cause = trio.BrokenResourceError |  | ||||||
| 
 |  | ||||||
|     # NOTE when the parent IPC side dies (even if the child does as well |  | ||||||
|     # but the child fails BEFORE the parent) we always expect the |     # but the child fails BEFORE the parent) we always expect the | ||||||
|     # IPC layer to raise a closed-resource, NEVER do we expect |     # IPC layer to raise a closed-resource, NEVER do we expect | ||||||
|     # a stop msg since the parent-side ctx apis will error out |     # a stop msg since the parent-side ctx apis will error out | ||||||
|  | @ -182,8 +166,7 @@ def test_ipc_channel_break_during_stream( | ||||||
|         and |         and | ||||||
|         ipc_break['break_child_ipc_after'] is False |         ipc_break['break_child_ipc_after'] is False | ||||||
|     ): |     ): | ||||||
|         expect_final_exc = tractor.TransportClosed |         expect_final_exc = trio.ClosedResourceError | ||||||
|         expect_final_cause = trio.ClosedResourceError |  | ||||||
| 
 | 
 | ||||||
|     # BOTH but, PARENT breaks FIRST |     # BOTH but, PARENT breaks FIRST | ||||||
|     elif ( |     elif ( | ||||||
|  | @ -194,8 +177,7 @@ def test_ipc_channel_break_during_stream( | ||||||
|             ipc_break['break_parent_ipc_after'] |             ipc_break['break_parent_ipc_after'] | ||||||
|         ) |         ) | ||||||
|     ): |     ): | ||||||
|         expect_final_exc = tractor.TransportClosed |         expect_final_exc = trio.ClosedResourceError | ||||||
|         expect_final_cause = trio.ClosedResourceError |  | ||||||
| 
 | 
 | ||||||
|     with pytest.raises( |     with pytest.raises( | ||||||
|         expected_exception=( |         expected_exception=( | ||||||
|  | @ -211,12 +193,11 @@ def test_ipc_channel_break_during_stream( | ||||||
|                     start_method=spawn_backend, |                     start_method=spawn_backend, | ||||||
|                     loglevel=loglevel, |                     loglevel=loglevel, | ||||||
|                     pre_close=pre_aclose_msgstream, |                     pre_close=pre_aclose_msgstream, | ||||||
|                     tpt_proto=tpt_proto, |  | ||||||
|                     **ipc_break, |                     **ipc_break, | ||||||
|                 ) |                 ) | ||||||
|             ) |             ) | ||||||
|         except KeyboardInterrupt as _kbi: |         except KeyboardInterrupt as kbi: | ||||||
|             kbi = _kbi |             _err = kbi | ||||||
|             if expect_final_exc is not KeyboardInterrupt: |             if expect_final_exc is not KeyboardInterrupt: | ||||||
|                 pytest.fail( |                 pytest.fail( | ||||||
|                     'Rxed unexpected KBI !?\n' |                     'Rxed unexpected KBI !?\n' | ||||||
|  | @ -225,33 +206,16 @@ def test_ipc_channel_break_during_stream( | ||||||
| 
 | 
 | ||||||
|             raise |             raise | ||||||
| 
 | 
 | ||||||
|         except tractor.TransportClosed as _tc: |  | ||||||
|             tc = _tc |  | ||||||
|             if expect_final_exc is KeyboardInterrupt: |  | ||||||
|                 pytest.fail( |  | ||||||
|                     'Unexpected transport failure !?\n' |  | ||||||
|                     f'{repr(tc)}' |  | ||||||
|                 ) |  | ||||||
|             cause: Exception = tc.__cause__ |  | ||||||
|             assert ( |  | ||||||
|                 # type(cause) is trio.ClosedResourceError |  | ||||||
|                 type(cause) is expect_final_cause |  | ||||||
| 
 |  | ||||||
|                 # TODO, should we expect a certain exc-message (per |  | ||||||
|                 # tpt) as well?? |  | ||||||
|                 # and |  | ||||||
|                 # cause.args[0] == 'another task closed this fd' |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|             raise |  | ||||||
| 
 |  | ||||||
|     # get raw instance from pytest wrapper |     # get raw instance from pytest wrapper | ||||||
|     value = excinfo.value |     value = excinfo.value | ||||||
|     if isinstance(value, ExceptionGroup): |     if isinstance(value, ExceptionGroup): | ||||||
|         excs = value.exceptions |         value = next( | ||||||
|         assert len(excs) == 1 |             itertools.dropwhile( | ||||||
|         final_exc = excs[0] |                 lambda exc: not isinstance(exc, expect_final_exc), | ||||||
|         assert isinstance(final_exc, expect_final_exc) |                 value.exceptions, | ||||||
|  |             ) | ||||||
|  |         ) | ||||||
|  |         assert value | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @tractor.context | @tractor.context | ||||||
|  | @ -260,30 +224,23 @@ async def break_ipc_after_started( | ||||||
| ) -> None: | ) -> None: | ||||||
|     await ctx.started() |     await ctx.started() | ||||||
|     async with ctx.open_stream() as stream: |     async with ctx.open_stream() as stream: | ||||||
| 
 |         await stream.aclose() | ||||||
|         # TODO: make a test which verifies the error |         await trio.sleep(0.2) | ||||||
|         # for this, i.e. raises a `MsgTypeError` |         await ctx.chan.send(None) | ||||||
|         # await ctx.chan.send(None) |  | ||||||
| 
 |  | ||||||
|         await break_ipc( |  | ||||||
|             stream=stream, |  | ||||||
|             pre_close=True, |  | ||||||
|         ) |  | ||||||
|         print('child broke IPC and terminating') |         print('child broke IPC and terminating') | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def test_stream_closed_right_after_ipc_break_and_zombie_lord_engages(): | def test_stream_closed_right_after_ipc_break_and_zombie_lord_engages(): | ||||||
|     ''' |     ''' | ||||||
|     Verify that is a subactor's IPC goes down just after bringing up |     Verify that is a subactor's IPC goes down just after bringing up a stream | ||||||
|     a stream the parent can trigger a SIGINT and the child will be |     the parent can trigger a SIGINT and the child will be reaped out-of-IPC by | ||||||
|     reaped out-of-IPC by the localhost process supervision machinery: |     the localhost process supervision machinery: aka "zombie lord". | ||||||
|     aka "zombie lord". |  | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|     async def main(): |     async def main(): | ||||||
|         with trio.fail_after(3): |         with trio.fail_after(3): | ||||||
|             async with tractor.open_nursery() as an: |             async with tractor.open_nursery() as n: | ||||||
|                 portal = await an.start_actor( |                 portal = await n.start_actor( | ||||||
|                     'ipc_breaker', |                     'ipc_breaker', | ||||||
|                     enable_modules=[__name__], |                     enable_modules=[__name__], | ||||||
|                 ) |                 ) | ||||||
|  |  | ||||||
|  | @ -307,13 +307,6 @@ async def inf_streamer( | ||||||
| 
 | 
 | ||||||
|     async with ( |     async with ( | ||||||
|         ctx.open_stream() as stream, |         ctx.open_stream() as stream, | ||||||
| 
 |  | ||||||
|         # XXX TODO, INTERESTING CASE!! |  | ||||||
|         # - if we don't collapse the eg then the embedded |  | ||||||
|         # `trio.EndOfChannel` doesn't propagate directly to the above |  | ||||||
|         # .open_stream() parent, resulting in it also raising instead |  | ||||||
|         # of gracefully absorbing as normal.. so how to handle? |  | ||||||
|         tractor.trionics.collapse_eg(), |  | ||||||
|         trio.open_nursery() as tn, |         trio.open_nursery() as tn, | ||||||
|     ): |     ): | ||||||
|         async def close_stream_on_sentinel(): |         async def close_stream_on_sentinel(): | ||||||
|  |  | ||||||
|  | @ -14,7 +14,7 @@ import tractor | ||||||
| from tractor._testing import ( | from tractor._testing import ( | ||||||
|     tractor_test, |     tractor_test, | ||||||
| ) | ) | ||||||
| from .conftest import no_windows | from conftest import no_windows | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def is_win(): | def is_win(): | ||||||
|  | @ -77,7 +77,7 @@ def test_remote_error(reg_addr, args_err): | ||||||
|                 # of this actor nursery. |                 # of this actor nursery. | ||||||
|                 await portal.result() |                 await portal.result() | ||||||
|             except tractor.RemoteActorError as err: |             except tractor.RemoteActorError as err: | ||||||
|                 assert err.boxed_type == errtype |                 assert err.type == errtype | ||||||
|                 print("Look Maa that actor failed hard, hehh") |                 print("Look Maa that actor failed hard, hehh") | ||||||
|                 raise |                 raise | ||||||
| 
 | 
 | ||||||
|  | @ -86,33 +86,20 @@ def test_remote_error(reg_addr, args_err): | ||||||
|         with pytest.raises(tractor.RemoteActorError) as excinfo: |         with pytest.raises(tractor.RemoteActorError) as excinfo: | ||||||
|             trio.run(main) |             trio.run(main) | ||||||
| 
 | 
 | ||||||
|         assert excinfo.value.boxed_type == errtype |         assert excinfo.value.type == errtype | ||||||
| 
 | 
 | ||||||
|     else: |     else: | ||||||
|         # the root task will also error on the `Portal.result()` |         # the root task will also error on the `.result()` call | ||||||
|         # call so we expect an error from there AND the child. |         # so we expect an error from there AND the child. | ||||||
|         # |_ tho seems like on new `trio` this doesn't always |         with pytest.raises(BaseExceptionGroup) as excinfo: | ||||||
|         #    happen? |  | ||||||
|         with pytest.raises(( |  | ||||||
|             BaseExceptionGroup, |  | ||||||
|             tractor.RemoteActorError, |  | ||||||
|         )) as excinfo: |  | ||||||
|             trio.run(main) |             trio.run(main) | ||||||
| 
 | 
 | ||||||
|         # ensure boxed errors are `errtype` |         # ensure boxed errors | ||||||
|         err: BaseException = excinfo.value |         for exc in excinfo.value.exceptions: | ||||||
|         if isinstance(err, BaseExceptionGroup): |             assert exc.type == errtype | ||||||
|             suberrs: list[BaseException] = err.exceptions |  | ||||||
|         else: |  | ||||||
|             suberrs: list[BaseException] = [err] |  | ||||||
| 
 |  | ||||||
|         for exc in suberrs: |  | ||||||
|             assert exc.boxed_type == errtype |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def test_multierror( | def test_multierror(reg_addr): | ||||||
|     reg_addr: tuple[str, int], |  | ||||||
| ): |  | ||||||
|     ''' |     ''' | ||||||
|     Verify we raise a ``BaseExceptionGroup`` out of a nursery where |     Verify we raise a ``BaseExceptionGroup`` out of a nursery where | ||||||
|     more then one actor errors. |     more then one actor errors. | ||||||
|  | @ -130,7 +117,7 @@ def test_multierror( | ||||||
|             try: |             try: | ||||||
|                 await portal2.result() |                 await portal2.result() | ||||||
|             except tractor.RemoteActorError as err: |             except tractor.RemoteActorError as err: | ||||||
|                 assert err.boxed_type is AssertionError |                 assert err.type == AssertionError | ||||||
|                 print("Look Maa that first actor failed hard, hehh") |                 print("Look Maa that first actor failed hard, hehh") | ||||||
|                 raise |                 raise | ||||||
| 
 | 
 | ||||||
|  | @ -182,7 +169,7 @@ def test_multierror_fast_nursery(reg_addr, start_method, num_subactors, delay): | ||||||
| 
 | 
 | ||||||
|     for exc in exceptions: |     for exc in exceptions: | ||||||
|         assert isinstance(exc, tractor.RemoteActorError) |         assert isinstance(exc, tractor.RemoteActorError) | ||||||
|         assert exc.boxed_type is AssertionError |         assert exc.type == AssertionError | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def do_nothing(): | async def do_nothing(): | ||||||
|  | @ -236,10 +223,7 @@ async def stream_forever(): | ||||||
| async def test_cancel_infinite_streamer(start_method): | async def test_cancel_infinite_streamer(start_method): | ||||||
| 
 | 
 | ||||||
|     # stream for at most 1 seconds |     # stream for at most 1 seconds | ||||||
|     with ( |     with trio.move_on_after(1) as cancel_scope: | ||||||
|         trio.fail_after(4), |  | ||||||
|         trio.move_on_after(1) as cancel_scope |  | ||||||
|     ): |  | ||||||
|         async with tractor.open_nursery() as n: |         async with tractor.open_nursery() as n: | ||||||
|             portal = await n.start_actor( |             portal = await n.start_actor( | ||||||
|                 'donny', |                 'donny', | ||||||
|  | @ -287,32 +271,20 @@ async def test_cancel_infinite_streamer(start_method): | ||||||
|     ], |     ], | ||||||
| ) | ) | ||||||
| @tractor_test | @tractor_test | ||||||
| async def test_some_cancels_all( | async def test_some_cancels_all(num_actors_and_errs, start_method, loglevel): | ||||||
|     num_actors_and_errs: tuple, |     """Verify a subset of failed subactors causes all others in | ||||||
|     start_method: str, |  | ||||||
|     loglevel: str, |  | ||||||
| ): |  | ||||||
|     ''' |  | ||||||
|     Verify a subset of failed subactors causes all others in |  | ||||||
|     the nursery to be cancelled just like the strategy in trio. |     the nursery to be cancelled just like the strategy in trio. | ||||||
| 
 | 
 | ||||||
|     This is the first and only supervisory strategy at the moment. |     This is the first and only supervisory strategy at the moment. | ||||||
| 
 |     """ | ||||||
|     ''' |     num_actors, first_err, err_type, ria_func, da_func = num_actors_and_errs | ||||||
|     ( |  | ||||||
|         num_actors, |  | ||||||
|         first_err, |  | ||||||
|         err_type, |  | ||||||
|         ria_func, |  | ||||||
|         da_func, |  | ||||||
|     ) = num_actors_and_errs |  | ||||||
|     try: |     try: | ||||||
|         async with tractor.open_nursery() as an: |         async with tractor.open_nursery() as n: | ||||||
| 
 | 
 | ||||||
|             # spawn the same number of deamon actors which should be cancelled |             # spawn the same number of deamon actors which should be cancelled | ||||||
|             dactor_portals = [] |             dactor_portals = [] | ||||||
|             for i in range(num_actors): |             for i in range(num_actors): | ||||||
|                 dactor_portals.append(await an.start_actor( |                 dactor_portals.append(await n.start_actor( | ||||||
|                     f'deamon_{i}', |                     f'deamon_{i}', | ||||||
|                     enable_modules=[__name__], |                     enable_modules=[__name__], | ||||||
|                 )) |                 )) | ||||||
|  | @ -322,7 +294,7 @@ async def test_some_cancels_all( | ||||||
|             for i in range(num_actors): |             for i in range(num_actors): | ||||||
|                 # start actor(s) that will fail immediately |                 # start actor(s) that will fail immediately | ||||||
|                 riactor_portals.append( |                 riactor_portals.append( | ||||||
|                     await an.run_in_actor( |                     await n.run_in_actor( | ||||||
|                         func, |                         func, | ||||||
|                         name=f'actor_{i}', |                         name=f'actor_{i}', | ||||||
|                         **kwargs |                         **kwargs | ||||||
|  | @ -338,7 +310,7 @@ async def test_some_cancels_all( | ||||||
|                         await portal.run(func, **kwargs) |                         await portal.run(func, **kwargs) | ||||||
| 
 | 
 | ||||||
|                     except tractor.RemoteActorError as err: |                     except tractor.RemoteActorError as err: | ||||||
|                         assert err.boxed_type == err_type |                         assert err.type == err_type | ||||||
|                         # we only expect this first error to propogate |                         # we only expect this first error to propogate | ||||||
|                         # (all other daemons are cancelled before they |                         # (all other daemons are cancelled before they | ||||||
|                         # can be scheduled) |                         # can be scheduled) | ||||||
|  | @ -352,20 +324,19 @@ async def test_some_cancels_all( | ||||||
| 
 | 
 | ||||||
|         # should error here with a ``RemoteActorError`` or ``MultiError`` |         # should error here with a ``RemoteActorError`` or ``MultiError`` | ||||||
| 
 | 
 | ||||||
|     except first_err as _err: |     except first_err as err: | ||||||
|         err = _err |  | ||||||
|         if isinstance(err, BaseExceptionGroup): |         if isinstance(err, BaseExceptionGroup): | ||||||
|             assert len(err.exceptions) == num_actors |             assert len(err.exceptions) == num_actors | ||||||
|             for exc in err.exceptions: |             for exc in err.exceptions: | ||||||
|                 if isinstance(exc, tractor.RemoteActorError): |                 if isinstance(exc, tractor.RemoteActorError): | ||||||
|                     assert exc.boxed_type == err_type |                     assert exc.type == err_type | ||||||
|                 else: |                 else: | ||||||
|                     assert isinstance(exc, trio.Cancelled) |                     assert isinstance(exc, trio.Cancelled) | ||||||
|         elif isinstance(err, tractor.RemoteActorError): |         elif isinstance(err, tractor.RemoteActorError): | ||||||
|             assert err.boxed_type == err_type |             assert err.type == err_type | ||||||
| 
 | 
 | ||||||
|         assert an.cancelled is True |         assert n.cancelled is True | ||||||
|         assert not an._children |         assert not n._children | ||||||
|     else: |     else: | ||||||
|         pytest.fail("Should have gotten a remote assertion error?") |         pytest.fail("Should have gotten a remote assertion error?") | ||||||
| 
 | 
 | ||||||
|  | @ -441,7 +412,7 @@ async def test_nested_multierrors(loglevel, start_method): | ||||||
|                     elif isinstance(subexc, tractor.RemoteActorError): |                     elif isinstance(subexc, tractor.RemoteActorError): | ||||||
|                         # on windows it seems we can't exactly be sure wtf |                         # on windows it seems we can't exactly be sure wtf | ||||||
|                         # will happen.. |                         # will happen.. | ||||||
|                         assert subexc.boxed_type in ( |                         assert subexc.type in ( | ||||||
|                             tractor.RemoteActorError, |                             tractor.RemoteActorError, | ||||||
|                             trio.Cancelled, |                             trio.Cancelled, | ||||||
|                             BaseExceptionGroup, |                             BaseExceptionGroup, | ||||||
|  | @ -451,7 +422,7 @@ async def test_nested_multierrors(loglevel, start_method): | ||||||
|                         for subsub in subexc.exceptions: |                         for subsub in subexc.exceptions: | ||||||
| 
 | 
 | ||||||
|                             if subsub in (tractor.RemoteActorError,): |                             if subsub in (tractor.RemoteActorError,): | ||||||
|                                 subsub = subsub.boxed_type |                                 subsub = subsub.type | ||||||
| 
 | 
 | ||||||
|                             assert type(subsub) in ( |                             assert type(subsub) in ( | ||||||
|                                 trio.Cancelled, |                                 trio.Cancelled, | ||||||
|  | @ -466,16 +437,16 @@ async def test_nested_multierrors(loglevel, start_method): | ||||||
|                     # we get back the (sent) cancel signal instead |                     # we get back the (sent) cancel signal instead | ||||||
|                     if is_win(): |                     if is_win(): | ||||||
|                         if isinstance(subexc, tractor.RemoteActorError): |                         if isinstance(subexc, tractor.RemoteActorError): | ||||||
|                             assert subexc.boxed_type in ( |                             assert subexc.type in ( | ||||||
|                                 BaseExceptionGroup, |                                 BaseExceptionGroup, | ||||||
|                                 tractor.RemoteActorError |                                 tractor.RemoteActorError | ||||||
|                             ) |                             ) | ||||||
|                         else: |                         else: | ||||||
|                             assert isinstance(subexc, BaseExceptionGroup) |                             assert isinstance(subexc, BaseExceptionGroup) | ||||||
|                     else: |                     else: | ||||||
|                         assert subexc.boxed_type is ExceptionGroup |                         assert subexc.type is ExceptionGroup | ||||||
|                 else: |                 else: | ||||||
|                     assert subexc.boxed_type in ( |                     assert subexc.type in ( | ||||||
|                         tractor.RemoteActorError, |                         tractor.RemoteActorError, | ||||||
|                         trio.Cancelled |                         trio.Cancelled | ||||||
|                     ) |                     ) | ||||||
|  | @ -520,9 +491,7 @@ def test_cancel_via_SIGINT_other_task( | ||||||
|     if is_win():  # smh |     if is_win():  # smh | ||||||
|         timeout += 1 |         timeout += 1 | ||||||
| 
 | 
 | ||||||
|     async def spawn_and_sleep_forever( |     async def spawn_and_sleep_forever(task_status=trio.TASK_STATUS_IGNORED): | ||||||
|         task_status=trio.TASK_STATUS_IGNORED |  | ||||||
|     ): |  | ||||||
|         async with tractor.open_nursery() as tn: |         async with tractor.open_nursery() as tn: | ||||||
|             for i in range(3): |             for i in range(3): | ||||||
|                 await tn.run_in_actor( |                 await tn.run_in_actor( | ||||||
|  | @ -535,15 +504,8 @@ def test_cancel_via_SIGINT_other_task( | ||||||
|     async def main(): |     async def main(): | ||||||
|         # should never timeout since SIGINT should cancel the current program |         # should never timeout since SIGINT should cancel the current program | ||||||
|         with trio.fail_after(timeout): |         with trio.fail_after(timeout): | ||||||
|             async with ( |             async with trio.open_nursery() as n: | ||||||
| 
 |                 await n.start(spawn_and_sleep_forever) | ||||||
|                 # XXX ?TODO? why no work!? |  | ||||||
|                 # tractor.trionics.collapse_eg(), |  | ||||||
|                 trio.open_nursery( |  | ||||||
|                     strict_exception_groups=False, |  | ||||||
|                 ) as tn, |  | ||||||
|             ): |  | ||||||
|                 await tn.start(spawn_and_sleep_forever) |  | ||||||
|                 if 'mp' in spawn_backend: |                 if 'mp' in spawn_backend: | ||||||
|                     time.sleep(0.1) |                     time.sleep(0.1) | ||||||
|                 os.kill(pid, signal.SIGINT) |                 os.kill(pid, signal.SIGINT) | ||||||
|  | @ -554,123 +516,38 @@ def test_cancel_via_SIGINT_other_task( | ||||||
| 
 | 
 | ||||||
| async def spin_for(period=3): | async def spin_for(period=3): | ||||||
|     "Sync sleep." |     "Sync sleep." | ||||||
|     print(f'sync sleeping in sub-sub for {period}\n') |  | ||||||
|     time.sleep(period) |     time.sleep(period) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def spawn_sub_with_sync_blocking_task(): | async def spawn(): | ||||||
|     async with tractor.open_nursery() as an: |     async with tractor.open_nursery() as tn: | ||||||
|         print('starting sync blocking subactor..\n') |         await tn.run_in_actor( | ||||||
|         await an.run_in_actor( |  | ||||||
|             spin_for, |             spin_for, | ||||||
|             name='sleeper', |             name='sleeper', | ||||||
|         ) |         ) | ||||||
|         print('exiting first subactor layer..\n') |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @pytest.mark.parametrize( |  | ||||||
|     'man_cancel_outer', |  | ||||||
|     [ |  | ||||||
|         False,  # passes if delay != 2 |  | ||||||
| 
 |  | ||||||
|         # always causes an unexpected eg-w-embedded-assert-err? |  | ||||||
|         pytest.param(True, |  | ||||||
|              marks=pytest.mark.xfail( |  | ||||||
|                  reason=( |  | ||||||
|                     'always causes an unexpected eg-w-embedded-assert-err?' |  | ||||||
|                 ) |  | ||||||
|             ), |  | ||||||
|         ), |  | ||||||
|     ], |  | ||||||
| ) |  | ||||||
| @no_windows | @no_windows | ||||||
| def test_cancel_while_childs_child_in_sync_sleep( | def test_cancel_while_childs_child_in_sync_sleep( | ||||||
|     loglevel: str, |     loglevel, | ||||||
|     start_method: str, |     start_method, | ||||||
|     spawn_backend: str, |     spawn_backend, | ||||||
|     debug_mode: bool, |  | ||||||
|     reg_addr: tuple, |  | ||||||
|     man_cancel_outer: bool, |  | ||||||
| ): | ): | ||||||
|     ''' |     """Verify that a child cancelled while executing sync code is torn | ||||||
|     Verify that a child cancelled while executing sync code is torn |  | ||||||
|     down even when that cancellation is triggered by the parent |     down even when that cancellation is triggered by the parent | ||||||
|     2 nurseries "up". |     2 nurseries "up". | ||||||
| 
 |     """ | ||||||
|     Though the grandchild should stay blocking its actor runtime, its |  | ||||||
|     parent should issue a "zombie reaper" to hard kill it after |  | ||||||
|     sufficient timeout. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     if start_method == 'forkserver': |     if start_method == 'forkserver': | ||||||
|         pytest.skip("Forksever sux hard at resuming from sync sleep...") |         pytest.skip("Forksever sux hard at resuming from sync sleep...") | ||||||
| 
 | 
 | ||||||
|     async def main(): |     async def main(): | ||||||
|         # |         with trio.fail_after(2): | ||||||
|         # XXX BIG TODO NOTE XXX |             async with tractor.open_nursery() as tn: | ||||||
|         # |                 await tn.run_in_actor( | ||||||
|         # it seems there's a strange race that can happen |                     spawn, | ||||||
|         # where where the fail-after will trigger outer scope |                     name='spawn', | ||||||
|         # .cancel() which then causes the inner scope to raise, |  | ||||||
|         # |  | ||||||
|         # BaseExceptionGroup('Exceptions from Trio nursery', [ |  | ||||||
|         #   BaseExceptionGroup('Exceptions from Trio nursery', |  | ||||||
|         #   [ |  | ||||||
|         #       Cancelled(), |  | ||||||
|         #       Cancelled(), |  | ||||||
|         #   ] |  | ||||||
|         #   ), |  | ||||||
|         #   AssertionError('assert 0') |  | ||||||
|         # ]) |  | ||||||
|         # |  | ||||||
|         # WHY THIS DOESN'T MAKE SENSE: |  | ||||||
|         # --------------------------- |  | ||||||
|         # - it should raise too-slow-error when too slow.. |  | ||||||
|         #  * verified that using simple-cs and manually cancelling |  | ||||||
|         #    you get same outcome -> indicates that the fail-after |  | ||||||
|         #    can have its TooSlowError overriden! |  | ||||||
|         #  |_ to check this it's easy, simplly decrease the timeout |  | ||||||
|         #     as per the var below. |  | ||||||
|         # |  | ||||||
|         # - when using the manual simple-cs the outcome is different |  | ||||||
|         #   DESPITE the `assert 0` which means regardless of the |  | ||||||
|         #   inner scope effectively failing in the same way, the |  | ||||||
|         #   bubbling up **is NOT the same**. |  | ||||||
|         # |  | ||||||
|         # delays trigger diff outcomes.. |  | ||||||
|         # --------------------------- |  | ||||||
|         # as seen by uncommenting various lines below there is from |  | ||||||
|         # my POV an unexpected outcome due to the delay=2 case. |  | ||||||
|         # |  | ||||||
|         # delay = 1  # no AssertionError in eg, TooSlowError raised. |  | ||||||
|         # delay = 2  # is AssertionError in eg AND no TooSlowError !? |  | ||||||
|         delay = 4  # is AssertionError in eg AND no _cs cancellation. |  | ||||||
| 
 |  | ||||||
|         with trio.fail_after(delay) as _cs: |  | ||||||
|         # with trio.CancelScope() as cs: |  | ||||||
|         # ^XXX^ can be used instead to see same outcome. |  | ||||||
| 
 |  | ||||||
|             async with ( |  | ||||||
|                 # tractor.trionics.collapse_eg(),  # doesn't help |  | ||||||
|                 tractor.open_nursery( |  | ||||||
|                     hide_tb=False, |  | ||||||
|                     debug_mode=debug_mode, |  | ||||||
|                     registry_addrs=[reg_addr], |  | ||||||
|                 ) as an, |  | ||||||
|             ): |  | ||||||
|                 await an.run_in_actor( |  | ||||||
|                     spawn_sub_with_sync_blocking_task, |  | ||||||
|                     name='sync_blocking_sub', |  | ||||||
|                 ) |                 ) | ||||||
|                 await trio.sleep(1) |                 await trio.sleep(1) | ||||||
| 
 |  | ||||||
|                 if man_cancel_outer: |  | ||||||
|                     print('Cancelling manually in root') |  | ||||||
|                     _cs.cancel() |  | ||||||
| 
 |  | ||||||
|                 # trigger exc-srced taskc down |  | ||||||
|                 # the actor tree. |  | ||||||
|                 print('RAISING IN ROOT') |  | ||||||
|                 assert 0 |                 assert 0 | ||||||
| 
 | 
 | ||||||
|     with pytest.raises(AssertionError): |     with pytest.raises(AssertionError): | ||||||
|  | @ -720,12 +597,6 @@ def test_fast_graceful_cancel_when_spawn_task_in_soft_proc_wait_for_daemon( | ||||||
|                     nurse.start_soon(delayed_kbi) |                     nurse.start_soon(delayed_kbi) | ||||||
| 
 | 
 | ||||||
|                     await p.run(do_nuthin) |                     await p.run(do_nuthin) | ||||||
| 
 |  | ||||||
|         # need to explicitly re-raise the lone kbi..now |  | ||||||
|         except* KeyboardInterrupt as kbi_eg: |  | ||||||
|             assert (len(excs := kbi_eg.exceptions) == 1) |  | ||||||
|             raise excs[0] |  | ||||||
| 
 |  | ||||||
|         finally: |         finally: | ||||||
|             duration = time.time() - start |             duration = time.time() - start | ||||||
|             if duration > timeout: |             if duration > timeout: | ||||||
|  |  | ||||||
|  | @ -95,8 +95,8 @@ async def trio_main( | ||||||
| 
 | 
 | ||||||
|     # stash a "service nursery" as "actor local" (aka a Python global) |     # stash a "service nursery" as "actor local" (aka a Python global) | ||||||
|     global _nursery |     global _nursery | ||||||
|     tn = _nursery |     n = _nursery | ||||||
|     assert tn |     assert n | ||||||
| 
 | 
 | ||||||
|     async def consume_stream(): |     async def consume_stream(): | ||||||
|         async with wrapper_mngr() as stream: |         async with wrapper_mngr() as stream: | ||||||
|  | @ -104,10 +104,10 @@ async def trio_main( | ||||||
|                 print(msg) |                 print(msg) | ||||||
| 
 | 
 | ||||||
|     # run 2 tasks to ensure broadcaster chan use |     # run 2 tasks to ensure broadcaster chan use | ||||||
|     tn.start_soon(consume_stream) |     n.start_soon(consume_stream) | ||||||
|     tn.start_soon(consume_stream) |     n.start_soon(consume_stream) | ||||||
| 
 | 
 | ||||||
|     tn.start_soon(trio_sleep_and_err) |     n.start_soon(trio_sleep_and_err) | ||||||
| 
 | 
 | ||||||
|     await trio.sleep_forever() |     await trio.sleep_forever() | ||||||
| 
 | 
 | ||||||
|  | @ -117,11 +117,8 @@ async def open_actor_local_nursery( | ||||||
|     ctx: tractor.Context, |     ctx: tractor.Context, | ||||||
| ): | ): | ||||||
|     global _nursery |     global _nursery | ||||||
|     async with ( |     async with trio.open_nursery() as n: | ||||||
|         tractor.trionics.collapse_eg(), |         _nursery = n | ||||||
|         trio.open_nursery() as tn |  | ||||||
|     ): |  | ||||||
|         _nursery = tn |  | ||||||
|         await ctx.started() |         await ctx.started() | ||||||
|         await trio.sleep(10) |         await trio.sleep(10) | ||||||
|         # await trio.sleep(1) |         # await trio.sleep(1) | ||||||
|  | @ -135,7 +132,7 @@ async def open_actor_local_nursery( | ||||||
|         # never yields back.. aka a scenario where the |         # never yields back.. aka a scenario where the | ||||||
|         # ``tractor.context`` task IS NOT in the service n's cancel |         # ``tractor.context`` task IS NOT in the service n's cancel | ||||||
|         # scope. |         # scope. | ||||||
|         tn.cancel_scope.cancel() |         n.cancel_scope.cancel() | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @pytest.mark.parametrize( | @pytest.mark.parametrize( | ||||||
|  | @ -160,7 +157,7 @@ def test_actor_managed_trio_nursery_task_error_cancels_aio( | ||||||
|         async with tractor.open_nursery() as n: |         async with tractor.open_nursery() as n: | ||||||
|             p = await n.start_actor( |             p = await n.start_actor( | ||||||
|                 'nursery_mngr', |                 'nursery_mngr', | ||||||
|                 infect_asyncio=asyncio_mode,  # TODO, is this enabling debug mode? |                 infect_asyncio=asyncio_mode, | ||||||
|                 enable_modules=[__name__], |                 enable_modules=[__name__], | ||||||
|             ) |             ) | ||||||
|             async with ( |             async with ( | ||||||
|  | @ -174,4 +171,4 @@ def test_actor_managed_trio_nursery_task_error_cancels_aio( | ||||||
| 
 | 
 | ||||||
|     # verify boxed error |     # verify boxed error | ||||||
|     err = excinfo.value |     err = excinfo.value | ||||||
|     assert err.boxed_type is NameError |     assert isinstance(err.type(), NameError) | ||||||
|  |  | ||||||
|  | @ -13,24 +13,26 @@ MESSAGE = 'tractoring at full speed' | ||||||
| def test_empty_mngrs_input_raises() -> None: | def test_empty_mngrs_input_raises() -> None: | ||||||
| 
 | 
 | ||||||
|     async def main(): |     async def main(): | ||||||
|         with trio.fail_after(3): |         with trio.fail_after(1): | ||||||
|             async with ( |             async with ( | ||||||
|                 open_actor_cluster( |                 open_actor_cluster( | ||||||
|                     modules=[__name__], |                     modules=[__name__], | ||||||
| 
 | 
 | ||||||
|                     # NOTE: ensure we can passthrough runtime opts |                     # NOTE: ensure we can passthrough runtime opts | ||||||
|                     loglevel='cancel', |                     loglevel='info', | ||||||
|                     debug_mode=False, |                     # debug_mode=True, | ||||||
| 
 | 
 | ||||||
|                 ) as portals, |                 ) as portals, | ||||||
| 
 | 
 | ||||||
|                 gather_contexts(mngrs=()), |                 gather_contexts( | ||||||
|  |                     # NOTE: it's the use of inline-generator syntax | ||||||
|  |                     # here that causes the empty input. | ||||||
|  |                     mngrs=( | ||||||
|  |                         p.open_context(worker) for p in portals.values() | ||||||
|  |                     ), | ||||||
|  |                 ), | ||||||
|             ): |             ): | ||||||
|                 # should fail before this? |                 assert 0 | ||||||
|                 assert portals |  | ||||||
| 
 |  | ||||||
|                 # test should fail if we mk it here! |  | ||||||
|                 assert 0, 'Should have raised val-err !?' |  | ||||||
| 
 | 
 | ||||||
|     with pytest.raises(ValueError): |     with pytest.raises(ValueError): | ||||||
|         trio.run(main) |         trio.run(main) | ||||||
|  |  | ||||||
|  | @ -6,7 +6,6 @@ sync-opening a ``tractor.Context`` beforehand. | ||||||
| 
 | 
 | ||||||
| ''' | ''' | ||||||
| from itertools import count | from itertools import count | ||||||
| import math |  | ||||||
| import platform | import platform | ||||||
| from pprint import pformat | from pprint import pformat | ||||||
| from typing import ( | from typing import ( | ||||||
|  | @ -25,7 +24,6 @@ from tractor._exceptions import ( | ||||||
|     StreamOverrun, |     StreamOverrun, | ||||||
|     ContextCancelled, |     ContextCancelled, | ||||||
| ) | ) | ||||||
| from tractor._state import current_ipc_ctx |  | ||||||
| 
 | 
 | ||||||
| from tractor._testing import ( | from tractor._testing import ( | ||||||
|     tractor_test, |     tractor_test, | ||||||
|  | @ -38,9 +36,9 @@ from tractor._testing import ( | ||||||
| # - standard setup/teardown: | # - standard setup/teardown: | ||||||
| #   ``Portal.open_context()`` starts a new | #   ``Portal.open_context()`` starts a new | ||||||
| #   remote task context in another actor. The target actor's task must | #   remote task context in another actor. The target actor's task must | ||||||
| #   call ``Context.started()`` to unblock this entry on the parent side. | #   call ``Context.started()`` to unblock this entry on the caller side. | ||||||
| #   the child task executes until complete and returns a final value | #   the callee task executes until complete and returns a final value | ||||||
| #   which is delivered to the parent side and retreived via | #   which is delivered to the caller side and retreived via | ||||||
| #   ``Context.result()``. | #   ``Context.result()``. | ||||||
| 
 | 
 | ||||||
| # - cancel termination: | # - cancel termination: | ||||||
|  | @ -145,8 +143,6 @@ async def simple_setup_teardown( | ||||||
|     global _state |     global _state | ||||||
|     _state = True |     _state = True | ||||||
| 
 | 
 | ||||||
|     assert current_ipc_ctx() is ctx |  | ||||||
| 
 |  | ||||||
|     # signal to parent that we're up |     # signal to parent that we're up | ||||||
|     await ctx.started(data + 1) |     await ctx.started(data + 1) | ||||||
| 
 | 
 | ||||||
|  | @ -170,9 +166,9 @@ async def assert_state(value: bool): | ||||||
|     [False, ValueError, KeyboardInterrupt], |     [False, ValueError, KeyboardInterrupt], | ||||||
| ) | ) | ||||||
| @pytest.mark.parametrize( | @pytest.mark.parametrize( | ||||||
|     'child_blocks_forever', |     'callee_blocks_forever', | ||||||
|     [False, True], |     [False, True], | ||||||
|     ids=lambda item: f'child_blocks_forever={item}' |     ids=lambda item: f'callee_blocks_forever={item}' | ||||||
| ) | ) | ||||||
| @pytest.mark.parametrize( | @pytest.mark.parametrize( | ||||||
|     'pointlessly_open_stream', |     'pointlessly_open_stream', | ||||||
|  | @ -181,7 +177,7 @@ async def assert_state(value: bool): | ||||||
| ) | ) | ||||||
| def test_simple_context( | def test_simple_context( | ||||||
|     error_parent, |     error_parent, | ||||||
|     child_blocks_forever, |     callee_blocks_forever, | ||||||
|     pointlessly_open_stream, |     pointlessly_open_stream, | ||||||
|     debug_mode: bool, |     debug_mode: bool, | ||||||
| ): | ): | ||||||
|  | @ -204,13 +200,12 @@ def test_simple_context( | ||||||
|                         portal.open_context( |                         portal.open_context( | ||||||
|                             simple_setup_teardown, |                             simple_setup_teardown, | ||||||
|                             data=10, |                             data=10, | ||||||
|                             block_forever=child_blocks_forever, |                             block_forever=callee_blocks_forever, | ||||||
|                         ) as (ctx, sent), |                         ) as (ctx, sent), | ||||||
|                     ): |                     ): | ||||||
|                         assert current_ipc_ctx() is ctx |  | ||||||
|                         assert sent == 11 |                         assert sent == 11 | ||||||
| 
 | 
 | ||||||
|                         if child_blocks_forever: |                         if callee_blocks_forever: | ||||||
|                             await portal.run(assert_state, value=True) |                             await portal.run(assert_state, value=True) | ||||||
|                         else: |                         else: | ||||||
|                             assert await ctx.result() == 'yo' |                             assert await ctx.result() == 'yo' | ||||||
|  | @ -220,7 +215,7 @@ def test_simple_context( | ||||||
|                                 if error_parent: |                                 if error_parent: | ||||||
|                                     raise error_parent |                                     raise error_parent | ||||||
| 
 | 
 | ||||||
|                                 if child_blocks_forever: |                                 if callee_blocks_forever: | ||||||
|                                     await ctx.cancel() |                                     await ctx.cancel() | ||||||
|                                 else: |                                 else: | ||||||
|                                     # in this case the stream will send a |                                     # in this case the stream will send a | ||||||
|  | @ -250,18 +245,18 @@ def test_simple_context( | ||||||
|             trio.run(main) |             trio.run(main) | ||||||
|         except error_parent: |         except error_parent: | ||||||
|             pass |             pass | ||||||
|         except BaseExceptionGroup as beg: |         except trio.MultiError as me: | ||||||
|             # XXX: on windows it seems we may have to expect the group error |             # XXX: on windows it seems we may have to expect the group error | ||||||
|             from tractor.trionics import is_multi_cancelled |             from tractor._exceptions import is_multi_cancelled | ||||||
|             assert is_multi_cancelled(beg) |             assert is_multi_cancelled(me) | ||||||
|     else: |     else: | ||||||
|         trio.run(main) |         trio.run(main) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @pytest.mark.parametrize( | @pytest.mark.parametrize( | ||||||
|     'child_returns_early', |     'callee_returns_early', | ||||||
|     [True, False], |     [True, False], | ||||||
|     ids=lambda item: f'child_returns_early={item}' |     ids=lambda item: f'callee_returns_early={item}' | ||||||
| ) | ) | ||||||
| @pytest.mark.parametrize( | @pytest.mark.parametrize( | ||||||
|     'cancel_method', |     'cancel_method', | ||||||
|  | @ -273,14 +268,14 @@ def test_simple_context( | ||||||
|     [True, False], |     [True, False], | ||||||
|     ids=lambda item: f'chk_ctx_result_before_exit={item}' |     ids=lambda item: f'chk_ctx_result_before_exit={item}' | ||||||
| ) | ) | ||||||
| def test_parent_cancels( | def test_caller_cancels( | ||||||
|     cancel_method: str, |     cancel_method: str, | ||||||
|     chk_ctx_result_before_exit: bool, |     chk_ctx_result_before_exit: bool, | ||||||
|     child_returns_early: bool, |     callee_returns_early: bool, | ||||||
|     debug_mode: bool, |     debug_mode: bool, | ||||||
| ): | ): | ||||||
|     ''' |     ''' | ||||||
|     Verify that when the opening side of a context (aka the parent) |     Verify that when the opening side of a context (aka the caller) | ||||||
|     cancels that context, the ctx does not raise a cancelled when |     cancels that context, the ctx does not raise a cancelled when | ||||||
|     either calling `.result()` or on context exit. |     either calling `.result()` or on context exit. | ||||||
| 
 | 
 | ||||||
|  | @ -294,7 +289,7 @@ def test_parent_cancels( | ||||||
| 
 | 
 | ||||||
|         if ( |         if ( | ||||||
|             cancel_method == 'portal' |             cancel_method == 'portal' | ||||||
|             and not child_returns_early |             and not callee_returns_early | ||||||
|         ): |         ): | ||||||
|             try: |             try: | ||||||
|                 res = await ctx.result() |                 res = await ctx.result() | ||||||
|  | @ -318,7 +313,7 @@ def test_parent_cancels( | ||||||
|                 pytest.fail(f'should not have raised ctxc\n{ctxc}') |                 pytest.fail(f'should not have raised ctxc\n{ctxc}') | ||||||
| 
 | 
 | ||||||
|         # we actually get a result |         # we actually get a result | ||||||
|         if child_returns_early: |         if callee_returns_early: | ||||||
|             assert res == 'yo' |             assert res == 'yo' | ||||||
|             assert ctx.outcome is res |             assert ctx.outcome is res | ||||||
|             assert ctx.maybe_error is None |             assert ctx.maybe_error is None | ||||||
|  | @ -362,14 +357,14 @@ def test_parent_cancels( | ||||||
|             ) |             ) | ||||||
|             timeout: float = ( |             timeout: float = ( | ||||||
|                 0.5 |                 0.5 | ||||||
|                 if not child_returns_early |                 if not callee_returns_early | ||||||
|                 else 2 |                 else 2 | ||||||
|             ) |             ) | ||||||
|             with trio.fail_after(timeout): |             with trio.fail_after(timeout): | ||||||
|                 async with ( |                 async with ( | ||||||
|                     expect_ctxc( |                     expect_ctxc( | ||||||
|                         yay=( |                         yay=( | ||||||
|                             not child_returns_early |                             not callee_returns_early | ||||||
|                             and cancel_method == 'portal' |                             and cancel_method == 'portal' | ||||||
|                         ) |                         ) | ||||||
|                     ), |                     ), | ||||||
|  | @ -377,13 +372,13 @@ def test_parent_cancels( | ||||||
|                     portal.open_context( |                     portal.open_context( | ||||||
|                         simple_setup_teardown, |                         simple_setup_teardown, | ||||||
|                         data=10, |                         data=10, | ||||||
|                         block_forever=not child_returns_early, |                         block_forever=not callee_returns_early, | ||||||
|                     ) as (ctx, sent), |                     ) as (ctx, sent), | ||||||
|                 ): |                 ): | ||||||
| 
 | 
 | ||||||
|                     if child_returns_early: |                     if callee_returns_early: | ||||||
|                         # ensure we block long enough before sending |                         # ensure we block long enough before sending | ||||||
|                         # a cancel such that the child has already |                         # a cancel such that the callee has already | ||||||
|                         # returned it's result. |                         # returned it's result. | ||||||
|                         await trio.sleep(0.5) |                         await trio.sleep(0.5) | ||||||
| 
 | 
 | ||||||
|  | @ -421,7 +416,7 @@ def test_parent_cancels( | ||||||
|             #   which should in turn cause `ctx._scope` to |             #   which should in turn cause `ctx._scope` to | ||||||
|             # catch any cancellation? |             # catch any cancellation? | ||||||
|             if ( |             if ( | ||||||
|                 not child_returns_early |                 not callee_returns_early | ||||||
|                 and cancel_method != 'portal' |                 and cancel_method != 'portal' | ||||||
|             ): |             ): | ||||||
|                 assert not ctx._scope.cancelled_caught |                 assert not ctx._scope.cancelled_caught | ||||||
|  | @ -430,11 +425,11 @@ def test_parent_cancels( | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| # basic stream terminations: | # basic stream terminations: | ||||||
| # - child context closes without using stream | # - callee context closes without using stream | ||||||
| # - parent context closes without using stream | # - caller context closes without using stream | ||||||
| # - parent context calls `Context.cancel()` while streaming | # - caller context calls `Context.cancel()` while streaming | ||||||
| #   is ongoing resulting in child being cancelled | #   is ongoing resulting in callee being cancelled | ||||||
| # - child calls `Context.cancel()` while streaming and parent | # - callee calls `Context.cancel()` while streaming and caller | ||||||
| #   sees stream terminated in `RemoteActorError` | #   sees stream terminated in `RemoteActorError` | ||||||
| 
 | 
 | ||||||
| # TODO: future possible features | # TODO: future possible features | ||||||
|  | @ -443,6 +438,7 @@ def test_parent_cancels( | ||||||
| 
 | 
 | ||||||
| @tractor.context | @tractor.context | ||||||
| async def close_ctx_immediately( | async def close_ctx_immediately( | ||||||
|  | 
 | ||||||
|     ctx: Context, |     ctx: Context, | ||||||
| 
 | 
 | ||||||
| ) -> None: | ) -> None: | ||||||
|  | @ -453,24 +449,13 @@ async def close_ctx_immediately( | ||||||
|     async with ctx.open_stream(): |     async with ctx.open_stream(): | ||||||
|         pass |         pass | ||||||
| 
 | 
 | ||||||
|     print('child returning!') |  | ||||||
| 
 | 
 | ||||||
| 
 |  | ||||||
| @pytest.mark.parametrize( |  | ||||||
|     'parent_send_before_receive', |  | ||||||
|     [ |  | ||||||
|         False, |  | ||||||
|         True, |  | ||||||
|     ], |  | ||||||
|     ids=lambda item: f'child_send_before_receive={item}' |  | ||||||
| ) |  | ||||||
| @tractor_test | @tractor_test | ||||||
| async def test_child_exits_ctx_after_stream_open( | async def test_callee_closes_ctx_after_stream_open( | ||||||
|     debug_mode: bool, |     debug_mode: bool, | ||||||
|     parent_send_before_receive: bool, |  | ||||||
| ): | ): | ||||||
|     ''' |     ''' | ||||||
|     child context closes without using stream. |     callee context closes without using stream. | ||||||
| 
 | 
 | ||||||
|     This should result in a msg sequence |     This should result in a msg sequence | ||||||
|     |_<root>_ |     |_<root>_ | ||||||
|  | @ -484,9 +469,6 @@ async def test_child_exits_ctx_after_stream_open( | ||||||
|     => {'stop': True, 'cid': <str>} |     => {'stop': True, 'cid': <str>} | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|     timeout: float = ( |  | ||||||
|         0.5 if not debug_mode else 999 |  | ||||||
|     ) |  | ||||||
|     async with tractor.open_nursery( |     async with tractor.open_nursery( | ||||||
|         debug_mode=debug_mode, |         debug_mode=debug_mode, | ||||||
|     ) as an: |     ) as an: | ||||||
|  | @ -495,7 +477,7 @@ async def test_child_exits_ctx_after_stream_open( | ||||||
|             enable_modules=[__name__], |             enable_modules=[__name__], | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|         with trio.fail_after(timeout): |         with trio.fail_after(0.5): | ||||||
|             async with portal.open_context( |             async with portal.open_context( | ||||||
|                 close_ctx_immediately, |                 close_ctx_immediately, | ||||||
| 
 | 
 | ||||||
|  | @ -507,56 +489,41 @@ async def test_child_exits_ctx_after_stream_open( | ||||||
| 
 | 
 | ||||||
|                 with trio.fail_after(0.4): |                 with trio.fail_after(0.4): | ||||||
|                     async with ctx.open_stream() as stream: |                     async with ctx.open_stream() as stream: | ||||||
|                         if parent_send_before_receive: |  | ||||||
|                             print('sending first msg from parent!') |  | ||||||
|                             await stream.send('yo') |  | ||||||
| 
 | 
 | ||||||
|                         # should fall through since ``StopAsyncIteration`` |                         # should fall through since ``StopAsyncIteration`` | ||||||
|                         # should be raised through translation of |                         # should be raised through translation of | ||||||
|                         # a ``trio.EndOfChannel`` by |                         # a ``trio.EndOfChannel`` by | ||||||
|                         # ``trio.abc.ReceiveChannel.__anext__()`` |                         # ``trio.abc.ReceiveChannel.__anext__()`` | ||||||
|                         msg = 10 |                         async for _ in stream: | ||||||
|                         async for msg in stream: |  | ||||||
|                             # trigger failure if we DO NOT |                             # trigger failure if we DO NOT | ||||||
|                             # get an EOC! |                             # get an EOC! | ||||||
|                             assert 0 |                             assert 0 | ||||||
|                         else: |                         else: | ||||||
|                             # never should get anythinig new from |  | ||||||
|                             # the underlying stream |  | ||||||
|                             assert msg == 10 |  | ||||||
| 
 | 
 | ||||||
|                             # verify stream is now closed |                             # verify stream is now closed | ||||||
|                             try: |                             try: | ||||||
|                                 with trio.fail_after(0.3): |                                 with trio.fail_after(0.3): | ||||||
|                                     print('parent trying to `.receive()` on EoC stream!') |  | ||||||
|                                     await stream.receive() |                                     await stream.receive() | ||||||
|                                     assert 0, 'should have raised eoc!?' |  | ||||||
|                             except trio.EndOfChannel: |                             except trio.EndOfChannel: | ||||||
|                                 print('parent got EoC as expected!') |  | ||||||
|                                 pass |                                 pass | ||||||
|                                 # raise |  | ||||||
| 
 | 
 | ||||||
|                 # TODO: should be just raise the closed resource err |                 # TODO: should be just raise the closed resource err | ||||||
|                 # directly here to enforce not allowing a re-open |                 # directly here to enforce not allowing a re-open | ||||||
|                 # of a stream to the context (at least until a time of |                 # of a stream to the context (at least until a time of | ||||||
|                 # if/when we decide that's a good idea?) |                 # if/when we decide that's a good idea?) | ||||||
|                 try: |                 try: | ||||||
|                     with trio.fail_after(timeout): |                     with trio.fail_after(0.5): | ||||||
|                         async with ctx.open_stream() as stream: |                         async with ctx.open_stream() as stream: | ||||||
|                             pass |                             pass | ||||||
|                 except trio.ClosedResourceError: |                 except trio.ClosedResourceError: | ||||||
|                     pass |                     pass | ||||||
| 
 | 
 | ||||||
|                 # if ctx._rx_chan._state.data: |  | ||||||
|                 #     await tractor.pause() |  | ||||||
| 
 |  | ||||||
|         await portal.cancel_actor() |         await portal.cancel_actor() | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @tractor.context | @tractor.context | ||||||
| async def expect_cancelled( | async def expect_cancelled( | ||||||
|     ctx: Context, |     ctx: Context, | ||||||
|     send_before_receive: bool = False, |  | ||||||
| 
 | 
 | ||||||
| ) -> None: | ) -> None: | ||||||
|     global _state |     global _state | ||||||
|  | @ -566,10 +533,6 @@ async def expect_cancelled( | ||||||
| 
 | 
 | ||||||
|     try: |     try: | ||||||
|         async with ctx.open_stream() as stream: |         async with ctx.open_stream() as stream: | ||||||
| 
 |  | ||||||
|             if send_before_receive: |  | ||||||
|                 await stream.send('yo') |  | ||||||
| 
 |  | ||||||
|             async for msg in stream: |             async for msg in stream: | ||||||
|                 await stream.send(msg)  # echo server |                 await stream.send(msg)  # echo server | ||||||
| 
 | 
 | ||||||
|  | @ -596,49 +559,26 @@ async def expect_cancelled( | ||||||
|         raise |         raise | ||||||
| 
 | 
 | ||||||
|     else: |     else: | ||||||
|         assert 0, "child wasn't cancelled !?" |         assert 0, "callee wasn't cancelled !?" | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @pytest.mark.parametrize( |  | ||||||
|     'child_send_before_receive', |  | ||||||
|     [ |  | ||||||
|         False, |  | ||||||
|         True, |  | ||||||
|     ], |  | ||||||
|     ids=lambda item: f'child_send_before_receive={item}' |  | ||||||
| ) |  | ||||||
| @pytest.mark.parametrize( |  | ||||||
|     'rent_wait_for_msg', |  | ||||||
|     [ |  | ||||||
|         False, |  | ||||||
|         True, |  | ||||||
|     ], |  | ||||||
|     ids=lambda item: f'rent_wait_for_msg={item}' |  | ||||||
| ) |  | ||||||
| @pytest.mark.parametrize( | @pytest.mark.parametrize( | ||||||
|     'use_ctx_cancel_method', |     'use_ctx_cancel_method', | ||||||
|     [ |     [False, True], | ||||||
|         False, |  | ||||||
|         'pre_stream', |  | ||||||
|         'post_stream_open', |  | ||||||
|         'post_stream_close', |  | ||||||
|     ], |  | ||||||
|     ids=lambda item: f'use_ctx_cancel_method={item}' |  | ||||||
| ) | ) | ||||||
| @tractor_test | @tractor_test | ||||||
| async def test_parent_exits_ctx_after_child_enters_stream( | async def test_caller_closes_ctx_after_callee_opens_stream( | ||||||
|     use_ctx_cancel_method: bool|str, |     use_ctx_cancel_method: bool, | ||||||
|     debug_mode: bool, |     debug_mode: bool, | ||||||
|     rent_wait_for_msg: bool, |  | ||||||
|     child_send_before_receive: bool, |  | ||||||
| ): | ): | ||||||
|     ''' |     ''' | ||||||
|     Parent-side of IPC context closes without sending on `MsgStream`. |     caller context closes without using/opening stream | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|     async with tractor.open_nursery( |     async with tractor.open_nursery( | ||||||
|         debug_mode=debug_mode, |         debug_mode=debug_mode, | ||||||
|     ) as an: |     ) as an: | ||||||
|  | 
 | ||||||
|         root: Actor = current_actor() |         root: Actor = current_actor() | ||||||
|         portal = await an.start_actor( |         portal = await an.start_actor( | ||||||
|             'ctx_cancelled', |             'ctx_cancelled', | ||||||
|  | @ -647,52 +587,41 @@ async def test_parent_exits_ctx_after_child_enters_stream( | ||||||
| 
 | 
 | ||||||
|         async with portal.open_context( |         async with portal.open_context( | ||||||
|             expect_cancelled, |             expect_cancelled, | ||||||
|             send_before_receive=child_send_before_receive, |  | ||||||
|         ) as (ctx, sent): |         ) as (ctx, sent): | ||||||
|             assert sent is None |             assert sent is None | ||||||
| 
 | 
 | ||||||
|             await portal.run(assert_state, value=True) |             await portal.run(assert_state, value=True) | ||||||
| 
 | 
 | ||||||
|             # call `ctx.cancel()` explicitly |             # call `ctx.cancel()` explicitly | ||||||
|             if use_ctx_cancel_method == 'pre_stream': |             if use_ctx_cancel_method: | ||||||
|                 await ctx.cancel() |                 await ctx.cancel() | ||||||
| 
 | 
 | ||||||
|                 # NOTE: means the local side `ctx._scope` will |                 # NOTE: means the local side `ctx._scope` will | ||||||
|                 # have been cancelled by an ctxc ack and thus |                 # have been cancelled by an ctxc ack and thus | ||||||
|                 # `._scope.cancelled_caught` should be set. |                 # `._scope.cancelled_caught` should be set. | ||||||
|                 async with ( |                 try: | ||||||
|                     expect_ctxc( |  | ||||||
|                         # XXX: the cause is US since we call |  | ||||||
|                         # `Context.cancel()` just above! |  | ||||||
|                         yay=True, |  | ||||||
| 
 |  | ||||||
|                         # XXX: must be propagated to __aexit__ |  | ||||||
|                         # and should be silently absorbed there |  | ||||||
|                         # since we called `.cancel()` just above ;) |  | ||||||
|                         reraise=True, |  | ||||||
|                     ) as maybe_ctxc, |  | ||||||
|                 ): |  | ||||||
|                     async with ctx.open_stream() as stream: |                     async with ctx.open_stream() as stream: | ||||||
|  |                         async for msg in stream: | ||||||
|  |                             pass | ||||||
| 
 | 
 | ||||||
|                         if rent_wait_for_msg: |                 except tractor.ContextCancelled as ctxc: | ||||||
|                             async for msg in stream: |                     # XXX: the cause is US since we call | ||||||
|                                 print(f'PARENT rx: {msg!r}\n') |                     # `Context.cancel()` just above! | ||||||
|                                 break |                     assert ( | ||||||
|  |                         ctxc.canceller | ||||||
|  |                         == | ||||||
|  |                         current_actor().uid | ||||||
|  |                         == | ||||||
|  |                         root.uid | ||||||
|  |                     ) | ||||||
| 
 | 
 | ||||||
|                         if use_ctx_cancel_method == 'post_stream_open': |                     # XXX: must be propagated to __aexit__ | ||||||
|                             await ctx.cancel() |                     # and should be silently absorbed there | ||||||
|  |                     # since we called `.cancel()` just above ;) | ||||||
|  |                     raise | ||||||
| 
 | 
 | ||||||
|                     if use_ctx_cancel_method == 'post_stream_close': |                 else: | ||||||
|                         await ctx.cancel() |                     assert 0, "Should have context cancelled?" | ||||||
| 
 |  | ||||||
|                 ctxc: tractor.ContextCancelled = maybe_ctxc.value |  | ||||||
|                 assert ( |  | ||||||
|                     ctxc.canceller |  | ||||||
|                     == |  | ||||||
|                     current_actor().uid |  | ||||||
|                     == |  | ||||||
|                     root.uid |  | ||||||
|                 ) |  | ||||||
| 
 | 
 | ||||||
|                 # channel should still be up |                 # channel should still be up | ||||||
|                 assert portal.channel.connected() |                 assert portal.channel.connected() | ||||||
|  | @ -703,20 +632,13 @@ async def test_parent_exits_ctx_after_child_enters_stream( | ||||||
|                     value=False, |                     value=False, | ||||||
|                 ) |                 ) | ||||||
| 
 | 
 | ||||||
|             # XXX CHILD-BLOCKS case, we SHOULD NOT exit from the |  | ||||||
|             # `.open_context()` before the child has returned, |  | ||||||
|             # errored or been cancelled! |  | ||||||
|             else: |             else: | ||||||
|                 try: |                 try: | ||||||
|                     with trio.fail_after( |                     with trio.fail_after(0.2): | ||||||
|                         0.5  # if not debug_mode else 999 |                         await ctx.result() | ||||||
|                     ): |  | ||||||
|                         res = await ctx.wait_for_result() |  | ||||||
|                         assert res is not tractor._context.Unresolved |  | ||||||
|                         assert 0, "Callee should have blocked!?" |                         assert 0, "Callee should have blocked!?" | ||||||
|                 except trio.TooSlowError: |                 except trio.TooSlowError: | ||||||
|                     # NO-OP -> since already triggered by |                     # NO-OP -> since already called above | ||||||
|                     # `trio.fail_after()` above! |  | ||||||
|                     await ctx.cancel() |                     await ctx.cancel() | ||||||
| 
 | 
 | ||||||
|         # NOTE: local scope should have absorbed the cancellation since |         # NOTE: local scope should have absorbed the cancellation since | ||||||
|  | @ -756,7 +678,7 @@ async def test_parent_exits_ctx_after_child_enters_stream( | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @tractor_test | @tractor_test | ||||||
| async def test_multitask_parent_cancels_from_nonroot_task( | async def test_multitask_caller_cancels_from_nonroot_task( | ||||||
|     debug_mode: bool, |     debug_mode: bool, | ||||||
| ): | ): | ||||||
|     async with tractor.open_nursery( |     async with tractor.open_nursery( | ||||||
|  | @ -808,6 +730,7 @@ async def test_multitask_parent_cancels_from_nonroot_task( | ||||||
| 
 | 
 | ||||||
| @tractor.context | @tractor.context | ||||||
| async def cancel_self( | async def cancel_self( | ||||||
|  | 
 | ||||||
|     ctx: Context, |     ctx: Context, | ||||||
| 
 | 
 | ||||||
| ) -> None: | ) -> None: | ||||||
|  | @ -847,11 +770,11 @@ async def cancel_self( | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @tractor_test | @tractor_test | ||||||
| async def test_child_cancels_before_started( | async def test_callee_cancels_before_started( | ||||||
|     debug_mode: bool, |     debug_mode: bool, | ||||||
| ): | ): | ||||||
|     ''' |     ''' | ||||||
|     Callee calls `Context.cancel()` while streaming and parent |     Callee calls `Context.cancel()` while streaming and caller | ||||||
|     sees stream terminated in `ContextCancelled`. |     sees stream terminated in `ContextCancelled`. | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|  | @ -872,12 +795,10 @@ async def test_child_cancels_before_started( | ||||||
| 
 | 
 | ||||||
|         # raises a special cancel signal |         # raises a special cancel signal | ||||||
|         except tractor.ContextCancelled as ce: |         except tractor.ContextCancelled as ce: | ||||||
|             _ce = ce  # for debug on crash |             ce.type == trio.Cancelled | ||||||
|             ce.boxed_type == trio.Cancelled |  | ||||||
| 
 | 
 | ||||||
|             # the traceback should be informative |             # the traceback should be informative | ||||||
|             assert 'itself' in ce.tb_str |             assert 'itself' in ce.msgdata['tb_str'] | ||||||
|             assert ce.tb_str == ce.msgdata['tb_str'] |  | ||||||
| 
 | 
 | ||||||
|         # teardown the actor |         # teardown the actor | ||||||
|         await portal.cancel_actor() |         await portal.cancel_actor() | ||||||
|  | @ -898,13 +819,14 @@ async def never_open_stream( | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @tractor.context | @tractor.context | ||||||
| async def keep_sending_from_child( | async def keep_sending_from_callee( | ||||||
|  | 
 | ||||||
|     ctx:  Context, |     ctx:  Context, | ||||||
|     msg_buffer_size: int|None = None, |     msg_buffer_size: int|None = None, | ||||||
| 
 | 
 | ||||||
| ) -> None: | ) -> None: | ||||||
|     ''' |     ''' | ||||||
|     Send endlessly on the child stream. |     Send endlessly on the calleee stream. | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|     await ctx.started() |     await ctx.started() | ||||||
|  | @ -912,7 +834,7 @@ async def keep_sending_from_child( | ||||||
|         msg_buffer_size=msg_buffer_size, |         msg_buffer_size=msg_buffer_size, | ||||||
|     ) as stream: |     ) as stream: | ||||||
|         for msg in count(): |         for msg in count(): | ||||||
|             print(f'child sending {msg}') |             print(f'callee sending {msg}') | ||||||
|             await stream.send(msg) |             await stream.send(msg) | ||||||
|             await trio.sleep(0.01) |             await trio.sleep(0.01) | ||||||
| 
 | 
 | ||||||
|  | @ -920,13 +842,10 @@ async def keep_sending_from_child( | ||||||
| @pytest.mark.parametrize( | @pytest.mark.parametrize( | ||||||
|     'overrun_by', |     'overrun_by', | ||||||
|     [ |     [ | ||||||
|         ('parent', 1, never_open_stream), |         ('caller', 1, never_open_stream), | ||||||
|         ('child', 0, keep_sending_from_child), |         ('callee', 0, keep_sending_from_callee), | ||||||
|     ], |     ], | ||||||
|     ids=[ |     ids='overrun_condition={}'.format, | ||||||
|          ('parent_1buf_never_open_stream'), |  | ||||||
|          ('child_0buf_keep_sending_from_child'), |  | ||||||
|     ] |  | ||||||
| ) | ) | ||||||
| def test_one_end_stream_not_opened( | def test_one_end_stream_not_opened( | ||||||
|     overrun_by: tuple[str, int, Callable], |     overrun_by: tuple[str, int, Callable], | ||||||
|  | @ -950,50 +869,50 @@ def test_one_end_stream_not_opened( | ||||||
|                 enable_modules=[__name__], |                 enable_modules=[__name__], | ||||||
|             ) |             ) | ||||||
| 
 | 
 | ||||||
|             with trio.fail_after(1): |             async with portal.open_context( | ||||||
|                 async with portal.open_context( |                 entrypoint, | ||||||
|                     entrypoint, |             ) as (ctx, sent): | ||||||
|                 ) as (ctx, sent): |                 assert sent is None | ||||||
|                     assert sent is None |  | ||||||
| 
 | 
 | ||||||
|                     if 'parent' in overrunner: |                 if 'caller' in overrunner: | ||||||
|                         async with ctx.open_stream() as stream: |  | ||||||
| 
 | 
 | ||||||
|                             # itersend +1 msg more then the buffer size |                     async with ctx.open_stream() as stream: | ||||||
|                             # to cause the most basic overrun. |  | ||||||
|                             for i in range(buf_size): |  | ||||||
|                                 print(f'sending {i}') |  | ||||||
|                                 await stream.send(i) |  | ||||||
| 
 | 
 | ||||||
|                             else: |                         # itersend +1 msg more then the buffer size | ||||||
|                                 # expect overrun error to be relayed back |                         # to cause the most basic overrun. | ||||||
|                                 # and this sleep interrupted |                         for i in range(buf_size): | ||||||
|                                 await trio.sleep_forever() |                             print(f'sending {i}') | ||||||
|  |                             await stream.send(i) | ||||||
| 
 | 
 | ||||||
|                     else: |                         else: | ||||||
|                         # child overruns parent case so we do nothing here |                             # expect overrun error to be relayed back | ||||||
|                         await trio.sleep_forever() |                             # and this sleep interrupted | ||||||
|  |                             await trio.sleep_forever() | ||||||
|  | 
 | ||||||
|  |                 else: | ||||||
|  |                     # callee overruns caller case so we do nothing here | ||||||
|  |                     await trio.sleep_forever() | ||||||
| 
 | 
 | ||||||
|             await portal.cancel_actor() |             await portal.cancel_actor() | ||||||
| 
 | 
 | ||||||
|     # 2 overrun cases and the no overrun case (which pushes right up to |     # 2 overrun cases and the no overrun case (which pushes right up to | ||||||
|     # the msg limit) |     # the msg limit) | ||||||
|     if ( |     if ( | ||||||
|         overrunner == 'parent' |         overrunner == 'caller' | ||||||
|     ): |     ): | ||||||
|         with pytest.raises(tractor.RemoteActorError) as excinfo: |         with pytest.raises(tractor.RemoteActorError) as excinfo: | ||||||
|             trio.run(main) |             trio.run(main) | ||||||
| 
 | 
 | ||||||
|         assert excinfo.value.boxed_type == StreamOverrun |         assert excinfo.value.type == StreamOverrun | ||||||
| 
 | 
 | ||||||
|     elif overrunner == 'child': |     elif overrunner == 'callee': | ||||||
|         with pytest.raises(tractor.RemoteActorError) as excinfo: |         with pytest.raises(tractor.RemoteActorError) as excinfo: | ||||||
|             trio.run(main) |             trio.run(main) | ||||||
| 
 | 
 | ||||||
|         # TODO: embedded remote errors so that we can verify the source |         # TODO: embedded remote errors so that we can verify the source | ||||||
|         # error? the child delivers an error which is an overrun |         # error? the callee delivers an error which is an overrun | ||||||
|         # wrapped in a remote actor error. |         # wrapped in a remote actor error. | ||||||
|         assert excinfo.value.boxed_type == tractor.RemoteActorError |         assert excinfo.value.type == tractor.RemoteActorError | ||||||
| 
 | 
 | ||||||
|     else: |     else: | ||||||
|         trio.run(main) |         trio.run(main) | ||||||
|  | @ -1001,7 +920,8 @@ def test_one_end_stream_not_opened( | ||||||
| 
 | 
 | ||||||
| @tractor.context | @tractor.context | ||||||
| async def echo_back_sequence( | async def echo_back_sequence( | ||||||
|     ctx: Context, | 
 | ||||||
|  |     ctx:  Context, | ||||||
|     seq: list[int], |     seq: list[int], | ||||||
|     wait_for_cancel: bool, |     wait_for_cancel: bool, | ||||||
|     allow_overruns_side: str, |     allow_overruns_side: str, | ||||||
|  | @ -1010,12 +930,12 @@ async def echo_back_sequence( | ||||||
| 
 | 
 | ||||||
| ) -> None: | ) -> None: | ||||||
|     ''' |     ''' | ||||||
|     Send endlessly on the child stream using a small buffer size |     Send endlessly on the calleee stream using a small buffer size | ||||||
|     setting on the contex to simulate backlogging that would normally |     setting on the contex to simulate backlogging that would normally | ||||||
|     cause overruns. |     cause overruns. | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|     # NOTE: ensure that if the parent is expecting to cancel this task |     # NOTE: ensure that if the caller is expecting to cancel this task | ||||||
|     # that we stay echoing much longer then they are so we don't |     # that we stay echoing much longer then they are so we don't | ||||||
|     # return early instead of receive the cancel msg. |     # return early instead of receive the cancel msg. | ||||||
|     total_batches: int = ( |     total_batches: int = ( | ||||||
|  | @ -1024,7 +944,7 @@ async def echo_back_sequence( | ||||||
|     ) |     ) | ||||||
| 
 | 
 | ||||||
|     await ctx.started() |     await ctx.started() | ||||||
|     # await tractor.pause() |     # await tractor.breakpoint() | ||||||
|     async with ctx.open_stream( |     async with ctx.open_stream( | ||||||
|         msg_buffer_size=msg_buffer_size, |         msg_buffer_size=msg_buffer_size, | ||||||
| 
 | 
 | ||||||
|  | @ -1065,18 +985,18 @@ async def echo_back_sequence( | ||||||
|                 if be_slow: |                 if be_slow: | ||||||
|                     await trio.sleep(0.05) |                     await trio.sleep(0.05) | ||||||
| 
 | 
 | ||||||
|                 print('child waiting on next') |                 print('callee waiting on next') | ||||||
| 
 | 
 | ||||||
|             print(f'child echoing back latest batch\n{batch}') |             print(f'callee echoing back latest batch\n{batch}') | ||||||
|             for msg in batch: |             for msg in batch: | ||||||
|                 print(f'child sending msg\n{msg}') |                 print(f'callee sending msg\n{msg}') | ||||||
|                 await stream.send(msg) |                 await stream.send(msg) | ||||||
| 
 | 
 | ||||||
|     try: |     try: | ||||||
|         return 'yo' |         return 'yo' | ||||||
|     finally: |     finally: | ||||||
|         print( |         print( | ||||||
|             'exiting child with context:\n' |             'exiting callee with context:\n' | ||||||
|             f'{pformat(ctx)}\n' |             f'{pformat(ctx)}\n' | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|  | @ -1130,68 +1050,59 @@ def test_maybe_allow_overruns_stream( | ||||||
|             debug_mode=debug_mode, |             debug_mode=debug_mode, | ||||||
|         ) as an: |         ) as an: | ||||||
|             portal = await an.start_actor( |             portal = await an.start_actor( | ||||||
|                 'child_sends_forever', |                 'callee_sends_forever', | ||||||
|                 enable_modules=[__name__], |                 enable_modules=[__name__], | ||||||
|                 loglevel=loglevel, |                 loglevel=loglevel, | ||||||
|                 debug_mode=debug_mode, |                 debug_mode=debug_mode, | ||||||
|             ) |             ) | ||||||
|  |             seq = list(range(10)) | ||||||
|  |             async with portal.open_context( | ||||||
|  |                 echo_back_sequence, | ||||||
|  |                 seq=seq, | ||||||
|  |                 wait_for_cancel=cancel_ctx, | ||||||
|  |                 be_slow=(slow_side == 'child'), | ||||||
|  |                 allow_overruns_side=allow_overruns_side, | ||||||
| 
 | 
 | ||||||
|             # stream-sequence batch info with send delay to determine |             ) as (ctx, sent): | ||||||
|             # approx timeout determining whether test has hung. |                 assert sent is None | ||||||
|             total_batches: int = 2 |  | ||||||
|             num_items: int = 10 |  | ||||||
|             seq = list(range(num_items)) |  | ||||||
|             parent_send_delay: float = 0.16 |  | ||||||
|             timeout: float = math.ceil( |  | ||||||
|                 total_batches * num_items * parent_send_delay |  | ||||||
|             ) |  | ||||||
|             with trio.fail_after(timeout): |  | ||||||
|                 async with portal.open_context( |  | ||||||
|                     echo_back_sequence, |  | ||||||
|                     seq=seq, |  | ||||||
|                     wait_for_cancel=cancel_ctx, |  | ||||||
|                     be_slow=(slow_side == 'child'), |  | ||||||
|                     allow_overruns_side=allow_overruns_side, |  | ||||||
| 
 | 
 | ||||||
|                 ) as (ctx, sent): |                 async with ctx.open_stream( | ||||||
|                     assert sent is None |                     msg_buffer_size=1 if slow_side == 'parent' else None, | ||||||
|  |                     allow_overruns=(allow_overruns_side in {'parent', 'both'}), | ||||||
|  |                 ) as stream: | ||||||
| 
 | 
 | ||||||
|                     async with ctx.open_stream( |                     total_batches: int = 2 | ||||||
|                         msg_buffer_size=1 if slow_side == 'parent' else None, |                     for _ in range(total_batches): | ||||||
|                         allow_overruns=(allow_overruns_side in {'parent', 'both'}), |                         for msg in seq: | ||||||
|                     ) as stream: |                             # print(f'root tx {msg}') | ||||||
|  |                             await stream.send(msg) | ||||||
|  |                             if slow_side == 'parent': | ||||||
|  |                                 # NOTE: we make the parent slightly | ||||||
|  |                                 # slower, when it is slow, to make sure | ||||||
|  |                                 # that in the overruns everywhere case | ||||||
|  |                                 await trio.sleep(0.16) | ||||||
| 
 | 
 | ||||||
|                         for _ in range(total_batches): |                         batch = [] | ||||||
|                             for msg in seq: |                         async for msg in stream: | ||||||
|                                 # print(f'root tx {msg}') |                             print(f'root rx {msg}') | ||||||
|                                 await stream.send(msg) |                             batch.append(msg) | ||||||
|                                 if slow_side == 'parent': |                             if batch == seq: | ||||||
|                                     # NOTE: we make the parent slightly |                                 break | ||||||
|                                     # slower, when it is slow, to make sure |  | ||||||
|                                     # that in the overruns everywhere case |  | ||||||
|                                     await trio.sleep(parent_send_delay) |  | ||||||
| 
 |  | ||||||
|                             batch = [] |  | ||||||
|                             async for msg in stream: |  | ||||||
|                                 print(f'root rx {msg}') |  | ||||||
|                                 batch.append(msg) |  | ||||||
|                                 if batch == seq: |  | ||||||
|                                     break |  | ||||||
| 
 |  | ||||||
|                     if cancel_ctx: |  | ||||||
|                         # cancel the remote task |  | ||||||
|                         print('Requesting `ctx.cancel()` in parent!') |  | ||||||
|                         await ctx.cancel() |  | ||||||
| 
 |  | ||||||
|                 res: str|ContextCancelled = await ctx.result() |  | ||||||
| 
 | 
 | ||||||
|                 if cancel_ctx: |                 if cancel_ctx: | ||||||
|                     assert isinstance(res, ContextCancelled) |                     # cancel the remote task | ||||||
|                     assert tuple(res.canceller) == current_actor().uid |                     print('Requesting `ctx.cancel()` in parent!') | ||||||
|  |                     await ctx.cancel() | ||||||
| 
 | 
 | ||||||
|                 else: |             res: str|ContextCancelled = await ctx.result() | ||||||
|                     print(f'RX ROOT SIDE RESULT {res}') | 
 | ||||||
|                     assert res == 'yo' |             if cancel_ctx: | ||||||
|  |                 assert isinstance(res, ContextCancelled) | ||||||
|  |                 assert tuple(res.canceller) == current_actor().uid | ||||||
|  | 
 | ||||||
|  |             else: | ||||||
|  |                 print(f'RX ROOT SIDE RESULT {res}') | ||||||
|  |                 assert res == 'yo' | ||||||
| 
 | 
 | ||||||
|             # cancel the daemon |             # cancel the daemon | ||||||
|             await portal.cancel_actor() |             await portal.cancel_actor() | ||||||
|  | @ -1220,7 +1131,7 @@ def test_maybe_allow_overruns_stream( | ||||||
|             # NOTE: i tried to isolate to a deterministic case here |             # NOTE: i tried to isolate to a deterministic case here | ||||||
|             # based on timeing, but i was kinda wasted, and i don't |             # based on timeing, but i was kinda wasted, and i don't | ||||||
|             # think it's sane to catch them.. |             # think it's sane to catch them.. | ||||||
|             assert err.boxed_type in ( |             assert err.type in ( | ||||||
|                 tractor.RemoteActorError, |                 tractor.RemoteActorError, | ||||||
|                 StreamOverrun, |                 StreamOverrun, | ||||||
|             ) |             ) | ||||||
|  | @ -1228,12 +1139,11 @@ def test_maybe_allow_overruns_stream( | ||||||
|         elif ( |         elif ( | ||||||
|             slow_side == 'child' |             slow_side == 'child' | ||||||
|         ): |         ): | ||||||
|             assert err.boxed_type == StreamOverrun |             assert err.type == StreamOverrun | ||||||
| 
 | 
 | ||||||
|         elif slow_side == 'parent': |         elif slow_side == 'parent': | ||||||
|             assert err.boxed_type == tractor.RemoteActorError |             assert err.type == tractor.RemoteActorError | ||||||
|             assert 'StreamOverrun' in err.tb_str |             assert 'StreamOverrun' in err.msgdata['tb_str'] | ||||||
|             assert err.tb_str == err.msgdata['tb_str'] |  | ||||||
| 
 | 
 | ||||||
|     else: |     else: | ||||||
|         # if this hits the logic blocks from above are not |         # if this hits the logic blocks from above are not | ||||||
|  |  | ||||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							|  | @ -7,11 +7,8 @@ import platform | ||||||
| from functools import partial | from functools import partial | ||||||
| import itertools | import itertools | ||||||
| 
 | 
 | ||||||
| import psutil |  | ||||||
| import pytest | import pytest | ||||||
| import subprocess |  | ||||||
| import tractor | import tractor | ||||||
| from tractor.trionics import collapse_eg |  | ||||||
| from tractor._testing import tractor_test | from tractor._testing import tractor_test | ||||||
| import trio | import trio | ||||||
| 
 | 
 | ||||||
|  | @ -29,7 +26,7 @@ async def test_reg_then_unreg(reg_addr): | ||||||
|         portal = await n.start_actor('actor', enable_modules=[__name__]) |         portal = await n.start_actor('actor', enable_modules=[__name__]) | ||||||
|         uid = portal.channel.uid |         uid = portal.channel.uid | ||||||
| 
 | 
 | ||||||
|         async with tractor.get_registry(reg_addr) as aportal: |         async with tractor.get_arbiter(*reg_addr) as aportal: | ||||||
|             # this local actor should be the arbiter |             # this local actor should be the arbiter | ||||||
|             assert actor is aportal.actor |             assert actor is aportal.actor | ||||||
| 
 | 
 | ||||||
|  | @ -155,25 +152,15 @@ async def unpack_reg(actor_or_portal): | ||||||
| async def spawn_and_check_registry( | async def spawn_and_check_registry( | ||||||
|     reg_addr: tuple, |     reg_addr: tuple, | ||||||
|     use_signal: bool, |     use_signal: bool, | ||||||
|     debug_mode: bool = False, |  | ||||||
|     remote_arbiter: bool = False, |     remote_arbiter: bool = False, | ||||||
|     with_streaming: bool = False, |     with_streaming: bool = False, | ||||||
|     maybe_daemon: tuple[ |  | ||||||
|         subprocess.Popen, |  | ||||||
|         psutil.Process, |  | ||||||
|     ]|None = None, |  | ||||||
| 
 | 
 | ||||||
| ) -> None: | ) -> None: | ||||||
| 
 | 
 | ||||||
|     if maybe_daemon: |  | ||||||
|         popen, proc = maybe_daemon |  | ||||||
|         # breakpoint() |  | ||||||
| 
 |  | ||||||
|     async with tractor.open_root_actor( |     async with tractor.open_root_actor( | ||||||
|         registry_addrs=[reg_addr], |         registry_addrs=[reg_addr], | ||||||
|         debug_mode=debug_mode, |  | ||||||
|     ): |     ): | ||||||
|         async with tractor.get_registry(reg_addr) as portal: |         async with tractor.get_arbiter(*reg_addr) as portal: | ||||||
|             # runtime needs to be up to call this |             # runtime needs to be up to call this | ||||||
|             actor = tractor.current_actor() |             actor = tractor.current_actor() | ||||||
| 
 | 
 | ||||||
|  | @ -189,30 +176,28 @@ async def spawn_and_check_registry( | ||||||
|                 extra = 2  # local root actor + remote arbiter |                 extra = 2  # local root actor + remote arbiter | ||||||
| 
 | 
 | ||||||
|             # ensure current actor is registered |             # ensure current actor is registered | ||||||
|             registry: dict = await get_reg() |             registry = await get_reg() | ||||||
|             assert actor.uid in registry |             assert actor.uid in registry | ||||||
| 
 | 
 | ||||||
|             try: |             try: | ||||||
|                 async with tractor.open_nursery() as an: |                 async with tractor.open_nursery() as n: | ||||||
|                     async with ( |                     async with trio.open_nursery() as trion: | ||||||
|                         collapse_eg(), | 
 | ||||||
|                         trio.open_nursery() as trion, |  | ||||||
|                     ): |  | ||||||
|                         portals = {} |                         portals = {} | ||||||
|                         for i in range(3): |                         for i in range(3): | ||||||
|                             name = f'a{i}' |                             name = f'a{i}' | ||||||
|                             if with_streaming: |                             if with_streaming: | ||||||
|                                 portals[name] = await an.start_actor( |                                 portals[name] = await n.start_actor( | ||||||
|                                     name=name, enable_modules=[__name__]) |                                     name=name, enable_modules=[__name__]) | ||||||
| 
 | 
 | ||||||
|                             else:  # no streaming |                             else:  # no streaming | ||||||
|                                 portals[name] = await an.run_in_actor( |                                 portals[name] = await n.run_in_actor( | ||||||
|                                     trio.sleep_forever, name=name) |                                     trio.sleep_forever, name=name) | ||||||
| 
 | 
 | ||||||
|                         # wait on last actor to come up |                         # wait on last actor to come up | ||||||
|                         async with tractor.wait_for_actor(name): |                         async with tractor.wait_for_actor(name): | ||||||
|                             registry = await get_reg() |                             registry = await get_reg() | ||||||
|                             for uid in an._children: |                             for uid in n._children: | ||||||
|                                 assert uid in registry |                                 assert uid in registry | ||||||
| 
 | 
 | ||||||
|                         assert len(portals) + extra == len(registry) |                         assert len(portals) + extra == len(registry) | ||||||
|  | @ -245,7 +230,6 @@ async def spawn_and_check_registry( | ||||||
| @pytest.mark.parametrize('use_signal', [False, True]) | @pytest.mark.parametrize('use_signal', [False, True]) | ||||||
| @pytest.mark.parametrize('with_streaming', [False, True]) | @pytest.mark.parametrize('with_streaming', [False, True]) | ||||||
| def test_subactors_unregister_on_cancel( | def test_subactors_unregister_on_cancel( | ||||||
|     debug_mode: bool, |  | ||||||
|     start_method, |     start_method, | ||||||
|     use_signal, |     use_signal, | ||||||
|     reg_addr, |     reg_addr, | ||||||
|  | @ -262,7 +246,6 @@ def test_subactors_unregister_on_cancel( | ||||||
|                 spawn_and_check_registry, |                 spawn_and_check_registry, | ||||||
|                 reg_addr, |                 reg_addr, | ||||||
|                 use_signal, |                 use_signal, | ||||||
|                 debug_mode=debug_mode, |  | ||||||
|                 remote_arbiter=False, |                 remote_arbiter=False, | ||||||
|                 with_streaming=with_streaming, |                 with_streaming=with_streaming, | ||||||
|             ), |             ), | ||||||
|  | @ -272,8 +255,7 @@ def test_subactors_unregister_on_cancel( | ||||||
| @pytest.mark.parametrize('use_signal', [False, True]) | @pytest.mark.parametrize('use_signal', [False, True]) | ||||||
| @pytest.mark.parametrize('with_streaming', [False, True]) | @pytest.mark.parametrize('with_streaming', [False, True]) | ||||||
| def test_subactors_unregister_on_cancel_remote_daemon( | def test_subactors_unregister_on_cancel_remote_daemon( | ||||||
|     daemon: subprocess.Popen, |     daemon, | ||||||
|     debug_mode: bool, |  | ||||||
|     start_method, |     start_method, | ||||||
|     use_signal, |     use_signal, | ||||||
|     reg_addr, |     reg_addr, | ||||||
|  | @ -289,13 +271,8 @@ def test_subactors_unregister_on_cancel_remote_daemon( | ||||||
|                 spawn_and_check_registry, |                 spawn_and_check_registry, | ||||||
|                 reg_addr, |                 reg_addr, | ||||||
|                 use_signal, |                 use_signal, | ||||||
|                 debug_mode=debug_mode, |  | ||||||
|                 remote_arbiter=True, |                 remote_arbiter=True, | ||||||
|                 with_streaming=with_streaming, |                 with_streaming=with_streaming, | ||||||
|                 maybe_daemon=( |  | ||||||
|                     daemon, |  | ||||||
|                     psutil.Process(daemon.pid) |  | ||||||
|                 ), |  | ||||||
|             ), |             ), | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|  | @ -321,7 +298,7 @@ async def close_chans_before_nursery( | ||||||
|     async with tractor.open_root_actor( |     async with tractor.open_root_actor( | ||||||
|         registry_addrs=[reg_addr], |         registry_addrs=[reg_addr], | ||||||
|     ): |     ): | ||||||
|         async with tractor.get_registry(reg_addr) as aportal: |         async with tractor.get_arbiter(*reg_addr) as aportal: | ||||||
|             try: |             try: | ||||||
|                 get_reg = partial(unpack_reg, aportal) |                 get_reg = partial(unpack_reg, aportal) | ||||||
| 
 | 
 | ||||||
|  | @ -339,12 +316,9 @@ async def close_chans_before_nursery( | ||||||
|                         async with portal2.open_stream_from( |                         async with portal2.open_stream_from( | ||||||
|                             stream_forever |                             stream_forever | ||||||
|                         ) as agen2: |                         ) as agen2: | ||||||
|                             async with ( |                             async with trio.open_nursery() as n: | ||||||
|                                 collapse_eg(), |                                 n.start_soon(streamer, agen1) | ||||||
|                                 trio.open_nursery() as tn, |                                 n.start_soon(cancel, use_signal, .5) | ||||||
|                             ): |  | ||||||
|                                 tn.start_soon(streamer, agen1) |  | ||||||
|                                 tn.start_soon(cancel, use_signal, .5) |  | ||||||
|                                 try: |                                 try: | ||||||
|                                     await streamer(agen2) |                                     await streamer(agen2) | ||||||
|                                 finally: |                                 finally: | ||||||
|  | @ -395,7 +369,7 @@ def test_close_channel_explicit( | ||||||
| 
 | 
 | ||||||
| @pytest.mark.parametrize('use_signal', [False, True]) | @pytest.mark.parametrize('use_signal', [False, True]) | ||||||
| def test_close_channel_explicit_remote_arbiter( | def test_close_channel_explicit_remote_arbiter( | ||||||
|     daemon: subprocess.Popen, |     daemon, | ||||||
|     start_method, |     start_method, | ||||||
|     use_signal, |     use_signal, | ||||||
|     reg_addr, |     reg_addr, | ||||||
|  |  | ||||||
|  | @ -19,8 +19,8 @@ from tractor._testing import ( | ||||||
| @pytest.fixture | @pytest.fixture | ||||||
| def run_example_in_subproc( | def run_example_in_subproc( | ||||||
|     loglevel: str, |     loglevel: str, | ||||||
|     testdir: pytest.Pytester, |     testdir, | ||||||
|     reg_addr: tuple[str, int], |     arb_addr: tuple[str, int], | ||||||
| ): | ): | ||||||
| 
 | 
 | ||||||
|     @contextmanager |     @contextmanager | ||||||
|  | @ -66,9 +66,6 @@ def run_example_in_subproc( | ||||||
|         # due to backpressure!!! |         # due to backpressure!!! | ||||||
|         proc = testdir.popen( |         proc = testdir.popen( | ||||||
|             cmdargs, |             cmdargs, | ||||||
|             stdin=subprocess.PIPE, |  | ||||||
|             stdout=subprocess.PIPE, |  | ||||||
|             stderr=subprocess.PIPE, |  | ||||||
|             **kwargs, |             **kwargs, | ||||||
|         ) |         ) | ||||||
|         assert not proc.returncode |         assert not proc.returncode | ||||||
|  | @ -84,37 +81,27 @@ def run_example_in_subproc( | ||||||
| 
 | 
 | ||||||
|     # walk yields: (dirpath, dirnames, filenames) |     # walk yields: (dirpath, dirnames, filenames) | ||||||
|     [ |     [ | ||||||
|         (p[0], f) |         (p[0], f) for p in os.walk(examples_dir()) for f in p[2] | ||||||
|         for p in os.walk(examples_dir()) |  | ||||||
|         for f in p[2] |  | ||||||
| 
 | 
 | ||||||
|         if ( |         if '__' not in f | ||||||
|             '__' not in f |         and f[0] != '_' | ||||||
|             and f[0] != '_' |         and 'debugging' not in p[0] | ||||||
|             and 'debugging' not in p[0] |         and 'integration' not in p[0] | ||||||
|             and 'integration' not in p[0] |         and 'advanced_faults' not in p[0] | ||||||
|             and 'advanced_faults' not in p[0] |  | ||||||
|             and 'multihost' not in p[0] |  | ||||||
|             and 'trio' not in p[0] |  | ||||||
|         ) |  | ||||||
|     ], |     ], | ||||||
|  | 
 | ||||||
|     ids=lambda t: t[1], |     ids=lambda t: t[1], | ||||||
| ) | ) | ||||||
| def test_example( | def test_example(run_example_in_subproc, example_script): | ||||||
|     run_example_in_subproc, |     """Load and run scripts from this repo's ``examples/`` dir as a user | ||||||
|     example_script, |  | ||||||
| ): |  | ||||||
|     ''' |  | ||||||
|     Load and run scripts from this repo's ``examples/`` dir as a user |  | ||||||
|     would copy and pasing them into their editor. |     would copy and pasing them into their editor. | ||||||
| 
 | 
 | ||||||
|     On windows a little more "finessing" is done to make |     On windows a little more "finessing" is done to make | ||||||
|     ``multiprocessing`` play nice: we copy the ``__main__.py`` into the |     ``multiprocessing`` play nice: we copy the ``__main__.py`` into the | ||||||
|     test directory and invoke the script as a module with ``python -m |     test directory and invoke the script as a module with ``python -m | ||||||
|     test_example``. |     test_example``. | ||||||
| 
 |     """ | ||||||
|     ''' |     ex_file = os.path.join(*example_script) | ||||||
|     ex_file: str = os.path.join(*example_script) |  | ||||||
| 
 | 
 | ||||||
|     if 'rpc_bidir_streaming' in ex_file and sys.version_info < (3, 9): |     if 'rpc_bidir_streaming' in ex_file and sys.version_info < (3, 9): | ||||||
|         pytest.skip("2-way streaming example requires py3.9 async with syntax") |         pytest.skip("2-way streaming example requires py3.9 async with syntax") | ||||||
|  | @ -123,14 +110,10 @@ def test_example( | ||||||
|         code = ex.read() |         code = ex.read() | ||||||
| 
 | 
 | ||||||
|         with run_example_in_subproc(code) as proc: |         with run_example_in_subproc(code) as proc: | ||||||
|             err = None |             proc.wait() | ||||||
|             try: |             err, _ = proc.stderr.read(), proc.stdout.read() | ||||||
|                 if not proc.poll(): |             # print(f'STDERR: {err}') | ||||||
|                     _, err = proc.communicate(timeout=15) |             # print(f'STDOUT: {out}') | ||||||
| 
 |  | ||||||
|             except subprocess.TimeoutExpired as e: |  | ||||||
|                 proc.kill() |  | ||||||
|                 err = e.stderr |  | ||||||
| 
 | 
 | ||||||
|             # if we get some gnarly output let's aggregate and raise |             # if we get some gnarly output let's aggregate and raise | ||||||
|             if err: |             if err: | ||||||
|  | @ -144,8 +127,7 @@ def test_example( | ||||||
|                     # shouldn't eventually once we figure out what's |                     # shouldn't eventually once we figure out what's | ||||||
|                     # a better way to be explicit about aio side |                     # a better way to be explicit about aio side | ||||||
|                     # cancels? |                     # cancels? | ||||||
|                     and |                     and 'asyncio.exceptions.CancelledError' not in last_error | ||||||
|                     'asyncio.exceptions.CancelledError' not in last_error |  | ||||||
|                 ): |                 ): | ||||||
|                     raise Exception(errmsg) |                     raise Exception(errmsg) | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
|  | @ -1,946 +0,0 @@ | ||||||
| ''' |  | ||||||
| Low-level functional audits for our |  | ||||||
| "capability based messaging"-spec feats. |  | ||||||
| 
 |  | ||||||
| B~) |  | ||||||
| 
 |  | ||||||
| ''' |  | ||||||
| from contextlib import ( |  | ||||||
|     contextmanager as cm, |  | ||||||
|     # nullcontext, |  | ||||||
| ) |  | ||||||
| import importlib |  | ||||||
| from typing import ( |  | ||||||
|     Any, |  | ||||||
|     Type, |  | ||||||
|     Union, |  | ||||||
| ) |  | ||||||
| 
 |  | ||||||
| from msgspec import ( |  | ||||||
|     # structs, |  | ||||||
|     # msgpack, |  | ||||||
|     Raw, |  | ||||||
|     # Struct, |  | ||||||
|     ValidationError, |  | ||||||
| ) |  | ||||||
| import pytest |  | ||||||
| import trio |  | ||||||
| 
 |  | ||||||
| import tractor |  | ||||||
| from tractor import ( |  | ||||||
|     Actor, |  | ||||||
|     # _state, |  | ||||||
|     MsgTypeError, |  | ||||||
|     Context, |  | ||||||
| ) |  | ||||||
| from tractor.msg import ( |  | ||||||
|     _codec, |  | ||||||
|     _ctxvar_MsgCodec, |  | ||||||
|     _exts, |  | ||||||
| 
 |  | ||||||
|     NamespacePath, |  | ||||||
|     MsgCodec, |  | ||||||
|     MsgDec, |  | ||||||
|     mk_codec, |  | ||||||
|     mk_dec, |  | ||||||
|     apply_codec, |  | ||||||
|     current_codec, |  | ||||||
| ) |  | ||||||
| from tractor.msg.types import ( |  | ||||||
|     log, |  | ||||||
|     Started, |  | ||||||
|     # _payload_msgs, |  | ||||||
|     # PayloadMsg, |  | ||||||
|     # mk_msg_spec, |  | ||||||
| ) |  | ||||||
| from tractor.msg._ops import ( |  | ||||||
|     limit_plds, |  | ||||||
| ) |  | ||||||
| 
 |  | ||||||
| def enc_nsp(obj: Any) -> Any: |  | ||||||
|     actor: Actor = tractor.current_actor( |  | ||||||
|         err_on_no_runtime=False, |  | ||||||
|     ) |  | ||||||
|     uid: tuple[str, str]|None = None if not actor else actor.uid |  | ||||||
|     print(f'{uid} ENC HOOK') |  | ||||||
| 
 |  | ||||||
|     match obj: |  | ||||||
|         # case NamespacePath()|str(): |  | ||||||
|         case NamespacePath(): |  | ||||||
|             encoded: str = str(obj) |  | ||||||
|             print( |  | ||||||
|                 f'----- ENCODING `NamespacePath` as `str` ------\n' |  | ||||||
|                 f'|_obj:{type(obj)!r} = {obj!r}\n' |  | ||||||
|                 f'|_encoded: str = {encoded!r}\n' |  | ||||||
|             ) |  | ||||||
|             # if type(obj) != NamespacePath: |  | ||||||
|             #     breakpoint() |  | ||||||
|             return encoded |  | ||||||
|         case _: |  | ||||||
|             logmsg: str = ( |  | ||||||
|                 f'{uid}\n' |  | ||||||
|                 'FAILED ENCODE\n' |  | ||||||
|                 f'obj-> `{obj}: {type(obj)}`\n' |  | ||||||
|             ) |  | ||||||
|             raise NotImplementedError(logmsg) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def dec_nsp( |  | ||||||
|     obj_type: Type, |  | ||||||
|     obj: Any, |  | ||||||
| 
 |  | ||||||
| ) -> Any: |  | ||||||
|     # breakpoint() |  | ||||||
|     actor: Actor = tractor.current_actor( |  | ||||||
|         err_on_no_runtime=False, |  | ||||||
|     ) |  | ||||||
|     uid: tuple[str, str]|None = None if not actor else actor.uid |  | ||||||
|     print( |  | ||||||
|         f'{uid}\n' |  | ||||||
|         'CUSTOM DECODE\n' |  | ||||||
|         f'type-arg-> {obj_type}\n' |  | ||||||
|         f'obj-arg-> `{obj}`: {type(obj)}\n' |  | ||||||
|     ) |  | ||||||
|     nsp = None |  | ||||||
|     # XXX, never happens right? |  | ||||||
|     if obj_type is Raw: |  | ||||||
|         breakpoint() |  | ||||||
| 
 |  | ||||||
|     if ( |  | ||||||
|         obj_type is NamespacePath |  | ||||||
|         and isinstance(obj, str) |  | ||||||
|         and ':' in obj |  | ||||||
|     ): |  | ||||||
|         nsp = NamespacePath(obj) |  | ||||||
|         # TODO: we could built a generic handler using |  | ||||||
|         # JUST matching the obj_type part? |  | ||||||
|         # nsp = obj_type(obj) |  | ||||||
| 
 |  | ||||||
|     if nsp: |  | ||||||
|         print(f'Returning NSP instance: {nsp}') |  | ||||||
|         return nsp |  | ||||||
| 
 |  | ||||||
|     logmsg: str = ( |  | ||||||
|         f'{uid}\n' |  | ||||||
|         'FAILED DECODE\n' |  | ||||||
|         f'type-> {obj_type}\n' |  | ||||||
|         f'obj-arg-> `{obj}`: {type(obj)}\n\n' |  | ||||||
|         f'current codec:\n' |  | ||||||
|         f'{current_codec()}\n' |  | ||||||
|     ) |  | ||||||
|     # TODO: figure out the ignore subsys for this! |  | ||||||
|     # -[ ] option whether to defense-relay backc the msg |  | ||||||
|     #   inside an `Invalid`/`Ignore` |  | ||||||
|     # -[ ] how to make this handling pluggable such that a |  | ||||||
|     #   `Channel`/`MsgTransport` can intercept and process |  | ||||||
|     #   back msgs either via exception handling or some other |  | ||||||
|     #   signal? |  | ||||||
|     log.warning(logmsg) |  | ||||||
|     # NOTE: this delivers the invalid |  | ||||||
|     # value up to `msgspec`'s decoding |  | ||||||
|     # machinery for error raising. |  | ||||||
|     return obj |  | ||||||
|     # raise NotImplementedError(logmsg) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def ex_func(*args): |  | ||||||
|     ''' |  | ||||||
|     A mod level func we can ref and load via our `NamespacePath` |  | ||||||
|     python-object pointer `str` subtype. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     print(f'ex_func({args})') |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @pytest.mark.parametrize( |  | ||||||
|     'add_codec_hooks', |  | ||||||
|     [ |  | ||||||
|         True, |  | ||||||
|         False, |  | ||||||
|     ], |  | ||||||
|     ids=['use_codec_hooks', 'no_codec_hooks'], |  | ||||||
| ) |  | ||||||
| def test_custom_extension_types( |  | ||||||
|     debug_mode: bool, |  | ||||||
|     add_codec_hooks: bool |  | ||||||
| ): |  | ||||||
|     ''' |  | ||||||
|     Verify that a `MsgCodec` (used for encoding all outbound IPC msgs |  | ||||||
|     and decoding all inbound `PayloadMsg`s) and a paired `MsgDec` |  | ||||||
|     (used for decoding the `PayloadMsg.pld: Raw` received within a given |  | ||||||
|     task's ipc `Context` scope) can both send and receive "extension types" |  | ||||||
|     as supported via custom converter hooks passed to `msgspec`. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     nsp_pld_dec: MsgDec = mk_dec( |  | ||||||
|         spec=None,  # ONLY support the ext type |  | ||||||
|         dec_hook=dec_nsp if add_codec_hooks else None, |  | ||||||
|         ext_types=[NamespacePath], |  | ||||||
|     ) |  | ||||||
|     nsp_codec: MsgCodec = mk_codec( |  | ||||||
|         # ipc_pld_spec=Raw,  # default! |  | ||||||
| 
 |  | ||||||
|         # NOTE XXX: the encode hook MUST be used no matter what since |  | ||||||
|         # our `NamespacePath` is not any of a `Any` native type nor |  | ||||||
|         # a `msgspec.Struct` subtype - so `msgspec` has no way to know |  | ||||||
|         # how to encode it unless we provide the custom hook. |  | ||||||
|         # |  | ||||||
|         # AGAIN that is, regardless of whether we spec an |  | ||||||
|         # `Any`-decoded-pld the enc has no knowledge (by default) |  | ||||||
|         # how to enc `NamespacePath` (nsp), so we add a custom |  | ||||||
|         # hook to do that ALWAYS. |  | ||||||
|         enc_hook=enc_nsp if add_codec_hooks else None, |  | ||||||
| 
 |  | ||||||
|         # XXX NOTE: pretty sure this is mutex with the `type=` to |  | ||||||
|         # `Decoder`? so it won't work in tandem with the |  | ||||||
|         # `ipc_pld_spec` passed above? |  | ||||||
|         ext_types=[NamespacePath], |  | ||||||
| 
 |  | ||||||
|         # TODO? is it useful to have the `.pld` decoded *prior* to |  | ||||||
|         # the `PldRx`?? like perf or mem related? |  | ||||||
|         # ext_dec=nsp_pld_dec, |  | ||||||
|     ) |  | ||||||
|     if add_codec_hooks: |  | ||||||
|         assert nsp_codec.dec.dec_hook is None |  | ||||||
| 
 |  | ||||||
|         # TODO? if we pass `ext_dec` above? |  | ||||||
|         # assert nsp_codec.dec.dec_hook is dec_nsp |  | ||||||
| 
 |  | ||||||
|         assert nsp_codec.enc.enc_hook is enc_nsp |  | ||||||
| 
 |  | ||||||
|     nsp = NamespacePath.from_ref(ex_func) |  | ||||||
| 
 |  | ||||||
|     try: |  | ||||||
|         nsp_bytes: bytes = nsp_codec.encode(nsp) |  | ||||||
|         nsp_rt_sin_msg = nsp_pld_dec.decode(nsp_bytes) |  | ||||||
|         nsp_rt_sin_msg.load_ref() is ex_func |  | ||||||
|     except TypeError: |  | ||||||
|         if not add_codec_hooks: |  | ||||||
|             pass |  | ||||||
| 
 |  | ||||||
|     try: |  | ||||||
|         msg_bytes: bytes = nsp_codec.encode( |  | ||||||
|             Started( |  | ||||||
|                 cid='cid', |  | ||||||
|                 pld=nsp, |  | ||||||
|             ) |  | ||||||
|         ) |  | ||||||
|         # since the ext-type obj should also be set as the msg.pld |  | ||||||
|         assert nsp_bytes in msg_bytes |  | ||||||
|         started_rt: Started = nsp_codec.decode(msg_bytes) |  | ||||||
|         pld: Raw = started_rt.pld |  | ||||||
|         assert isinstance(pld, Raw) |  | ||||||
|         nsp_rt: NamespacePath = nsp_pld_dec.decode(pld) |  | ||||||
|         assert isinstance(nsp_rt, NamespacePath) |  | ||||||
|         # in obj comparison terms they should be the same |  | ||||||
|         assert nsp_rt == nsp |  | ||||||
|         # ensure we've decoded to ext type! |  | ||||||
|         assert nsp_rt.load_ref() is ex_func |  | ||||||
| 
 |  | ||||||
|     except TypeError: |  | ||||||
|         if not add_codec_hooks: |  | ||||||
|             pass |  | ||||||
| 
 |  | ||||||
| @tractor.context |  | ||||||
| async def sleep_forever_in_sub( |  | ||||||
|     ctx: Context, |  | ||||||
| ) -> None: |  | ||||||
|     await trio.sleep_forever() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def mk_custom_codec( |  | ||||||
|     add_hooks: bool, |  | ||||||
| 
 |  | ||||||
| ) -> tuple[ |  | ||||||
|     MsgCodec,  # encode to send |  | ||||||
|     MsgDec,  # pld receive-n-decode |  | ||||||
| ]: |  | ||||||
|     ''' |  | ||||||
|     Create custom `msgpack` enc/dec-hooks and set a `Decoder` |  | ||||||
|     which only loads `pld_spec` (like `NamespacePath`) types. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
| 
 |  | ||||||
|     # XXX NOTE XXX: despite defining `NamespacePath` as a type |  | ||||||
|     # field on our `PayloadMsg.pld`, we still need a enc/dec_hook() pair |  | ||||||
|     # to cast to/from that type on the wire. See the docs: |  | ||||||
|     # https://jcristharif.com/msgspec/extending.html#mapping-to-from-native-types |  | ||||||
| 
 |  | ||||||
|     # if pld_spec is Any: |  | ||||||
|     #     pld_spec = Raw |  | ||||||
| 
 |  | ||||||
|     nsp_codec: MsgCodec = mk_codec( |  | ||||||
|         # ipc_pld_spec=Raw,  # default! |  | ||||||
| 
 |  | ||||||
|         # NOTE XXX: the encode hook MUST be used no matter what since |  | ||||||
|         # our `NamespacePath` is not any of a `Any` native type nor |  | ||||||
|         # a `msgspec.Struct` subtype - so `msgspec` has no way to know |  | ||||||
|         # how to encode it unless we provide the custom hook. |  | ||||||
|         # |  | ||||||
|         # AGAIN that is, regardless of whether we spec an |  | ||||||
|         # `Any`-decoded-pld the enc has no knowledge (by default) |  | ||||||
|         # how to enc `NamespacePath` (nsp), so we add a custom |  | ||||||
|         # hook to do that ALWAYS. |  | ||||||
|         enc_hook=enc_nsp if add_hooks else None, |  | ||||||
| 
 |  | ||||||
|         # XXX NOTE: pretty sure this is mutex with the `type=` to |  | ||||||
|         # `Decoder`? so it won't work in tandem with the |  | ||||||
|         # `ipc_pld_spec` passed above? |  | ||||||
|         ext_types=[NamespacePath], |  | ||||||
|     ) |  | ||||||
|     # dec_hook=dec_nsp if add_hooks else None, |  | ||||||
|     return nsp_codec |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @pytest.mark.parametrize( |  | ||||||
|     'limit_plds_args', |  | ||||||
|     [ |  | ||||||
|         ( |  | ||||||
|             {'dec_hook': None, 'ext_types': None}, |  | ||||||
|             None, |  | ||||||
|         ), |  | ||||||
|         ( |  | ||||||
|             {'dec_hook': dec_nsp, 'ext_types': None}, |  | ||||||
|             TypeError, |  | ||||||
|         ), |  | ||||||
|         ( |  | ||||||
|             {'dec_hook': dec_nsp, 'ext_types': [NamespacePath]}, |  | ||||||
|             None, |  | ||||||
|         ), |  | ||||||
|         ( |  | ||||||
|             {'dec_hook': dec_nsp, 'ext_types': [NamespacePath|None]}, |  | ||||||
|             None, |  | ||||||
|         ), |  | ||||||
|     ], |  | ||||||
|     ids=[ |  | ||||||
|         'no_hook_no_ext_types', |  | ||||||
|         'only_hook', |  | ||||||
|         'hook_and_ext_types', |  | ||||||
|         'hook_and_ext_types_w_null', |  | ||||||
|     ] |  | ||||||
| ) |  | ||||||
| def test_pld_limiting_usage( |  | ||||||
|     limit_plds_args: tuple[dict, Exception|None], |  | ||||||
| ): |  | ||||||
|     ''' |  | ||||||
|     Verify `dec_hook()` and `ext_types` need to either both be |  | ||||||
|     provided or we raise a explanator type-error. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     kwargs, maybe_err = limit_plds_args |  | ||||||
|     async def main(): |  | ||||||
|         async with tractor.open_nursery() as an:  # just to open runtime |  | ||||||
| 
 |  | ||||||
|             # XXX SHOULD NEVER WORK outside an ipc ctx scope! |  | ||||||
|             try: |  | ||||||
|                 with limit_plds(**kwargs): |  | ||||||
|                     pass |  | ||||||
|             except RuntimeError: |  | ||||||
|                 pass |  | ||||||
| 
 |  | ||||||
|             p: tractor.Portal = await an.start_actor( |  | ||||||
|                 'sub', |  | ||||||
|                 enable_modules=[__name__], |  | ||||||
|             ) |  | ||||||
|             async with ( |  | ||||||
|                 p.open_context( |  | ||||||
|                     sleep_forever_in_sub |  | ||||||
|                 ) as (ctx, first), |  | ||||||
|             ): |  | ||||||
|                 try: |  | ||||||
|                     with limit_plds(**kwargs): |  | ||||||
|                         pass |  | ||||||
|                 except maybe_err as exc: |  | ||||||
|                     assert type(exc) is maybe_err |  | ||||||
|                     pass |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def chk_codec_applied( |  | ||||||
|     expect_codec: MsgCodec|None, |  | ||||||
|     enter_value: MsgCodec|None = None, |  | ||||||
| 
 |  | ||||||
| ) -> MsgCodec: |  | ||||||
|     ''' |  | ||||||
|     buncha sanity checks ensuring that the IPC channel's |  | ||||||
|     context-vars are set to the expected codec and that are |  | ||||||
|     ctx-var wrapper APIs match the same. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     # TODO: play with tricyle again, bc this is supposed to work |  | ||||||
|     # the way we want? |  | ||||||
|     # |  | ||||||
|     # TreeVar |  | ||||||
|     # task: trio.Task = trio.lowlevel.current_task() |  | ||||||
|     # curr_codec = _ctxvar_MsgCodec.get_in(task) |  | ||||||
| 
 |  | ||||||
|     # ContextVar |  | ||||||
|     # task_ctx: Context = task.context |  | ||||||
|     # assert _ctxvar_MsgCodec in task_ctx |  | ||||||
|     # curr_codec: MsgCodec = task.context[_ctxvar_MsgCodec] |  | ||||||
|     if expect_codec is None: |  | ||||||
|         assert enter_value is None |  | ||||||
|         return |  | ||||||
| 
 |  | ||||||
|     # NOTE: currently we use this! |  | ||||||
|     # RunVar |  | ||||||
|     curr_codec: MsgCodec = current_codec() |  | ||||||
|     last_read_codec = _ctxvar_MsgCodec.get() |  | ||||||
|     # assert curr_codec is last_read_codec |  | ||||||
| 
 |  | ||||||
|     assert ( |  | ||||||
|         (same_codec := expect_codec) is |  | ||||||
|         # returned from `mk_codec()` |  | ||||||
| 
 |  | ||||||
|         # yielded value from `apply_codec()` |  | ||||||
| 
 |  | ||||||
|         # read from current task's `contextvars.Context` |  | ||||||
|         curr_codec is |  | ||||||
|         last_read_codec |  | ||||||
| 
 |  | ||||||
|         # the default `msgspec` settings |  | ||||||
|         is not _codec._def_msgspec_codec |  | ||||||
|         is not _codec._def_tractor_codec |  | ||||||
|     ) |  | ||||||
| 
 |  | ||||||
|     if enter_value: |  | ||||||
|         assert enter_value is same_codec |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @tractor.context |  | ||||||
| async def send_back_values( |  | ||||||
|     ctx: Context, |  | ||||||
|     rent_pld_spec_type_strs: list[str], |  | ||||||
|     add_hooks: bool, |  | ||||||
| 
 |  | ||||||
| ) -> None: |  | ||||||
|     ''' |  | ||||||
|     Setup up a custom codec to load instances of `NamespacePath` |  | ||||||
|     and ensure we can round trip a func ref with our parent. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     uid: tuple = tractor.current_actor().uid |  | ||||||
| 
 |  | ||||||
|     # init state in sub-actor should be default |  | ||||||
|     chk_codec_applied( |  | ||||||
|         expect_codec=_codec._def_tractor_codec, |  | ||||||
|     ) |  | ||||||
| 
 |  | ||||||
|     # load pld spec from input str |  | ||||||
|     rent_pld_spec = _exts.dec_type_union( |  | ||||||
|         rent_pld_spec_type_strs, |  | ||||||
|         mods=[ |  | ||||||
|             importlib.import_module(__name__), |  | ||||||
|         ], |  | ||||||
|     ) |  | ||||||
|     rent_pld_spec_types: set[Type] = _codec.unpack_spec_types( |  | ||||||
|         rent_pld_spec, |  | ||||||
|     ) |  | ||||||
| 
 |  | ||||||
|     # ONLY add ext-hooks if the rent specified a non-std type! |  | ||||||
|     add_hooks: bool = ( |  | ||||||
|         NamespacePath in rent_pld_spec_types |  | ||||||
|         and |  | ||||||
|         add_hooks |  | ||||||
|     ) |  | ||||||
| 
 |  | ||||||
|     # same as on parent side config. |  | ||||||
|     nsp_codec: MsgCodec|None = None |  | ||||||
|     if add_hooks: |  | ||||||
|         nsp_codec = mk_codec( |  | ||||||
|             enc_hook=enc_nsp, |  | ||||||
|             ext_types=[NamespacePath], |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|     with ( |  | ||||||
|         maybe_apply_codec(nsp_codec) as codec, |  | ||||||
|         limit_plds( |  | ||||||
|             rent_pld_spec, |  | ||||||
|             dec_hook=dec_nsp if add_hooks else None, |  | ||||||
|             ext_types=[NamespacePath]  if add_hooks else None, |  | ||||||
|         ) as pld_dec, |  | ||||||
|     ): |  | ||||||
|         # ?XXX? SHOULD WE NOT be swapping the global codec since it |  | ||||||
|         # breaks `Context.started()` roundtripping checks?? |  | ||||||
|         chk_codec_applied( |  | ||||||
|             expect_codec=nsp_codec, |  | ||||||
|             enter_value=codec, |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|         # ?TODO, mismatch case(s)? |  | ||||||
|         # |  | ||||||
|         # ensure pld spec matches on both sides |  | ||||||
|         ctx_pld_dec: MsgDec = ctx._pld_rx._pld_dec |  | ||||||
|         assert pld_dec is ctx_pld_dec |  | ||||||
|         child_pld_spec: Type = pld_dec.spec |  | ||||||
|         child_pld_spec_types: set[Type] = _codec.unpack_spec_types( |  | ||||||
|             child_pld_spec, |  | ||||||
|         ) |  | ||||||
|         assert ( |  | ||||||
|             child_pld_spec_types.issuperset( |  | ||||||
|                 rent_pld_spec_types |  | ||||||
|             ) |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|         # ?TODO, try loop for each of the types in pld-superset? |  | ||||||
|         # |  | ||||||
|         # for send_value in [ |  | ||||||
|         #     nsp, |  | ||||||
|         #     str(nsp), |  | ||||||
|         #     None, |  | ||||||
|         # ]: |  | ||||||
|         nsp = NamespacePath.from_ref(ex_func) |  | ||||||
|         try: |  | ||||||
|             print( |  | ||||||
|                 f'{uid}: attempting to `.started({nsp})`\n' |  | ||||||
|                 f'\n' |  | ||||||
|                 f'rent_pld_spec: {rent_pld_spec}\n' |  | ||||||
|                 f'child_pld_spec: {child_pld_spec}\n' |  | ||||||
|                 f'codec: {codec}\n' |  | ||||||
|             ) |  | ||||||
|             # await tractor.pause() |  | ||||||
|             await ctx.started(nsp) |  | ||||||
| 
 |  | ||||||
|         except tractor.MsgTypeError as _mte: |  | ||||||
|             mte = _mte |  | ||||||
| 
 |  | ||||||
|             # false -ve case |  | ||||||
|             if add_hooks: |  | ||||||
|                 raise RuntimeError( |  | ||||||
|                     f'EXPECTED to `.started()` value given spec ??\n\n' |  | ||||||
|                     f'child_pld_spec -> {child_pld_spec}\n' |  | ||||||
|                     f'value = {nsp}: {type(nsp)}\n' |  | ||||||
|                 ) |  | ||||||
| 
 |  | ||||||
|             # true -ve case |  | ||||||
|             raise mte |  | ||||||
| 
 |  | ||||||
|         # TODO: maybe we should add our own wrapper error so as to |  | ||||||
|         # be interchange-lib agnostic? |  | ||||||
|         # -[ ] the error type is wtv is raised from the hook so we |  | ||||||
|         #   could also require a type-class of errors for |  | ||||||
|         #   indicating whether the hook-failure can be handled by |  | ||||||
|         #   a nasty-dialog-unprot sub-sys? |  | ||||||
|         except TypeError as typerr: |  | ||||||
|             # false -ve |  | ||||||
|             if add_hooks: |  | ||||||
|                 raise RuntimeError('Should have been able to send `nsp`??') |  | ||||||
| 
 |  | ||||||
|             # true -ve |  | ||||||
|             print('Failed to send `nsp` due to no ext hooks set!') |  | ||||||
|             raise typerr |  | ||||||
| 
 |  | ||||||
|         # now try sending a set of valid and invalid plds to ensure |  | ||||||
|         # the pld spec is respected. |  | ||||||
|         sent: list[Any] = [] |  | ||||||
|         async with ctx.open_stream() as ipc: |  | ||||||
|             print( |  | ||||||
|                 f'{uid}: streaming all pld types to rent..' |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|             # for send_value, expect_send in iter_send_val_items: |  | ||||||
|             for send_value in [ |  | ||||||
|                 nsp, |  | ||||||
|                 str(nsp), |  | ||||||
|                 None, |  | ||||||
|             ]: |  | ||||||
|                 send_type: Type = type(send_value) |  | ||||||
|                 print( |  | ||||||
|                     f'{uid}: SENDING NEXT pld\n' |  | ||||||
|                     f'send_type: {send_type}\n' |  | ||||||
|                     f'send_value: {send_value}\n' |  | ||||||
|                 ) |  | ||||||
|                 try: |  | ||||||
|                     await ipc.send(send_value) |  | ||||||
|                     sent.append(send_value) |  | ||||||
| 
 |  | ||||||
|                 except ValidationError as valerr: |  | ||||||
|                     print(f'{uid} FAILED TO SEND {send_value}!') |  | ||||||
| 
 |  | ||||||
|                     # false -ve |  | ||||||
|                     if add_hooks: |  | ||||||
|                         raise RuntimeError( |  | ||||||
|                             f'EXPECTED to roundtrip value given spec:\n' |  | ||||||
|                             f'rent_pld_spec -> {rent_pld_spec}\n' |  | ||||||
|                             f'child_pld_spec -> {child_pld_spec}\n' |  | ||||||
|                             f'value = {send_value}: {send_type}\n' |  | ||||||
|                         ) |  | ||||||
| 
 |  | ||||||
|                     # true -ve |  | ||||||
|                     raise valerr |  | ||||||
|                     # continue |  | ||||||
| 
 |  | ||||||
|             else: |  | ||||||
|                 print( |  | ||||||
|                     f'{uid}: finished sending all values\n' |  | ||||||
|                     'Should be exiting stream block!\n' |  | ||||||
|                 ) |  | ||||||
| 
 |  | ||||||
|         print(f'{uid}: exited streaming block!') |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @cm |  | ||||||
| def maybe_apply_codec(codec: MsgCodec|None) -> MsgCodec|None: |  | ||||||
|     if codec is None: |  | ||||||
|         yield None |  | ||||||
|         return |  | ||||||
| 
 |  | ||||||
|     with apply_codec(codec) as codec: |  | ||||||
|         yield codec |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @pytest.mark.parametrize( |  | ||||||
|     'pld_spec', |  | ||||||
|     [ |  | ||||||
|         Any, |  | ||||||
|         NamespacePath, |  | ||||||
|         NamespacePath|None,  # the "maybe" spec Bo |  | ||||||
|     ], |  | ||||||
|     ids=[ |  | ||||||
|         'any_type', |  | ||||||
|         'only_nsp_ext', |  | ||||||
|         'maybe_nsp_ext', |  | ||||||
|     ] |  | ||||||
| ) |  | ||||||
| @pytest.mark.parametrize( |  | ||||||
|     'add_hooks', |  | ||||||
|     [ |  | ||||||
|         True, |  | ||||||
|         False, |  | ||||||
|     ], |  | ||||||
|     ids=[ |  | ||||||
|         'use_codec_hooks', |  | ||||||
|         'no_codec_hooks', |  | ||||||
|     ], |  | ||||||
| ) |  | ||||||
| def test_ext_types_over_ipc( |  | ||||||
|     debug_mode: bool, |  | ||||||
|     pld_spec: Union[Type], |  | ||||||
|     add_hooks: bool, |  | ||||||
| ): |  | ||||||
|     ''' |  | ||||||
|     Ensure we can support extension types coverted using |  | ||||||
|     `enc/dec_hook()`s passed to the `.msg.limit_plds()` API |  | ||||||
|     and that sane errors happen when we try do the same without |  | ||||||
|     the codec hooks. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     pld_types: set[Type] = _codec.unpack_spec_types(pld_spec) |  | ||||||
| 
 |  | ||||||
|     async def main(): |  | ||||||
| 
 |  | ||||||
|         # sanity check the default pld-spec beforehand |  | ||||||
|         chk_codec_applied( |  | ||||||
|             expect_codec=_codec._def_tractor_codec, |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|         # extension type we want to send as msg payload |  | ||||||
|         nsp = NamespacePath.from_ref(ex_func) |  | ||||||
| 
 |  | ||||||
|         # ^NOTE, 2 cases: |  | ||||||
|         # - codec hooks noto added -> decode nsp as `str` |  | ||||||
|         # - codec with hooks -> decode nsp as `NamespacePath` |  | ||||||
|         nsp_codec: MsgCodec|None = None |  | ||||||
|         if ( |  | ||||||
|             NamespacePath in pld_types |  | ||||||
|             and |  | ||||||
|             add_hooks |  | ||||||
|         ): |  | ||||||
|             nsp_codec = mk_codec( |  | ||||||
|                 enc_hook=enc_nsp, |  | ||||||
|                 ext_types=[NamespacePath], |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|         async with tractor.open_nursery( |  | ||||||
|             debug_mode=debug_mode, |  | ||||||
|         ) as an: |  | ||||||
|             p: tractor.Portal = await an.start_actor( |  | ||||||
|                 'sub', |  | ||||||
|                 enable_modules=[__name__], |  | ||||||
|             ) |  | ||||||
|             with ( |  | ||||||
|                 maybe_apply_codec(nsp_codec) as codec, |  | ||||||
|             ): |  | ||||||
|                 chk_codec_applied( |  | ||||||
|                     expect_codec=nsp_codec, |  | ||||||
|                     enter_value=codec, |  | ||||||
|                 ) |  | ||||||
|                 rent_pld_spec_type_strs: list[str] = _exts.enc_type_union(pld_spec) |  | ||||||
| 
 |  | ||||||
|                 # XXX should raise an mte (`MsgTypeError`) |  | ||||||
|                 # when `add_hooks == False` bc the input |  | ||||||
|                 # `expect_ipc_send` kwarg has a nsp which can't be |  | ||||||
|                 # serialized! |  | ||||||
|                 # |  | ||||||
|                 # TODO:can we ensure this happens from the |  | ||||||
|                 # `Return`-side (aka the sub) as well? |  | ||||||
|                 try: |  | ||||||
|                     ctx: tractor.Context |  | ||||||
|                     ipc: tractor.MsgStream |  | ||||||
|                     async with ( |  | ||||||
| 
 |  | ||||||
|                         # XXX should raise an mte (`MsgTypeError`) |  | ||||||
|                         # when `add_hooks == False`.. |  | ||||||
|                         p.open_context( |  | ||||||
|                             send_back_values, |  | ||||||
|                             # expect_debug=debug_mode, |  | ||||||
|                             rent_pld_spec_type_strs=rent_pld_spec_type_strs, |  | ||||||
|                             add_hooks=add_hooks, |  | ||||||
|                             # expect_ipc_send=expect_ipc_send, |  | ||||||
|                         ) as (ctx, first), |  | ||||||
| 
 |  | ||||||
|                         ctx.open_stream() as ipc, |  | ||||||
|                     ): |  | ||||||
|                         with ( |  | ||||||
|                             limit_plds( |  | ||||||
|                                 pld_spec, |  | ||||||
|                                 dec_hook=dec_nsp if add_hooks else None, |  | ||||||
|                                 ext_types=[NamespacePath]  if add_hooks else None, |  | ||||||
|                             ) as pld_dec, |  | ||||||
|                         ): |  | ||||||
|                             ctx_pld_dec: MsgDec = ctx._pld_rx._pld_dec |  | ||||||
|                             assert pld_dec is ctx_pld_dec |  | ||||||
| 
 |  | ||||||
|                             # if ( |  | ||||||
|                             #     not add_hooks |  | ||||||
|                             #     and |  | ||||||
|                             #     NamespacePath in  |  | ||||||
|                             # ): |  | ||||||
|                             #     pytest.fail('ctx should fail to open without custom enc_hook!?') |  | ||||||
| 
 |  | ||||||
|                             await ipc.send(nsp) |  | ||||||
|                             nsp_rt = await ipc.receive() |  | ||||||
| 
 |  | ||||||
|                             assert nsp_rt == nsp |  | ||||||
|                             assert nsp_rt.load_ref() is ex_func |  | ||||||
| 
 |  | ||||||
|                 # this test passes bc we can go no further! |  | ||||||
|                 except MsgTypeError as mte: |  | ||||||
|                     # if not add_hooks: |  | ||||||
|                     #     # teardown nursery |  | ||||||
|                     #     await p.cancel_actor() |  | ||||||
|                         # return |  | ||||||
| 
 |  | ||||||
|                     raise mte |  | ||||||
| 
 |  | ||||||
|             await p.cancel_actor() |  | ||||||
| 
 |  | ||||||
|     if ( |  | ||||||
|         NamespacePath in pld_types |  | ||||||
|         and |  | ||||||
|         add_hooks |  | ||||||
|     ): |  | ||||||
|         trio.run(main) |  | ||||||
| 
 |  | ||||||
|     else: |  | ||||||
|         with pytest.raises( |  | ||||||
|             expected_exception=tractor.RemoteActorError, |  | ||||||
|         ) as excinfo: |  | ||||||
|             trio.run(main) |  | ||||||
| 
 |  | ||||||
|         exc = excinfo.value |  | ||||||
|         # bc `.started(nsp: NamespacePath)` will raise |  | ||||||
|         assert exc.boxed_type is TypeError |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| # def chk_pld_type( |  | ||||||
| #     payload_spec: Type[Struct]|Any, |  | ||||||
| #     pld: Any, |  | ||||||
| 
 |  | ||||||
| #     expect_roundtrip: bool|None = None, |  | ||||||
| 
 |  | ||||||
| # ) -> bool: |  | ||||||
| 
 |  | ||||||
| #     pld_val_type: Type = type(pld) |  | ||||||
| 
 |  | ||||||
| #     # TODO: verify that the overridden subtypes |  | ||||||
| #     # DO NOT have modified type-annots from original! |  | ||||||
| #     # 'Start',  .pld: FuncSpec |  | ||||||
| #     # 'StartAck',  .pld: IpcCtxSpec |  | ||||||
| #     # 'Stop',  .pld: UNSEt |  | ||||||
| #     # 'Error',  .pld: ErrorData |  | ||||||
| 
 |  | ||||||
| #     codec: MsgCodec = mk_codec( |  | ||||||
| #         # NOTE: this ONLY accepts `PayloadMsg.pld` fields of a specified |  | ||||||
| #         # type union. |  | ||||||
| #         ipc_pld_spec=payload_spec, |  | ||||||
| #     ) |  | ||||||
| 
 |  | ||||||
| #     # make a one-off dec to compare with our `MsgCodec` instance |  | ||||||
| #     # which does the below `mk_msg_spec()` call internally |  | ||||||
| #     ipc_msg_spec: Union[Type[Struct]] |  | ||||||
| #     msg_types: list[PayloadMsg[payload_spec]] |  | ||||||
| #     ( |  | ||||||
| #         ipc_msg_spec, |  | ||||||
| #         msg_types, |  | ||||||
| #     ) = mk_msg_spec( |  | ||||||
| #         payload_type_union=payload_spec, |  | ||||||
| #     ) |  | ||||||
| #     _enc = msgpack.Encoder() |  | ||||||
| #     _dec = msgpack.Decoder( |  | ||||||
| #         type=ipc_msg_spec or Any,  # like `PayloadMsg[Any]` |  | ||||||
| #     ) |  | ||||||
| 
 |  | ||||||
| #     assert ( |  | ||||||
| #         payload_spec |  | ||||||
| #         == |  | ||||||
| #         codec.pld_spec |  | ||||||
| #     ) |  | ||||||
| 
 |  | ||||||
| #     # assert codec.dec == dec |  | ||||||
| #     # |  | ||||||
| #     # ^-XXX-^ not sure why these aren't "equal" but when cast |  | ||||||
| #     # to `str` they seem to match ?? .. kk |  | ||||||
| 
 |  | ||||||
| #     assert ( |  | ||||||
| #         str(ipc_msg_spec) |  | ||||||
| #         == |  | ||||||
| #         str(codec.msg_spec) |  | ||||||
| #         == |  | ||||||
| #         str(_dec.type) |  | ||||||
| #         == |  | ||||||
| #         str(codec.dec.type) |  | ||||||
| #     ) |  | ||||||
| 
 |  | ||||||
| #     # verify the boxed-type for all variable payload-type msgs. |  | ||||||
| #     if not msg_types: |  | ||||||
| #         breakpoint() |  | ||||||
| 
 |  | ||||||
| #     roundtrip: bool|None = None |  | ||||||
| #     pld_spec_msg_names: list[str] = [ |  | ||||||
| #         td.__name__ for td in _payload_msgs |  | ||||||
| #     ] |  | ||||||
| #     for typedef in msg_types: |  | ||||||
| 
 |  | ||||||
| #         skip_runtime_msg: bool = typedef.__name__ not in pld_spec_msg_names |  | ||||||
| #         if skip_runtime_msg: |  | ||||||
| #             continue |  | ||||||
| 
 |  | ||||||
| #         pld_field = structs.fields(typedef)[1] |  | ||||||
| #         assert pld_field.type is payload_spec # TODO-^ does this need to work to get all subtypes to adhere? |  | ||||||
| 
 |  | ||||||
| #         kwargs: dict[str, Any] = { |  | ||||||
| #             'cid': '666', |  | ||||||
| #             'pld': pld, |  | ||||||
| #         } |  | ||||||
| #         enc_msg: PayloadMsg = typedef(**kwargs) |  | ||||||
| 
 |  | ||||||
| #         _wire_bytes: bytes = _enc.encode(enc_msg) |  | ||||||
| #         wire_bytes: bytes = codec.enc.encode(enc_msg) |  | ||||||
| #         assert _wire_bytes == wire_bytes |  | ||||||
| 
 |  | ||||||
| #         ve: ValidationError|None = None |  | ||||||
| #         try: |  | ||||||
| #             dec_msg = codec.dec.decode(wire_bytes) |  | ||||||
| #             _dec_msg = _dec.decode(wire_bytes) |  | ||||||
| 
 |  | ||||||
| #             # decoded msg and thus payload should be exactly same! |  | ||||||
| #             assert (roundtrip := ( |  | ||||||
| #                 _dec_msg |  | ||||||
| #                 == |  | ||||||
| #                 dec_msg |  | ||||||
| #                 == |  | ||||||
| #                 enc_msg |  | ||||||
| #             )) |  | ||||||
| 
 |  | ||||||
| #             if ( |  | ||||||
| #                 expect_roundtrip is not None |  | ||||||
| #                 and expect_roundtrip != roundtrip |  | ||||||
| #             ): |  | ||||||
| #                 breakpoint() |  | ||||||
| 
 |  | ||||||
| #             assert ( |  | ||||||
| #                 pld |  | ||||||
| #                 == |  | ||||||
| #                 dec_msg.pld |  | ||||||
| #                 == |  | ||||||
| #                 enc_msg.pld |  | ||||||
| #             ) |  | ||||||
| #             # assert (roundtrip := (_dec_msg == enc_msg)) |  | ||||||
| 
 |  | ||||||
| #         except ValidationError as _ve: |  | ||||||
| #             ve = _ve |  | ||||||
| #             roundtrip: bool = False |  | ||||||
| #             if pld_val_type is payload_spec: |  | ||||||
| #                 raise ValueError( |  | ||||||
| #                    'Got `ValidationError` despite type-var match!?\n' |  | ||||||
| #                     f'pld_val_type: {pld_val_type}\n' |  | ||||||
| #                     f'payload_type: {payload_spec}\n' |  | ||||||
| #                 ) from ve |  | ||||||
| 
 |  | ||||||
| #             else: |  | ||||||
| #                 # ow we good cuz the pld spec mismatched. |  | ||||||
| #                 print( |  | ||||||
| #                     'Got expected `ValidationError` since,\n' |  | ||||||
| #                     f'{pld_val_type} is not {payload_spec}\n' |  | ||||||
| #                 ) |  | ||||||
| #         else: |  | ||||||
| #             if ( |  | ||||||
| #                 payload_spec is not Any |  | ||||||
| #                 and |  | ||||||
| #                 pld_val_type is not payload_spec |  | ||||||
| #             ): |  | ||||||
| #                 raise ValueError( |  | ||||||
| #                    'DID NOT `ValidationError` despite expected type match!?\n' |  | ||||||
| #                     f'pld_val_type: {pld_val_type}\n' |  | ||||||
| #                     f'payload_type: {payload_spec}\n' |  | ||||||
| #                 ) |  | ||||||
| 
 |  | ||||||
| #     # full code decode should always be attempted! |  | ||||||
| #     if roundtrip is None: |  | ||||||
| #         breakpoint() |  | ||||||
| 
 |  | ||||||
| #     return roundtrip |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| # ?TODO? maybe remove since covered in the newer `test_pldrx_limiting` |  | ||||||
| # via end-2-end testing of all this? |  | ||||||
| # -[ ] IOW do we really NEED this lowlevel unit testing? |  | ||||||
| # |  | ||||||
| # def test_limit_msgspec( |  | ||||||
| #     debug_mode: bool, |  | ||||||
| # ): |  | ||||||
| #     ''' |  | ||||||
| #     Internals unit testing to verify that type-limiting an IPC ctx's |  | ||||||
| #     msg spec with `Pldrx.limit_plds()` results in various |  | ||||||
| #     encapsulated `msgspec` object settings and state. |  | ||||||
| 
 |  | ||||||
| #     ''' |  | ||||||
| #     async def main(): |  | ||||||
| #         async with tractor.open_root_actor( |  | ||||||
| #             debug_mode=debug_mode, |  | ||||||
| #         ): |  | ||||||
| #             # ensure we can round-trip a boxing `PayloadMsg` |  | ||||||
| #             assert chk_pld_type( |  | ||||||
| #                 payload_spec=Any, |  | ||||||
| #                 pld=None, |  | ||||||
| #                 expect_roundtrip=True, |  | ||||||
| #             ) |  | ||||||
| 
 |  | ||||||
| #             # verify that a mis-typed payload value won't decode |  | ||||||
| #             assert not chk_pld_type( |  | ||||||
| #                 payload_spec=int, |  | ||||||
| #                 pld='doggy', |  | ||||||
| #             ) |  | ||||||
| 
 |  | ||||||
| #             # parametrize the boxed `.pld` type as a custom-struct |  | ||||||
| #             # and ensure that parametrization propagates |  | ||||||
| #             # to all payload-msg-spec-able subtypes! |  | ||||||
| #             class CustomPayload(Struct): |  | ||||||
| #                 name: str |  | ||||||
| #                 value: Any |  | ||||||
| 
 |  | ||||||
| #             assert not chk_pld_type( |  | ||||||
| #                 payload_spec=CustomPayload, |  | ||||||
| #                 pld='doggy', |  | ||||||
| #             ) |  | ||||||
| 
 |  | ||||||
| #             assert chk_pld_type( |  | ||||||
| #                 payload_spec=CustomPayload, |  | ||||||
| #                 pld=CustomPayload(name='doggy', value='urmom') |  | ||||||
| #             ) |  | ||||||
| 
 |  | ||||||
| #             # yah, we can `.pause_from_sync()` now! |  | ||||||
| #             # breakpoint() |  | ||||||
| 
 |  | ||||||
| #     trio.run(main) |  | ||||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							|  | @ -16,18 +16,17 @@ from tractor import (  # typing | ||||||
|     Portal, |     Portal, | ||||||
|     Context, |     Context, | ||||||
|     ContextCancelled, |     ContextCancelled, | ||||||
|     RemoteActorError, |  | ||||||
| ) |  | ||||||
| from tractor._testing import ( |  | ||||||
|     # tractor_test, |  | ||||||
|     expect_ctxc, |  | ||||||
| ) | ) | ||||||
| 
 | 
 | ||||||
| # XXX TODO cases: | # XXX TODO cases: | ||||||
|  | # - [ ] peer cancelled itself - so other peers should | ||||||
|  | #   get errors reflecting that the peer was itself the .canceller? | ||||||
|  | 
 | ||||||
| # - [x] WE cancelled the peer and thus should not see any raised | # - [x] WE cancelled the peer and thus should not see any raised | ||||||
| #   `ContextCancelled` as it should be reaped silently? | #   `ContextCancelled` as it should be reaped silently? | ||||||
| #   => pretty sure `test_context_stream_semantics::test_caller_cancels()` | #   => pretty sure `test_context_stream_semantics::test_caller_cancels()` | ||||||
| #      already covers this case? | #      already covers this case? | ||||||
|  | 
 | ||||||
| # - [x] INTER-PEER: some arbitrary remote peer cancels via | # - [x] INTER-PEER: some arbitrary remote peer cancels via | ||||||
| #   Portal.cancel_actor(). | #   Portal.cancel_actor(). | ||||||
| #   => all other connected peers should get that cancel requesting peer's | #   => all other connected peers should get that cancel requesting peer's | ||||||
|  | @ -40,11 +39,20 @@ from tractor._testing import ( | ||||||
| #   that also spawned a remote task task in that same peer-parent. | #   that also spawned a remote task task in that same peer-parent. | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | # def test_self_cancel(): | ||||||
|  | #     ''' | ||||||
|  | #     2 cases: | ||||||
|  | #     - calls `Actor.cancel()` locally in some task | ||||||
|  | #     - calls LocalPortal.cancel_actor()` ? | ||||||
|  | 
 | ||||||
|  | #     ''' | ||||||
|  | #     ... | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
| @tractor.context | @tractor.context | ||||||
| async def open_stream_then_sleep_forever( | async def sleep_forever( | ||||||
|     ctx: Context, |     ctx: Context, | ||||||
|     expect_ctxc: bool = False, |     expect_ctxc: bool = False, | ||||||
| 
 |  | ||||||
| ) -> None: | ) -> None: | ||||||
|     ''' |     ''' | ||||||
|     Sync the context, open a stream then just sleep. |     Sync the context, open a stream then just sleep. | ||||||
|  | @ -54,10 +62,6 @@ async def open_stream_then_sleep_forever( | ||||||
|     ''' |     ''' | ||||||
|     try: |     try: | ||||||
|         await ctx.started() |         await ctx.started() | ||||||
| 
 |  | ||||||
|         # NOTE: the below means this child will send a `Stop` |  | ||||||
|         # to it's parent-side task despite that side never |  | ||||||
|         # opening a stream itself. |  | ||||||
|         async with ctx.open_stream(): |         async with ctx.open_stream(): | ||||||
|             await trio.sleep_forever() |             await trio.sleep_forever() | ||||||
| 
 | 
 | ||||||
|  | @ -91,7 +95,7 @@ async def error_before_started( | ||||||
|     ''' |     ''' | ||||||
|     async with tractor.wait_for_actor('sleeper') as p2: |     async with tractor.wait_for_actor('sleeper') as p2: | ||||||
|         async with ( |         async with ( | ||||||
|             p2.open_context(open_stream_then_sleep_forever) as (peer_ctx, first), |             p2.open_context(sleep_forever) as (peer_ctx, first), | ||||||
|             peer_ctx.open_stream(), |             peer_ctx.open_stream(), | ||||||
|         ): |         ): | ||||||
|             # NOTE: this WAS inside an @acm body but i factored it |             # NOTE: this WAS inside an @acm body but i factored it | ||||||
|  | @ -152,11 +156,10 @@ def test_do_not_swallow_error_before_started_by_remote_contextcancelled( | ||||||
|             ): |             ): | ||||||
|                 await trio.sleep_forever() |                 await trio.sleep_forever() | ||||||
| 
 | 
 | ||||||
|     with pytest.raises(RemoteActorError) as excinfo: |     with pytest.raises(tractor.RemoteActorError) as excinfo: | ||||||
|         trio.run(main) |         trio.run(main) | ||||||
| 
 | 
 | ||||||
|     rae = excinfo.value |     assert excinfo.value.type == TypeError | ||||||
|     assert rae.boxed_type is TypeError |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @tractor.context | @tractor.context | ||||||
|  | @ -176,10 +179,6 @@ async def sleep_a_bit_then_cancel_peer( | ||||||
|         await trio.sleep(cancel_after) |         await trio.sleep(cancel_after) | ||||||
|         await peer.cancel_actor() |         await peer.cancel_actor() | ||||||
| 
 | 
 | ||||||
|         # such that we're cancelled by our rent ctx-task |  | ||||||
|         await trio.sleep(3) |  | ||||||
|         print('CANCELLER RETURNING!') |  | ||||||
| 
 |  | ||||||
| 
 | 
 | ||||||
| @tractor.context | @tractor.context | ||||||
| async def stream_ints( | async def stream_ints( | ||||||
|  | @ -195,13 +194,9 @@ async def stream_ints( | ||||||
| @tractor.context | @tractor.context | ||||||
| async def stream_from_peer( | async def stream_from_peer( | ||||||
|     ctx: Context, |     ctx: Context, | ||||||
|     debug_mode: bool, |  | ||||||
|     peer_name: str = 'sleeper', |     peer_name: str = 'sleeper', | ||||||
| ) -> None: | ) -> None: | ||||||
| 
 | 
 | ||||||
|     # sanity |  | ||||||
|     assert tractor._state.debug_mode() == debug_mode |  | ||||||
| 
 |  | ||||||
|     peer: Portal |     peer: Portal | ||||||
|     try: |     try: | ||||||
|         async with ( |         async with ( | ||||||
|  | @ -235,54 +230,20 @@ async def stream_from_peer( | ||||||
|                 assert msg is not None |                 assert msg is not None | ||||||
|                 print(msg) |                 print(msg) | ||||||
| 
 | 
 | ||||||
|     # NOTE: cancellation of the (sleeper) peer should always cause |     # NOTE: cancellation of the (sleeper) peer should always | ||||||
|     # a `ContextCancelled` raise in this streaming actor. |     # cause a `ContextCancelled` raise in this streaming | ||||||
|     except ContextCancelled as _ctxc: |     # actor. | ||||||
|         ctxc = _ctxc |     except ContextCancelled as ctxc: | ||||||
|  |         ctxerr = ctxc | ||||||
| 
 | 
 | ||||||
|         # print("TRYING TO ENTER PAUSSE!!!") |         assert peer_ctx._remote_error is ctxerr | ||||||
|         # await tractor.pause(shield=True) |         assert peer_ctx._remote_error.msgdata == ctxerr.msgdata | ||||||
|         re: ContextCancelled = peer_ctx._remote_error |  | ||||||
| 
 |  | ||||||
|         # XXX YES XXX, remote error should be unpacked only once! |  | ||||||
|         assert ( |  | ||||||
|             re |  | ||||||
|             is |  | ||||||
|             peer_ctx.maybe_error |  | ||||||
|             is |  | ||||||
|             ctxc |  | ||||||
|             is |  | ||||||
|             peer_ctx._local_error |  | ||||||
|         ) |  | ||||||
|         # NOTE: these errors should all match! |  | ||||||
|         #   ------ - ------ |  | ||||||
|         # XXX [2024-05-03] XXX |  | ||||||
|         #   ------ - ------ |  | ||||||
|         # broke this due to a re-raise inside `.msg._ops.drain_to_final_msg()` |  | ||||||
|         # where the `Error()` msg was directly raising the ctxc |  | ||||||
|         # instead of just returning up to the caller inside |  | ||||||
|         # `Context.return()` which would results in a diff instance of |  | ||||||
|         # the same remote error bubbling out above vs what was |  | ||||||
|         # already unpacked and set inside `Context. |  | ||||||
|         assert ( |  | ||||||
|             peer_ctx._remote_error.msgdata |  | ||||||
|             == |  | ||||||
|             ctxc.msgdata |  | ||||||
|         ) |  | ||||||
|         # ^-XXX-^ notice the data is of course the exact same.. so |  | ||||||
|         # the above larger assert makes sense to also always be true! |  | ||||||
| 
 |  | ||||||
|         # XXX YES XXX, bc should be exact same msg instances |  | ||||||
|         assert peer_ctx._remote_error._ipc_msg is ctxc._ipc_msg |  | ||||||
| 
 |  | ||||||
|         # XXX NO XXX, bc new one always created for property accesss |  | ||||||
|         assert peer_ctx._remote_error.ipc_msg != ctxc.ipc_msg |  | ||||||
| 
 | 
 | ||||||
|         # the peer ctx is the canceller even though it's canceller |         # the peer ctx is the canceller even though it's canceller | ||||||
|         # is the "canceller" XD |         # is the "canceller" XD | ||||||
|         assert peer_name in peer_ctx.canceller |         assert peer_name in peer_ctx.canceller | ||||||
| 
 | 
 | ||||||
|         assert "canceller" in ctxc.canceller |         assert "canceller" in ctxerr.canceller | ||||||
| 
 | 
 | ||||||
|         # caller peer should not be the cancel requester |         # caller peer should not be the cancel requester | ||||||
|         assert not ctx.cancel_called |         assert not ctx.cancel_called | ||||||
|  | @ -306,13 +267,12 @@ async def stream_from_peer( | ||||||
| 
 | 
 | ||||||
|         # TODO / NOTE `.canceller` won't have been set yet |         # TODO / NOTE `.canceller` won't have been set yet | ||||||
|         # here because that machinery is inside |         # here because that machinery is inside | ||||||
|         # `Portal.open_context().__aexit__()` BUT, if we had |         # `.open_context().__aexit__()` BUT, if we had | ||||||
|         # a way to know immediately (from the last |         # a way to know immediately (from the last | ||||||
|         # checkpoint) that cancellation was due to |         # checkpoint) that cancellation was due to | ||||||
|         # a remote, we COULD assert this here..see, |         # a remote, we COULD assert this here..see, | ||||||
|         # https://github.com/goodboy/tractor/issues/368 |         # https://github.com/goodboy/tractor/issues/368 | ||||||
|         # |         # | ||||||
|         # await tractor.pause() |  | ||||||
|         # assert 'canceller' in ctx.canceller |         # assert 'canceller' in ctx.canceller | ||||||
| 
 | 
 | ||||||
|         # root/parent actor task should NEVER HAVE cancelled us! |         # root/parent actor task should NEVER HAVE cancelled us! | ||||||
|  | @ -396,6 +356,7 @@ def test_peer_canceller( | ||||||
|     ''' |     ''' | ||||||
|     async def main(): |     async def main(): | ||||||
|         async with tractor.open_nursery( |         async with tractor.open_nursery( | ||||||
|  |             # NOTE: to halt the peer tasks on ctxc, uncomment this. | ||||||
|             debug_mode=debug_mode, |             debug_mode=debug_mode, | ||||||
|         ) as an: |         ) as an: | ||||||
|             canceller: Portal = await an.start_actor( |             canceller: Portal = await an.start_actor( | ||||||
|  | @ -415,13 +376,12 @@ def test_peer_canceller( | ||||||
|             try: |             try: | ||||||
|                 async with ( |                 async with ( | ||||||
|                     sleeper.open_context( |                     sleeper.open_context( | ||||||
|                         open_stream_then_sleep_forever, |                         sleep_forever, | ||||||
|                         expect_ctxc=True, |                         expect_ctxc=True, | ||||||
|                     ) as (sleeper_ctx, sent), |                     ) as (sleeper_ctx, sent), | ||||||
| 
 | 
 | ||||||
|                     just_caller.open_context( |                     just_caller.open_context( | ||||||
|                         stream_from_peer, |                         stream_from_peer, | ||||||
|                         debug_mode=debug_mode, |  | ||||||
|                     ) as (caller_ctx, sent), |                     ) as (caller_ctx, sent), | ||||||
| 
 | 
 | ||||||
|                     canceller.open_context( |                     canceller.open_context( | ||||||
|  | @ -447,11 +407,10 @@ def test_peer_canceller( | ||||||
| 
 | 
 | ||||||
|                     # should always raise since this root task does |                     # should always raise since this root task does | ||||||
|                     # not request the sleeper cancellation ;) |                     # not request the sleeper cancellation ;) | ||||||
|                     except ContextCancelled as _ctxc: |                     except ContextCancelled as ctxerr: | ||||||
|                         ctxc = _ctxc |  | ||||||
|                         print( |                         print( | ||||||
|                             'CAUGHT REMOTE CONTEXT CANCEL\n\n' |                             'CAUGHT REMOTE CONTEXT CANCEL\n\n' | ||||||
|                             f'{ctxc}\n' |                             f'{ctxerr}\n' | ||||||
|                         ) |                         ) | ||||||
| 
 | 
 | ||||||
|                         # canceller and caller peers should not |                         # canceller and caller peers should not | ||||||
|  | @ -462,7 +421,7 @@ def test_peer_canceller( | ||||||
|                         # we were not the actor, our peer was |                         # we were not the actor, our peer was | ||||||
|                         assert not sleeper_ctx.cancel_acked |                         assert not sleeper_ctx.cancel_acked | ||||||
| 
 | 
 | ||||||
|                         assert ctxc.canceller[0] == 'canceller' |                         assert ctxerr.canceller[0] == 'canceller' | ||||||
| 
 | 
 | ||||||
|                         # XXX NOTE XXX: since THIS `ContextCancelled` |                         # XXX NOTE XXX: since THIS `ContextCancelled` | ||||||
|                         # HAS NOT YET bubbled up to the |                         # HAS NOT YET bubbled up to the | ||||||
|  | @ -473,7 +432,7 @@ def test_peer_canceller( | ||||||
| 
 | 
 | ||||||
|                         # CASE_1: error-during-ctxc-handling, |                         # CASE_1: error-during-ctxc-handling, | ||||||
|                         if error_during_ctxerr_handling: |                         if error_during_ctxerr_handling: | ||||||
|                             raise RuntimeError('Simulated RTE re-raise during ctxc handling') |                             raise RuntimeError('Simulated error during teardown') | ||||||
| 
 | 
 | ||||||
|                         # CASE_2: standard teardown inside in `.open_context()` block |                         # CASE_2: standard teardown inside in `.open_context()` block | ||||||
|                         raise |                         raise | ||||||
|  | @ -538,9 +497,6 @@ def test_peer_canceller( | ||||||
|                 #   should be cancelled by US. |                 #   should be cancelled by US. | ||||||
|                 # |                 # | ||||||
|                 if error_during_ctxerr_handling: |                 if error_during_ctxerr_handling: | ||||||
|                     print(f'loc_err: {_loc_err}\n') |  | ||||||
|                     assert isinstance(loc_err, RuntimeError) |  | ||||||
| 
 |  | ||||||
|                     # since we do a rte reraise above, the |                     # since we do a rte reraise above, the | ||||||
|                     # `.open_context()` error handling should have |                     # `.open_context()` error handling should have | ||||||
|                     # raised a local rte, thus the internal |                     # raised a local rte, thus the internal | ||||||
|  | @ -549,6 +505,9 @@ def test_peer_canceller( | ||||||
|                     # a `trio.Cancelled` due to a local |                     # a `trio.Cancelled` due to a local | ||||||
|                     # `._scope.cancel()` call. |                     # `._scope.cancel()` call. | ||||||
|                     assert not sleeper_ctx._scope.cancelled_caught |                     assert not sleeper_ctx._scope.cancelled_caught | ||||||
|  | 
 | ||||||
|  |                     assert isinstance(loc_err, RuntimeError) | ||||||
|  |                     print(f'_loc_err: {_loc_err}\n') | ||||||
|                     # assert sleeper_ctx._local_error is _loc_err |                     # assert sleeper_ctx._local_error is _loc_err | ||||||
|                     # assert sleeper_ctx._local_error is _loc_err |                     # assert sleeper_ctx._local_error is _loc_err | ||||||
|                     assert not ( |                     assert not ( | ||||||
|  | @ -585,12 +544,9 @@ def test_peer_canceller( | ||||||
| 
 | 
 | ||||||
|                         else:  # the other 2 ctxs |                         else:  # the other 2 ctxs | ||||||
|                             assert ( |                             assert ( | ||||||
|                                 isinstance(re, ContextCancelled) |                                 re.canceller | ||||||
|                                 and ( |                                 == | ||||||
|                                     re.canceller |                                 canceller.channel.uid | ||||||
|                                     == |  | ||||||
|                                     canceller.channel.uid |  | ||||||
|                                 ) |  | ||||||
|                             ) |                             ) | ||||||
| 
 | 
 | ||||||
|                     # since the sleeper errors while handling a |                     # since the sleeper errors while handling a | ||||||
|  | @ -783,16 +739,14 @@ def test_peer_canceller( | ||||||
|         with pytest.raises(ContextCancelled) as excinfo: |         with pytest.raises(ContextCancelled) as excinfo: | ||||||
|             trio.run(main) |             trio.run(main) | ||||||
| 
 | 
 | ||||||
|         assert excinfo.value.boxed_type == ContextCancelled |         assert excinfo.value.type == ContextCancelled | ||||||
|         assert excinfo.value.canceller[0] == 'canceller' |         assert excinfo.value.canceller[0] == 'canceller' | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @tractor.context | @tractor.context | ||||||
| async def basic_echo_server( | async def basic_echo_server( | ||||||
|     ctx: Context, |     ctx: Context, | ||||||
|     peer_name: str = 'wittle_bruv', |     peer_name: str = 'stepbro', | ||||||
| 
 |  | ||||||
|     err_after_imsg: int|None = None, |  | ||||||
| 
 | 
 | ||||||
| ) -> None: | ) -> None: | ||||||
|     ''' |     ''' | ||||||
|  | @ -820,31 +774,17 @@ async def basic_echo_server( | ||||||
|             # assert 0 |             # assert 0 | ||||||
|             await ipc.send(resp) |             await ipc.send(resp) | ||||||
| 
 | 
 | ||||||
|             if ( |  | ||||||
|                 err_after_imsg |  | ||||||
|                 and |  | ||||||
|                 i > err_after_imsg |  | ||||||
|             ): |  | ||||||
|                 raise RuntimeError( |  | ||||||
|                     f'Simulated error in `{peer_name}`' |  | ||||||
|                 ) |  | ||||||
| 
 |  | ||||||
| 
 | 
 | ||||||
| @tractor.context | @tractor.context | ||||||
| async def serve_subactors( | async def serve_subactors( | ||||||
|     ctx: Context, |     ctx: Context, | ||||||
|     peer_name: str, |     peer_name: str, | ||||||
|     debug_mode: bool, |  | ||||||
| 
 | 
 | ||||||
| ) -> None: | ) -> None: | ||||||
|     async with open_nursery() as an: |     async with open_nursery() as an: | ||||||
| 
 |  | ||||||
|         # sanity |  | ||||||
|         assert tractor._state.debug_mode() == debug_mode |  | ||||||
| 
 |  | ||||||
|         await ctx.started(peer_name) |         await ctx.started(peer_name) | ||||||
|         async with ctx.open_stream() as ipc: |         async with ctx.open_stream() as reqs: | ||||||
|             async for msg in ipc: |             async for msg in reqs: | ||||||
|                 peer_name: str = msg |                 peer_name: str = msg | ||||||
|                 peer: Portal = await an.start_actor( |                 peer: Portal = await an.start_actor( | ||||||
|                     name=peer_name, |                     name=peer_name, | ||||||
|  | @ -855,9 +795,9 @@ async def serve_subactors( | ||||||
|                     f'{peer_name}\n' |                     f'{peer_name}\n' | ||||||
|                     f'|_{peer}\n' |                     f'|_{peer}\n' | ||||||
|                 ) |                 ) | ||||||
|                 await ipc.send(( |                 await reqs.send(( | ||||||
|                     peer.chan.uid, |                     peer.chan.uid, | ||||||
|                     peer.chan.raddr.unwrap(), |                     peer.chan.raddr, | ||||||
|                 )) |                 )) | ||||||
| 
 | 
 | ||||||
|         print('Spawner exiting spawn serve loop!') |         print('Spawner exiting spawn serve loop!') | ||||||
|  | @ -867,20 +807,14 @@ async def serve_subactors( | ||||||
| async def client_req_subactor( | async def client_req_subactor( | ||||||
|     ctx: Context, |     ctx: Context, | ||||||
|     peer_name: str, |     peer_name: str, | ||||||
|     debug_mode: bool, |  | ||||||
| 
 | 
 | ||||||
|     # used to simulate a user causing an error to be raised |     # used to simulate a user causing an error to be raised | ||||||
|     # directly in thread (like a KBI) to better replicate the |     # directly in thread (like a KBI) to better replicate the | ||||||
|     # case where a `modden` CLI client would hang afer requesting |     # case where a `modden` CLI client would hang afer requesting | ||||||
|     # a `Context.cancel()` to `bigd`'s wks spawner. |     # a `Context.cancel()` to `bigd`'s wks spawner. | ||||||
|     reraise_on_cancel: str|None = None, |     reraise_on_cancel: str|None = None, | ||||||
|     sub_err_after: int|None = None, |  | ||||||
| 
 | 
 | ||||||
| ) -> None: | ) -> None: | ||||||
|     # sanity |  | ||||||
|     if debug_mode: |  | ||||||
|         assert tractor._state.debug_mode() |  | ||||||
| 
 |  | ||||||
|     # TODO: other cases to do with sub lifetimes: |     # TODO: other cases to do with sub lifetimes: | ||||||
|     # -[ ] test that we can have the server spawn a sub |     # -[ ] test that we can have the server spawn a sub | ||||||
|     #   that lives longer then ctx with this client. |     #   that lives longer then ctx with this client. | ||||||
|  | @ -902,7 +836,6 @@ async def client_req_subactor( | ||||||
|         spawner.open_context( |         spawner.open_context( | ||||||
|             serve_subactors, |             serve_subactors, | ||||||
|             peer_name=peer_name, |             peer_name=peer_name, | ||||||
|             debug_mode=debug_mode, |  | ||||||
|         ) as (spawner_ctx, first), |         ) as (spawner_ctx, first), | ||||||
|     ): |     ): | ||||||
|         assert first == peer_name |         assert first == peer_name | ||||||
|  | @ -924,7 +857,6 @@ async def client_req_subactor( | ||||||
|             await tell_little_bro( |             await tell_little_bro( | ||||||
|                 actor_name=sub_uid[0], |                 actor_name=sub_uid[0], | ||||||
|                 caller='client', |                 caller='client', | ||||||
|                 err_after=sub_err_after, |  | ||||||
|             ) |             ) | ||||||
| 
 | 
 | ||||||
|             # TODO: test different scope-layers of |             # TODO: test different scope-layers of | ||||||
|  | @ -936,7 +868,9 @@ async def client_req_subactor( | ||||||
|             # TODO: would be super nice to have a special injected |             # TODO: would be super nice to have a special injected | ||||||
|             # cancel type here (maybe just our ctxc) but using |             # cancel type here (maybe just our ctxc) but using | ||||||
|             # some native mechanism in `trio` :p |             # some native mechanism in `trio` :p | ||||||
|             except trio.Cancelled as err: |             except ( | ||||||
|  |                 trio.Cancelled | ||||||
|  |             ) as err: | ||||||
|                 _err = err |                 _err = err | ||||||
|                 if reraise_on_cancel: |                 if reraise_on_cancel: | ||||||
|                     errtype = globals()['__builtins__'][reraise_on_cancel] |                     errtype = globals()['__builtins__'][reraise_on_cancel] | ||||||
|  | @ -963,10 +897,7 @@ async def client_req_subactor( | ||||||
| 
 | 
 | ||||||
| async def tell_little_bro( | async def tell_little_bro( | ||||||
|     actor_name: str, |     actor_name: str, | ||||||
| 
 |     caller: str = '' | ||||||
|     caller: str = '', |  | ||||||
|     err_after: float|None = None, |  | ||||||
|     rng_seed: int = 50, |  | ||||||
| ): | ): | ||||||
|     # contact target actor, do a stream dialog. |     # contact target actor, do a stream dialog. | ||||||
|     async with ( |     async with ( | ||||||
|  | @ -975,20 +906,14 @@ async def tell_little_bro( | ||||||
|         ) as lb, |         ) as lb, | ||||||
|         lb.open_context( |         lb.open_context( | ||||||
|             basic_echo_server, |             basic_echo_server, | ||||||
| 
 |  | ||||||
|             # XXX proxy any delayed err condition |  | ||||||
|             err_after_imsg=( |  | ||||||
|                 err_after * rng_seed |  | ||||||
|                 if err_after is not None |  | ||||||
|                 else None |  | ||||||
|             ), |  | ||||||
|         ) as (sub_ctx, first), |         ) as (sub_ctx, first), | ||||||
| 
 |         sub_ctx.open_stream( | ||||||
|         sub_ctx.open_stream() as echo_ipc, |             basic_echo_server, | ||||||
|  |         ) as echo_ipc, | ||||||
|     ): |     ): | ||||||
|         actor: Actor = current_actor() |         actor: Actor = current_actor() | ||||||
|         uid: tuple = actor.uid |         uid: tuple = actor.uid | ||||||
|         for i in range(rng_seed): |         for i in range(100): | ||||||
|             msg: tuple = ( |             msg: tuple = ( | ||||||
|                 uid, |                 uid, | ||||||
|                 i, |                 i, | ||||||
|  | @ -1011,15 +936,9 @@ async def tell_little_bro( | ||||||
|     'raise_client_error', |     'raise_client_error', | ||||||
|     [None, 'KeyboardInterrupt'], |     [None, 'KeyboardInterrupt'], | ||||||
| ) | ) | ||||||
| @pytest.mark.parametrize( |  | ||||||
|     'raise_sub_spawn_error_after', |  | ||||||
|     [None, 0.5], |  | ||||||
| ) |  | ||||||
| def test_peer_spawns_and_cancels_service_subactor( | def test_peer_spawns_and_cancels_service_subactor( | ||||||
|     debug_mode: bool, |     debug_mode: bool, | ||||||
|     raise_client_error: str, |     raise_client_error: str, | ||||||
|     reg_addr: tuple[str, int], |  | ||||||
|     raise_sub_spawn_error_after: float|None, |  | ||||||
| ): | ): | ||||||
|     # NOTE: this tests for the modden `mod wks open piker` bug |     # NOTE: this tests for the modden `mod wks open piker` bug | ||||||
|     # discovered as part of implementing workspace ctx |     # discovered as part of implementing workspace ctx | ||||||
|  | @ -1033,22 +952,10 @@ def test_peer_spawns_and_cancels_service_subactor( | ||||||
|     #   and the server's spawned child should cancel and terminate! |     #   and the server's spawned child should cancel and terminate! | ||||||
|     peer_name: str = 'little_bro' |     peer_name: str = 'little_bro' | ||||||
| 
 | 
 | ||||||
| 
 |  | ||||||
|     def check_inner_rte(rae: RemoteActorError): |  | ||||||
|         ''' |  | ||||||
|         Validate the little_bro's relayed inception! |  | ||||||
| 
 |  | ||||||
|         ''' |  | ||||||
|         assert rae.boxed_type is RemoteActorError |  | ||||||
|         assert rae.src_type is RuntimeError |  | ||||||
|         assert 'client' in rae.relay_uid |  | ||||||
|         assert peer_name in rae.src_uid |  | ||||||
| 
 |  | ||||||
|     async def main(): |     async def main(): | ||||||
|         async with tractor.open_nursery( |         async with tractor.open_nursery( | ||||||
|             # NOTE: to halt the peer tasks on ctxc, uncomment this. |             # NOTE: to halt the peer tasks on ctxc, uncomment this. | ||||||
|             debug_mode=debug_mode, |             debug_mode=debug_mode, | ||||||
|             registry_addrs=[reg_addr], |  | ||||||
|         ) as an: |         ) as an: | ||||||
|             server: Portal = await an.start_actor( |             server: Portal = await an.start_actor( | ||||||
|                 (server_name := 'spawn_server'), |                 (server_name := 'spawn_server'), | ||||||
|  | @ -1067,24 +974,14 @@ def test_peer_spawns_and_cancels_service_subactor( | ||||||
|                     server.open_context( |                     server.open_context( | ||||||
|                         serve_subactors, |                         serve_subactors, | ||||||
|                         peer_name=peer_name, |                         peer_name=peer_name, | ||||||
|                         debug_mode=debug_mode, |  | ||||||
| 
 |  | ||||||
|                     ) as (spawn_ctx, first), |                     ) as (spawn_ctx, first), | ||||||
| 
 | 
 | ||||||
|                     client.open_context( |                     client.open_context( | ||||||
|                         client_req_subactor, |                         client_req_subactor, | ||||||
|                         peer_name=peer_name, |                         peer_name=peer_name, | ||||||
|                         debug_mode=debug_mode, |  | ||||||
|                         reraise_on_cancel=raise_client_error, |                         reraise_on_cancel=raise_client_error, | ||||||
| 
 |  | ||||||
|                         # trigger for error condition in sub |  | ||||||
|                         # during streaming. |  | ||||||
|                         sub_err_after=raise_sub_spawn_error_after, |  | ||||||
| 
 |  | ||||||
|                     ) as (client_ctx, client_says), |                     ) as (client_ctx, client_says), | ||||||
|                 ): |                 ): | ||||||
|                     root: Actor = current_actor() |  | ||||||
|                     spawner_uid: tuple = spawn_ctx.chan.uid |  | ||||||
|                     print( |                     print( | ||||||
|                         f'Server says: {first}\n' |                         f'Server says: {first}\n' | ||||||
|                         f'Client says: {client_says}\n' |                         f'Client says: {client_says}\n' | ||||||
|  | @ -1094,7 +991,6 @@ def test_peer_spawns_and_cancels_service_subactor( | ||||||
|                     # (grandchild of this root actor) "little_bro" |                     # (grandchild of this root actor) "little_bro" | ||||||
|                     # and ensure we can also use it as an echo |                     # and ensure we can also use it as an echo | ||||||
|                     # server. |                     # server. | ||||||
|                     sub: Portal |  | ||||||
|                     async with tractor.wait_for_actor( |                     async with tractor.wait_for_actor( | ||||||
|                         name=peer_name, |                         name=peer_name, | ||||||
|                     ) as sub: |                     ) as sub: | ||||||
|  | @ -1106,150 +1002,56 @@ def test_peer_spawns_and_cancels_service_subactor( | ||||||
|                         f'.uid: {sub.actor.uid}\n' |                         f'.uid: {sub.actor.uid}\n' | ||||||
|                         f'chan.raddr: {sub.chan.raddr}\n' |                         f'chan.raddr: {sub.chan.raddr}\n' | ||||||
|                     ) |                     ) | ||||||
|  |                     await tell_little_bro( | ||||||
|  |                         actor_name=peer_name, | ||||||
|  |                         caller='root', | ||||||
|  |                     ) | ||||||
| 
 | 
 | ||||||
|                     async with expect_ctxc( |                     # signal client to raise a KBI | ||||||
|                         yay=raise_sub_spawn_error_after, |                     await client_ctx.cancel() | ||||||
|                         reraise=False, |                     print('root cancelled client, checking that sub-spawn is down') | ||||||
|                     ): |  | ||||||
|                         await tell_little_bro( |  | ||||||
|                             actor_name=peer_name, |  | ||||||
|                             caller='root', |  | ||||||
|                         ) |  | ||||||
| 
 | 
 | ||||||
|                     if not raise_sub_spawn_error_after: |                     async with tractor.find_actor( | ||||||
| 
 |                         name=peer_name, | ||||||
|                         # signal client to cancel and maybe raise a KBI |                     ) as sub: | ||||||
|                         await client_ctx.cancel() |                         assert not sub | ||||||
|                         print( |  | ||||||
|                             '-> root cancelling client,\n' |  | ||||||
|                             '-> root checking `client_ctx.result()`,\n' |  | ||||||
|                             f'-> checking that sub-spawn {peer_name} is down\n' |  | ||||||
|                         ) |  | ||||||
| 
 |  | ||||||
|                     try: |  | ||||||
|                         res = await client_ctx.wait_for_result(hide_tb=False) |  | ||||||
|                         # in remote (relayed inception) error |  | ||||||
|                         # case, we should error on the line above! |  | ||||||
|                         if raise_sub_spawn_error_after: |  | ||||||
|                             pytest.fail( |  | ||||||
|                                 'Never rxed proxied `RemoteActorError[RuntimeError]` !?' |  | ||||||
|                             ) |  | ||||||
| 
 |  | ||||||
|                         assert isinstance(res, ContextCancelled) |  | ||||||
|                         assert client_ctx.cancel_acked |  | ||||||
|                         assert res.canceller == root.uid |  | ||||||
|                         assert not raise_sub_spawn_error_after |  | ||||||
| 
 |  | ||||||
|                         # cancelling the spawner sub should |  | ||||||
|                         # transitively cancel it's sub, the little |  | ||||||
|                         # bruv. |  | ||||||
|                         print('root cancelling server/client sub-actors') |  | ||||||
|                         await spawn_ctx.cancel() |  | ||||||
|                         async with tractor.find_actor( |  | ||||||
|                             name=peer_name, |  | ||||||
|                         ) as sub: |  | ||||||
|                             assert not sub |  | ||||||
| 
 |  | ||||||
|                     # XXX, only for tracing |  | ||||||
|                     # except BaseException as _berr: |  | ||||||
|                     #     berr = _berr |  | ||||||
|                     #     await tractor.pause(shield=True) |  | ||||||
|                     #     raise berr |  | ||||||
| 
 |  | ||||||
|                     except RemoteActorError as rae: |  | ||||||
|                         _err = rae |  | ||||||
|                         assert raise_sub_spawn_error_after |  | ||||||
| 
 |  | ||||||
|                         # since this is a "relayed error" via the client |  | ||||||
|                         # sub-actor, it is expected to be |  | ||||||
|                         # a `RemoteActorError` boxing another |  | ||||||
|                         # `RemoteActorError` otherwise known as |  | ||||||
|                         #  an "inception" (from `trio`'s parlance) |  | ||||||
|                         # ((or maybe a "Matryoshka" and/or "matron" |  | ||||||
|                         # in our own working parlance)) which |  | ||||||
|                         # contains the source error from the |  | ||||||
|                         # little_bro: a `RuntimeError`. |  | ||||||
|                         # |  | ||||||
|                         check_inner_rte(rae) |  | ||||||
|                         assert rae.relay_uid == client.chan.uid |  | ||||||
|                         assert rae.src_uid == sub.chan.uid |  | ||||||
| 
 |  | ||||||
|                         assert not client_ctx.cancel_acked |  | ||||||
|                         assert ( |  | ||||||
|                             client_ctx.maybe_error |  | ||||||
|                             is client_ctx.outcome |  | ||||||
|                             is rae |  | ||||||
|                         ) |  | ||||||
|                         raise |  | ||||||
|                         # await tractor.pause() |  | ||||||
| 
 | 
 | ||||||
|  |                     print('root cancelling server/client sub-actors') | ||||||
| 
 | 
 | ||||||
|                     # await tractor.pause() |                     # await tractor.pause() | ||||||
|                     # await server.cancel_actor() |                     res = await client_ctx.result(hide_tb=False) | ||||||
|  |                     assert isinstance(res, ContextCancelled) | ||||||
|  |                     assert client_ctx.cancel_acked | ||||||
|  |                     assert res.canceller == current_actor().uid | ||||||
| 
 | 
 | ||||||
|             except RemoteActorError as rae: |                     await spawn_ctx.cancel() | ||||||
|                 # XXX more-or-less same as above handler |                     # await server.cancel_actor() | ||||||
|                 # this is just making sure the error bubbles out |  | ||||||
|                 # of the  |  | ||||||
|                 _err = rae |  | ||||||
|                 assert raise_sub_spawn_error_after |  | ||||||
|                 raise |  | ||||||
| 
 | 
 | ||||||
|             # since we called `.cancel_actor()`, `.cancel_ack` |             # since we called `.cancel_actor()`, `.cancel_ack` | ||||||
|             # will not be set on the ctx bc `ctx.cancel()` was not |             # will not be set on the ctx bc `ctx.cancel()` was not | ||||||
|             # called directly for this confext. |             # called directly fot this confext. | ||||||
|             except ContextCancelled as ctxc: |             except ContextCancelled as ctxc: | ||||||
|                 _ctxc = ctxc |                 print('caught ctxc from contexts!') | ||||||
|                 print( |                 assert ctxc.canceller == current_actor().uid | ||||||
|                     f'{root.uid} caught ctxc from ctx with {client_ctx.chan.uid}\n' |  | ||||||
|                     f'{repr(ctxc)}\n' |  | ||||||
|                 ) |  | ||||||
| 
 |  | ||||||
|                 if not raise_sub_spawn_error_after: |  | ||||||
|                     assert ctxc.canceller == root.uid |  | ||||||
|                 else: |  | ||||||
|                     assert ctxc.canceller == spawner_uid |  | ||||||
| 
 |  | ||||||
|                 assert ctxc is spawn_ctx.outcome |                 assert ctxc is spawn_ctx.outcome | ||||||
|                 assert ctxc is spawn_ctx.maybe_error |                 assert ctxc is spawn_ctx.maybe_error | ||||||
|                 raise |                 raise | ||||||
| 
 | 
 | ||||||
|             if raise_sub_spawn_error_after: |             # assert spawn_ctx.cancel_acked | ||||||
|                 pytest.fail( |             assert spawn_ctx.cancel_acked | ||||||
|                     'context block(s) in PARENT never raised?!?' |             assert client_ctx.cancel_acked | ||||||
|                 ) |  | ||||||
| 
 | 
 | ||||||
|             if not raise_sub_spawn_error_after: |             await client.cancel_actor() | ||||||
|                 # assert spawn_ctx.cancel_acked |             await server.cancel_actor() | ||||||
|                 assert spawn_ctx.cancel_acked |  | ||||||
|                 assert client_ctx.cancel_acked |  | ||||||
| 
 | 
 | ||||||
|                 await client.cancel_actor() |             # WOA WOA WOA! we need this to close..!!!?? | ||||||
|                 await server.cancel_actor() |             # that's super bad XD | ||||||
| 
 | 
 | ||||||
|                 # WOA WOA WOA! we need this to close..!!!?? |             # TODO: why isn't this working!?!? | ||||||
|                 # that's super bad XD |             # we're now outside the `.open_context()` block so | ||||||
|  |             # the internal `Context._scope: CancelScope` should be | ||||||
|  |             # gracefully "closed" ;) | ||||||
| 
 | 
 | ||||||
|                 # TODO: why isn't this working!?!? |             # assert spawn_ctx.cancelled_caught | ||||||
|                 # we're now outside the `.open_context()` block so |  | ||||||
|                 # the internal `Context._scope: CancelScope` should be |  | ||||||
|                 # gracefully "closed" ;) |  | ||||||
| 
 | 
 | ||||||
|                 # assert spawn_ctx.cancelled_caught |     trio.run(main) | ||||||
| 
 |  | ||||||
|     async def _main(): |  | ||||||
|         with trio.fail_after( |  | ||||||
|             3 if not debug_mode |  | ||||||
|             else 999 |  | ||||||
|         ): |  | ||||||
|             await main() |  | ||||||
| 
 |  | ||||||
|     if raise_sub_spawn_error_after: |  | ||||||
|         with pytest.raises(RemoteActorError) as excinfo: |  | ||||||
|             trio.run(_main) |  | ||||||
| 
 |  | ||||||
|         rae: RemoteActorError = excinfo.value |  | ||||||
|         check_inner_rte(rae) |  | ||||||
| 
 |  | ||||||
|     else: |  | ||||||
|         trio.run(_main) |  | ||||||
|  |  | ||||||
|  | @ -38,13 +38,10 @@ async def async_gen_stream(sequence): | ||||||
|     assert cs.cancelled_caught |     assert cs.cancelled_caught | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| # TODO: deprecated either remove entirely |  | ||||||
| # or re-impl in terms of `MsgStream` one-sides |  | ||||||
| # wrapper, but at least remove `Portal.open_stream_from()` |  | ||||||
| @tractor.stream | @tractor.stream | ||||||
| async def context_stream( | async def context_stream( | ||||||
|     ctx: tractor.Context, |     ctx: tractor.Context, | ||||||
|     sequence: list[int], |     sequence | ||||||
| ): | ): | ||||||
|     for i in sequence: |     for i in sequence: | ||||||
|         await ctx.send_yield(i) |         await ctx.send_yield(i) | ||||||
|  | @ -235,16 +232,10 @@ async def cancel_after(wait, reg_addr): | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @pytest.fixture(scope='module') | @pytest.fixture(scope='module') | ||||||
| def time_quad_ex( | def time_quad_ex(reg_addr, ci_env, spawn_backend): | ||||||
|     reg_addr: tuple, |  | ||||||
|     ci_env: bool, |  | ||||||
|     spawn_backend: str, |  | ||||||
| ): |  | ||||||
|     if spawn_backend == 'mp': |     if spawn_backend == 'mp': | ||||||
|         ''' |         """no idea but the  mp *nix runs are flaking out here often... | ||||||
|         no idea but the  mp *nix runs are flaking out here often... |         """ | ||||||
| 
 |  | ||||||
|         ''' |  | ||||||
|         pytest.skip("Test is too flaky on mp in CI") |         pytest.skip("Test is too flaky on mp in CI") | ||||||
| 
 | 
 | ||||||
|     timeout = 7 if platform.system() in ('Windows', 'Darwin') else 4 |     timeout = 7 if platform.system() in ('Windows', 'Darwin') else 4 | ||||||
|  | @ -255,24 +246,12 @@ def time_quad_ex( | ||||||
|     return results, diff |     return results, diff | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def test_a_quadruple_example( | def test_a_quadruple_example(time_quad_ex, ci_env, spawn_backend): | ||||||
|     time_quad_ex: tuple, |     """This also serves as a kind of "we'd like to be this fast test".""" | ||||||
|     ci_env: bool, |  | ||||||
|     spawn_backend: str, |  | ||||||
| ): |  | ||||||
|     ''' |  | ||||||
|     This also serves as a kind of "we'd like to be this fast test". |  | ||||||
| 
 | 
 | ||||||
|     ''' |  | ||||||
|     results, diff = time_quad_ex |     results, diff = time_quad_ex | ||||||
|     assert results |     assert results | ||||||
|     this_fast = ( |     this_fast = 6 if platform.system() in ('Windows', 'Darwin') else 3 | ||||||
|         6 if platform.system() in ( |  | ||||||
|             'Windows', |  | ||||||
|             'Darwin', |  | ||||||
|         ) |  | ||||||
|         else 3 |  | ||||||
|     ) |  | ||||||
|     assert diff < this_fast |     assert diff < this_fast | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
|  | @ -38,7 +38,7 @@ async def test_self_is_registered_localportal(reg_addr): | ||||||
|     "Verify waiting on the arbiter to register itself using a local portal." |     "Verify waiting on the arbiter to register itself using a local portal." | ||||||
|     actor = tractor.current_actor() |     actor = tractor.current_actor() | ||||||
|     assert actor.is_arbiter |     assert actor.is_arbiter | ||||||
|     async with tractor.get_registry(reg_addr) as portal: |     async with tractor.get_arbiter(*reg_addr) as portal: | ||||||
|         assert isinstance(portal, tractor._portal.LocalPortal) |         assert isinstance(portal, tractor._portal.LocalPortal) | ||||||
| 
 | 
 | ||||||
|         with trio.fail_after(0.2): |         with trio.fail_after(0.2): | ||||||
|  |  | ||||||
|  | @ -10,7 +10,7 @@ import tractor | ||||||
| from tractor._testing import ( | from tractor._testing import ( | ||||||
|     tractor_test, |     tractor_test, | ||||||
| ) | ) | ||||||
| from .conftest import ( | from conftest import ( | ||||||
|     sig_prog, |     sig_prog, | ||||||
|     _INT_SIGNAL, |     _INT_SIGNAL, | ||||||
|     _INT_RETURN_CODE, |     _INT_RETURN_CODE, | ||||||
|  | @ -32,7 +32,7 @@ def test_abort_on_sigint(daemon): | ||||||
| @tractor_test | @tractor_test | ||||||
| async def test_cancel_remote_arbiter(daemon, reg_addr): | async def test_cancel_remote_arbiter(daemon, reg_addr): | ||||||
|     assert not tractor.current_actor().is_arbiter |     assert not tractor.current_actor().is_arbiter | ||||||
|     async with tractor.get_registry(reg_addr) as portal: |     async with tractor.get_arbiter(*reg_addr) as portal: | ||||||
|         await portal.cancel_actor() |         await portal.cancel_actor() | ||||||
| 
 | 
 | ||||||
|     time.sleep(0.1) |     time.sleep(0.1) | ||||||
|  | @ -41,7 +41,7 @@ async def test_cancel_remote_arbiter(daemon, reg_addr): | ||||||
| 
 | 
 | ||||||
|     # no arbiter socket should exist |     # no arbiter socket should exist | ||||||
|     with pytest.raises(OSError): |     with pytest.raises(OSError): | ||||||
|         async with tractor.get_registry(reg_addr) as portal: |         async with tractor.get_arbiter(*reg_addr) as portal: | ||||||
|             pass |             pass | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
|  | @ -1,239 +0,0 @@ | ||||||
| ''' |  | ||||||
| Define the details of inter-actor "out-of-band" (OoB) cancel |  | ||||||
| semantics, that is how cancellation works when a cancel request comes |  | ||||||
| from the different concurrency (primitive's) "layer" then where the |  | ||||||
| eventual `trio.Task` actually raises a signal. |  | ||||||
| 
 |  | ||||||
| ''' |  | ||||||
| from functools import partial |  | ||||||
| # from contextlib import asynccontextmanager as acm |  | ||||||
| # import itertools |  | ||||||
| 
 |  | ||||||
| import pytest |  | ||||||
| import trio |  | ||||||
| import tractor |  | ||||||
| from tractor import (  # typing |  | ||||||
|     ActorNursery, |  | ||||||
|     Portal, |  | ||||||
|     Context, |  | ||||||
|     # ContextCancelled, |  | ||||||
|     # RemoteActorError, |  | ||||||
| ) |  | ||||||
| # from tractor._testing import ( |  | ||||||
| #     tractor_test, |  | ||||||
| #     expect_ctxc, |  | ||||||
| # ) |  | ||||||
| 
 |  | ||||||
| # XXX TODO cases: |  | ||||||
| # - [ ] peer cancelled itself - so other peers should |  | ||||||
| #   get errors reflecting that the peer was itself the .canceller? |  | ||||||
| 
 |  | ||||||
| # def test_self_cancel(): |  | ||||||
| #     ''' |  | ||||||
| #     2 cases: |  | ||||||
| #     - calls `Actor.cancel()` locally in some task |  | ||||||
| #     - calls LocalPortal.cancel_actor()` ? |  | ||||||
| # |  | ||||||
| # things to ensure! |  | ||||||
| # -[ ] the ctxc raised in a child should ideally show the tb of the |  | ||||||
| #     underlying `Cancelled` checkpoint, i.e. |  | ||||||
| #     `raise scope_error from ctxc`? |  | ||||||
| # |  | ||||||
| # -[ ] a self-cancelled context, if not allowed to block on |  | ||||||
| #     `ctx.result()` at some point will hang since the `ctx._scope` |  | ||||||
| #     is never `.cancel_called`; cases for this include, |  | ||||||
| #     - an `open_ctx()` which never starteds before being OoB actor |  | ||||||
| #       cancelled. |  | ||||||
| #       |_ parent task will be blocked in `.open_context()` for the |  | ||||||
| #         `Started` msg, and when the OoB ctxc arrives `ctx._scope` |  | ||||||
| #         will never have been signalled.. |  | ||||||
| 
 |  | ||||||
| #     ''' |  | ||||||
| #     ... |  | ||||||
| 
 |  | ||||||
| # TODO, sanity test against the case in `/examples/trio/lockacquire_not_unmasked.py` |  | ||||||
| # but with the `Lock.acquire()` from a `@context` to ensure the |  | ||||||
| # implicit ignore-case-non-unmasking. |  | ||||||
| # |  | ||||||
| # @tractor.context |  | ||||||
| # async def acquire_actor_global_lock( |  | ||||||
| #     ctx: tractor.Context, |  | ||||||
| #     ignore_special_cases: bool, |  | ||||||
| # ): |  | ||||||
| 
 |  | ||||||
| #     async with maybe_unmask_excs( |  | ||||||
| #         ignore_special_cases=ignore_special_cases, |  | ||||||
| #     ): |  | ||||||
| #         await ctx.started('locked') |  | ||||||
| 
 |  | ||||||
| #     # block til cancelled |  | ||||||
| #     await trio.sleep_forever() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @tractor.context |  | ||||||
| async def sleep_forever( |  | ||||||
|     ctx: tractor.Context, |  | ||||||
|     # ignore_special_cases: bool, |  | ||||||
|     do_started: bool, |  | ||||||
| ): |  | ||||||
| 
 |  | ||||||
|     # async with maybe_unmask_excs( |  | ||||||
|     #     ignore_special_cases=ignore_special_cases, |  | ||||||
|     # ): |  | ||||||
|     #     await ctx.started('locked') |  | ||||||
|     if do_started: |  | ||||||
|         await ctx.started() |  | ||||||
| 
 |  | ||||||
|     # block til cancelled |  | ||||||
|     print('sleepin on child-side..') |  | ||||||
|     await trio.sleep_forever() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @pytest.mark.parametrize( |  | ||||||
|     'cancel_ctx', |  | ||||||
|     [True, False], |  | ||||||
| ) |  | ||||||
| def test_cancel_ctx_with_parent_side_entered_in_bg_task( |  | ||||||
|     debug_mode: bool, |  | ||||||
|     loglevel: str, |  | ||||||
|     cancel_ctx: bool, |  | ||||||
| ): |  | ||||||
|     ''' |  | ||||||
|     The most "basic" out-of-band-task self-cancellation case where |  | ||||||
|     `Portal.open_context()` is entered in a bg task and the |  | ||||||
|     parent-task (of the containing nursery) calls `Context.cancel()` |  | ||||||
|     without the child knowing; the `Context._scope` should be |  | ||||||
|     `.cancel_called` when the IPC ctx's child-side relays |  | ||||||
|     a `ContextCancelled` with a `.canceller` set to the parent |  | ||||||
|     actor('s task). |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     async def main(): |  | ||||||
|         with trio.fail_after( |  | ||||||
|             2 if not debug_mode else 999, |  | ||||||
|         ): |  | ||||||
|             an: ActorNursery |  | ||||||
|             async with ( |  | ||||||
|                 tractor.open_nursery( |  | ||||||
|                     debug_mode=debug_mode, |  | ||||||
|                     loglevel='devx', |  | ||||||
|                     enable_stack_on_sig=True, |  | ||||||
|                 ) as an, |  | ||||||
|                 trio.open_nursery() as tn, |  | ||||||
|             ): |  | ||||||
|                 ptl: Portal = await an.start_actor( |  | ||||||
|                     'sub', |  | ||||||
|                     enable_modules=[__name__], |  | ||||||
|                 ) |  | ||||||
| 
 |  | ||||||
|                 async def _open_ctx_async( |  | ||||||
|                     do_started: bool = True, |  | ||||||
|                     task_status=trio.TASK_STATUS_IGNORED, |  | ||||||
|                 ): |  | ||||||
|                     # do we expect to never enter the |  | ||||||
|                     # `.open_context()` below. |  | ||||||
|                     if not do_started: |  | ||||||
|                         task_status.started() |  | ||||||
| 
 |  | ||||||
|                     async with ptl.open_context( |  | ||||||
|                         sleep_forever, |  | ||||||
|                         do_started=do_started, |  | ||||||
|                     ) as (ctx, first): |  | ||||||
|                         task_status.started(ctx) |  | ||||||
|                         await trio.sleep_forever() |  | ||||||
| 
 |  | ||||||
|                 # XXX, this is the key OoB part! |  | ||||||
|                 # |  | ||||||
|                 # - start the `.open_context()` in a bg task which |  | ||||||
|                 #   blocks inside the embedded scope-body, |  | ||||||
|                 # |  | ||||||
|                 # -  when we call `Context.cancel()` it **is |  | ||||||
|                 #   not** from the same task which eventually runs |  | ||||||
|                 #   `.__aexit__()`, |  | ||||||
|                 # |  | ||||||
|                 # - since the bg "opener" task will be in |  | ||||||
|                 #   a `trio.sleep_forever()`, it must be interrupted |  | ||||||
|                 #   by the `ContextCancelled` delivered from the |  | ||||||
|                 #   child-side; `Context._scope: CancelScope` MUST |  | ||||||
|                 #   be `.cancel_called`! |  | ||||||
|                 # |  | ||||||
|                 print('ASYNC opening IPC context in subtask..') |  | ||||||
|                 maybe_ctx: Context|None = await tn.start(partial( |  | ||||||
|                     _open_ctx_async, |  | ||||||
|                 )) |  | ||||||
| 
 |  | ||||||
|                 if ( |  | ||||||
|                     maybe_ctx |  | ||||||
|                     and |  | ||||||
|                     cancel_ctx |  | ||||||
|                 ): |  | ||||||
|                     print('cancelling first IPC ctx!') |  | ||||||
|                     await maybe_ctx.cancel() |  | ||||||
| 
 |  | ||||||
|                 # XXX, note that despite `maybe_context.cancel()` |  | ||||||
|                 # being called above, it's the parent (bg) task |  | ||||||
|                 # which was originally never interrupted in |  | ||||||
|                 # the `ctx._scope` body due to missing case logic in |  | ||||||
|                 # `ctx._maybe_cancel_and_set_remote_error()`. |  | ||||||
|                 # |  | ||||||
|                 # It didn't matter that the subactor process was |  | ||||||
|                 # already terminated and reaped, nothing was |  | ||||||
|                 # cancelling the ctx-parent task's scope! |  | ||||||
|                 # |  | ||||||
|                 print('cancelling subactor!') |  | ||||||
|                 await ptl.cancel_actor() |  | ||||||
| 
 |  | ||||||
|                 if maybe_ctx: |  | ||||||
|                     try: |  | ||||||
|                         await maybe_ctx.wait_for_result() |  | ||||||
|                     except tractor.ContextCancelled as ctxc: |  | ||||||
|                         assert not cancel_ctx |  | ||||||
|                         assert ( |  | ||||||
|                             ctxc.canceller |  | ||||||
|                             == |  | ||||||
|                             tractor.current_actor().aid.uid |  | ||||||
|                         ) |  | ||||||
|                         # don't re-raise since it'll trigger |  | ||||||
|                         # an EG from the above tn. |  | ||||||
| 
 |  | ||||||
|     if cancel_ctx: |  | ||||||
|         # graceful self-cancel |  | ||||||
|         trio.run(main) |  | ||||||
| 
 |  | ||||||
|     else: |  | ||||||
|         # ctx parent task should see OoB ctxc due to |  | ||||||
|         # `ptl.cancel_actor()`. |  | ||||||
|         with pytest.raises(tractor.ContextCancelled) as excinfo: |  | ||||||
|             trio.run(main) |  | ||||||
| 
 |  | ||||||
|         assert 'root' in excinfo.value.canceller[0] |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| # def test_parent_actor_cancels_subactor_with_gt1_ctxs_open_to_it( |  | ||||||
| #     debug_mode: bool, |  | ||||||
| #     loglevel: str, |  | ||||||
| # ): |  | ||||||
| #     ''' |  | ||||||
| #     Demos OoB cancellation from the perspective of a ctx opened with |  | ||||||
| #     a child subactor where the parent cancels the child at the "actor |  | ||||||
| #     layer" using `Portal.cancel_actor()` and thus the |  | ||||||
| #     `ContextCancelled.canceller` received by the ctx's parent-side |  | ||||||
| #     task will appear to be a "self cancellation" even though that |  | ||||||
| #     specific task itself was not cancelled and thus |  | ||||||
| #     `Context.cancel_called ==False`. |  | ||||||
| #     ''' |  | ||||||
|                 # TODO, do we have an existing implied ctx |  | ||||||
|                 # cancel test like this? |  | ||||||
|                 # with trio.move_on_after(0.5):# as cs: |  | ||||||
|                 #     await _open_ctx_async( |  | ||||||
|                 #         do_started=False, |  | ||||||
|                 #     ) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|                 # in-line ctx scope should definitely raise |  | ||||||
|                 # a ctxc with `.canceller = 'root'` |  | ||||||
|                 # async with ptl.open_context( |  | ||||||
|                 #     sleep_forever, |  | ||||||
|                 #     do_started=True, |  | ||||||
|                 # ) as pair: |  | ||||||
| 
 |  | ||||||
|  | @ -1,364 +0,0 @@ | ||||||
| ''' |  | ||||||
| Audit sub-sys APIs from `.msg._ops` |  | ||||||
| mostly for ensuring correct `contextvars` |  | ||||||
| related settings around IPC contexts. |  | ||||||
| 
 |  | ||||||
| ''' |  | ||||||
| from contextlib import ( |  | ||||||
|     asynccontextmanager as acm, |  | ||||||
| ) |  | ||||||
| 
 |  | ||||||
| from msgspec import ( |  | ||||||
|     Struct, |  | ||||||
| ) |  | ||||||
| import pytest |  | ||||||
| import trio |  | ||||||
| 
 |  | ||||||
| import tractor |  | ||||||
| from tractor import ( |  | ||||||
|     Context, |  | ||||||
|     MsgTypeError, |  | ||||||
|     current_ipc_ctx, |  | ||||||
|     Portal, |  | ||||||
| ) |  | ||||||
| from tractor.msg import ( |  | ||||||
|     _ops as msgops, |  | ||||||
|     Return, |  | ||||||
| ) |  | ||||||
| from tractor.msg import ( |  | ||||||
|     _codec, |  | ||||||
| ) |  | ||||||
| from tractor.msg.types import ( |  | ||||||
|     log, |  | ||||||
| ) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| class PldMsg( |  | ||||||
|     Struct, |  | ||||||
| 
 |  | ||||||
|     # TODO: with multiple structs in-spec we need to tag them! |  | ||||||
|     # -[ ] offer a built-in `PldMsg` type to inherit from which takes |  | ||||||
|     #      case of these details? |  | ||||||
|     # |  | ||||||
|     # https://jcristharif.com/msgspec/structs.html#tagged-unions |  | ||||||
|     # tag=True, |  | ||||||
|     # tag_field='msg_type', |  | ||||||
| ): |  | ||||||
|     field: str |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| maybe_msg_spec = PldMsg|None |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @acm |  | ||||||
| async def maybe_expect_raises( |  | ||||||
|     raises: BaseException|None = None, |  | ||||||
|     ensure_in_message: list[str]|None = None, |  | ||||||
|     post_mortem: bool = False, |  | ||||||
|     timeout: int = 3, |  | ||||||
| ) -> None: |  | ||||||
|     ''' |  | ||||||
|     Async wrapper for ensuring errors propagate from the inner scope. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     if tractor._state.debug_mode(): |  | ||||||
|         timeout += 999 |  | ||||||
| 
 |  | ||||||
|     with trio.fail_after(timeout): |  | ||||||
|         try: |  | ||||||
|             yield |  | ||||||
|         except BaseException as _inner_err: |  | ||||||
|             inner_err = _inner_err |  | ||||||
|             # wasn't-expected to error.. |  | ||||||
|             if raises is None: |  | ||||||
|                 raise |  | ||||||
| 
 |  | ||||||
|             else: |  | ||||||
|                 assert type(inner_err) is raises |  | ||||||
| 
 |  | ||||||
|                 # maybe check for error txt content |  | ||||||
|                 if ensure_in_message: |  | ||||||
|                     part: str |  | ||||||
|                     err_repr: str = repr(inner_err) |  | ||||||
|                     for part in ensure_in_message: |  | ||||||
|                         for i, arg in enumerate(inner_err.args): |  | ||||||
|                             if part in err_repr: |  | ||||||
|                                 break |  | ||||||
|                         # if part never matches an arg, then we're |  | ||||||
|                         # missing a match. |  | ||||||
|                         else: |  | ||||||
|                             raise ValueError( |  | ||||||
|                                 'Failed to find error message content?\n\n' |  | ||||||
|                                 f'expected: {ensure_in_message!r}\n' |  | ||||||
|                                 f'part: {part!r}\n\n' |  | ||||||
|                                 f'{inner_err.args}' |  | ||||||
|                         ) |  | ||||||
| 
 |  | ||||||
|                 if post_mortem: |  | ||||||
|                     await tractor.post_mortem() |  | ||||||
| 
 |  | ||||||
|         else: |  | ||||||
|             if raises: |  | ||||||
|                 raise RuntimeError( |  | ||||||
|                     f'Expected a {raises.__name__!r} to be raised?' |  | ||||||
|                 ) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @tractor.context( |  | ||||||
|     pld_spec=maybe_msg_spec, |  | ||||||
| ) |  | ||||||
| async def child( |  | ||||||
|     ctx: Context, |  | ||||||
|     started_value: int|PldMsg|None, |  | ||||||
|     return_value: str|None, |  | ||||||
|     validate_pld_spec: bool, |  | ||||||
|     raise_on_started_mte: bool = True, |  | ||||||
| 
 |  | ||||||
| ) -> None: |  | ||||||
|     ''' |  | ||||||
|     Call ``Context.started()`` more then once (an error). |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     expect_started_mte: bool = started_value == 10 |  | ||||||
| 
 |  | ||||||
|     # sanaity check that child RPC context is the current one |  | ||||||
|     curr_ctx: Context = current_ipc_ctx() |  | ||||||
|     assert ctx is curr_ctx |  | ||||||
| 
 |  | ||||||
|     rx: msgops.PldRx = ctx._pld_rx |  | ||||||
|     curr_pldec: _codec.MsgDec = rx.pld_dec |  | ||||||
| 
 |  | ||||||
|     ctx_meta: dict = getattr( |  | ||||||
|         child, |  | ||||||
|         '_tractor_context_meta', |  | ||||||
|         None, |  | ||||||
|     ) |  | ||||||
|     if ctx_meta: |  | ||||||
|         assert ( |  | ||||||
|             ctx_meta['pld_spec'] |  | ||||||
|             is curr_pldec.spec |  | ||||||
|             is curr_pldec.pld_spec |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|     # 2 cases: hdndle send-side and recv-only validation |  | ||||||
|     # - when `raise_on_started_mte == True`, send validate |  | ||||||
|     # - else, parent-recv-side only validation |  | ||||||
|     mte: MsgTypeError|None = None |  | ||||||
|     try: |  | ||||||
|         await ctx.started( |  | ||||||
|             value=started_value, |  | ||||||
|             validate_pld_spec=validate_pld_spec, |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|     except MsgTypeError as _mte: |  | ||||||
|         mte = _mte |  | ||||||
|         log.exception('started()` raised an MTE!\n') |  | ||||||
|         if not expect_started_mte: |  | ||||||
|             raise RuntimeError( |  | ||||||
|                 'Child-ctx-task SHOULD NOT HAVE raised an MTE for\n\n' |  | ||||||
|                 f'{started_value!r}\n' |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|         boxed_div: str = '------ - ------' |  | ||||||
|         assert boxed_div not in mte._message |  | ||||||
|         assert boxed_div not in mte.tb_str |  | ||||||
|         assert boxed_div not in repr(mte) |  | ||||||
|         assert boxed_div not in str(mte) |  | ||||||
|         mte_repr: str = repr(mte) |  | ||||||
|         for line in mte.message.splitlines(): |  | ||||||
|             assert line in mte_repr |  | ||||||
| 
 |  | ||||||
|         # since this is a *local error* there should be no |  | ||||||
|         # boxed traceback content! |  | ||||||
|         assert not mte.tb_str |  | ||||||
| 
 |  | ||||||
|         # propagate to parent? |  | ||||||
|         if raise_on_started_mte: |  | ||||||
|             raise |  | ||||||
| 
 |  | ||||||
|     # no-send-side-error fallthrough |  | ||||||
|     if ( |  | ||||||
|         validate_pld_spec |  | ||||||
|         and |  | ||||||
|         expect_started_mte |  | ||||||
|     ): |  | ||||||
|         raise RuntimeError( |  | ||||||
|             'Child-ctx-task SHOULD HAVE raised an MTE for\n\n' |  | ||||||
|             f'{started_value!r}\n' |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|     assert ( |  | ||||||
|         not expect_started_mte |  | ||||||
|         or |  | ||||||
|         not validate_pld_spec |  | ||||||
|     ) |  | ||||||
| 
 |  | ||||||
|     # if wait_for_parent_to_cancel: |  | ||||||
|     #     ... |  | ||||||
|     # |  | ||||||
|     # ^-TODO-^ logic for diff validation policies on each side: |  | ||||||
|     # |  | ||||||
|     # -[ ] ensure that if we don't validate on the send |  | ||||||
|     #   side, that we are eventually error-cancelled by our |  | ||||||
|     #   parent due to the bad `Started` payload! |  | ||||||
|     # -[ ] the boxed error should be srced from the parent's |  | ||||||
|     #   runtime NOT ours! |  | ||||||
|     # -[ ] we should still error on bad `return_value`s |  | ||||||
|     #   despite the parent not yet error-cancelling us? |  | ||||||
|     #   |_ how do we want the parent side to look in that |  | ||||||
|     #     case? |  | ||||||
|     #     -[ ] maybe the equiv of "during handling of the |  | ||||||
|     #       above error another occurred" for the case where |  | ||||||
|     #       the parent sends a MTE to this child and while |  | ||||||
|     #       waiting for the child to terminate it gets back |  | ||||||
|     #       the MTE for this case? |  | ||||||
|     # |  | ||||||
| 
 |  | ||||||
|     # XXX should always fail on recv side since we can't |  | ||||||
|     # really do much else beside terminate and relay the |  | ||||||
|     # msg-type-error from this RPC task ;) |  | ||||||
|     return return_value |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @pytest.mark.parametrize( |  | ||||||
|     'return_value', |  | ||||||
|     [ |  | ||||||
|         'yo', |  | ||||||
|         None, |  | ||||||
|     ], |  | ||||||
|     ids=[ |  | ||||||
|         'return[invalid-"yo"]', |  | ||||||
|         'return[valid-None]', |  | ||||||
|     ], |  | ||||||
| ) |  | ||||||
| @pytest.mark.parametrize( |  | ||||||
|     'started_value', |  | ||||||
|     [ |  | ||||||
|         10, |  | ||||||
|         PldMsg(field='yo'), |  | ||||||
|     ], |  | ||||||
|     ids=[ |  | ||||||
|         'Started[invalid-10]', |  | ||||||
|         'Started[valid-PldMsg]', |  | ||||||
|     ], |  | ||||||
| ) |  | ||||||
| @pytest.mark.parametrize( |  | ||||||
|     'pld_check_started_value', |  | ||||||
|     [ |  | ||||||
|         True, |  | ||||||
|         False, |  | ||||||
|     ], |  | ||||||
|     ids=[ |  | ||||||
|         'check-started-pld', |  | ||||||
|         'no-started-pld-validate', |  | ||||||
|     ], |  | ||||||
| ) |  | ||||||
| def test_basic_payload_spec( |  | ||||||
|     debug_mode: bool, |  | ||||||
|     loglevel: str, |  | ||||||
|     return_value: str|None, |  | ||||||
|     started_value: int|PldMsg, |  | ||||||
|     pld_check_started_value: bool, |  | ||||||
| ): |  | ||||||
|     ''' |  | ||||||
|     Validate the most basic `PldRx` msg-type-spec semantics around |  | ||||||
|     a IPC `Context` endpoint start, started-sync, and final return |  | ||||||
|     value depending on set payload types and the currently applied |  | ||||||
|     pld-spec. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     invalid_return: bool = return_value == 'yo' |  | ||||||
|     invalid_started: bool = started_value == 10 |  | ||||||
| 
 |  | ||||||
|     async def main(): |  | ||||||
|         async with tractor.open_nursery( |  | ||||||
|             debug_mode=debug_mode, |  | ||||||
|             loglevel=loglevel, |  | ||||||
|         ) as an: |  | ||||||
|             p: Portal = await an.start_actor( |  | ||||||
|                 'child', |  | ||||||
|                 enable_modules=[__name__], |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|             # since not opened yet. |  | ||||||
|             assert current_ipc_ctx() is None |  | ||||||
| 
 |  | ||||||
|             if invalid_started: |  | ||||||
|                 msg_type_str: str = 'Started' |  | ||||||
|                 bad_value: int = 10 |  | ||||||
|             elif invalid_return: |  | ||||||
|                 msg_type_str: str = 'Return' |  | ||||||
|                 bad_value: str = 'yo' |  | ||||||
|             else: |  | ||||||
|                 # XXX but should never be used below then.. |  | ||||||
|                 msg_type_str: str = '' |  | ||||||
|                 bad_value: str = '' |  | ||||||
| 
 |  | ||||||
|             maybe_mte: MsgTypeError|None = None |  | ||||||
|             should_raise: Exception|None = ( |  | ||||||
|                 MsgTypeError if ( |  | ||||||
|                     invalid_return |  | ||||||
|                     or |  | ||||||
|                     invalid_started |  | ||||||
|                 ) else None |  | ||||||
|             ) |  | ||||||
|             async with ( |  | ||||||
|                 maybe_expect_raises( |  | ||||||
|                     raises=should_raise, |  | ||||||
|                     ensure_in_message=[ |  | ||||||
|                         f"invalid `{msg_type_str}` msg payload", |  | ||||||
|                         f'{bad_value}', |  | ||||||
|                         f'has type {type(bad_value)!r}', |  | ||||||
|                         'not match type-spec', |  | ||||||
|                         f'`{msg_type_str}.pld: PldMsg|NoneType`', |  | ||||||
|                     ], |  | ||||||
|                     # only for debug |  | ||||||
|                     # post_mortem=True, |  | ||||||
|                 ), |  | ||||||
|                 p.open_context( |  | ||||||
|                     child, |  | ||||||
|                     return_value=return_value, |  | ||||||
|                     started_value=started_value, |  | ||||||
|                     validate_pld_spec=pld_check_started_value, |  | ||||||
|                 ) as (ctx, first), |  | ||||||
|             ): |  | ||||||
|                 # now opened with 'child' sub |  | ||||||
|                 assert current_ipc_ctx() is ctx |  | ||||||
| 
 |  | ||||||
|                 assert type(first) is PldMsg |  | ||||||
|                 assert first.field == 'yo' |  | ||||||
| 
 |  | ||||||
|                 try: |  | ||||||
|                     res: None|PldMsg = await ctx.result(hide_tb=False) |  | ||||||
|                     assert res is None |  | ||||||
|                 except MsgTypeError as mte: |  | ||||||
|                     maybe_mte = mte |  | ||||||
|                     if not invalid_return: |  | ||||||
|                         raise |  | ||||||
| 
 |  | ||||||
|                     # expected this invalid `Return.pld` so audit |  | ||||||
|                     # the error state + meta-data |  | ||||||
|                     assert mte.expected_msg_type is Return |  | ||||||
|                     assert mte.cid == ctx.cid |  | ||||||
|                     mte_repr: str = repr(mte) |  | ||||||
|                     for line in mte.message.splitlines(): |  | ||||||
|                         assert line in mte_repr |  | ||||||
| 
 |  | ||||||
|                     assert mte.tb_str |  | ||||||
|                     # await tractor.pause(shield=True) |  | ||||||
| 
 |  | ||||||
|                     # verify expected remote mte deats |  | ||||||
|                     assert ctx._local_error is None |  | ||||||
|                     assert ( |  | ||||||
|                         mte is |  | ||||||
|                         ctx._remote_error is |  | ||||||
|                         ctx.maybe_error is |  | ||||||
|                         ctx.outcome |  | ||||||
|                     ) |  | ||||||
| 
 |  | ||||||
|             if should_raise is None: |  | ||||||
|                 assert maybe_mte is None |  | ||||||
| 
 |  | ||||||
|             await p.cancel_actor() |  | ||||||
| 
 |  | ||||||
|     trio.run(main) |  | ||||||
|  | @ -1,237 +0,0 @@ | ||||||
| ''' |  | ||||||
| Special case testing for issues not (dis)covered in the primary |  | ||||||
| `Context` related functional/scenario suites. |  | ||||||
| 
 |  | ||||||
| **NOTE: this mod is a WIP** space for handling |  | ||||||
| odd/rare/undiscovered/not-yet-revealed faults which either |  | ||||||
| loudly (ideal case) breakl our supervision protocol |  | ||||||
| or (worst case) result in distributed sys hangs. |  | ||||||
| 
 |  | ||||||
| Suites here further try to clarify (if [partially] ill-defined) and |  | ||||||
| verify our edge case semantics for inter-actor-relayed-exceptions |  | ||||||
| including, |  | ||||||
| 
 |  | ||||||
| - lowlevel: what remote obj-data is interchanged for IPC and what is |  | ||||||
|   native-obj form is expected from unpacking in the the new |  | ||||||
|   mem-domain. |  | ||||||
| 
 |  | ||||||
| - which kinds of `RemoteActorError` (and its derivs) are expected by which |  | ||||||
|   (types of) peers (parent, child, sibling, etc) with what |  | ||||||
|   particular meta-data set such as, |  | ||||||
| 
 |  | ||||||
|   - `.src_uid`: the original (maybe) peer who raised. |  | ||||||
|   - `.relay_uid`: the next-hop-peer who sent it. |  | ||||||
|   - `.relay_path`: the sequence of peer actor hops. |  | ||||||
|   - `.is_inception`: a predicate that denotes multi-hop remote errors. |  | ||||||
| 
 |  | ||||||
| - when should `ExceptionGroup`s be relayed from a particular |  | ||||||
|   remote endpoint, they should never be caused by implicit `._rpc` |  | ||||||
|   nursery machinery! |  | ||||||
| 
 |  | ||||||
| - various special `trio` edge cases around its cancellation semantics |  | ||||||
|   and how we (currently) leverage `trio.Cancelled` as a signal for |  | ||||||
|   whether a `Context` task should raise `ContextCancelled` (ctx). |  | ||||||
| 
 |  | ||||||
| ''' |  | ||||||
| import pytest |  | ||||||
| import trio |  | ||||||
| import tractor |  | ||||||
| from tractor import (  # typing |  | ||||||
|     ActorNursery, |  | ||||||
|     Portal, |  | ||||||
|     Context, |  | ||||||
|     ContextCancelled, |  | ||||||
| ) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @tractor.context |  | ||||||
| async def sleep_n_chkpt_in_finally( |  | ||||||
|     ctx: Context, |  | ||||||
|     sleep_n_raise: bool, |  | ||||||
| 
 |  | ||||||
|     chld_raise_delay: float, |  | ||||||
|     chld_finally_delay: float, |  | ||||||
| 
 |  | ||||||
|     rent_cancels: bool, |  | ||||||
|     rent_ctxc_delay: float, |  | ||||||
| 
 |  | ||||||
|     expect_exc: str|None = None, |  | ||||||
| 
 |  | ||||||
| ) -> None: |  | ||||||
|     ''' |  | ||||||
|     Sync, open a tn, then wait for cancel, run a chkpt inside |  | ||||||
|     the user's `finally:` teardown. |  | ||||||
| 
 |  | ||||||
|     This covers a footgun case that `trio` core doesn't seem to care about |  | ||||||
|     wherein an exc can be masked by a `trio.Cancelled` raised inside a tn emedded |  | ||||||
|     `finally:`. |  | ||||||
| 
 |  | ||||||
|     Also see `test_trioisms::test_acm_embedded_nursery_propagates_enter_err` |  | ||||||
|     for the down and gritty details. |  | ||||||
| 
 |  | ||||||
|     Since a `@context` endpoint fn can also contain code like this, |  | ||||||
|     **and** bc we currently have no easy way other then |  | ||||||
|     `trio.Cancelled` to signal cancellation on each side of an IPC `Context`, |  | ||||||
|     the footgun issue can compound itself as demonstrated in this suite.. |  | ||||||
| 
 |  | ||||||
|     Here are some edge cases codified with our WIP "sclang" syntax |  | ||||||
|     (note the parent(rent)/child(chld) naming here is just |  | ||||||
|     pragmatism, generally these most of these cases can occurr |  | ||||||
|     regardless of the distributed-task's supervision hiearchy), |  | ||||||
| 
 |  | ||||||
|     - rent c)=> chld.raises-then-taskc-in-finally |  | ||||||
|      |_ chld's body raises an `exc: BaseException`. |  | ||||||
|       _ in its `finally:` block it runs a chkpoint |  | ||||||
|         which raises a taskc (`trio.Cancelled`) which |  | ||||||
|         masks `exc` instead raising taskc up to the first tn. |  | ||||||
|       _ the embedded/chld tn captures the masking taskc and then |  | ||||||
|         raises it up to the ._rpc-ep-tn instead of `exc`. |  | ||||||
|       _ the rent thinks the child ctxc-ed instead of errored.. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     await ctx.started() |  | ||||||
| 
 |  | ||||||
|     if expect_exc: |  | ||||||
|         expect_exc: BaseException = tractor._exceptions.get_err_type( |  | ||||||
|             type_name=expect_exc, |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|     berr: BaseException|None = None |  | ||||||
|     try: |  | ||||||
|         if not sleep_n_raise: |  | ||||||
|             await trio.sleep_forever() |  | ||||||
|         elif sleep_n_raise: |  | ||||||
| 
 |  | ||||||
|             # XXX this sleep is less then the sleep the parent |  | ||||||
|             # does before calling `ctx.cancel()` |  | ||||||
|             await trio.sleep(chld_raise_delay) |  | ||||||
| 
 |  | ||||||
|             # XXX this will be masked by a taskc raised in |  | ||||||
|             # the `finally:` if this fn doesn't terminate |  | ||||||
|             # before any ctxc-req arrives AND a checkpoint is hit |  | ||||||
|             # in that `finally:`. |  | ||||||
|             raise RuntimeError('my app krurshed..') |  | ||||||
| 
 |  | ||||||
|     except BaseException as _berr: |  | ||||||
|         berr = _berr |  | ||||||
| 
 |  | ||||||
|         # TODO: it'd sure be nice to be able to inject our own |  | ||||||
|         # `ContextCancelled` here instead of of `trio.Cancelled` |  | ||||||
|         # so that our runtime can expect it and this "user code" |  | ||||||
|         # would be able to tell the diff between a generic trio |  | ||||||
|         # cancel and a tractor runtime-IPC cancel. |  | ||||||
|         if expect_exc: |  | ||||||
|             if not isinstance( |  | ||||||
|                 berr, |  | ||||||
|                 expect_exc, |  | ||||||
|             ): |  | ||||||
|                 raise ValueError( |  | ||||||
|                     f'Unexpected exc type ??\n' |  | ||||||
|                     f'{berr!r}\n' |  | ||||||
|                     f'\n' |  | ||||||
|                     f'Expected a {expect_exc!r}\n' |  | ||||||
|                 ) |  | ||||||
| 
 |  | ||||||
|         raise berr |  | ||||||
| 
 |  | ||||||
|     # simulate what user code might try even though |  | ||||||
|     # it's a known boo-boo.. |  | ||||||
|     finally: |  | ||||||
|         # maybe wait for rent ctxc to arrive |  | ||||||
|         with trio.CancelScope(shield=True): |  | ||||||
|             await trio.sleep(chld_finally_delay) |  | ||||||
| 
 |  | ||||||
|         # !!XXX this will raise `trio.Cancelled` which |  | ||||||
|         # will mask the RTE from above!!! |  | ||||||
|         # |  | ||||||
|         # YES, it's the same case as our extant |  | ||||||
|         # `test_trioisms::test_acm_embedded_nursery_propagates_enter_err` |  | ||||||
|         try: |  | ||||||
|             await trio.lowlevel.checkpoint() |  | ||||||
|         except trio.Cancelled as taskc: |  | ||||||
|             if (scope_err := taskc.__context__): |  | ||||||
|                 print( |  | ||||||
|                     f'XXX MASKED REMOTE ERROR XXX\n' |  | ||||||
|                     f'ENDPOINT exception -> {scope_err!r}\n' |  | ||||||
|                     f'will be masked by -> {taskc!r}\n' |  | ||||||
|                 ) |  | ||||||
|                 # await tractor.pause(shield=True) |  | ||||||
| 
 |  | ||||||
|             raise taskc |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @pytest.mark.parametrize( |  | ||||||
|     'chld_callspec', |  | ||||||
|     [ |  | ||||||
|         dict( |  | ||||||
|             sleep_n_raise=None, |  | ||||||
|             chld_raise_delay=0.1, |  | ||||||
|             chld_finally_delay=0.1, |  | ||||||
|             expect_exc='Cancelled', |  | ||||||
|             rent_cancels=True, |  | ||||||
|             rent_ctxc_delay=0.1, |  | ||||||
|         ), |  | ||||||
|         dict( |  | ||||||
|             sleep_n_raise='RuntimeError', |  | ||||||
|             chld_raise_delay=0.1, |  | ||||||
|             chld_finally_delay=1, |  | ||||||
|             expect_exc='RuntimeError', |  | ||||||
|             rent_cancels=False, |  | ||||||
|             rent_ctxc_delay=0.1, |  | ||||||
|         ), |  | ||||||
|     ], |  | ||||||
|     ids=lambda item: f'chld_callspec={item!r}' |  | ||||||
| ) |  | ||||||
| def test_unmasked_remote_exc( |  | ||||||
|     debug_mode: bool, |  | ||||||
|     chld_callspec: dict, |  | ||||||
|     tpt_proto: str, |  | ||||||
| ): |  | ||||||
|     expect_exc_str: str|None = chld_callspec['sleep_n_raise'] |  | ||||||
|     rent_ctxc_delay: float|None = chld_callspec['rent_ctxc_delay'] |  | ||||||
|     async def main(): |  | ||||||
|         an: ActorNursery |  | ||||||
|         async with tractor.open_nursery( |  | ||||||
|             debug_mode=debug_mode, |  | ||||||
|             enable_transports=[tpt_proto], |  | ||||||
|         ) as an: |  | ||||||
|             ptl: Portal = await an.start_actor( |  | ||||||
|                 'cancellee', |  | ||||||
|                 enable_modules=[__name__], |  | ||||||
|             ) |  | ||||||
|             ctx: Context |  | ||||||
|             async with ( |  | ||||||
|                 ptl.open_context( |  | ||||||
|                     sleep_n_chkpt_in_finally, |  | ||||||
|                     **chld_callspec, |  | ||||||
|                 ) as (ctx, sent), |  | ||||||
|             ): |  | ||||||
|                 assert not sent |  | ||||||
|                 await trio.sleep(rent_ctxc_delay) |  | ||||||
|                 await ctx.cancel() |  | ||||||
| 
 |  | ||||||
|                 # recv error or result from chld |  | ||||||
|                 ctxc: ContextCancelled = await ctx.wait_for_result() |  | ||||||
|                 assert ( |  | ||||||
|                     ctxc is ctx.outcome |  | ||||||
|                     and |  | ||||||
|                     isinstance(ctxc, ContextCancelled) |  | ||||||
|                 ) |  | ||||||
| 
 |  | ||||||
|             # always graceful terminate the sub in non-error cases |  | ||||||
|             await an.cancel() |  | ||||||
| 
 |  | ||||||
|     if expect_exc_str: |  | ||||||
|         expect_exc: BaseException = tractor._exceptions.get_err_type( |  | ||||||
|             type_name=expect_exc_str, |  | ||||||
|         ) |  | ||||||
|         with pytest.raises( |  | ||||||
|             expected_exception=tractor.RemoteActorError, |  | ||||||
|         ) as excinfo: |  | ||||||
|             trio.run(main) |  | ||||||
| 
 |  | ||||||
|         rae = excinfo.value |  | ||||||
|         assert expect_exc == rae.boxed_type |  | ||||||
| 
 |  | ||||||
|     else: |  | ||||||
|         trio.run(main) |  | ||||||
|  | @ -1,6 +1,5 @@ | ||||||
| ''' | ''' | ||||||
| Suites for our `.trionics.maybe_open_context()` multi-task | Async context manager cache api testing: ``trionics.maybe_open_context():`` | ||||||
| shared-cached `@acm` API. |  | ||||||
| 
 | 
 | ||||||
| ''' | ''' | ||||||
| from contextlib import asynccontextmanager as acm | from contextlib import asynccontextmanager as acm | ||||||
|  | @ -10,15 +9,6 @@ from typing import Awaitable | ||||||
| import pytest | import pytest | ||||||
| import trio | import trio | ||||||
| import tractor | import tractor | ||||||
| from tractor.trionics import ( |  | ||||||
|     maybe_open_context, |  | ||||||
| ) |  | ||||||
| from tractor.log import ( |  | ||||||
|     get_console_log, |  | ||||||
|     get_logger, |  | ||||||
| ) |  | ||||||
| log = get_logger(__name__) |  | ||||||
| 
 |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| _resource: int = 0 | _resource: int = 0 | ||||||
|  | @ -62,7 +52,7 @@ def test_resource_only_entered_once(key_on): | ||||||
|                 # different task names per task will be used |                 # different task names per task will be used | ||||||
|                 kwargs = {'task_name': name} |                 kwargs = {'task_name': name} | ||||||
| 
 | 
 | ||||||
|             async with maybe_open_context( |             async with tractor.trionics.maybe_open_context( | ||||||
|                 maybe_increment_counter, |                 maybe_increment_counter, | ||||||
|                 kwargs=kwargs, |                 kwargs=kwargs, | ||||||
|                 key=key, |                 key=key, | ||||||
|  | @ -82,13 +72,11 @@ def test_resource_only_entered_once(key_on): | ||||||
|         with trio.move_on_after(0.5): |         with trio.move_on_after(0.5): | ||||||
|             async with ( |             async with ( | ||||||
|                 tractor.open_root_actor(), |                 tractor.open_root_actor(), | ||||||
|                 trio.open_nursery() as tn, |                 trio.open_nursery() as n, | ||||||
|             ): |             ): | ||||||
|  | 
 | ||||||
|                 for i in range(10): |                 for i in range(10): | ||||||
|                     tn.start_soon( |                     n.start_soon(enter_cached_mngr, f'task_{i}') | ||||||
|                         enter_cached_mngr, |  | ||||||
|                         f'task_{i}', |  | ||||||
|                     ) |  | ||||||
|                     await trio.sleep(0.001) |                     await trio.sleep(0.001) | ||||||
| 
 | 
 | ||||||
|     trio.run(main) |     trio.run(main) | ||||||
|  | @ -110,55 +98,27 @@ async def streamer( | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @acm | @acm | ||||||
| async def open_stream() -> Awaitable[ | async def open_stream() -> Awaitable[tractor.MsgStream]: | ||||||
|     tuple[ |  | ||||||
|         tractor.ActorNursery, |  | ||||||
|         tractor.MsgStream, |  | ||||||
|     ] |  | ||||||
| ]: |  | ||||||
|     try: |  | ||||||
|         async with tractor.open_nursery() as an: |  | ||||||
|             portal = await an.start_actor( |  | ||||||
|                 'streamer', |  | ||||||
|                 enable_modules=[__name__], |  | ||||||
|             ) |  | ||||||
|             try: |  | ||||||
|                 async with ( |  | ||||||
|                     portal.open_context(streamer) as (ctx, first), |  | ||||||
|                     ctx.open_stream() as stream, |  | ||||||
|                 ): |  | ||||||
|                     print('Entered open_stream() caller') |  | ||||||
|                     yield an, stream |  | ||||||
|                     print('Exited open_stream() caller') |  | ||||||
| 
 | 
 | ||||||
|             finally: |     async with tractor.open_nursery() as tn: | ||||||
|                 print( |         portal = await tn.start_actor('streamer', enable_modules=[__name__]) | ||||||
|                     'Cancelling streamer with,\n' |         async with ( | ||||||
|                     '=> `Portal.cancel_actor()`' |             portal.open_context(streamer) as (ctx, first), | ||||||
|                 ) |             ctx.open_stream() as stream, | ||||||
|                 await portal.cancel_actor() |         ): | ||||||
|                 print('Cancelled streamer') |             yield stream | ||||||
| 
 | 
 | ||||||
|     except Exception as err: |         await portal.cancel_actor() | ||||||
|         print( |     print('CANCELLED STREAMER') | ||||||
|             f'`open_stream()` errored?\n' |  | ||||||
|             f'{err!r}\n' |  | ||||||
|         ) |  | ||||||
|         await tractor.pause(shield=True) |  | ||||||
|         raise err |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @acm | @acm | ||||||
| async def maybe_open_stream(taskname: str): | async def maybe_open_stream(taskname: str): | ||||||
|     async with maybe_open_context( |     async with tractor.trionics.maybe_open_context( | ||||||
|         # NOTE: all secondary tasks should cache hit on the same key |         # NOTE: all secondary tasks should cache hit on the same key | ||||||
|         acm_func=open_stream, |         acm_func=open_stream, | ||||||
|     ) as ( |     ) as (cache_hit, stream): | ||||||
|         cache_hit, | 
 | ||||||
|         (an, stream) |  | ||||||
|     ): |  | ||||||
|         # when the actor + portal + ctx + stream has already been |  | ||||||
|         # allocated we want to just bcast to this task. |  | ||||||
|         if cache_hit: |         if cache_hit: | ||||||
|             print(f'{taskname} loaded from cache') |             print(f'{taskname} loaded from cache') | ||||||
| 
 | 
 | ||||||
|  | @ -166,77 +126,27 @@ async def maybe_open_stream(taskname: str): | ||||||
|             # if this feed is already allocated by the first |             # if this feed is already allocated by the first | ||||||
|             # task that entereed |             # task that entereed | ||||||
|             async with stream.subscribe() as bstream: |             async with stream.subscribe() as bstream: | ||||||
|                 yield an, bstream |                 yield bstream | ||||||
|                 print( |  | ||||||
|                     f'cached task exited\n' |  | ||||||
|                     f')>\n' |  | ||||||
|                     f' |_{taskname}\n' |  | ||||||
|                 ) |  | ||||||
| 
 |  | ||||||
|             # we should always unreg the "cloned" bcrc for this |  | ||||||
|             # consumer-task |  | ||||||
|             assert id(bstream) not in bstream._state.subs |  | ||||||
| 
 |  | ||||||
|         else: |         else: | ||||||
|             # yield the actual stream |             # yield the actual stream | ||||||
|             try: |             yield stream | ||||||
|                 yield an, stream |  | ||||||
|             finally: |  | ||||||
|                 print( |  | ||||||
|                     f'NON-cached task exited\n' |  | ||||||
|                     f')>\n' |  | ||||||
|                     f' |_{taskname}\n' |  | ||||||
|                 ) |  | ||||||
| 
 |  | ||||||
|         first_bstream = stream._broadcaster |  | ||||||
|         bcrx_state = first_bstream._state |  | ||||||
|         subs: dict[int, int] = bcrx_state.subs |  | ||||||
|         if len(subs) == 1: |  | ||||||
|             assert id(first_bstream) in subs |  | ||||||
|             # ^^TODO! the bcrx should always de-allocate all subs, |  | ||||||
|             # including the implicit first one allocated on entry |  | ||||||
|             # by the first subscribing peer task, no? |  | ||||||
|             # |  | ||||||
|             # -[ ] adjust `MsgStream.subscribe()` to do this mgmt! |  | ||||||
|             #  |_ allows reverting `MsgStream.receive()` to the |  | ||||||
|             #    non-bcaster method. |  | ||||||
|             #  |_ we can decide whether to reset `._broadcaster`? |  | ||||||
|             # |  | ||||||
|             # await tractor.pause(shield=True) |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def test_open_local_sub_to_stream( | def test_open_local_sub_to_stream(): | ||||||
|     debug_mode: bool, |  | ||||||
| ): |  | ||||||
|     ''' |     ''' | ||||||
|     Verify a single inter-actor stream can can be fanned-out shared to |     Verify a single inter-actor stream can can be fanned-out shared to | ||||||
|     N local tasks using `trionics.maybe_open_context()`. |     N local tasks using ``trionics.maybe_open_context():``. | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|     timeout: float = 3.6 |     timeout: float = 3.6 if platform.system() != "Windows" else 10 | ||||||
|     if platform.system() == "Windows": |  | ||||||
|         timeout: float = 10 |  | ||||||
| 
 |  | ||||||
|     if debug_mode: |  | ||||||
|         timeout = 999 |  | ||||||
|         print(f'IN debug_mode, setting large timeout={timeout!r}..') |  | ||||||
| 
 | 
 | ||||||
|     async def main(): |     async def main(): | ||||||
| 
 | 
 | ||||||
|         full = list(range(1000)) |         full = list(range(1000)) | ||||||
|         an: tractor.ActorNursery|None = None |  | ||||||
|         num_tasks: int = 10 |  | ||||||
| 
 | 
 | ||||||
|         async def get_sub_and_pull(taskname: str): |         async def get_sub_and_pull(taskname: str): | ||||||
| 
 |  | ||||||
|             nonlocal an |  | ||||||
| 
 |  | ||||||
|             stream: tractor.MsgStream |  | ||||||
|             async with ( |             async with ( | ||||||
|                 maybe_open_stream(taskname) as ( |                 maybe_open_stream(taskname) as stream, | ||||||
|                     an, |  | ||||||
|                     stream, |  | ||||||
|                 ), |  | ||||||
|             ): |             ): | ||||||
|                 if '0' in taskname: |                 if '0' in taskname: | ||||||
|                     assert isinstance(stream, tractor.MsgStream) |                     assert isinstance(stream, tractor.MsgStream) | ||||||
|  | @ -248,159 +158,24 @@ def test_open_local_sub_to_stream( | ||||||
| 
 | 
 | ||||||
|                 first = await stream.receive() |                 first = await stream.receive() | ||||||
|                 print(f'{taskname} started with value {first}') |                 print(f'{taskname} started with value {first}') | ||||||
|                 seq: list[int] = [] |                 seq = [] | ||||||
|                 async for msg in stream: |                 async for msg in stream: | ||||||
|                     seq.append(msg) |                     seq.append(msg) | ||||||
| 
 | 
 | ||||||
|                 assert set(seq).issubset(set(full)) |                 assert set(seq).issubset(set(full)) | ||||||
| 
 |  | ||||||
|             # end of @acm block |  | ||||||
|             print(f'{taskname} finished') |             print(f'{taskname} finished') | ||||||
| 
 | 
 | ||||||
|         root: tractor.Actor |         with trio.fail_after(timeout): | ||||||
|         with trio.fail_after(timeout) as cs: |  | ||||||
|             # TODO: turns out this isn't multi-task entrant XD |             # TODO: turns out this isn't multi-task entrant XD | ||||||
|             # We probably need an indepotent entry semantic? |             # We probably need an indepotent entry semantic? | ||||||
|             async with tractor.open_root_actor( |             async with tractor.open_root_actor(): | ||||||
|                 debug_mode=debug_mode, |  | ||||||
|                 # maybe_enable_greenback=True, |  | ||||||
|                 # |  | ||||||
|                 # ^TODO? doesn't seem to mk breakpoint() usage work |  | ||||||
|                 # bc each bg task needs to open a portal?? |  | ||||||
|                 # - [ ] we should consider making this part of |  | ||||||
|                 #      our taskman defaults? |  | ||||||
|                 #   |_see https://github.com/goodboy/tractor/pull/363 |  | ||||||
|                 # |  | ||||||
|             ) as root: |  | ||||||
|                 assert root.is_registrar |  | ||||||
| 
 |  | ||||||
|                 async with ( |                 async with ( | ||||||
|                     trio.open_nursery() as tn, |                     trio.open_nursery() as nurse, | ||||||
|                 ): |                 ): | ||||||
|                     for i in range(num_tasks): |                     for i in range(10): | ||||||
|                         tn.start_soon( |                         nurse.start_soon(get_sub_and_pull, f'task_{i}') | ||||||
|                             get_sub_and_pull, |  | ||||||
|                             f'task_{i}', |  | ||||||
|                         ) |  | ||||||
|                         await trio.sleep(0.001) |                         await trio.sleep(0.001) | ||||||
| 
 | 
 | ||||||
|                 print('all consumer tasks finished!') |                 print('all consumer tasks finished') | ||||||
| 
 |  | ||||||
|                 # ?XXX, ensure actor-nursery is shutdown or we might |  | ||||||
|                 # hang here due to a minor task deadlock/race-condition? |  | ||||||
|                 # |  | ||||||
|                 # - seems that all we need is a checkpoint to ensure |  | ||||||
|                 #   the last suspended task, which is inside |  | ||||||
|                 #   `.maybe_open_context()`, can do the |  | ||||||
|                 #   `Portal.cancel_actor()` call? |  | ||||||
|                 # |  | ||||||
|                 # - if that bg task isn't resumed, then this blocks |  | ||||||
|                 #   timeout might hit before that? |  | ||||||
|                 # |  | ||||||
|                 if root.ipc_server.has_peers(): |  | ||||||
|                     await trio.lowlevel.checkpoint() |  | ||||||
| 
 |  | ||||||
|                     # alt approach, cancel the entire `an` |  | ||||||
|                     # await tractor.pause() |  | ||||||
|                     # await an.cancel() |  | ||||||
| 
 |  | ||||||
|             # end of runtime scope |  | ||||||
|             print('root actor terminated.') |  | ||||||
| 
 |  | ||||||
|         if cs.cancelled_caught: |  | ||||||
|             pytest.fail( |  | ||||||
|                 'Should NOT time out in `open_root_actor()` ?' |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|         print('exiting main.') |  | ||||||
| 
 |  | ||||||
|     trio.run(main) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @acm |  | ||||||
| async def cancel_outer_cs( |  | ||||||
|     cs: trio.CancelScope|None = None, |  | ||||||
|     delay: float = 0, |  | ||||||
| ): |  | ||||||
|     # on first task delay this enough to block |  | ||||||
|     # the 2nd task but then cancel it mid sleep |  | ||||||
|     # so that the tn.start() inside the key-err handler block |  | ||||||
|     # is cancelled and would previously corrupt the |  | ||||||
|     # mutext state. |  | ||||||
|     log.info(f'task entering sleep({delay})') |  | ||||||
|     await trio.sleep(delay) |  | ||||||
|     if cs: |  | ||||||
|         log.info('task calling cs.cancel()') |  | ||||||
|         cs.cancel() |  | ||||||
|     trio.lowlevel.checkpoint() |  | ||||||
|     yield |  | ||||||
|     await trio.sleep_forever() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def test_lock_not_corrupted_on_fast_cancel( |  | ||||||
|     debug_mode: bool, |  | ||||||
|     loglevel: str, |  | ||||||
| ): |  | ||||||
|     ''' |  | ||||||
|     Verify that if the caching-task (the first to enter |  | ||||||
|     `maybe_open_context()`) is cancelled mid-cache-miss, the embedded |  | ||||||
|     mutex can never be left in a corrupted state. |  | ||||||
| 
 |  | ||||||
|     That is, the lock is always eventually released ensuring a peer |  | ||||||
|     (cache-hitting) task will never, |  | ||||||
| 
 |  | ||||||
|     - be left to inf-block/hang on the `lock.acquire()`. |  | ||||||
|     - try to release the lock when still owned by the caching-task |  | ||||||
|       due to it having erronously exited without calling |  | ||||||
|       `lock.release()`. |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     delay: float = 1. |  | ||||||
| 
 |  | ||||||
|     async def use_moc( |  | ||||||
|         cs: trio.CancelScope|None, |  | ||||||
|         delay: float, |  | ||||||
|     ): |  | ||||||
|         log.info('task entering moc') |  | ||||||
|         async with maybe_open_context( |  | ||||||
|             cancel_outer_cs, |  | ||||||
|             kwargs={ |  | ||||||
|                 'cs': cs, |  | ||||||
|                 'delay': delay, |  | ||||||
|             }, |  | ||||||
|         ) as (cache_hit, _null): |  | ||||||
|             if cache_hit: |  | ||||||
|                 log.info('2nd task entered') |  | ||||||
|             else: |  | ||||||
|                 log.info('1st task entered') |  | ||||||
| 
 |  | ||||||
|             await trio.sleep_forever() |  | ||||||
| 
 |  | ||||||
|     async def main(): |  | ||||||
|         with trio.fail_after(delay + 2): |  | ||||||
|             async with ( |  | ||||||
|                 tractor.open_root_actor( |  | ||||||
|                     debug_mode=debug_mode, |  | ||||||
|                     loglevel=loglevel, |  | ||||||
|                 ), |  | ||||||
|                 trio.open_nursery() as tn, |  | ||||||
|             ): |  | ||||||
|                 get_console_log('info') |  | ||||||
|                 log.info('yo starting') |  | ||||||
|                 cs = tn.cancel_scope |  | ||||||
|                 tn.start_soon( |  | ||||||
|                     use_moc, |  | ||||||
|                     cs, |  | ||||||
|                     delay, |  | ||||||
|                     name='child', |  | ||||||
|                 ) |  | ||||||
|                 with trio.CancelScope() as rent_cs: |  | ||||||
|                     await use_moc( |  | ||||||
|                         cs=rent_cs, |  | ||||||
|                         delay=delay, |  | ||||||
|                     ) |  | ||||||
| 
 |  | ||||||
| 
 | 
 | ||||||
|     trio.run(main) |     trio.run(main) | ||||||
|  |  | ||||||
|  | @ -1,211 +0,0 @@ | ||||||
| import time |  | ||||||
| 
 |  | ||||||
| import trio |  | ||||||
| import pytest |  | ||||||
| 
 |  | ||||||
| import tractor |  | ||||||
| from tractor.ipc._ringbuf import ( |  | ||||||
|     open_ringbuf, |  | ||||||
|     RBToken, |  | ||||||
|     RingBuffSender, |  | ||||||
|     RingBuffReceiver |  | ||||||
| ) |  | ||||||
| from tractor._testing.samples import ( |  | ||||||
|     generate_sample_messages, |  | ||||||
| ) |  | ||||||
| 
 |  | ||||||
| # in case you don't want to melt your cores, uncomment dis! |  | ||||||
| pytestmark = pytest.mark.skip |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @tractor.context |  | ||||||
| async def child_read_shm( |  | ||||||
|     ctx: tractor.Context, |  | ||||||
|     msg_amount: int, |  | ||||||
|     token: RBToken, |  | ||||||
|     total_bytes: int, |  | ||||||
| ) -> None: |  | ||||||
|     recvd_bytes = 0 |  | ||||||
|     await ctx.started() |  | ||||||
|     start_ts = time.time() |  | ||||||
|     async with RingBuffReceiver(token) as receiver: |  | ||||||
|         while recvd_bytes < total_bytes: |  | ||||||
|             msg = await receiver.receive_some() |  | ||||||
|             recvd_bytes += len(msg) |  | ||||||
| 
 |  | ||||||
|         # make sure we dont hold any memoryviews |  | ||||||
|         # before the ctx manager aclose() |  | ||||||
|         msg = None |  | ||||||
| 
 |  | ||||||
|     end_ts = time.time() |  | ||||||
|     elapsed = end_ts - start_ts |  | ||||||
|     elapsed_ms = int(elapsed * 1000) |  | ||||||
| 
 |  | ||||||
|     print(f'\n\telapsed ms: {elapsed_ms}') |  | ||||||
|     print(f'\tmsg/sec: {int(msg_amount / elapsed):,}') |  | ||||||
|     print(f'\tbytes/sec: {int(recvd_bytes / elapsed):,}') |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @tractor.context |  | ||||||
| async def child_write_shm( |  | ||||||
|     ctx: tractor.Context, |  | ||||||
|     msg_amount: int, |  | ||||||
|     rand_min: int, |  | ||||||
|     rand_max: int, |  | ||||||
|     token: RBToken, |  | ||||||
| ) -> None: |  | ||||||
|     msgs, total_bytes = generate_sample_messages( |  | ||||||
|         msg_amount, |  | ||||||
|         rand_min=rand_min, |  | ||||||
|         rand_max=rand_max, |  | ||||||
|     ) |  | ||||||
|     await ctx.started(total_bytes) |  | ||||||
|     async with RingBuffSender(token) as sender: |  | ||||||
|         for msg in msgs: |  | ||||||
|             await sender.send_all(msg) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @pytest.mark.parametrize( |  | ||||||
|     'msg_amount,rand_min,rand_max,buf_size', |  | ||||||
|     [ |  | ||||||
|         # simple case, fixed payloads, large buffer |  | ||||||
|         (100_000, 0, 0, 10 * 1024), |  | ||||||
| 
 |  | ||||||
|         # guaranteed wrap around on every write |  | ||||||
|         (100, 10 * 1024, 20 * 1024, 10 * 1024), |  | ||||||
| 
 |  | ||||||
|         # large payload size, but large buffer |  | ||||||
|         (10_000, 256 * 1024, 512 * 1024, 10 * 1024 * 1024) |  | ||||||
|     ], |  | ||||||
|     ids=[ |  | ||||||
|         'fixed_payloads_large_buffer', |  | ||||||
|         'wrap_around_every_write', |  | ||||||
|         'large_payloads_large_buffer', |  | ||||||
|     ] |  | ||||||
| ) |  | ||||||
| def test_ringbuf( |  | ||||||
|     msg_amount: int, |  | ||||||
|     rand_min: int, |  | ||||||
|     rand_max: int, |  | ||||||
|     buf_size: int |  | ||||||
| ): |  | ||||||
|     async def main(): |  | ||||||
|         with open_ringbuf( |  | ||||||
|             'test_ringbuf', |  | ||||||
|             buf_size=buf_size |  | ||||||
|         ) as token: |  | ||||||
|             proc_kwargs = { |  | ||||||
|                 'pass_fds': (token.write_eventfd, token.wrap_eventfd) |  | ||||||
|             } |  | ||||||
| 
 |  | ||||||
|             common_kwargs = { |  | ||||||
|                 'msg_amount': msg_amount, |  | ||||||
|                 'token': token, |  | ||||||
|             } |  | ||||||
|             async with tractor.open_nursery() as an: |  | ||||||
|                 send_p = await an.start_actor( |  | ||||||
|                     'ring_sender', |  | ||||||
|                     enable_modules=[__name__], |  | ||||||
|                     proc_kwargs=proc_kwargs |  | ||||||
|                 ) |  | ||||||
|                 recv_p = await an.start_actor( |  | ||||||
|                     'ring_receiver', |  | ||||||
|                     enable_modules=[__name__], |  | ||||||
|                     proc_kwargs=proc_kwargs |  | ||||||
|                 ) |  | ||||||
|                 async with ( |  | ||||||
|                     send_p.open_context( |  | ||||||
|                         child_write_shm, |  | ||||||
|                         rand_min=rand_min, |  | ||||||
|                         rand_max=rand_max, |  | ||||||
|                         **common_kwargs |  | ||||||
|                     ) as (sctx, total_bytes), |  | ||||||
|                     recv_p.open_context( |  | ||||||
|                         child_read_shm, |  | ||||||
|                         **common_kwargs, |  | ||||||
|                         total_bytes=total_bytes, |  | ||||||
|                     ) as (sctx, _sent), |  | ||||||
|                 ): |  | ||||||
|                     await recv_p.result() |  | ||||||
| 
 |  | ||||||
|                 await send_p.cancel_actor() |  | ||||||
|                 await recv_p.cancel_actor() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     trio.run(main) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @tractor.context |  | ||||||
| async def child_blocked_receiver( |  | ||||||
|     ctx: tractor.Context, |  | ||||||
|     token: RBToken |  | ||||||
| ): |  | ||||||
|     async with RingBuffReceiver(token) as receiver: |  | ||||||
|         await ctx.started() |  | ||||||
|         await receiver.receive_some() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def test_ring_reader_cancel(): |  | ||||||
|     async def main(): |  | ||||||
|         with open_ringbuf('test_ring_cancel_reader') as token: |  | ||||||
|             async with ( |  | ||||||
|                 tractor.open_nursery() as an, |  | ||||||
|                 RingBuffSender(token) as _sender, |  | ||||||
|             ): |  | ||||||
|                 recv_p = await an.start_actor( |  | ||||||
|                     'ring_blocked_receiver', |  | ||||||
|                     enable_modules=[__name__], |  | ||||||
|                     proc_kwargs={ |  | ||||||
|                         'pass_fds': (token.write_eventfd, token.wrap_eventfd) |  | ||||||
|                     } |  | ||||||
|                 ) |  | ||||||
|                 async with ( |  | ||||||
|                     recv_p.open_context( |  | ||||||
|                         child_blocked_receiver, |  | ||||||
|                         token=token |  | ||||||
|                     ) as (sctx, _sent), |  | ||||||
|                 ): |  | ||||||
|                     await trio.sleep(1) |  | ||||||
|                     await an.cancel() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     with pytest.raises(tractor._exceptions.ContextCancelled): |  | ||||||
|         trio.run(main) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @tractor.context |  | ||||||
| async def child_blocked_sender( |  | ||||||
|     ctx: tractor.Context, |  | ||||||
|     token: RBToken |  | ||||||
| ): |  | ||||||
|     async with RingBuffSender(token) as sender: |  | ||||||
|         await ctx.started() |  | ||||||
|         await sender.send_all(b'this will wrap') |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def test_ring_sender_cancel(): |  | ||||||
|     async def main(): |  | ||||||
|         with open_ringbuf( |  | ||||||
|             'test_ring_cancel_sender', |  | ||||||
|             buf_size=1 |  | ||||||
|         ) as token: |  | ||||||
|             async with tractor.open_nursery() as an: |  | ||||||
|                 recv_p = await an.start_actor( |  | ||||||
|                     'ring_blocked_sender', |  | ||||||
|                     enable_modules=[__name__], |  | ||||||
|                     proc_kwargs={ |  | ||||||
|                         'pass_fds': (token.write_eventfd, token.wrap_eventfd) |  | ||||||
|                     } |  | ||||||
|                 ) |  | ||||||
|                 async with ( |  | ||||||
|                     recv_p.open_context( |  | ||||||
|                         child_blocked_sender, |  | ||||||
|                         token=token |  | ||||||
|                     ) as (sctx, _sent), |  | ||||||
|                 ): |  | ||||||
|                     await trio.sleep(1) |  | ||||||
|                     await an.cancel() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     with pytest.raises(tractor._exceptions.ContextCancelled): |  | ||||||
|         trio.run(main) |  | ||||||
|  | @ -1,240 +0,0 @@ | ||||||
| ''' |  | ||||||
| Special attention cases for using "infect `asyncio`" mode from a root |  | ||||||
| actor; i.e. not using a std `trio.run()` bootstrap. |  | ||||||
| 
 |  | ||||||
| ''' |  | ||||||
| import asyncio |  | ||||||
| from functools import partial |  | ||||||
| 
 |  | ||||||
| import pytest |  | ||||||
| import trio |  | ||||||
| import tractor |  | ||||||
| from tractor import ( |  | ||||||
|     to_asyncio, |  | ||||||
| ) |  | ||||||
| from tests.test_infected_asyncio import ( |  | ||||||
|     aio_echo_server, |  | ||||||
| ) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @pytest.mark.parametrize( |  | ||||||
|     'raise_error_mid_stream', |  | ||||||
|     [ |  | ||||||
|         False, |  | ||||||
|         Exception, |  | ||||||
|         KeyboardInterrupt, |  | ||||||
|     ], |  | ||||||
|     ids='raise_error={}'.format, |  | ||||||
| ) |  | ||||||
| def test_infected_root_actor( |  | ||||||
|     raise_error_mid_stream: bool|Exception, |  | ||||||
| 
 |  | ||||||
|     # conftest wide |  | ||||||
|     loglevel: str, |  | ||||||
|     debug_mode: bool, |  | ||||||
| ): |  | ||||||
|     ''' |  | ||||||
|     Verify you can run the `tractor` runtime with `Actor.is_infected_aio() == True` |  | ||||||
|     in the root actor. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     async def _trio_main(): |  | ||||||
|         with trio.fail_after(2 if not debug_mode else 999): |  | ||||||
|             first: str |  | ||||||
|             chan: to_asyncio.LinkedTaskChannel |  | ||||||
|             async with ( |  | ||||||
|                 tractor.open_root_actor( |  | ||||||
|                     debug_mode=debug_mode, |  | ||||||
|                     loglevel=loglevel, |  | ||||||
|                 ), |  | ||||||
|                 to_asyncio.open_channel_from( |  | ||||||
|                     aio_echo_server, |  | ||||||
|                 ) as (first, chan), |  | ||||||
|             ): |  | ||||||
|                 assert first == 'start' |  | ||||||
| 
 |  | ||||||
|                 for i in range(1000): |  | ||||||
|                     await chan.send(i) |  | ||||||
|                     out = await chan.receive() |  | ||||||
|                     assert out == i |  | ||||||
|                     print(f'asyncio echoing {i}') |  | ||||||
| 
 |  | ||||||
|                     if ( |  | ||||||
|                         raise_error_mid_stream |  | ||||||
|                         and |  | ||||||
|                         i == 500 |  | ||||||
|                     ): |  | ||||||
|                         raise raise_error_mid_stream |  | ||||||
| 
 |  | ||||||
|                     if out is None: |  | ||||||
|                         try: |  | ||||||
|                             out = await chan.receive() |  | ||||||
|                         except trio.EndOfChannel: |  | ||||||
|                             break |  | ||||||
|                         else: |  | ||||||
|                             raise RuntimeError( |  | ||||||
|                                 'aio channel never stopped?' |  | ||||||
|                             ) |  | ||||||
| 
 |  | ||||||
|     if raise_error_mid_stream: |  | ||||||
|         with pytest.raises(raise_error_mid_stream): |  | ||||||
|             tractor.to_asyncio.run_as_asyncio_guest( |  | ||||||
|                 trio_main=_trio_main, |  | ||||||
|             ) |  | ||||||
|     else: |  | ||||||
|         tractor.to_asyncio.run_as_asyncio_guest( |  | ||||||
|             trio_main=_trio_main, |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| async def sync_and_err( |  | ||||||
|     # just signature placeholders for compat with |  | ||||||
|     # ``to_asyncio.open_channel_from()`` |  | ||||||
|     to_trio: trio.MemorySendChannel, |  | ||||||
|     from_trio: asyncio.Queue, |  | ||||||
|     ev: asyncio.Event, |  | ||||||
| 
 |  | ||||||
| ): |  | ||||||
|     if to_trio: |  | ||||||
|         to_trio.send_nowait('start') |  | ||||||
| 
 |  | ||||||
|     await ev.wait() |  | ||||||
|     raise RuntimeError('asyncio-side') |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @pytest.mark.parametrize( |  | ||||||
|     'aio_err_trigger', |  | ||||||
|     [ |  | ||||||
|         'before_start_point', |  | ||||||
|         'after_trio_task_starts', |  | ||||||
|         'after_start_point', |  | ||||||
|     ], |  | ||||||
|     ids='aio_err_triggered={}'.format |  | ||||||
| ) |  | ||||||
| def test_trio_prestarted_task_bubbles( |  | ||||||
|     aio_err_trigger: str, |  | ||||||
| 
 |  | ||||||
|     # conftest wide |  | ||||||
|     loglevel: str, |  | ||||||
|     debug_mode: bool, |  | ||||||
| ): |  | ||||||
|     async def pre_started_err( |  | ||||||
|         raise_err: bool = False, |  | ||||||
|         pre_sleep: float|None = None, |  | ||||||
|         aio_trigger: asyncio.Event|None = None, |  | ||||||
|         task_status=trio.TASK_STATUS_IGNORED, |  | ||||||
|     ): |  | ||||||
|         ''' |  | ||||||
|         Maybe pre-started error then sleep. |  | ||||||
| 
 |  | ||||||
|         ''' |  | ||||||
|         if pre_sleep is not None: |  | ||||||
|             print(f'Sleeping from trio for {pre_sleep!r}s !') |  | ||||||
|             await trio.sleep(pre_sleep) |  | ||||||
| 
 |  | ||||||
|         # signal aio-task to raise JUST AFTER this task |  | ||||||
|         # starts but has not yet `.started()` |  | ||||||
|         if aio_trigger: |  | ||||||
|             print('Signalling aio-task to raise from `trio`!!') |  | ||||||
|             aio_trigger.set() |  | ||||||
| 
 |  | ||||||
|         if raise_err: |  | ||||||
|             print('Raising from trio!') |  | ||||||
|             raise TypeError('trio-side') |  | ||||||
| 
 |  | ||||||
|         task_status.started() |  | ||||||
|         await trio.sleep_forever() |  | ||||||
| 
 |  | ||||||
|     async def _trio_main(): |  | ||||||
|         with trio.fail_after(2 if not debug_mode else 999): |  | ||||||
|             first: str |  | ||||||
|             chan: to_asyncio.LinkedTaskChannel |  | ||||||
|             aio_ev = asyncio.Event() |  | ||||||
| 
 |  | ||||||
|             async with ( |  | ||||||
|                 tractor.open_root_actor( |  | ||||||
|                     debug_mode=False, |  | ||||||
|                     loglevel=loglevel, |  | ||||||
|                 ), |  | ||||||
|             ): |  | ||||||
|                 # TODO, tests for this with 3.13 egs? |  | ||||||
|                 # from tractor.devx import open_crash_handler |  | ||||||
|                 # with open_crash_handler(): |  | ||||||
|                 async with ( |  | ||||||
|                     # where we'll start a sub-task that errors BEFORE |  | ||||||
|                     # calling `.started()` such that the error should |  | ||||||
|                     # bubble before the guest run terminates! |  | ||||||
|                     trio.open_nursery() as tn, |  | ||||||
| 
 |  | ||||||
|                     # THEN start an infect task which should error just |  | ||||||
|                     # after the trio-side's task does. |  | ||||||
|                     to_asyncio.open_channel_from( |  | ||||||
|                         partial( |  | ||||||
|                             sync_and_err, |  | ||||||
|                             ev=aio_ev, |  | ||||||
|                         ) |  | ||||||
|                     ) as (first, chan), |  | ||||||
|                 ): |  | ||||||
| 
 |  | ||||||
|                     for i in range(5): |  | ||||||
|                         pre_sleep: float|None = None |  | ||||||
|                         last_iter: bool = (i == 4) |  | ||||||
| 
 |  | ||||||
|                         # TODO, missing cases? |  | ||||||
|                         # -[ ] error as well on |  | ||||||
|                         #    'after_start_point' case as well for |  | ||||||
|                         #    another case? |  | ||||||
|                         raise_err: bool = False |  | ||||||
| 
 |  | ||||||
|                         if last_iter: |  | ||||||
|                             raise_err: bool = True |  | ||||||
| 
 |  | ||||||
|                             # trigger aio task to error on next loop |  | ||||||
|                             # tick/checkpoint |  | ||||||
|                             if aio_err_trigger == 'before_start_point': |  | ||||||
|                                 aio_ev.set() |  | ||||||
| 
 |  | ||||||
|                             pre_sleep: float = 0 |  | ||||||
| 
 |  | ||||||
|                         await tn.start( |  | ||||||
|                             pre_started_err, |  | ||||||
|                             raise_err, |  | ||||||
|                             pre_sleep, |  | ||||||
|                             (aio_ev if ( |  | ||||||
|                                     aio_err_trigger == 'after_trio_task_starts' |  | ||||||
|                                     and |  | ||||||
|                                     last_iter |  | ||||||
|                                 ) else None |  | ||||||
|                             ), |  | ||||||
|                         ) |  | ||||||
| 
 |  | ||||||
|                         if ( |  | ||||||
|                             aio_err_trigger == 'after_start_point' |  | ||||||
|                             and |  | ||||||
|                             last_iter |  | ||||||
|                         ): |  | ||||||
|                             aio_ev.set() |  | ||||||
| 
 |  | ||||||
|     # ensure the trio-task's error bubbled despite the aio-side |  | ||||||
|     # having (maybe) errored first. |  | ||||||
|     if aio_err_trigger in ( |  | ||||||
|         'after_trio_task_starts', |  | ||||||
|         'after_start_point', |  | ||||||
|     ): |  | ||||||
|         patt: str = 'trio-side' |  | ||||||
|         expect_exc = TypeError |  | ||||||
| 
 |  | ||||||
|     # when aio errors BEFORE (last) trio task is scheduled, we should |  | ||||||
|     # never see anythinb but the aio-side. |  | ||||||
|     else: |  | ||||||
|         patt: str = 'asyncio-side' |  | ||||||
|         expect_exc = RuntimeError |  | ||||||
| 
 |  | ||||||
|     with pytest.raises(expect_exc) as excinfo: |  | ||||||
|         tractor.to_asyncio.run_as_asyncio_guest( |  | ||||||
|             trio_main=_trio_main, |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|     caught_exc = excinfo.value |  | ||||||
|     assert patt in caught_exc.args |  | ||||||
|  | @ -1,108 +0,0 @@ | ||||||
| ''' |  | ||||||
| Runtime boot/init sanity. |  | ||||||
| 
 |  | ||||||
| ''' |  | ||||||
| 
 |  | ||||||
| import pytest |  | ||||||
| import trio |  | ||||||
| 
 |  | ||||||
| import tractor |  | ||||||
| from tractor._exceptions import RuntimeFailure |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @tractor.context |  | ||||||
| async def open_new_root_in_sub( |  | ||||||
|     ctx: tractor.Context, |  | ||||||
| ) -> None: |  | ||||||
| 
 |  | ||||||
|     async with tractor.open_root_actor(): |  | ||||||
|         pass |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @pytest.mark.parametrize( |  | ||||||
|     'open_root_in', |  | ||||||
|     ['root', 'sub'], |  | ||||||
|     ids='open_2nd_root_in={}'.format, |  | ||||||
| ) |  | ||||||
| def test_only_one_root_actor( |  | ||||||
|     open_root_in: str, |  | ||||||
|     reg_addr: tuple, |  | ||||||
|     debug_mode: bool |  | ||||||
| ): |  | ||||||
|     ''' |  | ||||||
|     Verify we specially fail whenever more then one root actor |  | ||||||
|     is attempted to be opened within an already opened tree. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     async def main(): |  | ||||||
|         async with tractor.open_nursery() as an: |  | ||||||
| 
 |  | ||||||
|             if open_root_in == 'root': |  | ||||||
|                 async with tractor.open_root_actor( |  | ||||||
|                     registry_addrs=[reg_addr], |  | ||||||
|                 ): |  | ||||||
|                     pass |  | ||||||
| 
 |  | ||||||
|             ptl: tractor.Portal = await an.start_actor( |  | ||||||
|                 name='bad_rooty_boi', |  | ||||||
|                 enable_modules=[__name__], |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|             async with ptl.open_context( |  | ||||||
|                 open_new_root_in_sub, |  | ||||||
|             ) as (ctx, first): |  | ||||||
|                 pass |  | ||||||
| 
 |  | ||||||
|     if open_root_in == 'root': |  | ||||||
|         with pytest.raises( |  | ||||||
|             RuntimeFailure |  | ||||||
|         ) as excinfo: |  | ||||||
|             trio.run(main) |  | ||||||
| 
 |  | ||||||
|     else: |  | ||||||
|         with pytest.raises( |  | ||||||
|             tractor.RemoteActorError, |  | ||||||
|         ) as excinfo: |  | ||||||
|             trio.run(main) |  | ||||||
| 
 |  | ||||||
|         assert excinfo.value.boxed_type is RuntimeFailure |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def test_implicit_root_via_first_nursery( |  | ||||||
|     reg_addr: tuple, |  | ||||||
|     debug_mode: bool |  | ||||||
| ): |  | ||||||
|     ''' |  | ||||||
|     The first `ActorNursery` open should implicitly call |  | ||||||
|     `_root.open_root_actor()`. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     async def main(): |  | ||||||
|         async with tractor.open_nursery() as an: |  | ||||||
|             assert an._implicit_runtime_started |  | ||||||
|             assert tractor.current_actor().aid.name == 'root' |  | ||||||
| 
 |  | ||||||
|     trio.run(main) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def test_runtime_vars_unset( |  | ||||||
|     reg_addr: tuple, |  | ||||||
|     debug_mode: bool |  | ||||||
| ): |  | ||||||
|     ''' |  | ||||||
|     Ensure any `._state._runtime_vars` are restored to default values |  | ||||||
|     after the root actor-runtime exits! |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     assert not tractor._state._runtime_vars['_debug_mode'] |  | ||||||
|     async def main(): |  | ||||||
|         assert not tractor._state._runtime_vars['_debug_mode'] |  | ||||||
|         async with tractor.open_nursery( |  | ||||||
|             debug_mode=True, |  | ||||||
|         ): |  | ||||||
|             assert tractor._state._runtime_vars['_debug_mode'] |  | ||||||
| 
 |  | ||||||
|         # after runtime closure, should be reverted! |  | ||||||
|         assert not tractor._state._runtime_vars['_debug_mode'] |  | ||||||
| 
 |  | ||||||
|     trio.run(main) |  | ||||||
|  | @ -15,19 +15,9 @@ async def sleep_back_actor( | ||||||
|     func_name, |     func_name, | ||||||
|     func_defined, |     func_defined, | ||||||
|     exposed_mods, |     exposed_mods, | ||||||
|     *, |  | ||||||
|     reg_addr: tuple, |  | ||||||
| ): | ): | ||||||
|     if actor_name: |     if actor_name: | ||||||
|         async with tractor.find_actor( |         async with tractor.find_actor(actor_name) as portal: | ||||||
|             actor_name, |  | ||||||
|             # NOTE: must be set manually since |  | ||||||
|             # the subactor doesn't have the reg_addr |  | ||||||
|             # fixture code run in it! |  | ||||||
|             # TODO: maybe we should just set this once in the |  | ||||||
|             # _state mod and derive to all children? |  | ||||||
|             registry_addrs=[reg_addr], |  | ||||||
|         ) as portal: |  | ||||||
|             try: |             try: | ||||||
|                 await portal.run(__name__, func_name) |                 await portal.run(__name__, func_name) | ||||||
|             except tractor.RemoteActorError as err: |             except tractor.RemoteActorError as err: | ||||||
|  | @ -36,7 +26,7 @@ async def sleep_back_actor( | ||||||
|                 if not exposed_mods: |                 if not exposed_mods: | ||||||
|                     expect = tractor.ModuleNotExposed |                     expect = tractor.ModuleNotExposed | ||||||
| 
 | 
 | ||||||
|                 assert err.boxed_type is expect |                 assert err.type is expect | ||||||
|                 raise |                 raise | ||||||
|     else: |     else: | ||||||
|         await trio.sleep(float('inf')) |         await trio.sleep(float('inf')) | ||||||
|  | @ -62,17 +52,11 @@ async def short_sleep(): | ||||||
|         'fail_on_syntax', |         'fail_on_syntax', | ||||||
|     ], |     ], | ||||||
| ) | ) | ||||||
| def test_rpc_errors( | def test_rpc_errors(reg_addr, to_call, testdir): | ||||||
|     reg_addr, |     """Test errors when making various RPC requests to an actor | ||||||
|     to_call, |  | ||||||
|     testdir, |  | ||||||
| ): |  | ||||||
|     ''' |  | ||||||
|     Test errors when making various RPC requests to an actor |  | ||||||
|     that either doesn't have the requested module exposed or doesn't define |     that either doesn't have the requested module exposed or doesn't define | ||||||
|     the named function. |     the named function. | ||||||
| 
 |     """ | ||||||
|     ''' |  | ||||||
|     exposed_mods, funcname, inside_err = to_call |     exposed_mods, funcname, inside_err = to_call | ||||||
|     subactor_exposed_mods = [] |     subactor_exposed_mods = [] | ||||||
|     func_defined = globals().get(funcname, False) |     func_defined = globals().get(funcname, False) | ||||||
|  | @ -100,13 +84,8 @@ def test_rpc_errors( | ||||||
| 
 | 
 | ||||||
|         # spawn a subactor which calls us back |         # spawn a subactor which calls us back | ||||||
|         async with tractor.open_nursery( |         async with tractor.open_nursery( | ||||||
|             registry_addrs=[reg_addr], |             arbiter_addr=reg_addr, | ||||||
|             enable_modules=exposed_mods.copy(), |             enable_modules=exposed_mods.copy(), | ||||||
| 
 |  | ||||||
|             # NOTE: will halt test in REPL if uncommented, so only |  | ||||||
|             # do that if actually debugging subactor but keep it |  | ||||||
|             # disabled for the test. |  | ||||||
|             # debug_mode=True, |  | ||||||
|         ) as n: |         ) as n: | ||||||
| 
 | 
 | ||||||
|             actor = tractor.current_actor() |             actor = tractor.current_actor() | ||||||
|  | @ -123,7 +102,6 @@ def test_rpc_errors( | ||||||
|                 exposed_mods=exposed_mods, |                 exposed_mods=exposed_mods, | ||||||
|                 func_defined=True if func_defined else False, |                 func_defined=True if func_defined else False, | ||||||
|                 enable_modules=subactor_exposed_mods, |                 enable_modules=subactor_exposed_mods, | ||||||
|                 reg_addr=reg_addr, |  | ||||||
|             ) |             ) | ||||||
| 
 | 
 | ||||||
|     def run(): |     def run(): | ||||||
|  | @ -150,4 +128,4 @@ def test_rpc_errors( | ||||||
|             )) |             )) | ||||||
| 
 | 
 | ||||||
|         if getattr(value, 'type', None): |         if getattr(value, 'type', None): | ||||||
|             assert value.boxed_type is inside_err |             assert value.type is inside_err | ||||||
|  |  | ||||||
|  | @ -1,167 +0,0 @@ | ||||||
| """ |  | ||||||
| Shared mem primitives and APIs. |  | ||||||
| 
 |  | ||||||
| """ |  | ||||||
| import uuid |  | ||||||
| 
 |  | ||||||
| # import numpy |  | ||||||
| import pytest |  | ||||||
| import trio |  | ||||||
| import tractor |  | ||||||
| from tractor.ipc._shm import ( |  | ||||||
|     open_shm_list, |  | ||||||
|     attach_shm_list, |  | ||||||
| ) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @tractor.context |  | ||||||
| async def child_attach_shml_alot( |  | ||||||
|     ctx: tractor.Context, |  | ||||||
|     shm_key: str, |  | ||||||
| ) -> None: |  | ||||||
| 
 |  | ||||||
|     await ctx.started(shm_key) |  | ||||||
| 
 |  | ||||||
|     # now try to attach a boatload of times in a loop.. |  | ||||||
|     for _ in range(1000): |  | ||||||
|         shml = attach_shm_list( |  | ||||||
|             key=shm_key, |  | ||||||
|             readonly=False, |  | ||||||
|         ) |  | ||||||
|         assert shml.shm.name == shm_key |  | ||||||
|         await trio.sleep(0.001) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def test_child_attaches_alot(): |  | ||||||
|     async def main(): |  | ||||||
|         async with tractor.open_nursery() as an: |  | ||||||
| 
 |  | ||||||
|             # allocate writeable list in parent |  | ||||||
|             key = f'shml_{uuid.uuid4()}' |  | ||||||
|             shml = open_shm_list( |  | ||||||
|                 key=key, |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|             portal = await an.start_actor( |  | ||||||
|                 'shm_attacher', |  | ||||||
|                 enable_modules=[__name__], |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|             async with ( |  | ||||||
|                 portal.open_context( |  | ||||||
|                     child_attach_shml_alot, |  | ||||||
|                     shm_key=shml.key, |  | ||||||
|                 ) as (ctx, start_val), |  | ||||||
|             ): |  | ||||||
|                 assert start_val == key |  | ||||||
|                 await ctx.result() |  | ||||||
| 
 |  | ||||||
|             await portal.cancel_actor() |  | ||||||
| 
 |  | ||||||
|     trio.run(main) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @tractor.context |  | ||||||
| async def child_read_shm_list( |  | ||||||
|     ctx: tractor.Context, |  | ||||||
|     shm_key: str, |  | ||||||
|     use_str: bool, |  | ||||||
|     frame_size: int, |  | ||||||
| ) -> None: |  | ||||||
| 
 |  | ||||||
|     # attach in child |  | ||||||
|     shml = attach_shm_list( |  | ||||||
|         key=shm_key, |  | ||||||
|         # dtype=str if use_str else float, |  | ||||||
|     ) |  | ||||||
|     await ctx.started(shml.key) |  | ||||||
| 
 |  | ||||||
|     async with ctx.open_stream() as stream: |  | ||||||
|         async for i in stream: |  | ||||||
|             print(f'(child): reading shm list index: {i}') |  | ||||||
| 
 |  | ||||||
|             if use_str: |  | ||||||
|                 expect = str(float(i)) |  | ||||||
|             else: |  | ||||||
|                 expect = float(i) |  | ||||||
| 
 |  | ||||||
|             if frame_size == 1: |  | ||||||
|                 val = shml[i] |  | ||||||
|                 assert expect == val |  | ||||||
|                 print(f'(child): reading value: {val}') |  | ||||||
|             else: |  | ||||||
|                 frame = shml[i - frame_size:i] |  | ||||||
|                 print(f'(child): reading frame: {frame}') |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @pytest.mark.parametrize( |  | ||||||
|     'use_str', |  | ||||||
|     [False, True], |  | ||||||
|     ids=lambda i: f'use_str_values={i}', |  | ||||||
| ) |  | ||||||
| @pytest.mark.parametrize( |  | ||||||
|     'frame_size', |  | ||||||
|     [1, 2**6, 2**10], |  | ||||||
|     ids=lambda i: f'frame_size={i}', |  | ||||||
| ) |  | ||||||
| def test_parent_writer_child_reader( |  | ||||||
|     use_str: bool, |  | ||||||
|     frame_size: int, |  | ||||||
| ): |  | ||||||
| 
 |  | ||||||
|     async def main(): |  | ||||||
|         async with tractor.open_nursery( |  | ||||||
|             # debug_mode=True, |  | ||||||
|         ) as an: |  | ||||||
| 
 |  | ||||||
|             portal = await an.start_actor( |  | ||||||
|                 'shm_reader', |  | ||||||
|                 enable_modules=[__name__], |  | ||||||
|                 debug_mode=True, |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|             # allocate writeable list in parent |  | ||||||
|             key = 'shm_list' |  | ||||||
|             seq_size = int(2 * 2 ** 10) |  | ||||||
|             shml = open_shm_list( |  | ||||||
|                 key=key, |  | ||||||
|                 size=seq_size, |  | ||||||
|                 dtype=str if use_str else float, |  | ||||||
|                 readonly=False, |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|             async with ( |  | ||||||
|                 portal.open_context( |  | ||||||
|                     child_read_shm_list, |  | ||||||
|                     shm_key=key, |  | ||||||
|                     use_str=use_str, |  | ||||||
|                     frame_size=frame_size, |  | ||||||
|                 ) as (ctx, sent), |  | ||||||
| 
 |  | ||||||
|                 ctx.open_stream() as stream, |  | ||||||
|             ): |  | ||||||
| 
 |  | ||||||
|                 assert sent == key |  | ||||||
| 
 |  | ||||||
|                 for i in range(seq_size): |  | ||||||
| 
 |  | ||||||
|                     val = float(i) |  | ||||||
|                     if use_str: |  | ||||||
|                         val = str(val) |  | ||||||
| 
 |  | ||||||
|                     # print(f'(parent): writing {val}') |  | ||||||
|                     shml[i] = val |  | ||||||
| 
 |  | ||||||
|                     # only on frame fills do we |  | ||||||
|                     # signal to the child that a frame's |  | ||||||
|                     # worth is ready. |  | ||||||
|                     if (i % frame_size) == 0: |  | ||||||
|                         print(f'(parent): signalling frame full on {val}') |  | ||||||
|                         await stream.send(i) |  | ||||||
|                 else: |  | ||||||
|                     print(f'(parent): signalling final frame on {val}') |  | ||||||
|                     await stream.send(i) |  | ||||||
| 
 |  | ||||||
|             await portal.cancel_actor() |  | ||||||
| 
 |  | ||||||
|     trio.run(main) |  | ||||||
|  | @ -2,10 +2,7 @@ | ||||||
| Spawning basics | Spawning basics | ||||||
| 
 | 
 | ||||||
| """ | """ | ||||||
| from functools import partial | from typing import Optional | ||||||
| from typing import ( |  | ||||||
|     Any, |  | ||||||
| ) |  | ||||||
| 
 | 
 | ||||||
| import pytest | import pytest | ||||||
| import trio | import trio | ||||||
|  | @ -13,99 +10,75 @@ import tractor | ||||||
| 
 | 
 | ||||||
| from tractor._testing import tractor_test | from tractor._testing import tractor_test | ||||||
| 
 | 
 | ||||||
| data_to_pass_down = { | data_to_pass_down = {'doggy': 10, 'kitty': 4} | ||||||
|     'doggy': 10, |  | ||||||
|     'kitty': 4, |  | ||||||
| } |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def spawn( | async def spawn( | ||||||
|     should_be_root: bool, |     is_arbiter: bool, | ||||||
|     data: dict, |     data: dict, | ||||||
|     reg_addr: tuple[str, int], |     reg_addr: tuple[str, int], | ||||||
| 
 |  | ||||||
|     debug_mode: bool = False, |  | ||||||
| ): | ): | ||||||
|  |     namespaces = [__name__] | ||||||
|  | 
 | ||||||
|     await trio.sleep(0.1) |     await trio.sleep(0.1) | ||||||
|     actor = tractor.current_actor(err_on_no_runtime=False) |  | ||||||
| 
 | 
 | ||||||
|     if should_be_root: |     async with tractor.open_root_actor( | ||||||
|         assert actor is None  # no runtime yet |         arbiter_addr=reg_addr, | ||||||
|         async with ( |     ): | ||||||
|             tractor.open_root_actor( |  | ||||||
|                 arbiter_addr=reg_addr, |  | ||||||
|             ), |  | ||||||
|             tractor.open_nursery() as an, |  | ||||||
|         ): |  | ||||||
|             # now runtime exists |  | ||||||
|             actor: tractor.Actor = tractor.current_actor() |  | ||||||
|             assert actor.is_arbiter == should_be_root |  | ||||||
| 
 | 
 | ||||||
|             # spawns subproc here |         actor = tractor.current_actor() | ||||||
|             portal: tractor.Portal = await an.run_in_actor( |         assert actor.is_arbiter == is_arbiter | ||||||
|                 fn=spawn, |         data = data_to_pass_down | ||||||
| 
 | 
 | ||||||
|                 # spawning args |         if actor.is_arbiter: | ||||||
|                 name='sub-actor', |  | ||||||
|                 enable_modules=[__name__], |  | ||||||
| 
 | 
 | ||||||
|                 # passed to a subactor-recursive RPC invoke |             async with tractor.open_nursery( | ||||||
|                 # of this same `spawn()` fn. |             ) as nursery: | ||||||
|                 should_be_root=False, |  | ||||||
|                 data=data_to_pass_down, |  | ||||||
|                 reg_addr=reg_addr, |  | ||||||
|             ) |  | ||||||
| 
 | 
 | ||||||
|             assert len(an._children) == 1 |                 # forks here | ||||||
|             assert ( |                 portal = await nursery.run_in_actor( | ||||||
|                 portal.channel.uid |                     spawn, | ||||||
|                 in |                     is_arbiter=False, | ||||||
|                 tractor.current_actor().ipc_server._peers |                     name='sub-actor', | ||||||
|             ) |                     data=data, | ||||||
|  |                     reg_addr=reg_addr, | ||||||
|  |                     enable_modules=namespaces, | ||||||
|  |                 ) | ||||||
| 
 | 
 | ||||||
|             # get result from child subactor |                 assert len(nursery._children) == 1 | ||||||
|             result = await portal.result() |                 assert portal.channel.uid in tractor.current_actor()._peers | ||||||
|             assert result == 10 |                 # be sure we can still get the result | ||||||
|             return result |                 result = await portal.result() | ||||||
|     else: |                 assert result == 10 | ||||||
|         assert actor.is_arbiter == should_be_root |                 return result | ||||||
|         return 10 |         else: | ||||||
|  |             return 10 | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def test_run_in_actor_same_func_in_child( | def test_local_arbiter_subactor_global_state(reg_addr): | ||||||
|     reg_addr: tuple, |  | ||||||
|     debug_mode: bool, |  | ||||||
| ): |  | ||||||
|     result = trio.run( |     result = trio.run( | ||||||
|         partial( |         spawn, | ||||||
|             spawn, |         True, | ||||||
|             should_be_root=True, |         data_to_pass_down, | ||||||
|             data=data_to_pass_down, |         reg_addr, | ||||||
|             reg_addr=reg_addr, |  | ||||||
|             debug_mode=debug_mode, |  | ||||||
|         ) |  | ||||||
|     ) |     ) | ||||||
|     assert result == 10 |     assert result == 10 | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def movie_theatre_question(): | async def movie_theatre_question(): | ||||||
|     ''' |     """A question asked in a dark theatre, in a tangent | ||||||
|     A question asked in a dark theatre, in a tangent |  | ||||||
|     (errr, I mean different) process. |     (errr, I mean different) process. | ||||||
| 
 |     """ | ||||||
|     ''' |  | ||||||
|     return 'have you ever seen a portal?' |     return 'have you ever seen a portal?' | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @tractor_test | @tractor_test | ||||||
| async def test_movie_theatre_convo(start_method): | async def test_movie_theatre_convo(start_method): | ||||||
|     ''' |     """The main ``tractor`` routine. | ||||||
|     The main ``tractor`` routine. |     """ | ||||||
|  |     async with tractor.open_nursery() as n: | ||||||
| 
 | 
 | ||||||
|     ''' |         portal = await n.start_actor( | ||||||
|     async with tractor.open_nursery(debug_mode=True) as an: |  | ||||||
| 
 |  | ||||||
|         portal = await an.start_actor( |  | ||||||
|             'frank', |             'frank', | ||||||
|             # enable the actor to run funcs from this current module |             # enable the actor to run funcs from this current module | ||||||
|             enable_modules=[__name__], |             enable_modules=[__name__], | ||||||
|  | @ -121,9 +94,7 @@ async def test_movie_theatre_convo(start_method): | ||||||
|         await portal.cancel_actor() |         await portal.cancel_actor() | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def cellar_door( | async def cellar_door(return_value: Optional[str]): | ||||||
|     return_value: str|None, |  | ||||||
| ): |  | ||||||
|     return return_value |     return return_value | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | @ -133,19 +104,17 @@ async def cellar_door( | ||||||
| ) | ) | ||||||
| @tractor_test | @tractor_test | ||||||
| async def test_most_beautiful_word( | async def test_most_beautiful_word( | ||||||
|     start_method: str, |     start_method, | ||||||
|     return_value: Any, |     return_value | ||||||
|     debug_mode: bool, |  | ||||||
| ): | ): | ||||||
|     ''' |     ''' | ||||||
|     The main ``tractor`` routine. |     The main ``tractor`` routine. | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|     with trio.fail_after(1): |     with trio.fail_after(1): | ||||||
|         async with tractor.open_nursery( |         async with tractor.open_nursery() as n: | ||||||
|             debug_mode=debug_mode, | 
 | ||||||
|         ) as an: |             portal = await n.run_in_actor( | ||||||
|             portal = await an.run_in_actor( |  | ||||||
|                 cellar_door, |                 cellar_door, | ||||||
|                 return_value=return_value, |                 return_value=return_value, | ||||||
|                 name='some_linguist', |                 name='some_linguist', | ||||||
|  |  | ||||||
|  | @ -2,9 +2,7 @@ | ||||||
| Broadcast channels for fan-out to local tasks. | Broadcast channels for fan-out to local tasks. | ||||||
| 
 | 
 | ||||||
| """ | """ | ||||||
| from contextlib import ( | from contextlib import asynccontextmanager | ||||||
|     asynccontextmanager as acm, |  | ||||||
| ) |  | ||||||
| from functools import partial | from functools import partial | ||||||
| from itertools import cycle | from itertools import cycle | ||||||
| import time | import time | ||||||
|  | @ -17,7 +15,6 @@ import tractor | ||||||
| from tractor.trionics import ( | from tractor.trionics import ( | ||||||
|     broadcast_receiver, |     broadcast_receiver, | ||||||
|     Lagged, |     Lagged, | ||||||
|     collapse_eg, |  | ||||||
| ) | ) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | @ -65,7 +62,7 @@ async def ensure_sequence( | ||||||
|                 break |                 break | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @acm | @asynccontextmanager | ||||||
| async def open_sequence_streamer( | async def open_sequence_streamer( | ||||||
| 
 | 
 | ||||||
|     sequence: list[int], |     sequence: list[int], | ||||||
|  | @ -77,9 +74,9 @@ async def open_sequence_streamer( | ||||||
|     async with tractor.open_nursery( |     async with tractor.open_nursery( | ||||||
|         arbiter_addr=reg_addr, |         arbiter_addr=reg_addr, | ||||||
|         start_method=start_method, |         start_method=start_method, | ||||||
|     ) as an: |     ) as tn: | ||||||
| 
 | 
 | ||||||
|         portal = await an.start_actor( |         portal = await tn.start_actor( | ||||||
|             'sequence_echoer', |             'sequence_echoer', | ||||||
|             enable_modules=[__name__], |             enable_modules=[__name__], | ||||||
|         ) |         ) | ||||||
|  | @ -158,12 +155,9 @@ def test_consumer_and_parent_maybe_lag( | ||||||
|         ) as stream: |         ) as stream: | ||||||
| 
 | 
 | ||||||
|             try: |             try: | ||||||
|                 async with ( |                 async with trio.open_nursery() as n: | ||||||
|                     collapse_eg(), |  | ||||||
|                     trio.open_nursery() as tn, |  | ||||||
|                 ): |  | ||||||
| 
 | 
 | ||||||
|                     tn.start_soon( |                     n.start_soon( | ||||||
|                         ensure_sequence, |                         ensure_sequence, | ||||||
|                         stream, |                         stream, | ||||||
|                         sequence.copy(), |                         sequence.copy(), | ||||||
|  | @ -236,8 +230,8 @@ def test_faster_task_to_recv_is_cancelled_by_slower( | ||||||
| 
 | 
 | ||||||
|         ) as stream: |         ) as stream: | ||||||
| 
 | 
 | ||||||
|             async with trio.open_nursery() as tn: |             async with trio.open_nursery() as n: | ||||||
|                 tn.start_soon( |                 n.start_soon( | ||||||
|                     ensure_sequence, |                     ensure_sequence, | ||||||
|                     stream, |                     stream, | ||||||
|                     sequence.copy(), |                     sequence.copy(), | ||||||
|  | @ -259,7 +253,7 @@ def test_faster_task_to_recv_is_cancelled_by_slower( | ||||||
|                         continue |                         continue | ||||||
| 
 | 
 | ||||||
|                 print('cancelling faster subtask') |                 print('cancelling faster subtask') | ||||||
|                 tn.cancel_scope.cancel() |                 n.cancel_scope.cancel() | ||||||
| 
 | 
 | ||||||
|             try: |             try: | ||||||
|                 value = await stream.receive() |                 value = await stream.receive() | ||||||
|  | @ -277,7 +271,7 @@ def test_faster_task_to_recv_is_cancelled_by_slower( | ||||||
|                         # the faster subtask was cancelled |                         # the faster subtask was cancelled | ||||||
|                         break |                         break | ||||||
| 
 | 
 | ||||||
|                 # await tractor.pause() |                 # await tractor.breakpoint() | ||||||
|                 # await stream.receive() |                 # await stream.receive() | ||||||
|                 print(f'final value: {value}') |                 print(f'final value: {value}') | ||||||
| 
 | 
 | ||||||
|  | @ -377,13 +371,13 @@ def test_ensure_slow_consumers_lag_out( | ||||||
|                                     f'on {lags}:{value}') |                                     f'on {lags}:{value}') | ||||||
|                                 return |                                 return | ||||||
| 
 | 
 | ||||||
|             async with trio.open_nursery() as tn: |             async with trio.open_nursery() as nursery: | ||||||
| 
 | 
 | ||||||
|                 for i in range(1, num_laggers): |                 for i in range(1, num_laggers): | ||||||
| 
 | 
 | ||||||
|                     task_name = f'sub_{i}' |                     task_name = f'sub_{i}' | ||||||
|                     laggers[task_name] = 0 |                     laggers[task_name] = 0 | ||||||
|                     tn.start_soon( |                     nursery.start_soon( | ||||||
|                         partial( |                         partial( | ||||||
|                             sub_and_print, |                             sub_and_print, | ||||||
|                             delay=i*0.001, |                             delay=i*0.001, | ||||||
|  | @ -503,7 +497,6 @@ def test_no_raise_on_lag(): | ||||||
|                 # internals when the no raise flag is set. |                 # internals when the no raise flag is set. | ||||||
|                 loglevel='warning', |                 loglevel='warning', | ||||||
|             ), |             ), | ||||||
|             collapse_eg(), |  | ||||||
|             trio.open_nursery() as n, |             trio.open_nursery() as n, | ||||||
|         ): |         ): | ||||||
|             n.start_soon(slow) |             n.start_soon(slow) | ||||||
|  |  | ||||||
|  | @ -3,21 +3,9 @@ Reminders for oddities in `trio` that we need to stay aware of and/or | ||||||
| want to see changed. | want to see changed. | ||||||
| 
 | 
 | ||||||
| ''' | ''' | ||||||
| from contextlib import ( |  | ||||||
|     asynccontextmanager as acm, |  | ||||||
| ) |  | ||||||
| from types import ModuleType |  | ||||||
| 
 |  | ||||||
| from functools import partial |  | ||||||
| 
 |  | ||||||
| import pytest | import pytest | ||||||
| from _pytest import pathlib |  | ||||||
| from tractor.trionics import collapse_eg |  | ||||||
| import trio | import trio | ||||||
| from trio import TaskStatus | from trio import TaskStatus | ||||||
| from tractor._testing import ( |  | ||||||
|     examples_dir, |  | ||||||
| ) |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @pytest.mark.parametrize( | @pytest.mark.parametrize( | ||||||
|  | @ -72,7 +60,6 @@ def test_stashed_child_nursery(use_start_soon): | ||||||
|     async def main(): |     async def main(): | ||||||
| 
 | 
 | ||||||
|         async with ( |         async with ( | ||||||
|             collapse_eg(), |  | ||||||
|             trio.open_nursery() as pn, |             trio.open_nursery() as pn, | ||||||
|         ): |         ): | ||||||
|             cn = await pn.start(mk_child_nursery) |             cn = await pn.start(mk_child_nursery) | ||||||
|  | @ -93,210 +80,3 @@ def test_stashed_child_nursery(use_start_soon): | ||||||
| 
 | 
 | ||||||
|     with pytest.raises(NameError): |     with pytest.raises(NameError): | ||||||
|         trio.run(main) |         trio.run(main) | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @pytest.mark.parametrize( |  | ||||||
|     ('unmask_from_canc', 'canc_from_finally'), |  | ||||||
|     [ |  | ||||||
|         (True, False), |  | ||||||
|         (True, True), |  | ||||||
|         pytest.param(False, True, |  | ||||||
|                      marks=pytest.mark.xfail(reason="never raises!") |  | ||||||
|         ), |  | ||||||
|     ], |  | ||||||
|     # TODO, ask ronny how to impl this .. XD |  | ||||||
|     # ids='unmask_from_canc={0}, canc_from_finally={1}',#.format, |  | ||||||
| ) |  | ||||||
| def test_acm_embedded_nursery_propagates_enter_err( |  | ||||||
|     canc_from_finally: bool, |  | ||||||
|     unmask_from_canc: bool, |  | ||||||
|     debug_mode: bool, |  | ||||||
| ): |  | ||||||
|     ''' |  | ||||||
|     Demo how a masking `trio.Cancelled` could be handled by unmasking |  | ||||||
|     from the `.__context__` field when a user (by accident) re-raises |  | ||||||
|     from a `finally:`. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     import tractor |  | ||||||
| 
 |  | ||||||
|     @acm |  | ||||||
|     async def wraps_tn_that_always_cancels(): |  | ||||||
|         async with ( |  | ||||||
|             trio.open_nursery() as tn, |  | ||||||
|             tractor.trionics.maybe_raise_from_masking_exc( |  | ||||||
|                 unmask_from=( |  | ||||||
|                     (trio.Cancelled,) if unmask_from_canc |  | ||||||
|                     else () |  | ||||||
|                 ), |  | ||||||
|             ) |  | ||||||
|         ): |  | ||||||
|             try: |  | ||||||
|                 yield tn |  | ||||||
|             finally: |  | ||||||
|                 if canc_from_finally: |  | ||||||
|                     tn.cancel_scope.cancel() |  | ||||||
|                     await trio.lowlevel.checkpoint() |  | ||||||
| 
 |  | ||||||
|     async def _main(): |  | ||||||
|         with tractor.devx.maybe_open_crash_handler( |  | ||||||
|             pdb=debug_mode, |  | ||||||
|         ) as bxerr: |  | ||||||
|             assert not bxerr.value |  | ||||||
| 
 |  | ||||||
|             async with ( |  | ||||||
|                 wraps_tn_that_always_cancels() as tn, |  | ||||||
|             ): |  | ||||||
|                 assert not tn.cancel_scope.cancel_called |  | ||||||
|                 assert 0 |  | ||||||
| 
 |  | ||||||
|         if debug_mode: |  | ||||||
|             assert ( |  | ||||||
|                 (err := bxerr.value) |  | ||||||
|                 and |  | ||||||
|                 type(err) is AssertionError |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|     with pytest.raises(ExceptionGroup) as excinfo: |  | ||||||
|         trio.run(_main) |  | ||||||
| 
 |  | ||||||
|     eg: ExceptionGroup = excinfo.value |  | ||||||
|     assert_eg, rest_eg = eg.split(AssertionError) |  | ||||||
| 
 |  | ||||||
|     assert len(assert_eg.exceptions) == 1 |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def test_gatherctxs_with_memchan_breaks_multicancelled( |  | ||||||
|     debug_mode: bool, |  | ||||||
| ): |  | ||||||
|     ''' |  | ||||||
|     Demo how a using an `async with sndchan` inside |  | ||||||
|     a `.trionics.gather_contexts()` task will break a strict-eg-tn's |  | ||||||
|     multi-cancelled absorption.. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     from tractor import ( |  | ||||||
|         trionics, |  | ||||||
|     ) |  | ||||||
| 
 |  | ||||||
|     @acm |  | ||||||
|     async def open_memchan() -> trio.abc.ReceiveChannel: |  | ||||||
| 
 |  | ||||||
|         task: trio.Task = trio.lowlevel.current_task() |  | ||||||
|         print( |  | ||||||
|             f'Opening {task!r}\n' |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|         # 1 to force eager sending |  | ||||||
|         send, recv = trio.open_memory_channel(16) |  | ||||||
| 
 |  | ||||||
|         try: |  | ||||||
|             async with send: |  | ||||||
|                 yield recv |  | ||||||
|         finally: |  | ||||||
|             print( |  | ||||||
|                 f'Closed {task!r}\n' |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|     async def main(): |  | ||||||
|         async with ( |  | ||||||
|             # XXX should ensure ONLY the KBI |  | ||||||
|             # is relayed upward |  | ||||||
|             collapse_eg(), |  | ||||||
|             trio.open_nursery(), # as tn, |  | ||||||
| 
 |  | ||||||
|             trionics.gather_contexts([ |  | ||||||
|                 open_memchan(), |  | ||||||
|                 open_memchan(), |  | ||||||
|             ]) as recv_chans, |  | ||||||
|         ): |  | ||||||
|             assert len(recv_chans) == 2 |  | ||||||
| 
 |  | ||||||
|             await trio.sleep(1) |  | ||||||
|             raise KeyboardInterrupt |  | ||||||
|             # tn.cancel_scope.cancel() |  | ||||||
| 
 |  | ||||||
|     with pytest.raises(KeyboardInterrupt): |  | ||||||
|         trio.run(main) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @pytest.mark.parametrize( |  | ||||||
|     'raise_unmasked', [ |  | ||||||
|         True, |  | ||||||
|         pytest.param( |  | ||||||
|             False, |  | ||||||
|             marks=pytest.mark.xfail( |  | ||||||
|                 reason="see examples/trio/send_chan_aclose_masks.py" |  | ||||||
|             ) |  | ||||||
|         ), |  | ||||||
|     ] |  | ||||||
| ) |  | ||||||
| @pytest.mark.parametrize( |  | ||||||
|     'child_errors_mid_stream', |  | ||||||
|     [True, False], |  | ||||||
| ) |  | ||||||
| def test_unmask_aclose_as_checkpoint_on_aexit( |  | ||||||
|     raise_unmasked: bool, |  | ||||||
|     child_errors_mid_stream: bool, |  | ||||||
|     debug_mode: bool, |  | ||||||
| ): |  | ||||||
|     ''' |  | ||||||
|     Verify that our unmasker util works over the common case where |  | ||||||
|     a mem-chan's `.aclose()` is included in an `@acm` stack |  | ||||||
|     and it being currently a checkpoint, can `trio.Cancelled`-mask an embedded |  | ||||||
|     exception from user code resulting in a silent failure which |  | ||||||
|     appears like graceful cancellation. |  | ||||||
| 
 |  | ||||||
|     This test suite is mostly implemented as an example script so it |  | ||||||
|     could more easily be shared with `trio`-core peeps as `tractor`-less |  | ||||||
|     minimum reproducing example. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     mod: ModuleType = pathlib.import_path( |  | ||||||
|         examples_dir() |  | ||||||
|         / 'trio' |  | ||||||
|         / 'send_chan_aclose_masks_beg.py', |  | ||||||
|         root=examples_dir(), |  | ||||||
|         consider_namespace_packages=False, |  | ||||||
|     ) |  | ||||||
|     with pytest.raises(RuntimeError): |  | ||||||
|         trio.run(partial( |  | ||||||
|             mod.main, |  | ||||||
|             raise_unmasked=raise_unmasked, |  | ||||||
|             child_errors_mid_stream=child_errors_mid_stream, |  | ||||||
|         )) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @pytest.mark.parametrize( |  | ||||||
|     'ignore_special_cases', [ |  | ||||||
|         True, |  | ||||||
|         pytest.param( |  | ||||||
|             False, |  | ||||||
|             marks=pytest.mark.xfail( |  | ||||||
|                 reason="see examples/trio/lockacquire_not_umasked.py" |  | ||||||
|             ) |  | ||||||
|         ), |  | ||||||
|     ] |  | ||||||
| ) |  | ||||||
| def test_cancelled_lockacquire_in_ipctx_not_unmasked( |  | ||||||
|     ignore_special_cases: bool, |  | ||||||
|     loglevel: str, |  | ||||||
|     debug_mode: bool, |  | ||||||
| ): |  | ||||||
|     mod: ModuleType = pathlib.import_path( |  | ||||||
|         examples_dir() |  | ||||||
|         / 'trio' |  | ||||||
|         / 'lockacquire_not_unmasked.py', |  | ||||||
|         root=examples_dir(), |  | ||||||
|         consider_namespace_packages=False, |  | ||||||
|     ) |  | ||||||
|     async def _main(): |  | ||||||
|         with trio.fail_after(2): |  | ||||||
|             await mod.main( |  | ||||||
|                 ignore_special_cases=ignore_special_cases, |  | ||||||
|                 loglevel=loglevel, |  | ||||||
|                 debug_mode=debug_mode, |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|     trio.run(_main) |  | ||||||
|  |  | ||||||
|  | @ -18,53 +18,71 @@ | ||||||
| tractor: structured concurrent ``trio``-"actors". | tractor: structured concurrent ``trio``-"actors". | ||||||
| 
 | 
 | ||||||
| """ | """ | ||||||
|  | from exceptiongroup import BaseExceptionGroup | ||||||
| 
 | 
 | ||||||
| from ._clustering import ( | from ._clustering import open_actor_cluster | ||||||
|     open_actor_cluster as open_actor_cluster, | from ._ipc import Channel | ||||||
| ) |  | ||||||
| from ._context import ( | from ._context import ( | ||||||
|     Context as Context,  # the type |     Context,  # the type | ||||||
|     context as context,  # a func-decorator |     context,  # a func-decorator | ||||||
| ) | ) | ||||||
| from ._streaming import ( | from ._streaming import ( | ||||||
|     MsgStream as MsgStream, |     MsgStream, | ||||||
|     stream as stream, |     stream, | ||||||
| ) | ) | ||||||
| from ._discovery import ( | from ._discovery import ( | ||||||
|     get_registry as get_registry, |     get_arbiter, | ||||||
|     find_actor as find_actor, |     find_actor, | ||||||
|     wait_for_actor as wait_for_actor, |     wait_for_actor, | ||||||
|     query_actor as query_actor, |     query_actor, | ||||||
| ) |  | ||||||
| from ._supervise import ( |  | ||||||
|     open_nursery as open_nursery, |  | ||||||
|     ActorNursery as ActorNursery, |  | ||||||
| ) | ) | ||||||
|  | from ._supervise import open_nursery | ||||||
| from ._state import ( | from ._state import ( | ||||||
|     current_actor as current_actor, |     current_actor, | ||||||
|     is_root_process as is_root_process, |     is_root_process, | ||||||
|     current_ipc_ctx as current_ipc_ctx, |  | ||||||
|     debug_mode as debug_mode |  | ||||||
| ) | ) | ||||||
| from ._exceptions import ( | from ._exceptions import ( | ||||||
|     ContextCancelled as ContextCancelled, |     RemoteActorError, | ||||||
|     ModuleNotExposed as ModuleNotExposed, |     ModuleNotExposed, | ||||||
|     MsgTypeError as MsgTypeError, |     ContextCancelled, | ||||||
|     RemoteActorError as RemoteActorError, |  | ||||||
|     TransportClosed as TransportClosed, |  | ||||||
| ) | ) | ||||||
| from .devx import ( | from ._debug import ( | ||||||
|     breakpoint as breakpoint, |     breakpoint, | ||||||
|     pause as pause, |     post_mortem, | ||||||
|     pause_from_sync as pause_from_sync, |  | ||||||
|     post_mortem as post_mortem, |  | ||||||
| ) | ) | ||||||
| from . import msg as msg | from . import msg | ||||||
| from ._root import ( | from ._root import ( | ||||||
|     run_daemon as run_daemon, |     run_daemon, | ||||||
|     open_root_actor as open_root_actor, |     open_root_actor, | ||||||
| ) | ) | ||||||
| from .ipc import Channel as Channel | from ._portal import Portal | ||||||
| from ._portal import Portal as Portal | from ._runtime import Actor | ||||||
| from ._runtime import Actor as Actor | 
 | ||||||
| # from . import hilevel as hilevel | 
 | ||||||
|  | __all__ = [ | ||||||
|  |     'Actor', | ||||||
|  |     'Channel', | ||||||
|  |     'Context', | ||||||
|  |     'ContextCancelled', | ||||||
|  |     'ModuleNotExposed', | ||||||
|  |     'MsgStream', | ||||||
|  |     'BaseExceptionGroup', | ||||||
|  |     'Portal', | ||||||
|  |     'RemoteActorError', | ||||||
|  |     'breakpoint', | ||||||
|  |     'context', | ||||||
|  |     'current_actor', | ||||||
|  |     'find_actor', | ||||||
|  |     'get_arbiter', | ||||||
|  |     'is_root_process', | ||||||
|  |     'msg', | ||||||
|  |     'open_actor_cluster', | ||||||
|  |     'open_nursery', | ||||||
|  |     'open_root_actor', | ||||||
|  |     'post_mortem', | ||||||
|  |     'query_actor', | ||||||
|  |     'run_daemon', | ||||||
|  |     'stream', | ||||||
|  |     'to_asyncio', | ||||||
|  |     'wait_for_actor', | ||||||
|  | ] | ||||||
|  |  | ||||||
							
								
								
									
										282
									
								
								tractor/_addr.py
								
								
								
								
							
							
						
						
									
										282
									
								
								tractor/_addr.py
								
								
								
								
							|  | @ -1,282 +0,0 @@ | ||||||
| # tractor: structured concurrent "actors". |  | ||||||
| # Copyright 2018-eternity Tyler Goodlet. |  | ||||||
| 
 |  | ||||||
| # This program is free software: you can redistribute it and/or modify |  | ||||||
| # it under the terms of the GNU Affero General Public License as published by |  | ||||||
| # the Free Software Foundation, either version 3 of the License, or |  | ||||||
| # (at your option) any later version. |  | ||||||
| 
 |  | ||||||
| # This program is distributed in the hope that it will be useful, |  | ||||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of |  | ||||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the |  | ||||||
| # GNU Affero General Public License for more details. |  | ||||||
| 
 |  | ||||||
| # You should have received a copy of the GNU Affero General Public License |  | ||||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. |  | ||||||
| from __future__ import annotations |  | ||||||
| from uuid import uuid4 |  | ||||||
| from typing import ( |  | ||||||
|     Protocol, |  | ||||||
|     ClassVar, |  | ||||||
|     Type, |  | ||||||
|     TYPE_CHECKING, |  | ||||||
| ) |  | ||||||
| 
 |  | ||||||
| from bidict import bidict |  | ||||||
| from trio import ( |  | ||||||
|     SocketListener, |  | ||||||
| ) |  | ||||||
| 
 |  | ||||||
| from .log import get_logger |  | ||||||
| from ._state import ( |  | ||||||
|     _def_tpt_proto, |  | ||||||
| ) |  | ||||||
| from .ipc._tcp import TCPAddress |  | ||||||
| from .ipc._uds import UDSAddress |  | ||||||
| 
 |  | ||||||
| if TYPE_CHECKING: |  | ||||||
|     from ._runtime import Actor |  | ||||||
| 
 |  | ||||||
| log = get_logger(__name__) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| # TODO, maybe breakout the netns key to a struct? |  | ||||||
| # class NetNs(Struct)[str, int]: |  | ||||||
| #     ... |  | ||||||
| 
 |  | ||||||
| # TODO, can't we just use a type alias |  | ||||||
| # for this? namely just some `tuple[str, int, str, str]`? |  | ||||||
| # |  | ||||||
| # -[ ] would also just be simpler to keep this as SockAddr[tuple] |  | ||||||
| #     or something, implying it's just a simple pair of values which can |  | ||||||
| #     presumably be mapped to all transports? |  | ||||||
| # -[ ] `pydoc socket.socket.getsockname()` delivers a 4-tuple for |  | ||||||
| #     ipv6 `(hostaddr, port, flowinfo, scope_id)`.. so how should we |  | ||||||
| #     handle that? |  | ||||||
| # -[ ] as a further alternative to this wrap()/unwrap() approach we |  | ||||||
| #     could just implement `enc/dec_hook()`s for the `Address`-types |  | ||||||
| #     and just deal with our internal objs directly and always and |  | ||||||
| #     leave it to the codec layer to figure out marshalling? |  | ||||||
| #    |_ would mean only one spot to do the `.unwrap()` (which we may |  | ||||||
| #       end up needing to call from the hook()s anyway?) |  | ||||||
| # -[x] rename to `UnwrappedAddress[Descriptor]` ?? |  | ||||||
| #    seems like the right name as per, |  | ||||||
| #    https://www.geeksforgeeks.org/introduction-to-address-descriptor/ |  | ||||||
| # |  | ||||||
| UnwrappedAddress = ( |  | ||||||
|     # tcp/udp/uds |  | ||||||
|     tuple[ |  | ||||||
|         str,  # host/domain(tcp), filesys-dir(uds) |  | ||||||
|         int|str,  # port/path(uds) |  | ||||||
|     ] |  | ||||||
|     # ?TODO? should we also include another 2 fields from |  | ||||||
|     # our `Aid` msg such that we include the runtime `Actor.uid` |  | ||||||
|     # of `.name` and `.uuid`? |  | ||||||
|     # - would ensure uniqueness across entire net? |  | ||||||
|     # - allows for easier runtime-level filtering of "actors by |  | ||||||
|     #   service name" |  | ||||||
| ) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| # TODO, maybe rename to `SocketAddress`? |  | ||||||
| class Address(Protocol): |  | ||||||
|     proto_key: ClassVar[str] |  | ||||||
|     unwrapped_type: ClassVar[UnwrappedAddress] |  | ||||||
| 
 |  | ||||||
|     # TODO, i feel like an `.is_bound()` is a better thing to |  | ||||||
|     # support? |  | ||||||
|     # Lke, what use does this have besides a noop and if it's not |  | ||||||
|     # valid why aren't we erroring on creation/use? |  | ||||||
|     @property |  | ||||||
|     def is_valid(self) -> bool: |  | ||||||
|         ... |  | ||||||
| 
 |  | ||||||
|     # TODO, maybe `.netns` is a better name? |  | ||||||
|     @property |  | ||||||
|     def namespace(self) -> tuple[str, int]|None: |  | ||||||
|         ''' |  | ||||||
|         The if-available, OS-specific "network namespace" key. |  | ||||||
| 
 |  | ||||||
|         ''' |  | ||||||
|         ... |  | ||||||
| 
 |  | ||||||
|     @property |  | ||||||
|     def bindspace(self) -> str: |  | ||||||
|         ''' |  | ||||||
|         Deliver the socket address' "bindable space" from |  | ||||||
|         a `socket.socket.bind()` and thus from the perspective of |  | ||||||
|         specific transport protocol domain. |  | ||||||
| 
 |  | ||||||
|         I.e. for most (layer-4) network-socket protocols this is |  | ||||||
|         normally the ipv4/6 address, for UDS this is normally |  | ||||||
|         a filesystem (sub-directory). |  | ||||||
| 
 |  | ||||||
|         For (distributed) network protocols this is normally the routing |  | ||||||
|         layer's domain/(ip-)address, though it might also include a "network namespace" |  | ||||||
|         key different then the default. |  | ||||||
| 
 |  | ||||||
|         For local-host-only transports this is either an explicit |  | ||||||
|         namespace (with types defined by the OS: netns, Cgroup, IPC, |  | ||||||
|         pid, etc. on linux) or failing that the sub-directory in the |  | ||||||
|         filesys in which socket/shm files are located *under*. |  | ||||||
| 
 |  | ||||||
|         ''' |  | ||||||
|         ... |  | ||||||
| 
 |  | ||||||
|     @classmethod |  | ||||||
|     def from_addr(cls, addr: UnwrappedAddress) -> Address: |  | ||||||
|         ... |  | ||||||
| 
 |  | ||||||
|     def unwrap(self) -> UnwrappedAddress: |  | ||||||
|         ''' |  | ||||||
|         Deliver the underying minimum field set in |  | ||||||
|         a primitive python data type-structure. |  | ||||||
|         ''' |  | ||||||
|         ... |  | ||||||
| 
 |  | ||||||
|     @classmethod |  | ||||||
|     def get_random( |  | ||||||
|         cls, |  | ||||||
|         current_actor: Actor, |  | ||||||
|         bindspace: str|None = None, |  | ||||||
|     ) -> Address: |  | ||||||
|         ... |  | ||||||
| 
 |  | ||||||
|     # TODO, this should be something like a `.get_def_registar_addr()` |  | ||||||
|     # or similar since, |  | ||||||
|     # - it should be a **host singleton** (not root/tree singleton) |  | ||||||
|     # - we **only need this value** when one isn't provided to the |  | ||||||
|     #   runtime at boot and we want to implicitly provide a host-wide |  | ||||||
|     #   registrar. |  | ||||||
|     # - each rooted-actor-tree should likely have its own |  | ||||||
|     #   micro-registry (likely the root being it), also see |  | ||||||
|     @classmethod |  | ||||||
|     def get_root(cls) -> Address: |  | ||||||
|         ... |  | ||||||
| 
 |  | ||||||
|     def __repr__(self) -> str: |  | ||||||
|         ... |  | ||||||
| 
 |  | ||||||
|     def __eq__(self, other) -> bool: |  | ||||||
|         ... |  | ||||||
| 
 |  | ||||||
|     async def open_listener( |  | ||||||
|         self, |  | ||||||
|         **kwargs, |  | ||||||
|     ) -> SocketListener: |  | ||||||
|         ... |  | ||||||
| 
 |  | ||||||
|     async def close_listener(self): |  | ||||||
|         ... |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| _address_types: bidict[str, Type[Address]] = { |  | ||||||
|     'tcp': TCPAddress, |  | ||||||
|     'uds': UDSAddress |  | ||||||
| } |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| # TODO! really these are discovery sys default addrs ONLY useful for |  | ||||||
| # when none is provided to a root actor on first boot. |  | ||||||
| _default_lo_addrs: dict[ |  | ||||||
|     str, |  | ||||||
|     UnwrappedAddress |  | ||||||
| ] = { |  | ||||||
|     'tcp': TCPAddress.get_root().unwrap(), |  | ||||||
|     'uds': UDSAddress.get_root().unwrap(), |  | ||||||
| } |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def get_address_cls(name: str) -> Type[Address]: |  | ||||||
|     return _address_types[name] |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def is_wrapped_addr(addr: any) -> bool: |  | ||||||
|     return type(addr) in _address_types.values() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def mk_uuid() -> str: |  | ||||||
|     ''' |  | ||||||
|     Encapsulate creation of a uuid4 as `str` as used |  | ||||||
|     for creating `Actor.uid: tuple[str, str]` and/or |  | ||||||
|     `.msg.types.Aid`. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     return str(uuid4()) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def wrap_address( |  | ||||||
|     addr: UnwrappedAddress |  | ||||||
| ) -> Address: |  | ||||||
|     ''' |  | ||||||
|     Wrap an `UnwrappedAddress` as an `Address`-type based |  | ||||||
|     on matching builtin python data-structures which we adhoc |  | ||||||
|     use for each. |  | ||||||
| 
 |  | ||||||
|     XXX NOTE, careful care must be placed to ensure |  | ||||||
|     `UnwrappedAddress` cases are **definitely unique** otherwise the |  | ||||||
|     wrong transport backend may be loaded and will break many |  | ||||||
|     low-level things in our runtime in a not-fun-to-debug way! |  | ||||||
| 
 |  | ||||||
|     XD |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     if is_wrapped_addr(addr): |  | ||||||
|         return addr |  | ||||||
| 
 |  | ||||||
|     cls: Type|None = None |  | ||||||
|     # if 'sock' in addr[0]: |  | ||||||
|     #     import pdbp; pdbp.set_trace() |  | ||||||
|     match addr: |  | ||||||
| 
 |  | ||||||
|         # classic network socket-address as tuple/list |  | ||||||
|         case ( |  | ||||||
|             (str(), int()) |  | ||||||
|             | |  | ||||||
|             [str(), int()] |  | ||||||
|         ): |  | ||||||
|             cls = TCPAddress |  | ||||||
| 
 |  | ||||||
|         case ( |  | ||||||
|             # (str()|Path(), str()|Path()), |  | ||||||
|             # ^TODO? uhh why doesn't this work!? |  | ||||||
| 
 |  | ||||||
|             (_, filename) |  | ||||||
|         ) if type(filename) is str: |  | ||||||
|             cls = UDSAddress |  | ||||||
| 
 |  | ||||||
|         # likely an unset UDS or TCP reg address as defaulted in |  | ||||||
|         # `_state._runtime_vars['_root_mailbox']` |  | ||||||
|         # |  | ||||||
|         # TODO? figure out when/if we even need this? |  | ||||||
|         case ( |  | ||||||
|             None |  | ||||||
|             | |  | ||||||
|             [None, None] |  | ||||||
|         ): |  | ||||||
|             cls: Type[Address] = get_address_cls(_def_tpt_proto) |  | ||||||
|             addr: UnwrappedAddress = cls.get_root().unwrap() |  | ||||||
| 
 |  | ||||||
|         case _: |  | ||||||
|             # import pdbp; pdbp.set_trace() |  | ||||||
|             raise TypeError( |  | ||||||
|                 f'Can not wrap unwrapped-address ??\n' |  | ||||||
|                 f'type(addr): {type(addr)!r}\n' |  | ||||||
|                 f'addr: {addr!r}\n' |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|     return cls.from_addr(addr) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def default_lo_addrs( |  | ||||||
|     transports: list[str], |  | ||||||
| ) -> list[Type[Address]]: |  | ||||||
|     ''' |  | ||||||
|     Return the default, host-singleton, registry address |  | ||||||
|     for an input transport key set. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     return [ |  | ||||||
|         _default_lo_addrs[transport] |  | ||||||
|         for transport in transports |  | ||||||
|     ] |  | ||||||
|  | @ -31,16 +31,11 @@ def parse_uid(arg): | ||||||
|     return str(name), str(uuid)  # ensures str encoding |     return str(name), str(uuid)  # ensures str encoding | ||||||
| 
 | 
 | ||||||
| def parse_ipaddr(arg): | def parse_ipaddr(arg): | ||||||
|     try: |     host, port = literal_eval(arg) | ||||||
|         return literal_eval(arg) |     return (str(host), int(port)) | ||||||
| 
 |  | ||||||
|     except (ValueError, SyntaxError): |  | ||||||
|         # UDS: try to interpret as a straight up str |  | ||||||
|         return arg |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| if __name__ == "__main__": | if __name__ == "__main__": | ||||||
|     __tracebackhide__: bool = True |  | ||||||
| 
 | 
 | ||||||
|     parser = argparse.ArgumentParser() |     parser = argparse.ArgumentParser() | ||||||
|     parser.add_argument("--uid", type=parse_uid) |     parser.add_argument("--uid", type=parse_uid) | ||||||
|  | @ -50,8 +45,8 @@ if __name__ == "__main__": | ||||||
|     args = parser.parse_args() |     args = parser.parse_args() | ||||||
| 
 | 
 | ||||||
|     subactor = Actor( |     subactor = Actor( | ||||||
|         name=args.uid[0], |         args.uid[0], | ||||||
|         uuid=args.uid[1], |         uid=args.uid[1], | ||||||
|         loglevel=args.loglevel, |         loglevel=args.loglevel, | ||||||
|         spawn_method="trio" |         spawn_method="trio" | ||||||
|     ) |     ) | ||||||
|  |  | ||||||
|  | @ -19,13 +19,10 @@ Actor cluster helpers. | ||||||
| 
 | 
 | ||||||
| ''' | ''' | ||||||
| from __future__ import annotations | from __future__ import annotations | ||||||
| from contextlib import ( | 
 | ||||||
|     asynccontextmanager as acm, | from contextlib import asynccontextmanager as acm | ||||||
| ) |  | ||||||
| from multiprocessing import cpu_count | from multiprocessing import cpu_count | ||||||
| from typing import ( | from typing import AsyncGenerator, Optional | ||||||
|     AsyncGenerator, |  | ||||||
| ) |  | ||||||
| 
 | 
 | ||||||
| import trio | import trio | ||||||
| import tractor | import tractor | ||||||
|  | @ -55,17 +52,10 @@ async def open_actor_cluster( | ||||||
|         raise ValueError( |         raise ValueError( | ||||||
|             'Number of names is {len(names)} but count it {count}') |             'Number of names is {len(names)} but count it {count}') | ||||||
| 
 | 
 | ||||||
|     async with ( |     async with tractor.open_nursery( | ||||||
|         # tractor.trionics.collapse_eg(), |         **runtime_kwargs, | ||||||
|         tractor.open_nursery( |     ) as an: | ||||||
|             **runtime_kwargs, |         async with trio.open_nursery() as n: | ||||||
|         ) as an |  | ||||||
|     ): |  | ||||||
|         async with ( |  | ||||||
|             # tractor.trionics.collapse_eg(), |  | ||||||
|             trio.open_nursery() as tn, |  | ||||||
|             tractor.trionics.maybe_raise_from_masking_exc() |  | ||||||
|         ): |  | ||||||
|             uid = tractor.current_actor().uid |             uid = tractor.current_actor().uid | ||||||
| 
 | 
 | ||||||
|             async def _start(name: str) -> None: |             async def _start(name: str) -> None: | ||||||
|  | @ -76,8 +66,9 @@ async def open_actor_cluster( | ||||||
|                 ) |                 ) | ||||||
| 
 | 
 | ||||||
|             for name in names: |             for name in names: | ||||||
|                 tn.start_soon(_start, name) |                 n.start_soon(_start, name) | ||||||
| 
 | 
 | ||||||
|         assert len(portals) == count |         assert len(portals) == count | ||||||
|         yield portals |         yield portals | ||||||
|  | 
 | ||||||
|         await an.cancel(hard_kill=hard_kill) |         await an.cancel(hard_kill=hard_kill) | ||||||
|  |  | ||||||
							
								
								
									
										2131
									
								
								tractor/_context.py
								
								
								
								
							
							
						
						
									
										2131
									
								
								tractor/_context.py
								
								
								
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							|  | @ -0,0 +1,922 @@ | ||||||
|  | # tractor: structured concurrent "actors". | ||||||
|  | # Copyright 2018-eternity Tyler Goodlet. | ||||||
|  | 
 | ||||||
|  | # This program is free software: you can redistribute it and/or modify | ||||||
|  | # it under the terms of the GNU Affero General Public License as published by | ||||||
|  | # the Free Software Foundation, either version 3 of the License, or | ||||||
|  | # (at your option) any later version. | ||||||
|  | 
 | ||||||
|  | # This program is distributed in the hope that it will be useful, | ||||||
|  | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||||
|  | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||||
|  | # GNU Affero General Public License for more details. | ||||||
|  | 
 | ||||||
|  | # You should have received a copy of the GNU Affero General Public License | ||||||
|  | # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||||
|  | 
 | ||||||
|  | """ | ||||||
|  | Multi-core debugging for da peeps! | ||||||
|  | 
 | ||||||
|  | """ | ||||||
|  | from __future__ import annotations | ||||||
|  | import bdb | ||||||
|  | import os | ||||||
|  | import sys | ||||||
|  | import signal | ||||||
|  | from functools import ( | ||||||
|  |     partial, | ||||||
|  |     cached_property, | ||||||
|  | ) | ||||||
|  | from contextlib import asynccontextmanager as acm | ||||||
|  | from typing import ( | ||||||
|  |     Any, | ||||||
|  |     Optional, | ||||||
|  |     Callable, | ||||||
|  |     AsyncIterator, | ||||||
|  |     AsyncGenerator, | ||||||
|  | ) | ||||||
|  | from types import FrameType | ||||||
|  | 
 | ||||||
|  | import pdbp | ||||||
|  | import tractor | ||||||
|  | import trio | ||||||
|  | from trio_typing import TaskStatus | ||||||
|  | 
 | ||||||
|  | from .log import get_logger | ||||||
|  | from ._discovery import get_root | ||||||
|  | from ._state import ( | ||||||
|  |     is_root_process, | ||||||
|  |     debug_mode, | ||||||
|  | ) | ||||||
|  | from ._exceptions import ( | ||||||
|  |     is_multi_cancelled, | ||||||
|  |     ContextCancelled, | ||||||
|  | ) | ||||||
|  | from ._ipc import Channel | ||||||
|  | 
 | ||||||
|  | log = get_logger(__name__) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | __all__ = ['breakpoint', 'post_mortem'] | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class Lock: | ||||||
|  |     ''' | ||||||
|  |     Actor global debug lock state. | ||||||
|  | 
 | ||||||
|  |     Mostly to avoid a lot of ``global`` declarations for now XD. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     repl: MultiActorPdb | None = None | ||||||
|  |     # placeholder for function to set a ``trio.Event`` on debugger exit | ||||||
|  |     # pdb_release_hook: Optional[Callable] = None | ||||||
|  | 
 | ||||||
|  |     _trio_handler: Callable[ | ||||||
|  |         [int, Optional[FrameType]], Any | ||||||
|  |     ] | int | None = None | ||||||
|  | 
 | ||||||
|  |     # actor-wide variable pointing to current task name using debugger | ||||||
|  |     local_task_in_debug: str | None = None | ||||||
|  | 
 | ||||||
|  |     # NOTE: set by the current task waiting on the root tty lock from | ||||||
|  |     # the CALLER side of the `lock_tty_for_child()` context entry-call | ||||||
|  |     # and must be cancelled if this actor is cancelled via IPC | ||||||
|  |     # request-message otherwise deadlocks with the parent actor may | ||||||
|  |     # ensure | ||||||
|  |     _debugger_request_cs: Optional[trio.CancelScope] = None | ||||||
|  | 
 | ||||||
|  |     # NOTE: set only in the root actor for the **local** root spawned task | ||||||
|  |     # which has acquired the lock (i.e. this is on the callee side of | ||||||
|  |     # the `lock_tty_for_child()` context entry). | ||||||
|  |     _root_local_task_cs_in_debug: Optional[trio.CancelScope] = None | ||||||
|  | 
 | ||||||
|  |     # actor tree-wide actor uid that supposedly has the tty lock | ||||||
|  |     global_actor_in_debug: Optional[tuple[str, str]] = None | ||||||
|  | 
 | ||||||
|  |     local_pdb_complete: Optional[trio.Event] = None | ||||||
|  |     no_remote_has_tty: Optional[trio.Event] = None | ||||||
|  | 
 | ||||||
|  |     # lock in root actor preventing multi-access to local tty | ||||||
|  |     _debug_lock: trio.StrictFIFOLock = trio.StrictFIFOLock() | ||||||
|  | 
 | ||||||
|  |     _orig_sigint_handler: Optional[Callable] = None | ||||||
|  |     _blocked: set[tuple[str, str]] = set() | ||||||
|  | 
 | ||||||
|  |     @classmethod | ||||||
|  |     def shield_sigint(cls): | ||||||
|  |         cls._orig_sigint_handler = signal.signal( | ||||||
|  |             signal.SIGINT, | ||||||
|  |             shield_sigint_handler, | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|  |     @classmethod | ||||||
|  |     def unshield_sigint(cls): | ||||||
|  |         # always restore ``trio``'s sigint handler. see notes below in | ||||||
|  |         # the pdb factory about the nightmare that is that code swapping | ||||||
|  |         # out the handler when the repl activates... | ||||||
|  |         signal.signal(signal.SIGINT, cls._trio_handler) | ||||||
|  |         cls._orig_sigint_handler = None | ||||||
|  | 
 | ||||||
|  |     @classmethod | ||||||
|  |     def release(cls): | ||||||
|  |         try: | ||||||
|  |             cls._debug_lock.release() | ||||||
|  |         except RuntimeError: | ||||||
|  |             # uhhh makes no sense but been seeing the non-owner | ||||||
|  |             # release error even though this is definitely the task | ||||||
|  |             # that locked? | ||||||
|  |             owner = cls._debug_lock.statistics().owner | ||||||
|  |             if owner: | ||||||
|  |                 raise | ||||||
|  | 
 | ||||||
|  |         # actor-local state, irrelevant for non-root. | ||||||
|  |         cls.global_actor_in_debug = None | ||||||
|  |         cls.local_task_in_debug = None | ||||||
|  | 
 | ||||||
|  |         try: | ||||||
|  |             # sometimes the ``trio`` might already be terminated in | ||||||
|  |             # which case this call will raise. | ||||||
|  |             if cls.local_pdb_complete is not None: | ||||||
|  |                 cls.local_pdb_complete.set() | ||||||
|  |         finally: | ||||||
|  |             # restore original sigint handler | ||||||
|  |             cls.unshield_sigint() | ||||||
|  |             cls.repl = None | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class TractorConfig(pdbp.DefaultConfig): | ||||||
|  |     ''' | ||||||
|  |     Custom ``pdbp`` goodness :surfer: | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     use_pygments: bool = True | ||||||
|  |     sticky_by_default: bool = False | ||||||
|  |     enable_hidden_frames: bool = False | ||||||
|  | 
 | ||||||
|  |     # much thanks @mdmintz for the hot tip! | ||||||
|  |     # fixes line spacing issue when resizing terminal B) | ||||||
|  |     truncate_long_lines: bool = False | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class MultiActorPdb(pdbp.Pdb): | ||||||
|  |     ''' | ||||||
|  |     Add teardown hooks to the regular ``pdbp.Pdb``. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     # override the pdbp config with our coolio one | ||||||
|  |     DefaultConfig = TractorConfig | ||||||
|  | 
 | ||||||
|  |     # def preloop(self): | ||||||
|  |     #     print('IN PRELOOP') | ||||||
|  |     #     super().preloop() | ||||||
|  | 
 | ||||||
|  |     # TODO: figure out how to disallow recursive .set_trace() entry | ||||||
|  |     # since that'll cause deadlock for us. | ||||||
|  |     def set_continue(self): | ||||||
|  |         try: | ||||||
|  |             super().set_continue() | ||||||
|  |         finally: | ||||||
|  |             Lock.release() | ||||||
|  | 
 | ||||||
|  |     def set_quit(self): | ||||||
|  |         try: | ||||||
|  |             super().set_quit() | ||||||
|  |         finally: | ||||||
|  |             Lock.release() | ||||||
|  | 
 | ||||||
|  |     # XXX NOTE: we only override this because apparently the stdlib pdb | ||||||
|  |     # bois likes to touch the SIGINT handler as much as i like to touch | ||||||
|  |     # my d$%&. | ||||||
|  |     def _cmdloop(self): | ||||||
|  |         self.cmdloop() | ||||||
|  | 
 | ||||||
|  |     @cached_property | ||||||
|  |     def shname(self) -> str | None: | ||||||
|  |         ''' | ||||||
|  |         Attempt to return the login shell name with a special check for | ||||||
|  |         the infamous `xonsh` since it seems to have some issues much | ||||||
|  |         different from std shells when it comes to flushing the prompt? | ||||||
|  | 
 | ||||||
|  |         ''' | ||||||
|  |         # SUPER HACKY and only really works if `xonsh` is not used | ||||||
|  |         # before spawning further sub-shells.. | ||||||
|  |         shpath = os.getenv('SHELL', None) | ||||||
|  | 
 | ||||||
|  |         if shpath: | ||||||
|  |             if ( | ||||||
|  |                 os.getenv('XONSH_LOGIN', default=False) | ||||||
|  |                 or 'xonsh' in shpath | ||||||
|  |             ): | ||||||
|  |                 return 'xonsh' | ||||||
|  | 
 | ||||||
|  |             return os.path.basename(shpath) | ||||||
|  | 
 | ||||||
|  |         return None | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | @acm | ||||||
|  | async def _acquire_debug_lock_from_root_task( | ||||||
|  |     uid: tuple[str, str] | ||||||
|  | 
 | ||||||
|  | ) -> AsyncIterator[trio.StrictFIFOLock]: | ||||||
|  |     ''' | ||||||
|  |     Acquire a root-actor local FIFO lock which tracks mutex access of | ||||||
|  |     the process tree's global debugger breakpoint. | ||||||
|  | 
 | ||||||
|  |     This lock avoids tty clobbering (by preventing multiple processes | ||||||
|  |     reading from stdstreams) and ensures multi-actor, sequential access | ||||||
|  |     to the ``pdb`` repl. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     task_name = trio.lowlevel.current_task().name | ||||||
|  | 
 | ||||||
|  |     log.runtime( | ||||||
|  |         f"Attempting to acquire TTY lock, remote task: {task_name}:{uid}" | ||||||
|  |     ) | ||||||
|  | 
 | ||||||
|  |     we_acquired = False | ||||||
|  | 
 | ||||||
|  |     try: | ||||||
|  |         log.runtime( | ||||||
|  |             f"entering lock checkpoint, remote task: {task_name}:{uid}" | ||||||
|  |         ) | ||||||
|  |         we_acquired = True | ||||||
|  | 
 | ||||||
|  |         # NOTE: if the surrounding cancel scope from the | ||||||
|  |         # `lock_tty_for_child()` caller is cancelled, this line should | ||||||
|  |         # unblock and NOT leave us in some kind of | ||||||
|  |         # a "child-locked-TTY-but-child-is-uncontactable-over-IPC" | ||||||
|  |         # condition. | ||||||
|  |         await Lock._debug_lock.acquire() | ||||||
|  | 
 | ||||||
|  |         if Lock.no_remote_has_tty is None: | ||||||
|  |             # mark the tty lock as being in use so that the runtime | ||||||
|  |             # can try to avoid clobbering any connection from a child | ||||||
|  |             # that's currently relying on it. | ||||||
|  |             Lock.no_remote_has_tty = trio.Event() | ||||||
|  | 
 | ||||||
|  |         Lock.global_actor_in_debug = uid | ||||||
|  |         log.runtime(f"TTY lock acquired, remote task: {task_name}:{uid}") | ||||||
|  | 
 | ||||||
|  |         # NOTE: critical section: this yield is unshielded! | ||||||
|  | 
 | ||||||
|  |         # IF we received a cancel during the shielded lock entry of some | ||||||
|  |         # next-in-queue requesting task, then the resumption here will | ||||||
|  |         # result in that ``trio.Cancelled`` being raised to our caller | ||||||
|  |         # (likely from ``lock_tty_for_child()`` below)!  In | ||||||
|  |         # this case the ``finally:`` below should trigger and the | ||||||
|  |         # surrounding caller side context should cancel normally | ||||||
|  |         # relaying back to the caller. | ||||||
|  | 
 | ||||||
|  |         yield Lock._debug_lock | ||||||
|  | 
 | ||||||
|  |     finally: | ||||||
|  |         if ( | ||||||
|  |             we_acquired | ||||||
|  |             and Lock._debug_lock.locked() | ||||||
|  |         ): | ||||||
|  |             Lock._debug_lock.release() | ||||||
|  | 
 | ||||||
|  |         # IFF there are no more requesting tasks queued up fire, the | ||||||
|  |         # "tty-unlocked" event thereby alerting any monitors of the lock that | ||||||
|  |         # we are now back in the "tty unlocked" state. This is basically | ||||||
|  |         # and edge triggered signal around an empty queue of sub-actor | ||||||
|  |         # tasks that may have tried to acquire the lock. | ||||||
|  |         stats = Lock._debug_lock.statistics() | ||||||
|  |         if ( | ||||||
|  |             not stats.owner | ||||||
|  |         ): | ||||||
|  |             log.runtime(f"No more tasks waiting on tty lock! says {uid}") | ||||||
|  |             if Lock.no_remote_has_tty is not None: | ||||||
|  |                 Lock.no_remote_has_tty.set() | ||||||
|  |                 Lock.no_remote_has_tty = None | ||||||
|  | 
 | ||||||
|  |         Lock.global_actor_in_debug = None | ||||||
|  | 
 | ||||||
|  |         log.runtime( | ||||||
|  |             f"TTY lock released, remote task: {task_name}:{uid}" | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | @tractor.context | ||||||
|  | async def lock_tty_for_child( | ||||||
|  | 
 | ||||||
|  |     ctx: tractor.Context, | ||||||
|  |     subactor_uid: tuple[str, str] | ||||||
|  | 
 | ||||||
|  | ) -> str: | ||||||
|  |     ''' | ||||||
|  |     Lock the TTY in the root process of an actor tree in a new | ||||||
|  |     inter-actor-context-task such that the ``pdbp`` debugger console | ||||||
|  |     can be mutex-allocated to the calling sub-actor for REPL control | ||||||
|  |     without interference by other processes / threads. | ||||||
|  | 
 | ||||||
|  |     NOTE: this task must be invoked in the root process of the actor | ||||||
|  |     tree. It is meant to be invoked as an rpc-task and should be | ||||||
|  |     highly reliable at releasing the mutex complete! | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     task_name = trio.lowlevel.current_task().name | ||||||
|  | 
 | ||||||
|  |     if tuple(subactor_uid) in Lock._blocked: | ||||||
|  |         log.warning( | ||||||
|  |             f'Actor {subactor_uid} is blocked from acquiring debug lock\n' | ||||||
|  |             f"remote task: {task_name}:{subactor_uid}" | ||||||
|  |         ) | ||||||
|  |         ctx._enter_debugger_on_cancel = False | ||||||
|  |         await ctx.cancel(f'Debug lock blocked for {subactor_uid}') | ||||||
|  |         return 'pdb_lock_blocked' | ||||||
|  | 
 | ||||||
|  |     # TODO: when we get to true remote debugging | ||||||
|  |     # this will deliver stdin data? | ||||||
|  | 
 | ||||||
|  |     log.debug( | ||||||
|  |         "Attempting to acquire TTY lock\n" | ||||||
|  |         f"remote task: {task_name}:{subactor_uid}" | ||||||
|  |     ) | ||||||
|  | 
 | ||||||
|  |     log.debug(f"Actor {subactor_uid} is WAITING on stdin hijack lock") | ||||||
|  |     Lock.shield_sigint() | ||||||
|  | 
 | ||||||
|  |     try: | ||||||
|  |         with ( | ||||||
|  |             trio.CancelScope(shield=True) as debug_lock_cs, | ||||||
|  |         ): | ||||||
|  |             Lock._root_local_task_cs_in_debug = debug_lock_cs | ||||||
|  |             async with _acquire_debug_lock_from_root_task(subactor_uid): | ||||||
|  | 
 | ||||||
|  |                 # indicate to child that we've locked stdio | ||||||
|  |                 await ctx.started('Locked') | ||||||
|  |                 log.debug( | ||||||
|  |                     f"Actor {subactor_uid} acquired stdin hijack lock" | ||||||
|  |                 ) | ||||||
|  | 
 | ||||||
|  |                 # wait for unlock pdb by child | ||||||
|  |                 async with ctx.open_stream() as stream: | ||||||
|  |                     assert await stream.receive() == 'pdb_unlock' | ||||||
|  | 
 | ||||||
|  |         return "pdb_unlock_complete" | ||||||
|  | 
 | ||||||
|  |     finally: | ||||||
|  |         Lock._root_local_task_cs_in_debug = None | ||||||
|  |         Lock.unshield_sigint() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | async def wait_for_parent_stdin_hijack( | ||||||
|  |     actor_uid: tuple[str, str], | ||||||
|  |     task_status: TaskStatus[trio.CancelScope] = trio.TASK_STATUS_IGNORED | ||||||
|  | ): | ||||||
|  |     ''' | ||||||
|  |     Connect to the root actor via a ``Context`` and invoke a task which | ||||||
|  |     locks a root-local TTY lock: ``lock_tty_for_child()``; this func | ||||||
|  |     should be called in a new task from a child actor **and never the | ||||||
|  |     root*. | ||||||
|  | 
 | ||||||
|  |     This function is used by any sub-actor to acquire mutex access to | ||||||
|  |     the ``pdb`` REPL and thus the root's TTY for interactive debugging | ||||||
|  |     (see below inside ``_breakpoint()``). It can be used to ensure that | ||||||
|  |     an intermediate nursery-owning actor does not clobber its children | ||||||
|  |     if they are in debug (see below inside | ||||||
|  |     ``maybe_wait_for_debugger()``). | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     with trio.CancelScope(shield=True) as cs: | ||||||
|  |         Lock._debugger_request_cs = cs | ||||||
|  | 
 | ||||||
|  |         try: | ||||||
|  |             async with get_root() as portal: | ||||||
|  | 
 | ||||||
|  |                 # this syncs to child's ``Context.started()`` call. | ||||||
|  |                 async with portal.open_context( | ||||||
|  | 
 | ||||||
|  |                     tractor._debug.lock_tty_for_child, | ||||||
|  |                     subactor_uid=actor_uid, | ||||||
|  | 
 | ||||||
|  |                 ) as (ctx, val): | ||||||
|  | 
 | ||||||
|  |                     log.debug('locked context') | ||||||
|  |                     assert val == 'Locked' | ||||||
|  | 
 | ||||||
|  |                     async with ctx.open_stream() as stream: | ||||||
|  |                         # unblock local caller | ||||||
|  | 
 | ||||||
|  |                         try: | ||||||
|  |                             assert Lock.local_pdb_complete | ||||||
|  |                             task_status.started(cs) | ||||||
|  |                             await Lock.local_pdb_complete.wait() | ||||||
|  | 
 | ||||||
|  |                         finally: | ||||||
|  |                             # TODO: shielding currently can cause hangs... | ||||||
|  |                             # with trio.CancelScope(shield=True): | ||||||
|  |                             await stream.send('pdb_unlock') | ||||||
|  | 
 | ||||||
|  |                         # sync with callee termination | ||||||
|  |                         assert await ctx.result() == "pdb_unlock_complete" | ||||||
|  | 
 | ||||||
|  |                 log.debug('exitting child side locking task context') | ||||||
|  | 
 | ||||||
|  |         except ContextCancelled: | ||||||
|  |             log.warning('Root actor cancelled debug lock') | ||||||
|  |             raise | ||||||
|  | 
 | ||||||
|  |         finally: | ||||||
|  |             Lock.local_task_in_debug = None | ||||||
|  |             log.debug('Exiting debugger from child') | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def mk_mpdb() -> tuple[MultiActorPdb, Callable]: | ||||||
|  | 
 | ||||||
|  |     pdb = MultiActorPdb() | ||||||
|  |     # signal.signal = pdbp.hideframe(signal.signal) | ||||||
|  | 
 | ||||||
|  |     Lock.shield_sigint() | ||||||
|  | 
 | ||||||
|  |     # XXX: These are the important flags mentioned in | ||||||
|  |     # https://github.com/python-trio/trio/issues/1155 | ||||||
|  |     # which resolve the traceback spews to console. | ||||||
|  |     pdb.allow_kbdint = True | ||||||
|  |     pdb.nosigint = True | ||||||
|  | 
 | ||||||
|  |     return pdb, Lock.unshield_sigint | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | async def _breakpoint( | ||||||
|  | 
 | ||||||
|  |     debug_func, | ||||||
|  | 
 | ||||||
|  |     # TODO: | ||||||
|  |     # shield: bool = False | ||||||
|  | 
 | ||||||
|  | ) -> None: | ||||||
|  |     ''' | ||||||
|  |     Breakpoint entry for engaging debugger instance sync-interaction, | ||||||
|  |     from async code, executing in actor runtime (task). | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     __tracebackhide__ = True | ||||||
|  |     actor = tractor.current_actor() | ||||||
|  |     pdb, undo_sigint = mk_mpdb() | ||||||
|  |     task_name = trio.lowlevel.current_task().name | ||||||
|  | 
 | ||||||
|  |     # TODO: is it possible to debug a trio.Cancelled except block? | ||||||
|  |     # right now it seems like we can kinda do with by shielding | ||||||
|  |     # around ``tractor.breakpoint()`` but not if we move the shielded | ||||||
|  |     # scope here??? | ||||||
|  |     # with trio.CancelScope(shield=shield): | ||||||
|  |     #     await trio.lowlevel.checkpoint() | ||||||
|  | 
 | ||||||
|  |     if ( | ||||||
|  |         not Lock.local_pdb_complete | ||||||
|  |         or Lock.local_pdb_complete.is_set() | ||||||
|  |     ): | ||||||
|  |         Lock.local_pdb_complete = trio.Event() | ||||||
|  | 
 | ||||||
|  |     # TODO: need a more robust check for the "root" actor | ||||||
|  |     if ( | ||||||
|  |         not is_root_process() | ||||||
|  |         and actor._parent_chan  # a connected child | ||||||
|  |     ): | ||||||
|  | 
 | ||||||
|  |         if Lock.local_task_in_debug: | ||||||
|  | 
 | ||||||
|  |             # Recurrence entry case: this task already has the lock and | ||||||
|  |             # is likely recurrently entering a breakpoint | ||||||
|  |             if Lock.local_task_in_debug == task_name: | ||||||
|  |                 # noop on recurrent entry case but we want to trigger | ||||||
|  |                 # a checkpoint to allow other actors error-propagate and | ||||||
|  |                 # potetially avoid infinite re-entries in some subactor. | ||||||
|  |                 await trio.lowlevel.checkpoint() | ||||||
|  |                 return | ||||||
|  | 
 | ||||||
|  |             # if **this** actor is already in debug mode block here | ||||||
|  |             # waiting for the control to be released - this allows | ||||||
|  |             # support for recursive entries to `tractor.breakpoint()` | ||||||
|  |             log.warning(f"{actor.uid} already has a debug lock, waiting...") | ||||||
|  | 
 | ||||||
|  |             await Lock.local_pdb_complete.wait() | ||||||
|  |             await trio.sleep(0.1) | ||||||
|  | 
 | ||||||
|  |         # mark local actor as "in debug mode" to avoid recurrent | ||||||
|  |         # entries/requests to the root process | ||||||
|  |         Lock.local_task_in_debug = task_name | ||||||
|  | 
 | ||||||
|  |         # this **must** be awaited by the caller and is done using the | ||||||
|  |         # root nursery so that the debugger can continue to run without | ||||||
|  |         # being restricted by the scope of a new task nursery. | ||||||
|  | 
 | ||||||
|  |         # TODO: if we want to debug a trio.Cancelled triggered exception | ||||||
|  |         # we have to figure out how to avoid having the service nursery | ||||||
|  |         # cancel on this task start? I *think* this works below: | ||||||
|  |         # ```python | ||||||
|  |         #   actor._service_n.cancel_scope.shield = shield | ||||||
|  |         # ``` | ||||||
|  |         # but not entirely sure if that's a sane way to implement it? | ||||||
|  |         try: | ||||||
|  |             with trio.CancelScope(shield=True): | ||||||
|  |                 await actor._service_n.start( | ||||||
|  |                     wait_for_parent_stdin_hijack, | ||||||
|  |                     actor.uid, | ||||||
|  |                 ) | ||||||
|  |                 Lock.repl = pdb | ||||||
|  |         except RuntimeError: | ||||||
|  |             Lock.release() | ||||||
|  | 
 | ||||||
|  |             if actor._cancel_called: | ||||||
|  |                 # service nursery won't be usable and we | ||||||
|  |                 # don't want to lock up the root either way since | ||||||
|  |                 # we're in (the midst of) cancellation. | ||||||
|  |                 return | ||||||
|  | 
 | ||||||
|  |             raise | ||||||
|  | 
 | ||||||
|  |     elif is_root_process(): | ||||||
|  | 
 | ||||||
|  |         # we also wait in the root-parent for any child that | ||||||
|  |         # may have the tty locked prior | ||||||
|  |         # TODO: wait, what about multiple root tasks acquiring it though? | ||||||
|  |         if Lock.global_actor_in_debug == actor.uid: | ||||||
|  |             # re-entrant root process already has it: noop. | ||||||
|  |             return | ||||||
|  | 
 | ||||||
|  |         # XXX: since we need to enter pdb synchronously below, | ||||||
|  |         # we have to release the lock manually from pdb completion | ||||||
|  |         # callbacks. Can't think of a nicer way then this atm. | ||||||
|  |         if Lock._debug_lock.locked(): | ||||||
|  |             log.warning( | ||||||
|  |                 'Root actor attempting to shield-acquire active tty lock' | ||||||
|  |                 f' owned by {Lock.global_actor_in_debug}') | ||||||
|  | 
 | ||||||
|  |             # must shield here to avoid hitting a ``Cancelled`` and | ||||||
|  |             # a child getting stuck bc we clobbered the tty | ||||||
|  |             with trio.CancelScope(shield=True): | ||||||
|  |                 await Lock._debug_lock.acquire() | ||||||
|  |         else: | ||||||
|  |             # may be cancelled | ||||||
|  |             await Lock._debug_lock.acquire() | ||||||
|  | 
 | ||||||
|  |         Lock.global_actor_in_debug = actor.uid | ||||||
|  |         Lock.local_task_in_debug = task_name | ||||||
|  |         Lock.repl = pdb | ||||||
|  | 
 | ||||||
|  |     try: | ||||||
|  |         # block here one (at the appropriate frame *up*) where | ||||||
|  |         # ``breakpoint()`` was awaited and begin handling stdio. | ||||||
|  |         log.debug("Entering the synchronous world of pdb") | ||||||
|  |         debug_func(actor, pdb) | ||||||
|  | 
 | ||||||
|  |     except bdb.BdbQuit: | ||||||
|  |         Lock.release() | ||||||
|  |         raise | ||||||
|  | 
 | ||||||
|  |     # XXX: apparently we can't do this without showing this frame | ||||||
|  |     # in the backtrace on first entry to the REPL? Seems like an odd | ||||||
|  |     # behaviour that should have been fixed by now. This is also why | ||||||
|  |     # we scrapped all the @cm approaches that were tried previously. | ||||||
|  |     # finally: | ||||||
|  |     #     __tracebackhide__ = True | ||||||
|  |     #     # frame = sys._getframe() | ||||||
|  |     #     # last_f = frame.f_back | ||||||
|  |     #     # last_f.f_globals['__tracebackhide__'] = True | ||||||
|  |     #     # signal.signal = pdbp.hideframe(signal.signal) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def shield_sigint_handler( | ||||||
|  |     signum: int, | ||||||
|  |     frame: 'frame',  # type: ignore # noqa | ||||||
|  |     # pdb_obj: Optional[MultiActorPdb] = None, | ||||||
|  |     *args, | ||||||
|  | 
 | ||||||
|  | ) -> None: | ||||||
|  |     ''' | ||||||
|  |     Specialized, debugger-aware SIGINT handler. | ||||||
|  | 
 | ||||||
|  |     In childred we always ignore to avoid deadlocks since cancellation | ||||||
|  |     should always be managed by the parent supervising actor. The root | ||||||
|  |     is always cancelled on ctrl-c. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     __tracebackhide__ = True | ||||||
|  | 
 | ||||||
|  |     uid_in_debug = Lock.global_actor_in_debug | ||||||
|  | 
 | ||||||
|  |     actor = tractor.current_actor() | ||||||
|  |     # print(f'{actor.uid} in HANDLER with ') | ||||||
|  | 
 | ||||||
|  |     def do_cancel(): | ||||||
|  |         # If we haven't tried to cancel the runtime then do that instead | ||||||
|  |         # of raising a KBI (which may non-gracefully destroy | ||||||
|  |         # a ``trio.run()``). | ||||||
|  |         if not actor._cancel_called: | ||||||
|  |             actor.cancel_soon() | ||||||
|  | 
 | ||||||
|  |         # If the runtime is already cancelled it likely means the user | ||||||
|  |         # hit ctrl-c again because teardown didn't full take place in | ||||||
|  |         # which case we do the "hard" raising of a local KBI. | ||||||
|  |         else: | ||||||
|  |             raise KeyboardInterrupt | ||||||
|  | 
 | ||||||
|  |     any_connected = False | ||||||
|  | 
 | ||||||
|  |     if uid_in_debug is not None: | ||||||
|  |         # try to see if the supposed (sub)actor in debug still | ||||||
|  |         # has an active connection to *this* actor, and if not | ||||||
|  |         # it's likely they aren't using the TTY lock / debugger | ||||||
|  |         # and we should propagate SIGINT normally. | ||||||
|  |         chans = actor._peers.get(tuple(uid_in_debug)) | ||||||
|  |         if chans: | ||||||
|  |             any_connected = any(chan.connected() for chan in chans) | ||||||
|  |             if not any_connected: | ||||||
|  |                 log.warning( | ||||||
|  |                     'A global actor reported to be in debug ' | ||||||
|  |                     'but no connection exists for this child:\n' | ||||||
|  |                     f'{uid_in_debug}\n' | ||||||
|  |                     'Allowing SIGINT propagation..' | ||||||
|  |                 ) | ||||||
|  |                 return do_cancel() | ||||||
|  | 
 | ||||||
|  |     # only set in the actor actually running the REPL | ||||||
|  |     pdb_obj = Lock.repl | ||||||
|  | 
 | ||||||
|  |     # root actor branch that reports whether or not a child | ||||||
|  |     # has locked debugger. | ||||||
|  |     if ( | ||||||
|  |         is_root_process() | ||||||
|  |         and uid_in_debug is not None | ||||||
|  | 
 | ||||||
|  |         # XXX: only if there is an existing connection to the | ||||||
|  |         # (sub-)actor in debug do we ignore SIGINT in this | ||||||
|  |         # parent! Otherwise we may hang waiting for an actor | ||||||
|  |         # which has already terminated to unlock. | ||||||
|  |         and any_connected | ||||||
|  |     ): | ||||||
|  |         # we are root and some actor is in debug mode | ||||||
|  |         # if uid_in_debug is not None: | ||||||
|  | 
 | ||||||
|  |         if pdb_obj: | ||||||
|  |             name = uid_in_debug[0] | ||||||
|  |             if name != 'root': | ||||||
|  |                 log.pdb( | ||||||
|  |                     f"Ignoring SIGINT, child in debug mode: `{uid_in_debug}`" | ||||||
|  |                 ) | ||||||
|  | 
 | ||||||
|  |             else: | ||||||
|  |                 log.pdb( | ||||||
|  |                     "Ignoring SIGINT while in debug mode" | ||||||
|  |                 ) | ||||||
|  |     elif ( | ||||||
|  |         is_root_process() | ||||||
|  |     ): | ||||||
|  |         if pdb_obj: | ||||||
|  |             log.pdb( | ||||||
|  |                 "Ignoring SIGINT since debug mode is enabled" | ||||||
|  |             ) | ||||||
|  | 
 | ||||||
|  |         if ( | ||||||
|  |             Lock._root_local_task_cs_in_debug | ||||||
|  |             and not Lock._root_local_task_cs_in_debug.cancel_called | ||||||
|  |         ): | ||||||
|  |             Lock._root_local_task_cs_in_debug.cancel() | ||||||
|  | 
 | ||||||
|  |             # revert back to ``trio`` handler asap! | ||||||
|  |             Lock.unshield_sigint() | ||||||
|  | 
 | ||||||
|  |     # child actor that has locked the debugger | ||||||
|  |     elif not is_root_process(): | ||||||
|  | 
 | ||||||
|  |         chan: Channel = actor._parent_chan | ||||||
|  |         if not chan or not chan.connected(): | ||||||
|  |             log.warning( | ||||||
|  |                 'A global actor reported to be in debug ' | ||||||
|  |                 'but no connection exists for its parent:\n' | ||||||
|  |                 f'{uid_in_debug}\n' | ||||||
|  |                 'Allowing SIGINT propagation..' | ||||||
|  |             ) | ||||||
|  |             return do_cancel() | ||||||
|  | 
 | ||||||
|  |         task = Lock.local_task_in_debug | ||||||
|  |         if ( | ||||||
|  |             task | ||||||
|  |             and pdb_obj | ||||||
|  |         ): | ||||||
|  |             log.pdb( | ||||||
|  |                 f"Ignoring SIGINT while task in debug mode: `{task}`" | ||||||
|  |             ) | ||||||
|  | 
 | ||||||
|  |         # TODO: how to handle the case of an intermediary-child actor | ||||||
|  |         # that **is not** marked in debug mode? See oustanding issue: | ||||||
|  |         # https://github.com/goodboy/tractor/issues/320 | ||||||
|  |         # elif debug_mode(): | ||||||
|  | 
 | ||||||
|  |     else:  # XXX: shouldn't ever get here? | ||||||
|  |         print("WTFWTFWTF") | ||||||
|  |         raise KeyboardInterrupt | ||||||
|  | 
 | ||||||
|  |     # NOTE: currently (at least on ``fancycompleter`` 0.9.2) | ||||||
|  |     # it looks to be that the last command that was run (eg. ll) | ||||||
|  |     # will be repeated by default. | ||||||
|  | 
 | ||||||
|  |     # maybe redraw/print last REPL output to console since | ||||||
|  |     # we want to alert the user that more input is expect since | ||||||
|  |     # nothing has been done dur to ignoring sigint. | ||||||
|  |     if ( | ||||||
|  |         pdb_obj  # only when this actor has a REPL engaged | ||||||
|  |     ): | ||||||
|  |         # XXX: yah, mega hack, but how else do we catch this madness XD | ||||||
|  |         if pdb_obj.shname == 'xonsh': | ||||||
|  |             pdb_obj.stdout.write(pdb_obj.prompt) | ||||||
|  | 
 | ||||||
|  |         pdb_obj.stdout.flush() | ||||||
|  | 
 | ||||||
|  |         # TODO: make this work like sticky mode where if there is output | ||||||
|  |         # detected as written to the tty we redraw this part underneath | ||||||
|  |         # and erase the past draw of this same bit above? | ||||||
|  |         # pdb_obj.sticky = True | ||||||
|  |         # pdb_obj._print_if_sticky() | ||||||
|  | 
 | ||||||
|  |         # also see these links for an approach from ``ptk``: | ||||||
|  |         # https://github.com/goodboy/tractor/issues/130#issuecomment-663752040 | ||||||
|  |         # https://github.com/prompt-toolkit/python-prompt-toolkit/blob/c2c6af8a0308f9e5d7c0e28cb8a02963fe0ce07a/prompt_toolkit/patch_stdout.py | ||||||
|  | 
 | ||||||
|  |         # XXX LEGACY: lol, see ``pdbpp`` issue: | ||||||
|  |         # https://github.com/pdbpp/pdbpp/issues/496 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _set_trace( | ||||||
|  |     actor: tractor.Actor | None = None, | ||||||
|  |     pdb: MultiActorPdb | None = None, | ||||||
|  | ): | ||||||
|  |     __tracebackhide__ = True | ||||||
|  |     actor = actor or tractor.current_actor() | ||||||
|  | 
 | ||||||
|  |     # start 2 levels up in user code | ||||||
|  |     frame: Optional[FrameType] = sys._getframe() | ||||||
|  |     if frame: | ||||||
|  |         frame = frame.f_back  # type: ignore | ||||||
|  | 
 | ||||||
|  |     if ( | ||||||
|  |         frame | ||||||
|  |         and pdb | ||||||
|  |         and actor is not None | ||||||
|  |     ): | ||||||
|  |         log.pdb(f"\nAttaching pdb to actor: {actor.uid}\n") | ||||||
|  |         # no f!#$&* idea, but when we're in async land | ||||||
|  |         # we need 2x frames up? | ||||||
|  |         frame = frame.f_back | ||||||
|  | 
 | ||||||
|  |     else: | ||||||
|  |         pdb, undo_sigint = mk_mpdb() | ||||||
|  | 
 | ||||||
|  |         # we entered the global ``breakpoint()`` built-in from sync | ||||||
|  |         # code? | ||||||
|  |         Lock.local_task_in_debug = 'sync' | ||||||
|  | 
 | ||||||
|  |     pdb.set_trace(frame=frame) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | breakpoint = partial( | ||||||
|  |     _breakpoint, | ||||||
|  |     _set_trace, | ||||||
|  | ) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _post_mortem( | ||||||
|  |     actor: tractor.Actor, | ||||||
|  |     pdb: MultiActorPdb, | ||||||
|  | 
 | ||||||
|  | ) -> None: | ||||||
|  |     ''' | ||||||
|  |     Enter the ``pdbpp`` port mortem entrypoint using our custom | ||||||
|  |     debugger instance. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     log.pdb(f"\nAttaching to pdb in crashed actor: {actor.uid}\n") | ||||||
|  | 
 | ||||||
|  |     # TODO: you need ``pdbpp`` master (at least this commit | ||||||
|  |     # https://github.com/pdbpp/pdbpp/commit/b757794857f98d53e3ebbe70879663d7d843a6c2) | ||||||
|  |     # to fix this and avoid the hang it causes. See issue: | ||||||
|  |     # https://github.com/pdbpp/pdbpp/issues/480 | ||||||
|  |     # TODO: help with a 3.10+ major release if/when it arrives. | ||||||
|  | 
 | ||||||
|  |     pdbp.xpm(Pdb=lambda: pdb) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | post_mortem = partial( | ||||||
|  |     _breakpoint, | ||||||
|  |     _post_mortem, | ||||||
|  | ) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | async def _maybe_enter_pm(err): | ||||||
|  |     if ( | ||||||
|  |         debug_mode() | ||||||
|  | 
 | ||||||
|  |         # NOTE: don't enter debug mode recursively after quitting pdb | ||||||
|  |         # Iow, don't re-enter the repl if the `quit` command was issued | ||||||
|  |         # by the user. | ||||||
|  |         and not isinstance(err, bdb.BdbQuit) | ||||||
|  | 
 | ||||||
|  |         # XXX: if the error is the likely result of runtime-wide | ||||||
|  |         # cancellation, we don't want to enter the debugger since | ||||||
|  |         # there's races between when the parent actor has killed all | ||||||
|  |         # comms and when the child tries to contact said parent to | ||||||
|  |         # acquire the tty lock. | ||||||
|  | 
 | ||||||
|  |         # Really we just want to mostly avoid catching KBIs here so there | ||||||
|  |         # might be a simpler check we can do? | ||||||
|  |         and not is_multi_cancelled(err) | ||||||
|  |     ): | ||||||
|  |         log.debug("Actor crashed, entering debug mode") | ||||||
|  |         try: | ||||||
|  |             await post_mortem() | ||||||
|  |         finally: | ||||||
|  |             Lock.release() | ||||||
|  |             return True | ||||||
|  | 
 | ||||||
|  |     else: | ||||||
|  |         return False | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | @acm | ||||||
|  | async def acquire_debug_lock( | ||||||
|  |     subactor_uid: tuple[str, str], | ||||||
|  | ) -> AsyncGenerator[None, tuple]: | ||||||
|  |     ''' | ||||||
|  |     Grab root's debug lock on entry, release on exit. | ||||||
|  | 
 | ||||||
|  |     This helper is for actor's who don't actually need | ||||||
|  |     to acquired the debugger but want to wait until the | ||||||
|  |     lock is free in the process-tree root. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     if not debug_mode(): | ||||||
|  |         yield None | ||||||
|  |         return | ||||||
|  | 
 | ||||||
|  |     async with trio.open_nursery() as n: | ||||||
|  |         cs = await n.start( | ||||||
|  |             wait_for_parent_stdin_hijack, | ||||||
|  |             subactor_uid, | ||||||
|  |         ) | ||||||
|  |         yield None | ||||||
|  |         cs.cancel() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | async def maybe_wait_for_debugger( | ||||||
|  |     poll_steps: int = 2, | ||||||
|  |     poll_delay: float = 0.1, | ||||||
|  |     child_in_debug: bool = False, | ||||||
|  | 
 | ||||||
|  | ) -> None: | ||||||
|  | 
 | ||||||
|  |     if ( | ||||||
|  |         not debug_mode() | ||||||
|  |         and not child_in_debug | ||||||
|  |     ): | ||||||
|  |         return | ||||||
|  | 
 | ||||||
|  |     if ( | ||||||
|  |         is_root_process() | ||||||
|  |     ): | ||||||
|  |         # If we error in the root but the debugger is | ||||||
|  |         # engaged we don't want to prematurely kill (and | ||||||
|  |         # thus clobber access to) the local tty since it | ||||||
|  |         # will make the pdb repl unusable. | ||||||
|  |         # Instead try to wait for pdb to be released before | ||||||
|  |         # tearing down. | ||||||
|  | 
 | ||||||
|  |         sub_in_debug = None | ||||||
|  | 
 | ||||||
|  |         for _ in range(poll_steps): | ||||||
|  | 
 | ||||||
|  |             if Lock.global_actor_in_debug: | ||||||
|  |                 sub_in_debug = tuple(Lock.global_actor_in_debug) | ||||||
|  | 
 | ||||||
|  |             log.debug('Root polling for debug') | ||||||
|  | 
 | ||||||
|  |             with trio.CancelScope(shield=True): | ||||||
|  |                 await trio.sleep(poll_delay) | ||||||
|  | 
 | ||||||
|  |                 # TODO: could this make things more deterministic?  wait | ||||||
|  |                 # to see if a sub-actor task will be scheduled and grab | ||||||
|  |                 # the tty lock on the next tick? | ||||||
|  |                 # XXX: doesn't seem to work | ||||||
|  |                 # await trio.testing.wait_all_tasks_blocked(cushion=0) | ||||||
|  | 
 | ||||||
|  |                 debug_complete = Lock.no_remote_has_tty | ||||||
|  |                 if ( | ||||||
|  |                     (debug_complete and | ||||||
|  |                      not debug_complete.is_set()) | ||||||
|  |                 ): | ||||||
|  |                     log.debug( | ||||||
|  |                         'Root has errored but pdb is in use by ' | ||||||
|  |                         f'child {sub_in_debug}\n' | ||||||
|  |                         'Waiting on tty lock to release..') | ||||||
|  | 
 | ||||||
|  |                     await debug_complete.wait() | ||||||
|  | 
 | ||||||
|  |                 await trio.sleep(poll_delay) | ||||||
|  |                 continue | ||||||
|  |         else: | ||||||
|  |             log.debug( | ||||||
|  |                     'Root acquired TTY LOCK' | ||||||
|  |             ) | ||||||
|  | @ -15,79 +15,52 @@ | ||||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||||
| 
 | 
 | ||||||
| """ | """ | ||||||
| Discovery (protocols) API for automatic addressing and location | Actor discovery API. | ||||||
| management of (service) actors. |  | ||||||
| 
 | 
 | ||||||
| """ | """ | ||||||
| from __future__ import annotations |  | ||||||
| from typing import ( | from typing import ( | ||||||
|  |     Optional, | ||||||
|  |     Union, | ||||||
|     AsyncGenerator, |     AsyncGenerator, | ||||||
|     AsyncContextManager, |  | ||||||
|     TYPE_CHECKING, |  | ||||||
| ) | ) | ||||||
| from contextlib import asynccontextmanager as acm | from contextlib import asynccontextmanager as acm | ||||||
| 
 | 
 | ||||||
| from tractor.log import get_logger | from ._ipc import _connect_chan, Channel | ||||||
| from .trionics import ( |  | ||||||
|     gather_contexts, |  | ||||||
|     collapse_eg, |  | ||||||
| ) |  | ||||||
| from .ipc import _connect_chan, Channel |  | ||||||
| from ._addr import ( |  | ||||||
|     UnwrappedAddress, |  | ||||||
|     Address, |  | ||||||
|     wrap_address |  | ||||||
| ) |  | ||||||
| from ._portal import ( | from ._portal import ( | ||||||
|     Portal, |     Portal, | ||||||
|     open_portal, |     open_portal, | ||||||
|     LocalPortal, |     LocalPortal, | ||||||
| ) | ) | ||||||
| from ._state import ( | from ._state import current_actor, _runtime_vars | ||||||
|     current_actor, |  | ||||||
|     _runtime_vars, |  | ||||||
|     _def_tpt_proto, |  | ||||||
| ) |  | ||||||
| 
 |  | ||||||
| if TYPE_CHECKING: |  | ||||||
|     from ._runtime import Actor |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| log = get_logger(__name__) |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @acm | @acm | ||||||
| async def get_registry( | async def get_arbiter( | ||||||
|     addr: UnwrappedAddress|None = None, | 
 | ||||||
| ) -> AsyncGenerator[ |     host: str, | ||||||
|     Portal | LocalPortal | None, |     port: int, | ||||||
|     None, | 
 | ||||||
| ]: | ) -> AsyncGenerator[Union[Portal, LocalPortal], None]: | ||||||
|     ''' |     ''' | ||||||
|     Return a portal instance connected to a local or remote |     Return a portal instance connected to a local or remote | ||||||
|     registry-service actor; if a connection already exists re-use it |     arbiter. | ||||||
|     (presumably to call a `.register_actor()` registry runtime RPC |  | ||||||
|     ep). |  | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|     actor: Actor = current_actor() |     actor = current_actor() | ||||||
|     if actor.is_registrar: | 
 | ||||||
|  |     if not actor: | ||||||
|  |         raise RuntimeError("No actor instance has been defined yet?") | ||||||
|  | 
 | ||||||
|  |     if actor.is_arbiter: | ||||||
|         # we're already the arbiter |         # we're already the arbiter | ||||||
|         # (likely a re-entrant call from the arbiter actor) |         # (likely a re-entrant call from the arbiter actor) | ||||||
|         yield LocalPortal( |         yield LocalPortal(actor, Channel((host, port))) | ||||||
|             actor, |  | ||||||
|             Channel(transport=None) |  | ||||||
|             # ^XXX, we DO NOT actually provide nor connect an |  | ||||||
|             # underlying transport since this is merely an API shim. |  | ||||||
|         ) |  | ||||||
|     else: |     else: | ||||||
|         # TODO: try to look pre-existing connection from |         async with _connect_chan(host, port) as chan: | ||||||
|         # `Server._peers` and use it instead? | 
 | ||||||
|         async with ( |             async with open_portal(chan) as arb_portal: | ||||||
|             _connect_chan(addr) as chan, | 
 | ||||||
|             open_portal(chan) as regstr_ptl, |                 yield arb_portal | ||||||
|         ): |  | ||||||
|             yield regstr_ptl |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @acm | @acm | ||||||
|  | @ -95,131 +68,51 @@ async def get_root( | ||||||
|     **kwargs, |     **kwargs, | ||||||
| ) -> AsyncGenerator[Portal, None]: | ) -> AsyncGenerator[Portal, None]: | ||||||
| 
 | 
 | ||||||
|     # TODO: rename mailbox to `_root_maddr` when we finally |     host, port = _runtime_vars['_root_mailbox'] | ||||||
|     # add and impl libp2p multi-addrs? |     assert host is not None | ||||||
|     addr = _runtime_vars['_root_mailbox'] |  | ||||||
| 
 | 
 | ||||||
|     async with ( |     async with _connect_chan(host, port) as chan: | ||||||
|         _connect_chan(addr) as chan, |         async with open_portal(chan, **kwargs) as portal: | ||||||
|         open_portal(chan, **kwargs) as portal, |             yield portal | ||||||
|     ): |  | ||||||
|         yield portal |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def get_peer_by_name( |  | ||||||
|     name: str, |  | ||||||
|     # uuid: str|None = None, |  | ||||||
| 
 |  | ||||||
| ) -> list[Channel]|None:  # at least 1 |  | ||||||
|     ''' |  | ||||||
|     Scan for an existing connection (set) to a named actor |  | ||||||
|     and return any channels from `Server._peers: dict`. |  | ||||||
| 
 |  | ||||||
|     This is an optimization method over querying the registrar for |  | ||||||
|     the same info. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     actor: Actor = current_actor() |  | ||||||
|     to_scan: dict[tuple, list[Channel]] = actor.ipc_server._peers.copy() |  | ||||||
| 
 |  | ||||||
|     # TODO: is this ever needed? creates a duplicate channel on actor._peers |  | ||||||
|     # when multiple find_actor calls are made to same actor from a single ctx |  | ||||||
|     # which causes actor exit to hang waiting forever on |  | ||||||
|     # `actor._no_more_peers.wait()` in `_runtime.async_main` |  | ||||||
| 
 |  | ||||||
|     # pchan: Channel|None = actor._parent_chan |  | ||||||
|     # if pchan and pchan.uid not in to_scan: |  | ||||||
|     #     to_scan[pchan.uid].append(pchan) |  | ||||||
| 
 |  | ||||||
|     for aid, chans in to_scan.items(): |  | ||||||
|         _, peer_name = aid |  | ||||||
|         if name == peer_name: |  | ||||||
|             if not chans: |  | ||||||
|                 log.warning( |  | ||||||
|                     'No IPC chans for matching peer {peer_name}\n' |  | ||||||
|                 ) |  | ||||||
|                 continue |  | ||||||
|             return chans |  | ||||||
| 
 |  | ||||||
|     return None |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @acm | @acm | ||||||
| async def query_actor( | async def query_actor( | ||||||
|     name: str, |     name: str, | ||||||
|     regaddr: UnwrappedAddress|None = None, |     arbiter_sockaddr: Optional[tuple[str, int]] = None, | ||||||
| 
 | 
 | ||||||
| ) -> AsyncGenerator[ | ) -> AsyncGenerator[tuple[str, int], None]: | ||||||
|     UnwrappedAddress|None, |  | ||||||
|     None, |  | ||||||
| ]: |  | ||||||
|     ''' |     ''' | ||||||
|     Lookup a transport address (by actor name) via querying a registrar |     Simple address lookup for a given actor name. | ||||||
|     listening @ `regaddr`. |  | ||||||
| 
 | 
 | ||||||
|     Returns the transport protocol (socket) address or `None` if no |     Returns the (socket) address or ``None``. | ||||||
|     entry under that name exists. |  | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|     actor: Actor = current_actor() |     actor = current_actor() | ||||||
|     if ( |     async with get_arbiter( | ||||||
|         name == 'registrar' |         *arbiter_sockaddr or actor._arb_addr | ||||||
|         and actor.is_registrar |     ) as arb_portal: | ||||||
|     ): |  | ||||||
|         raise RuntimeError( |  | ||||||
|             'The current actor IS the registry!?' |  | ||||||
|         ) |  | ||||||
| 
 | 
 | ||||||
|     maybe_peers: list[Channel]|None = get_peer_by_name(name) |         sockaddr = await arb_portal.run_from_ns( | ||||||
|     if maybe_peers: |  | ||||||
|         yield maybe_peers[0].raddr |  | ||||||
|         return |  | ||||||
| 
 |  | ||||||
|     reg_portal: Portal |  | ||||||
|     regaddr: Address = wrap_address(regaddr) or actor.reg_addrs[0] |  | ||||||
|     async with get_registry(regaddr) as reg_portal: |  | ||||||
|         # TODO: return portals to all available actors - for now |  | ||||||
|         # just the last one that registered |  | ||||||
|         addr: UnwrappedAddress = await reg_portal.run_from_ns( |  | ||||||
|             'self', |             'self', | ||||||
|             'find_actor', |             'find_actor', | ||||||
|             name=name, |             name=name, | ||||||
|         ) |         ) | ||||||
|         yield addr |  | ||||||
| 
 | 
 | ||||||
|  |         # TODO: return portals to all available actors - for now just | ||||||
|  |         # the last one that registered | ||||||
|  |         if name == 'arbiter' and actor.is_arbiter: | ||||||
|  |             raise RuntimeError("The current actor is the arbiter") | ||||||
| 
 | 
 | ||||||
| @acm |         yield sockaddr if sockaddr else None | ||||||
| async def maybe_open_portal( |  | ||||||
|     addr: UnwrappedAddress, |  | ||||||
|     name: str, |  | ||||||
| ): |  | ||||||
|     async with query_actor( |  | ||||||
|         name=name, |  | ||||||
|         regaddr=addr, |  | ||||||
|     ) as addr: |  | ||||||
|         pass |  | ||||||
| 
 |  | ||||||
|     if addr: |  | ||||||
|         async with _connect_chan(addr) as chan: |  | ||||||
|             async with open_portal(chan) as portal: |  | ||||||
|                 yield portal |  | ||||||
|     else: |  | ||||||
|         yield None |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @acm | @acm | ||||||
| async def find_actor( | async def find_actor( | ||||||
|     name: str, |     name: str, | ||||||
|     registry_addrs: list[UnwrappedAddress]|None = None, |     arbiter_sockaddr: tuple[str, int] | None = None | ||||||
|     enable_transports: list[str] = [_def_tpt_proto], |  | ||||||
| 
 | 
 | ||||||
|     only_first: bool = True, | ) -> AsyncGenerator[Optional[Portal], None]: | ||||||
|     raise_on_none: bool = False, |  | ||||||
| 
 |  | ||||||
| ) -> AsyncGenerator[ |  | ||||||
|     Portal | list[Portal] | None, |  | ||||||
|     None, |  | ||||||
| ]: |  | ||||||
|     ''' |     ''' | ||||||
|     Ask the arbiter to find actor(s) by name. |     Ask the arbiter to find actor(s) by name. | ||||||
| 
 | 
 | ||||||
|  | @ -227,106 +120,44 @@ async def find_actor( | ||||||
|     known to the arbiter. |     known to the arbiter. | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|     # optimization path, use any pre-existing peer channel |     async with query_actor( | ||||||
|     maybe_peers: list[Channel]|None = get_peer_by_name(name) |         name=name, | ||||||
|     if maybe_peers and only_first: |         arbiter_sockaddr=arbiter_sockaddr, | ||||||
|         async with open_portal(maybe_peers[0]) as peer_portal: |     ) as sockaddr: | ||||||
|             yield peer_portal |  | ||||||
|             return |  | ||||||
| 
 |  | ||||||
|     if not registry_addrs: |  | ||||||
|         # XXX NOTE: make sure to dynamically read the value on |  | ||||||
|         # every call since something may change it globally (eg. |  | ||||||
|         # like in our discovery test suite)! |  | ||||||
|         from ._addr import default_lo_addrs |  | ||||||
|         registry_addrs = ( |  | ||||||
|             _runtime_vars['_registry_addrs'] |  | ||||||
|             or |  | ||||||
|             default_lo_addrs(enable_transports) |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|     maybe_portals: list[ |  | ||||||
|         AsyncContextManager[UnwrappedAddress] |  | ||||||
|     ] = list( |  | ||||||
|         maybe_open_portal( |  | ||||||
|             addr=addr, |  | ||||||
|             name=name, |  | ||||||
|         ) |  | ||||||
|         for addr in registry_addrs |  | ||||||
|     ) |  | ||||||
|     portals: list[Portal] |  | ||||||
|     async with ( |  | ||||||
|         collapse_eg(), |  | ||||||
|         gather_contexts( |  | ||||||
|             mngrs=maybe_portals, |  | ||||||
|         ) as portals, |  | ||||||
|     ): |  | ||||||
|         # log.runtime( |  | ||||||
|         #     'Gathered portals:\n' |  | ||||||
|         #     f'{portals}' |  | ||||||
|         # ) |  | ||||||
|         # NOTE: `gather_contexts()` will return a |  | ||||||
|         # `tuple[None, None, ..., None]` if no contact |  | ||||||
|         # can be made with any regstrar at any of the |  | ||||||
|         # N provided addrs! |  | ||||||
|         if not any(portals): |  | ||||||
|             if raise_on_none: |  | ||||||
|                 raise RuntimeError( |  | ||||||
|                     f'No actor "{name}" found registered @ {registry_addrs}' |  | ||||||
|                 ) |  | ||||||
|             yield None |  | ||||||
|             return |  | ||||||
| 
 |  | ||||||
|         portals: list[Portal] = list(portals) |  | ||||||
|         if only_first: |  | ||||||
|             yield portals[0] |  | ||||||
| 
 | 
 | ||||||
|  |         if sockaddr: | ||||||
|  |             async with _connect_chan(*sockaddr) as chan: | ||||||
|  |                 async with open_portal(chan) as portal: | ||||||
|  |                     yield portal | ||||||
|         else: |         else: | ||||||
|             # TODO: currently this may return multiple portals |             yield None | ||||||
|             # given there are multi-homed or multiple registrars.. |  | ||||||
|             # SO, we probably need de-duplication logic? |  | ||||||
|             yield portals |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @acm | @acm | ||||||
| async def wait_for_actor( | async def wait_for_actor( | ||||||
|     name: str, |     name: str, | ||||||
|     registry_addr: UnwrappedAddress | None = None, |     arbiter_sockaddr: tuple[str, int] | None = None, | ||||||
|  |     # registry_addr: tuple[str, int] | None = None, | ||||||
| 
 | 
 | ||||||
| ) -> AsyncGenerator[Portal, None]: | ) -> AsyncGenerator[Portal, None]: | ||||||
|     ''' |     ''' | ||||||
|     Wait on at least one peer actor to register `name` with the |     Wait on an actor to register with the arbiter. | ||||||
|     registrar, yield a `Portal to the first registree. | 
 | ||||||
|  |     A portal to the first registered actor is returned. | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|     actor: Actor = current_actor() |     actor = current_actor() | ||||||
| 
 | 
 | ||||||
|     # optimization path, use any pre-existing peer channel |     async with get_arbiter( | ||||||
|     maybe_peers: list[Channel]|None = get_peer_by_name(name) |         *arbiter_sockaddr or actor._arb_addr, | ||||||
|     if maybe_peers: |     ) as arb_portal: | ||||||
|         async with open_portal(maybe_peers[0]) as peer_portal: |         sockaddrs = await arb_portal.run_from_ns( | ||||||
|             yield peer_portal |  | ||||||
|             return |  | ||||||
| 
 |  | ||||||
|     regaddr: UnwrappedAddress = ( |  | ||||||
|         registry_addr |  | ||||||
|         or |  | ||||||
|         actor.reg_addrs[0] |  | ||||||
|     ) |  | ||||||
|     # TODO: use `.trionics.gather_contexts()` like |  | ||||||
|     # above in `find_actor()` as well? |  | ||||||
|     reg_portal: Portal |  | ||||||
|     async with get_registry(regaddr) as reg_portal: |  | ||||||
|         addrs = await reg_portal.run_from_ns( |  | ||||||
|             'self', |             'self', | ||||||
|             'wait_for_actor', |             'wait_for_actor', | ||||||
|             name=name, |             name=name, | ||||||
|         ) |         ) | ||||||
|  |         sockaddr = sockaddrs[-1] | ||||||
| 
 | 
 | ||||||
|         # get latest registered addr by default? |         async with _connect_chan(*sockaddr) as chan: | ||||||
|         # TODO: offer multi-portal yields in multi-homed case? |  | ||||||
|         addr: UnwrappedAddress = addrs[-1] |  | ||||||
| 
 |  | ||||||
|         async with _connect_chan(addr) as chan: |  | ||||||
|             async with open_portal(chan) as portal: |             async with open_portal(chan) as portal: | ||||||
|                 yield portal |                 yield portal | ||||||
|  |  | ||||||
|  | @ -20,8 +20,6 @@ Sub-process entry points. | ||||||
| """ | """ | ||||||
| from __future__ import annotations | from __future__ import annotations | ||||||
| from functools import partial | from functools import partial | ||||||
| import multiprocessing as mp |  | ||||||
| # import os |  | ||||||
| from typing import ( | from typing import ( | ||||||
|     Any, |     Any, | ||||||
|     TYPE_CHECKING, |     TYPE_CHECKING, | ||||||
|  | @ -34,13 +32,7 @@ from .log import ( | ||||||
|     get_logger, |     get_logger, | ||||||
| ) | ) | ||||||
| from . import _state | from . import _state | ||||||
| from .devx import ( |  | ||||||
|     _frame_stack, |  | ||||||
|     pformat, |  | ||||||
| ) |  | ||||||
| # from .msg import pretty_struct |  | ||||||
| from .to_asyncio import run_as_asyncio_guest | from .to_asyncio import run_as_asyncio_guest | ||||||
| from ._addr import UnwrappedAddress |  | ||||||
| from ._runtime import ( | from ._runtime import ( | ||||||
|     async_main, |     async_main, | ||||||
|     Actor, |     Actor, | ||||||
|  | @ -55,40 +47,38 @@ log = get_logger(__name__) | ||||||
| 
 | 
 | ||||||
| def _mp_main( | def _mp_main( | ||||||
| 
 | 
 | ||||||
|     actor: Actor, |     actor: Actor,  # type: ignore | ||||||
|     accept_addrs: list[UnwrappedAddress], |     accept_addr: tuple[str, int], | ||||||
|     forkserver_info: tuple[Any, Any, Any, Any, Any], |     forkserver_info: tuple[Any, Any, Any, Any, Any], | ||||||
|     start_method: SpawnMethodKey, |     start_method: SpawnMethodKey, | ||||||
|     parent_addr: UnwrappedAddress | None = None, |     parent_addr: tuple[str, int] | None = None, | ||||||
|     infect_asyncio: bool = False, |     infect_asyncio: bool = False, | ||||||
| 
 | 
 | ||||||
| ) -> None: | ) -> None: | ||||||
|     ''' |     ''' | ||||||
|     The routine called *after fork* which invokes a fresh `trio.run()` |     The routine called *after fork* which invokes a fresh ``trio.run`` | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|     actor._forkserver_info = forkserver_info |     actor._forkserver_info = forkserver_info | ||||||
|     from ._spawn import try_set_start_method |     from ._spawn import try_set_start_method | ||||||
|     spawn_ctx: mp.context.BaseContext = try_set_start_method(start_method) |     spawn_ctx = try_set_start_method(start_method) | ||||||
|     assert spawn_ctx |  | ||||||
| 
 | 
 | ||||||
|     if actor.loglevel is not None: |     if actor.loglevel is not None: | ||||||
|         log.info( |         log.info( | ||||||
|             f'Setting loglevel for {actor.uid} to {actor.loglevel}' |             f"Setting loglevel for {actor.uid} to {actor.loglevel}") | ||||||
|         ) |  | ||||||
|         get_console_log(actor.loglevel) |         get_console_log(actor.loglevel) | ||||||
| 
 | 
 | ||||||
|     # TODO: use scops headers like for `trio` below! |     assert spawn_ctx | ||||||
|     # (well after we libify it maybe..) |  | ||||||
|     log.info( |     log.info( | ||||||
|         f'Started new {spawn_ctx.current_process()} for {actor.uid}' |         f"Started new {spawn_ctx.current_process()} for {actor.uid}") | ||||||
|     #     f"parent_addr is {parent_addr}" | 
 | ||||||
|     ) |     _state._current_actor = actor | ||||||
|     _state._current_actor: Actor = actor | 
 | ||||||
|  |     log.debug(f"parent_addr is {parent_addr}") | ||||||
|     trio_main = partial( |     trio_main = partial( | ||||||
|         async_main, |         async_main, | ||||||
|         actor=actor, |         actor, | ||||||
|         accept_addrs=accept_addrs, |         accept_addr, | ||||||
|         parent_addr=parent_addr |         parent_addr=parent_addr | ||||||
|     ) |     ) | ||||||
|     try: |     try: | ||||||
|  | @ -101,15 +91,14 @@ def _mp_main( | ||||||
|         pass  # handle it the same way trio does? |         pass  # handle it the same way trio does? | ||||||
| 
 | 
 | ||||||
|     finally: |     finally: | ||||||
|         log.info( |         log.info(f"Actor {actor.uid} terminated") | ||||||
|             f'`mp`-subactor {actor.uid} exited' |  | ||||||
|         ) |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def _trio_main( | def _trio_main( | ||||||
|     actor: Actor, | 
 | ||||||
|  |     actor: Actor,  # type: ignore | ||||||
|     *, |     *, | ||||||
|     parent_addr: UnwrappedAddress|None = None, |     parent_addr: tuple[str, int] | None = None, | ||||||
|     infect_asyncio: bool = False, |     infect_asyncio: bool = False, | ||||||
| 
 | 
 | ||||||
| ) -> None: | ) -> None: | ||||||
|  | @ -117,8 +106,6 @@ def _trio_main( | ||||||
|     Entry point for a `trio_run_in_process` subactor. |     Entry point for a `trio_run_in_process` subactor. | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|     _frame_stack.hide_runtime_frames() |  | ||||||
| 
 |  | ||||||
|     _state._current_actor = actor |     _state._current_actor = actor | ||||||
|     trio_main = partial( |     trio_main = partial( | ||||||
|         async_main, |         async_main, | ||||||
|  | @ -128,55 +115,36 @@ def _trio_main( | ||||||
| 
 | 
 | ||||||
|     if actor.loglevel is not None: |     if actor.loglevel is not None: | ||||||
|         get_console_log(actor.loglevel) |         get_console_log(actor.loglevel) | ||||||
|  |         import os | ||||||
|  |         actor_info: str = ( | ||||||
|  |             f'|_{actor}\n' | ||||||
|  |             f'  uid: {actor.uid}\n' | ||||||
|  |             f'  pid: {os.getpid()}\n' | ||||||
|  |             f'  parent_addr: {parent_addr}\n' | ||||||
|  |             f'  loglevel: {actor.loglevel}\n' | ||||||
|  |         ) | ||||||
|         log.info( |         log.info( | ||||||
|             f'Starting `trio` subactor from parent @ ' |             'Started new trio process:\n' | ||||||
|             f'{parent_addr}\n' |  | ||||||
|             + |             + | ||||||
|             pformat.nest_from_op( |             actor_info | ||||||
|                 input_op='>(',  # see syntax ideas above |  | ||||||
|                 text=f'{actor}', |  | ||||||
|             ) |  | ||||||
|         ) |         ) | ||||||
|     logmeth = log.info | 
 | ||||||
|     exit_status: str = ( |  | ||||||
|         'Subactor exited\n' |  | ||||||
|         + |  | ||||||
|         pformat.nest_from_op( |  | ||||||
|             input_op=')>',  # like a "closed-to-play"-icon from super perspective |  | ||||||
|             text=f'{actor}', |  | ||||||
|             nest_indent=1, |  | ||||||
|         ) |  | ||||||
|     ) |  | ||||||
|     try: |     try: | ||||||
|         if infect_asyncio: |         if infect_asyncio: | ||||||
|             actor._infected_aio = True |             actor._infected_aio = True | ||||||
|             run_as_asyncio_guest(trio_main) |             run_as_asyncio_guest(trio_main) | ||||||
|         else: |         else: | ||||||
|             trio.run(trio_main) |             trio.run(trio_main) | ||||||
| 
 |  | ||||||
|     except KeyboardInterrupt: |     except KeyboardInterrupt: | ||||||
|         logmeth = log.cancel |         log.cancel( | ||||||
|         exit_status: str = ( |             'Actor received KBI\n' | ||||||
|             'Actor received KBI (aka an OS-cancel)\n' |  | ||||||
|             + |             + | ||||||
|             pformat.nest_from_op( |             actor_info | ||||||
|                 input_op='c)>',  # closed due to cancel (see above) |  | ||||||
|                 text=f'{actor}', |  | ||||||
|             ) |  | ||||||
|         ) |         ) | ||||||
|     except BaseException as err: |  | ||||||
|         logmeth = log.error |  | ||||||
|         exit_status: str = ( |  | ||||||
|             'Main actor task exited due to crash?\n' |  | ||||||
|             + |  | ||||||
|             pformat.nest_from_op( |  | ||||||
|                 input_op='x)>',  # closed by error |  | ||||||
|                 text=f'{actor}', |  | ||||||
|             ) |  | ||||||
|         ) |  | ||||||
|         # NOTE since we raise a tb will already be shown on the |  | ||||||
|         # console, thus we do NOT use `.exception()` above. |  | ||||||
|         raise err |  | ||||||
| 
 | 
 | ||||||
|     finally: |     finally: | ||||||
|         logmeth(exit_status) |         log.info( | ||||||
|  |             'Actor terminated\n' | ||||||
|  |             + | ||||||
|  |             actor_info | ||||||
|  |         ) | ||||||
|  |  | ||||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							|  | @ -0,0 +1,530 @@ | ||||||
|  | # tractor: structured concurrent "actors". | ||||||
|  | # Copyright 2018-eternity Tyler Goodlet. | ||||||
|  | 
 | ||||||
|  | # This program is free software: you can redistribute it and/or modify | ||||||
|  | # it under the terms of the GNU Affero General Public License as published by | ||||||
|  | # the Free Software Foundation, either version 3 of the License, or | ||||||
|  | # (at your option) any later version. | ||||||
|  | 
 | ||||||
|  | # This program is distributed in the hope that it will be useful, | ||||||
|  | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||||
|  | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||||
|  | # GNU Affero General Public License for more details. | ||||||
|  | 
 | ||||||
|  | # You should have received a copy of the GNU Affero General Public License | ||||||
|  | # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||||
|  | 
 | ||||||
|  | """ | ||||||
|  | Inter-process comms abstractions | ||||||
|  | 
 | ||||||
|  | """ | ||||||
|  | from __future__ import annotations | ||||||
|  | from collections.abc import ( | ||||||
|  |     AsyncGenerator, | ||||||
|  |     AsyncIterator, | ||||||
|  | ) | ||||||
|  | from contextlib import asynccontextmanager as acm | ||||||
|  | import platform | ||||||
|  | from pprint import pformat | ||||||
|  | import struct | ||||||
|  | import typing | ||||||
|  | from typing import ( | ||||||
|  |     Any, | ||||||
|  |     runtime_checkable, | ||||||
|  |     Protocol, | ||||||
|  |     Type, | ||||||
|  |     TypeVar, | ||||||
|  | ) | ||||||
|  | 
 | ||||||
|  | import msgspec | ||||||
|  | from tricycle import BufferedReceiveStream | ||||||
|  | import trio | ||||||
|  | 
 | ||||||
|  | from tractor.log import get_logger | ||||||
|  | from tractor._exceptions import TransportClosed | ||||||
|  | 
 | ||||||
|  | log = get_logger(__name__) | ||||||
|  | 
 | ||||||
|  | _is_windows = platform.system() == 'Windows' | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def get_stream_addrs(stream: trio.SocketStream) -> tuple: | ||||||
|  |     # should both be IP sockets | ||||||
|  |     lsockname = stream.socket.getsockname() | ||||||
|  |     rsockname = stream.socket.getpeername() | ||||||
|  |     return ( | ||||||
|  |         tuple(lsockname[:2]), | ||||||
|  |         tuple(rsockname[:2]), | ||||||
|  |     ) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | MsgType = TypeVar("MsgType") | ||||||
|  | 
 | ||||||
|  | # TODO: consider using a generic def and indexing with our eventual | ||||||
|  | # msg definition/types? | ||||||
|  | # - https://docs.python.org/3/library/typing.html#typing.Protocol | ||||||
|  | # - https://jcristharif.com/msgspec/usage.html#structs | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | @runtime_checkable | ||||||
|  | class MsgTransport(Protocol[MsgType]): | ||||||
|  | 
 | ||||||
|  |     stream: trio.SocketStream | ||||||
|  |     drained: list[MsgType] | ||||||
|  | 
 | ||||||
|  |     def __init__(self, stream: trio.SocketStream) -> None: | ||||||
|  |         ... | ||||||
|  | 
 | ||||||
|  |     # XXX: should this instead be called `.sendall()`? | ||||||
|  |     async def send(self, msg: MsgType) -> None: | ||||||
|  |         ... | ||||||
|  | 
 | ||||||
|  |     async def recv(self) -> MsgType: | ||||||
|  |         ... | ||||||
|  | 
 | ||||||
|  |     def __aiter__(self) -> MsgType: | ||||||
|  |         ... | ||||||
|  | 
 | ||||||
|  |     def connected(self) -> bool: | ||||||
|  |         ... | ||||||
|  | 
 | ||||||
|  |     # defining this sync otherwise it causes a mypy error because it | ||||||
|  |     # can't figure out it's a generator i guess?..? | ||||||
|  |     def drain(self) -> AsyncIterator[dict]: | ||||||
|  |         ... | ||||||
|  | 
 | ||||||
|  |     @property | ||||||
|  |     def laddr(self) -> tuple[str, int]: | ||||||
|  |         ... | ||||||
|  | 
 | ||||||
|  |     @property | ||||||
|  |     def raddr(self) -> tuple[str, int]: | ||||||
|  |         ... | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # TODO: not sure why we have to inherit here, but it seems to be an | ||||||
|  | # issue with ``get_msg_transport()`` returning a ``Type[Protocol]``; | ||||||
|  | # probably should make a `mypy` issue? | ||||||
|  | class MsgpackTCPStream(MsgTransport): | ||||||
|  |     ''' | ||||||
|  |     A ``trio.SocketStream`` delivering ``msgpack`` formatted data | ||||||
|  |     using the ``msgspec`` codec lib. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     layer_key: int = 4 | ||||||
|  |     name_key: str = 'tcp' | ||||||
|  | 
 | ||||||
|  |     # TODO: better naming for this? | ||||||
|  |     # -[ ] check how libp2p does naming for such things? | ||||||
|  |     codec_key: str = 'msgpack' | ||||||
|  | 
 | ||||||
|  |     def __init__( | ||||||
|  |         self, | ||||||
|  |         stream: trio.SocketStream, | ||||||
|  |         prefix_size: int = 4, | ||||||
|  | 
 | ||||||
|  |     ) -> None: | ||||||
|  | 
 | ||||||
|  |         self.stream = stream | ||||||
|  |         assert self.stream.socket | ||||||
|  | 
 | ||||||
|  |         # should both be IP sockets | ||||||
|  |         self._laddr, self._raddr = get_stream_addrs(stream) | ||||||
|  | 
 | ||||||
|  |         # create read loop instance | ||||||
|  |         self._agen = self._iter_packets() | ||||||
|  |         self._send_lock = trio.StrictFIFOLock() | ||||||
|  | 
 | ||||||
|  |         # public i guess? | ||||||
|  |         self.drained: list[dict] = [] | ||||||
|  | 
 | ||||||
|  |         self.recv_stream = BufferedReceiveStream(transport_stream=stream) | ||||||
|  |         self.prefix_size = prefix_size | ||||||
|  | 
 | ||||||
|  |         # TODO: struct aware messaging coders | ||||||
|  |         self.encode = msgspec.msgpack.Encoder().encode | ||||||
|  |         self.decode = msgspec.msgpack.Decoder().decode  # dict[str, Any]) | ||||||
|  | 
 | ||||||
|  |     async def _iter_packets(self) -> AsyncGenerator[dict, None]: | ||||||
|  |         '''Yield packets from the underlying stream. | ||||||
|  | 
 | ||||||
|  |         ''' | ||||||
|  |         import msgspec  # noqa | ||||||
|  |         decodes_failed: int = 0 | ||||||
|  | 
 | ||||||
|  |         while True: | ||||||
|  |             try: | ||||||
|  |                 header = await self.recv_stream.receive_exactly(4) | ||||||
|  | 
 | ||||||
|  |             except ( | ||||||
|  |                 ValueError, | ||||||
|  |                 ConnectionResetError, | ||||||
|  | 
 | ||||||
|  |                 # not sure entirely why we need this but without it we | ||||||
|  |                 # seem to be getting racy failures here on | ||||||
|  |                 # arbiter/registry name subs.. | ||||||
|  |                 trio.BrokenResourceError, | ||||||
|  |             ): | ||||||
|  |                 raise TransportClosed( | ||||||
|  |                     f'transport {self} was already closed prior ro read' | ||||||
|  |                 ) | ||||||
|  | 
 | ||||||
|  |             if header == b'': | ||||||
|  |                 raise TransportClosed( | ||||||
|  |                     f'transport {self} was already closed prior ro read' | ||||||
|  |                 ) | ||||||
|  | 
 | ||||||
|  |             size, = struct.unpack("<I", header) | ||||||
|  | 
 | ||||||
|  |             log.transport(f'received header {size}')  # type: ignore | ||||||
|  | 
 | ||||||
|  |             msg_bytes = await self.recv_stream.receive_exactly(size) | ||||||
|  | 
 | ||||||
|  |             log.transport(f"received {msg_bytes}")  # type: ignore | ||||||
|  |             try: | ||||||
|  |                 yield self.decode(msg_bytes) | ||||||
|  |             except ( | ||||||
|  |                 msgspec.DecodeError, | ||||||
|  |                 UnicodeDecodeError, | ||||||
|  |             ): | ||||||
|  |                 if decodes_failed < 4: | ||||||
|  |                     # ignore decoding errors for now and assume they have to | ||||||
|  |                     # do with a channel drop - hope that receiving from the | ||||||
|  |                     # channel will raise an expected error and bubble up. | ||||||
|  |                     try: | ||||||
|  |                         msg_str: str | bytes = msg_bytes.decode() | ||||||
|  |                     except UnicodeDecodeError: | ||||||
|  |                         msg_str = msg_bytes | ||||||
|  | 
 | ||||||
|  |                     log.error( | ||||||
|  |                         '`msgspec` failed to decode!?\n' | ||||||
|  |                         'dumping bytes:\n' | ||||||
|  |                         f'{msg_str!r}' | ||||||
|  |                     ) | ||||||
|  |                     decodes_failed += 1 | ||||||
|  |                 else: | ||||||
|  |                     raise | ||||||
|  | 
 | ||||||
|  |     async def send( | ||||||
|  |         self, | ||||||
|  |         msg: Any, | ||||||
|  | 
 | ||||||
|  |         # hide_tb: bool = False, | ||||||
|  |     ) -> None: | ||||||
|  |         ''' | ||||||
|  |         Send a msgpack coded blob-as-msg over TCP. | ||||||
|  | 
 | ||||||
|  |         ''' | ||||||
|  |         # __tracebackhide__: bool = hide_tb | ||||||
|  |         async with self._send_lock: | ||||||
|  | 
 | ||||||
|  |             bytes_data: bytes = self.encode(msg) | ||||||
|  | 
 | ||||||
|  |             # supposedly the fastest says, | ||||||
|  |             # https://stackoverflow.com/a/54027962 | ||||||
|  |             size: bytes = struct.pack("<I", len(bytes_data)) | ||||||
|  | 
 | ||||||
|  |             return await self.stream.send_all(size + bytes_data) | ||||||
|  | 
 | ||||||
|  |     @property | ||||||
|  |     def laddr(self) -> tuple[str, int]: | ||||||
|  |         return self._laddr | ||||||
|  | 
 | ||||||
|  |     @property | ||||||
|  |     def raddr(self) -> tuple[str, int]: | ||||||
|  |         return self._raddr | ||||||
|  | 
 | ||||||
|  |     async def recv(self) -> Any: | ||||||
|  |         return await self._agen.asend(None) | ||||||
|  | 
 | ||||||
|  |     async def drain(self) -> AsyncIterator[dict]: | ||||||
|  |         ''' | ||||||
|  |         Drain the stream's remaining messages sent from | ||||||
|  |         the far end until the connection is closed by | ||||||
|  |         the peer. | ||||||
|  | 
 | ||||||
|  |         ''' | ||||||
|  |         try: | ||||||
|  |             async for msg in self._iter_packets(): | ||||||
|  |                 self.drained.append(msg) | ||||||
|  |         except TransportClosed: | ||||||
|  |             for msg in self.drained: | ||||||
|  |                 yield msg | ||||||
|  | 
 | ||||||
|  |     def __aiter__(self): | ||||||
|  |         return self._agen | ||||||
|  | 
 | ||||||
|  |     def connected(self) -> bool: | ||||||
|  |         return self.stream.socket.fileno() != -1 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def get_msg_transport( | ||||||
|  | 
 | ||||||
|  |     key: tuple[str, str], | ||||||
|  | 
 | ||||||
|  | ) -> Type[MsgTransport]: | ||||||
|  | 
 | ||||||
|  |     return { | ||||||
|  |         ('msgpack', 'tcp'): MsgpackTCPStream, | ||||||
|  |     }[key] | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class Channel: | ||||||
|  |     ''' | ||||||
|  |     An inter-process channel for communication between (remote) actors. | ||||||
|  | 
 | ||||||
|  |     Wraps a ``MsgStream``: transport + encoding IPC connection. | ||||||
|  | 
 | ||||||
|  |     Currently we only support ``trio.SocketStream`` for transport | ||||||
|  |     (aka TCP) and the ``msgpack`` interchange format via the ``msgspec`` | ||||||
|  |     codec libary. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     def __init__( | ||||||
|  | 
 | ||||||
|  |         self, | ||||||
|  |         destaddr: tuple[str, int]|None, | ||||||
|  | 
 | ||||||
|  |         msg_transport_type_key: tuple[str, str] = ('msgpack', 'tcp'), | ||||||
|  | 
 | ||||||
|  |         # TODO: optional reconnection support? | ||||||
|  |         # auto_reconnect: bool = False, | ||||||
|  |         # on_reconnect: typing.Callable[..., typing.Awaitable] = None, | ||||||
|  | 
 | ||||||
|  |     ) -> None: | ||||||
|  | 
 | ||||||
|  |         # self._recon_seq = on_reconnect | ||||||
|  |         # self._autorecon = auto_reconnect | ||||||
|  | 
 | ||||||
|  |         self._destaddr = destaddr | ||||||
|  |         self._transport_key = msg_transport_type_key | ||||||
|  | 
 | ||||||
|  |         # Either created in ``.connect()`` or passed in by | ||||||
|  |         # user in ``.from_stream()``. | ||||||
|  |         self._stream: trio.SocketStream|None = None | ||||||
|  |         self._transport: MsgTransport|None = None | ||||||
|  | 
 | ||||||
|  |         # set after handshake - always uid of far end | ||||||
|  |         self.uid: tuple[str, str]|None = None | ||||||
|  | 
 | ||||||
|  |         self._agen = self._aiter_recv() | ||||||
|  |         self._exc: Exception|None = None  # set if far end actor errors | ||||||
|  |         self._closed: bool = False | ||||||
|  | 
 | ||||||
|  |         # flag set by ``Portal.cancel_actor()`` indicating remote | ||||||
|  |         # (possibly peer) cancellation of the far end actor | ||||||
|  |         # runtime. | ||||||
|  |         self._cancel_called: bool = False | ||||||
|  | 
 | ||||||
|  |     @property | ||||||
|  |     def msgstream(self) -> MsgTransport: | ||||||
|  |         log.info('`Channel.msgstream` is an old name, use `._transport`') | ||||||
|  |         return self._transport | ||||||
|  | 
 | ||||||
|  |     @property | ||||||
|  |     def transport(self) -> MsgTransport: | ||||||
|  |         return self._transport | ||||||
|  | 
 | ||||||
|  |     @classmethod | ||||||
|  |     def from_stream( | ||||||
|  |         cls, | ||||||
|  |         stream: trio.SocketStream, | ||||||
|  |         **kwargs, | ||||||
|  | 
 | ||||||
|  |     ) -> Channel: | ||||||
|  | 
 | ||||||
|  |         src, dst = get_stream_addrs(stream) | ||||||
|  |         chan = Channel( | ||||||
|  |             destaddr=dst, | ||||||
|  |             **kwargs, | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|  |         # set immediately here from provided instance | ||||||
|  |         chan._stream: trio.SocketStream = stream | ||||||
|  |         chan.set_msg_transport(stream) | ||||||
|  |         return chan | ||||||
|  | 
 | ||||||
|  |     def set_msg_transport( | ||||||
|  |         self, | ||||||
|  |         stream: trio.SocketStream, | ||||||
|  |         type_key: tuple[str, str]|None = None, | ||||||
|  | 
 | ||||||
|  |     ) -> MsgTransport: | ||||||
|  |         type_key = type_key or self._transport_key | ||||||
|  |         self._transport = get_msg_transport(type_key)(stream) | ||||||
|  |         return self._transport | ||||||
|  | 
 | ||||||
|  |     def __repr__(self) -> str: | ||||||
|  |         if not self._transport: | ||||||
|  |             return '<Channel with inactive transport?>' | ||||||
|  | 
 | ||||||
|  |         return repr( | ||||||
|  |             self._transport.stream.socket._sock | ||||||
|  |         ).replace(  # type: ignore | ||||||
|  |             "socket.socket", | ||||||
|  |             "Channel", | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|  |     @property | ||||||
|  |     def laddr(self) -> tuple[str, int]|None: | ||||||
|  |         return self._transport.laddr if self._transport else None | ||||||
|  | 
 | ||||||
|  |     @property | ||||||
|  |     def raddr(self) -> tuple[str, int]|None: | ||||||
|  |         return self._transport.raddr if self._transport else None | ||||||
|  | 
 | ||||||
|  |     async def connect( | ||||||
|  |         self, | ||||||
|  |         destaddr: tuple[Any, ...] | None = None, | ||||||
|  |         **kwargs | ||||||
|  | 
 | ||||||
|  |     ) -> MsgTransport: | ||||||
|  | 
 | ||||||
|  |         if self.connected(): | ||||||
|  |             raise RuntimeError("channel is already connected?") | ||||||
|  | 
 | ||||||
|  |         destaddr = destaddr or self._destaddr | ||||||
|  |         assert isinstance(destaddr, tuple) | ||||||
|  | 
 | ||||||
|  |         stream = await trio.open_tcp_stream( | ||||||
|  |             *destaddr, | ||||||
|  |             **kwargs | ||||||
|  |         ) | ||||||
|  |         transport = self.set_msg_transport(stream) | ||||||
|  | 
 | ||||||
|  |         log.transport( | ||||||
|  |             f'Opened channel[{type(transport)}]: {self.laddr} -> {self.raddr}' | ||||||
|  |         ) | ||||||
|  |         return transport | ||||||
|  | 
 | ||||||
|  |     async def send( | ||||||
|  |         self, | ||||||
|  |         payload: Any, | ||||||
|  | 
 | ||||||
|  |         # hide_tb: bool = False, | ||||||
|  | 
 | ||||||
|  |     ) -> None: | ||||||
|  |         ''' | ||||||
|  |         Send a coded msg-blob over the transport. | ||||||
|  | 
 | ||||||
|  |         ''' | ||||||
|  |         # __tracebackhide__: bool = hide_tb | ||||||
|  |         log.transport( | ||||||
|  |             '=> send IPC msg:\n\n' | ||||||
|  |             f'{pformat(payload)}\n' | ||||||
|  |         )  # type: ignore | ||||||
|  |         assert self._transport | ||||||
|  | 
 | ||||||
|  |         await self._transport.send( | ||||||
|  |             payload, | ||||||
|  |             # hide_tb=hide_tb, | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|  |     async def recv(self) -> Any: | ||||||
|  |         assert self._transport | ||||||
|  |         return await self._transport.recv() | ||||||
|  | 
 | ||||||
|  |         # try: | ||||||
|  |         #     return await self._transport.recv() | ||||||
|  |         # except trio.BrokenResourceError: | ||||||
|  |         #     if self._autorecon: | ||||||
|  |         #         await self._reconnect() | ||||||
|  |         #         return await self.recv() | ||||||
|  |         #     raise | ||||||
|  | 
 | ||||||
|  |     async def aclose(self) -> None: | ||||||
|  | 
 | ||||||
|  |         log.transport( | ||||||
|  |             f'Closing channel to {self.uid} ' | ||||||
|  |             f'{self.laddr} -> {self.raddr}' | ||||||
|  |         ) | ||||||
|  |         assert self._transport | ||||||
|  |         await self._transport.stream.aclose() | ||||||
|  |         self._closed = True | ||||||
|  | 
 | ||||||
|  |     async def __aenter__(self): | ||||||
|  |         await self.connect() | ||||||
|  |         return self | ||||||
|  | 
 | ||||||
|  |     async def __aexit__(self, *args): | ||||||
|  |         await self.aclose(*args) | ||||||
|  | 
 | ||||||
|  |     def __aiter__(self): | ||||||
|  |         return self._agen | ||||||
|  | 
 | ||||||
|  |     # async def _reconnect(self) -> None: | ||||||
|  |     #     """Handle connection failures by polling until a reconnect can be | ||||||
|  |     #     established. | ||||||
|  |     #     """ | ||||||
|  |     #     down = False | ||||||
|  |     #     while True: | ||||||
|  |     #         try: | ||||||
|  |     #             with trio.move_on_after(3) as cancel_scope: | ||||||
|  |     #                 await self.connect() | ||||||
|  |     #             cancelled = cancel_scope.cancelled_caught | ||||||
|  |     #             if cancelled: | ||||||
|  |     #                 log.transport( | ||||||
|  |     #                     "Reconnect timed out after 3 seconds, retrying...") | ||||||
|  |     #                 continue | ||||||
|  |     #             else: | ||||||
|  |     #                 log.transport("Stream connection re-established!") | ||||||
|  | 
 | ||||||
|  |     #                 # TODO: run any reconnection sequence | ||||||
|  |     #                 # on_recon = self._recon_seq | ||||||
|  |     #                 # if on_recon: | ||||||
|  |     #                 #     await on_recon(self) | ||||||
|  | 
 | ||||||
|  |     #                 break | ||||||
|  |     #         except (OSError, ConnectionRefusedError): | ||||||
|  |     #             if not down: | ||||||
|  |     #                 down = True | ||||||
|  |     #                 log.transport( | ||||||
|  |     #                     f"Connection to {self.raddr} went down, waiting" | ||||||
|  |     #                     " for re-establishment") | ||||||
|  |     #             await trio.sleep(1) | ||||||
|  | 
 | ||||||
|  |     async def _aiter_recv( | ||||||
|  |         self | ||||||
|  |     ) -> AsyncGenerator[Any, None]: | ||||||
|  |         ''' | ||||||
|  |         Async iterate items from underlying stream. | ||||||
|  | 
 | ||||||
|  |         ''' | ||||||
|  |         assert self._transport | ||||||
|  |         while True: | ||||||
|  |             try: | ||||||
|  |                 async for item in self._transport: | ||||||
|  |                     yield item | ||||||
|  |                     # sent = yield item | ||||||
|  |                     # if sent is not None: | ||||||
|  |                     #     # optimization, passing None through all the | ||||||
|  |                     #     # time is pointless | ||||||
|  |                     #     await self._transport.send(sent) | ||||||
|  |             except trio.BrokenResourceError: | ||||||
|  | 
 | ||||||
|  |                 # if not self._autorecon: | ||||||
|  |                 raise | ||||||
|  | 
 | ||||||
|  |             await self.aclose() | ||||||
|  | 
 | ||||||
|  |             # if self._autorecon:  # attempt reconnect | ||||||
|  |             #     await self._reconnect() | ||||||
|  |             #     continue | ||||||
|  | 
 | ||||||
|  |     def connected(self) -> bool: | ||||||
|  |         return self._transport.connected() if self._transport else False | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | @acm | ||||||
|  | async def _connect_chan( | ||||||
|  |     host: str, port: int | ||||||
|  | ) -> typing.AsyncGenerator[Channel, None]: | ||||||
|  |     ''' | ||||||
|  |     Create and connect a channel with disconnect on context manager | ||||||
|  |     teardown. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     chan = Channel((host, port)) | ||||||
|  |     await chan.connect() | ||||||
|  |     yield chan | ||||||
|  |     await chan.aclose() | ||||||
|  | @ -1,151 +0,0 @@ | ||||||
| # tractor: structured concurrent "actors". |  | ||||||
| # Copyright 2018-eternity Tyler Goodlet. |  | ||||||
| 
 |  | ||||||
| # This program is free software: you can redistribute it and/or modify |  | ||||||
| # it under the terms of the GNU Affero General Public License as published by |  | ||||||
| # the Free Software Foundation, either version 3 of the License, or |  | ||||||
| # (at your option) any later version. |  | ||||||
| 
 |  | ||||||
| # This program is distributed in the hope that it will be useful, |  | ||||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of |  | ||||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the |  | ||||||
| # GNU Affero General Public License for more details. |  | ||||||
| 
 |  | ||||||
| # You should have received a copy of the GNU Affero General Public License |  | ||||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. |  | ||||||
| 
 |  | ||||||
| ''' |  | ||||||
| Multiaddress parser and utils according the spec(s) defined by |  | ||||||
| `libp2p` and used in dependent project such as `ipfs`: |  | ||||||
| 
 |  | ||||||
| - https://docs.libp2p.io/concepts/fundamentals/addressing/ |  | ||||||
| - https://github.com/libp2p/specs/blob/master/addressing/README.md |  | ||||||
| 
 |  | ||||||
| ''' |  | ||||||
| from typing import Iterator |  | ||||||
| 
 |  | ||||||
| from bidict import bidict |  | ||||||
| 
 |  | ||||||
| # TODO: see if we can leverage libp2p ecosys projects instead of |  | ||||||
| # rolling our own (parser) impls of the above addressing specs: |  | ||||||
| # - https://github.com/libp2p/py-libp2p |  | ||||||
| # - https://docs.libp2p.io/concepts/nat/circuit-relay/#relay-addresses |  | ||||||
| # prots: bidict[int, str] = bidict({ |  | ||||||
| prots: bidict[int, str] = { |  | ||||||
|     'ipv4': 3, |  | ||||||
|     'ipv6': 3, |  | ||||||
|     'wg': 3, |  | ||||||
| 
 |  | ||||||
|     'tcp': 4, |  | ||||||
|     'udp': 4, |  | ||||||
| 
 |  | ||||||
|     # TODO: support the next-gen shite Bo |  | ||||||
|     # 'quic': 4, |  | ||||||
|     # 'ssh': 7,  # via rsyscall bootstrapping |  | ||||||
| } |  | ||||||
| 
 |  | ||||||
| prot_params: dict[str, tuple[str]] = { |  | ||||||
|     'ipv4': ('addr',), |  | ||||||
|     'ipv6': ('addr',), |  | ||||||
|     'wg': ('addr', 'port', 'pubkey'), |  | ||||||
| 
 |  | ||||||
|     'tcp': ('port',), |  | ||||||
|     'udp': ('port',), |  | ||||||
| 
 |  | ||||||
|     # 'quic': ('port',), |  | ||||||
|     # 'ssh': ('port',), |  | ||||||
| } |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def iter_prot_layers( |  | ||||||
|     multiaddr: str, |  | ||||||
| ) -> Iterator[ |  | ||||||
|     tuple[ |  | ||||||
|         int, |  | ||||||
|         list[str] |  | ||||||
|     ] |  | ||||||
| ]: |  | ||||||
|     ''' |  | ||||||
|     Unpack a libp2p style "multiaddress" into multiple "segments" |  | ||||||
|     for each "layer" of the protocoll stack (in OSI terms). |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     tokens: list[str] = multiaddr.split('/') |  | ||||||
|     root, tokens = tokens[0], tokens[1:] |  | ||||||
|     assert not root  # there is a root '/' on LHS |  | ||||||
|     itokens = iter(tokens) |  | ||||||
| 
 |  | ||||||
|     prot: str | None = None |  | ||||||
|     params: list[str] = [] |  | ||||||
|     for token in itokens: |  | ||||||
|         # every prot path should start with a known |  | ||||||
|         # key-str. |  | ||||||
|         if token in prots: |  | ||||||
|             if prot is None: |  | ||||||
|                 prot: str = token |  | ||||||
|             else: |  | ||||||
|                 yield prot, params |  | ||||||
|                 prot = token |  | ||||||
| 
 |  | ||||||
|             params = [] |  | ||||||
| 
 |  | ||||||
|         elif token not in prots: |  | ||||||
|             params.append(token) |  | ||||||
| 
 |  | ||||||
|     else: |  | ||||||
|         yield prot, params |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def parse_maddr( |  | ||||||
|     multiaddr: str, |  | ||||||
| ) -> dict[str, str | int | dict]: |  | ||||||
|     ''' |  | ||||||
|     Parse a libp2p style "multiaddress" into its distinct protocol |  | ||||||
|     segments where each segment is of the form: |  | ||||||
| 
 |  | ||||||
|         `../<protocol>/<param0>/<param1>/../<paramN>` |  | ||||||
| 
 |  | ||||||
|     and is loaded into a (order preserving) `layers: dict[str, |  | ||||||
|     dict[str, Any]` which holds each protocol-layer-segment of the |  | ||||||
|     original `str` path as a separate entry according to its approx |  | ||||||
|     OSI "layer number". |  | ||||||
| 
 |  | ||||||
|     Any `paramN` in the path must be distinctly defined by a str-token in the |  | ||||||
|     (module global) `prot_params` table. |  | ||||||
| 
 |  | ||||||
|     For eg. for wireguard which requires an address, port number and publickey |  | ||||||
|     the protocol params are specified as the entry: |  | ||||||
| 
 |  | ||||||
|         'wg': ('addr', 'port', 'pubkey'), |  | ||||||
| 
 |  | ||||||
|     and are thus parsed from a maddr in that order: |  | ||||||
|         `'/wg/1.1.1.1/51820/<pubkey>'` |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     layers: dict[str, str | int | dict] = {} |  | ||||||
|     for ( |  | ||||||
|         prot_key, |  | ||||||
|         params, |  | ||||||
|     ) in iter_prot_layers(multiaddr): |  | ||||||
| 
 |  | ||||||
|         layer: int = prots[prot_key]  # OSI layer used for sorting |  | ||||||
|         ep: dict[str, int | str] = {'layer': layer} |  | ||||||
|         layers[prot_key] = ep |  | ||||||
| 
 |  | ||||||
|         # TODO; validation and resolving of names: |  | ||||||
|         # - each param via a validator provided as part of the |  | ||||||
|         #   prot_params def? (also see `"port"` case below..) |  | ||||||
|         # - do a resolv step that will check addrs against |  | ||||||
|         #   any loaded network.resolv: dict[str, str] |  | ||||||
|         rparams: list = list(reversed(params)) |  | ||||||
|         for key in prot_params[prot_key]: |  | ||||||
|             val: str | int = rparams.pop() |  | ||||||
| 
 |  | ||||||
|             # TODO: UGHH, dunno what we should do for validation |  | ||||||
|             # here, put it in the params spec somehow? |  | ||||||
|             if key == 'port': |  | ||||||
|                 val = int(val) |  | ||||||
| 
 |  | ||||||
|             ep[key] = val |  | ||||||
| 
 |  | ||||||
|     return layers |  | ||||||
|  | @ -31,7 +31,7 @@ from typing import ( | ||||||
|     Any, |     Any, | ||||||
|     Callable, |     Callable, | ||||||
|     AsyncGenerator, |     AsyncGenerator, | ||||||
|     TYPE_CHECKING, |     # Type, | ||||||
| ) | ) | ||||||
| from functools import partial | from functools import partial | ||||||
| from dataclasses import dataclass | from dataclasses import dataclass | ||||||
|  | @ -39,24 +39,16 @@ import warnings | ||||||
| 
 | 
 | ||||||
| import trio | import trio | ||||||
| 
 | 
 | ||||||
| from .trionics import ( | from .trionics import maybe_open_nursery | ||||||
|     maybe_open_nursery, |  | ||||||
|     collapse_eg, |  | ||||||
| ) |  | ||||||
| from ._state import ( | from ._state import ( | ||||||
|     current_actor, |     current_actor, | ||||||
| ) | ) | ||||||
| from .ipc import Channel | from ._ipc import Channel | ||||||
| from .log import get_logger | from .log import get_logger | ||||||
| from .msg import ( | from .msg import NamespacePath | ||||||
|     # Error, |  | ||||||
|     PayloadMsg, |  | ||||||
|     NamespacePath, |  | ||||||
|     Return, |  | ||||||
| ) |  | ||||||
| from ._exceptions import ( | from ._exceptions import ( | ||||||
|  |     unpack_error, | ||||||
|     NoResult, |     NoResult, | ||||||
|     TransportClosed, |  | ||||||
| ) | ) | ||||||
| from ._context import ( | from ._context import ( | ||||||
|     Context, |     Context, | ||||||
|  | @ -66,12 +58,41 @@ from ._streaming import ( | ||||||
|     MsgStream, |     MsgStream, | ||||||
| ) | ) | ||||||
| 
 | 
 | ||||||
| if TYPE_CHECKING: |  | ||||||
|     from ._runtime import Actor |  | ||||||
| 
 | 
 | ||||||
| log = get_logger(__name__) | log = get_logger(__name__) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | # TODO: rename to `unwrap_result()` and use | ||||||
|  | # `._raise_from_no_key_in_msg()` (after tweak to | ||||||
|  | # accept a `chan: Channel` arg) in key block! | ||||||
|  | def _unwrap_msg( | ||||||
|  |     msg: dict[str, Any], | ||||||
|  |     channel: Channel, | ||||||
|  | 
 | ||||||
|  |     hide_tb: bool = True, | ||||||
|  | 
 | ||||||
|  | ) -> Any: | ||||||
|  |     ''' | ||||||
|  |     Unwrap a final result from a `{return: <Any>}` IPC msg. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     __tracebackhide__: bool = hide_tb | ||||||
|  | 
 | ||||||
|  |     try: | ||||||
|  |         return msg['return'] | ||||||
|  |     except KeyError as ke: | ||||||
|  | 
 | ||||||
|  |         # internal error should never get here | ||||||
|  |         assert msg.get('cid'), ( | ||||||
|  |             "Received internal error at portal?" | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|  |         raise unpack_error( | ||||||
|  |             msg, | ||||||
|  |             channel | ||||||
|  |         ) from ke | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
| class Portal: | class Portal: | ||||||
|     ''' |     ''' | ||||||
|     A 'portal' to a memory-domain-separated `Actor`. |     A 'portal' to a memory-domain-separated `Actor`. | ||||||
|  | @ -95,34 +116,17 @@ class Portal: | ||||||
|     # connected (peer) actors. |     # connected (peer) actors. | ||||||
|     cancel_timeout: float = 0.5 |     cancel_timeout: float = 0.5 | ||||||
| 
 | 
 | ||||||
|     def __init__( |     def __init__(self, channel: Channel) -> None: | ||||||
|         self, |         self.chan = channel | ||||||
|         channel: Channel, |  | ||||||
|     ) -> None: |  | ||||||
| 
 |  | ||||||
|         self._chan: Channel = channel |  | ||||||
|         # during the portal's lifetime |         # during the portal's lifetime | ||||||
|         self._final_result_pld: Any|None = None |         self._result_msg: dict|None = None | ||||||
|         self._final_result_msg: PayloadMsg|None = None |  | ||||||
| 
 | 
 | ||||||
|         # When set to a ``Context`` (when _submit_for_result is called) |         # When set to a ``Context`` (when _submit_for_result is called) | ||||||
|         # it is expected that ``result()`` will be awaited at some |         # it is expected that ``result()`` will be awaited at some | ||||||
|         # point. |         # point. | ||||||
|         self._expect_result_ctx: Context|None = None |         self._expect_result: Context | None = None | ||||||
|         self._streams: set[MsgStream] = set() |         self._streams: set[MsgStream] = set() | ||||||
| 
 |         self.actor = current_actor() | ||||||
|         # TODO, this should be PRIVATE (and never used publicly)! since it's just |  | ||||||
|         # a cached ref to the local runtime instead of calling |  | ||||||
|         # `current_actor()` everywhere.. XD |  | ||||||
|         self.actor: Actor = current_actor() |  | ||||||
| 
 |  | ||||||
|     @property |  | ||||||
|     def chan(self) -> Channel: |  | ||||||
|         ''' |  | ||||||
|         Ref to this ctx's underlying `tractor.ipc.Channel`. |  | ||||||
| 
 |  | ||||||
|         ''' |  | ||||||
|         return self._chan |  | ||||||
| 
 | 
 | ||||||
|     @property |     @property | ||||||
|     def channel(self) -> Channel: |     def channel(self) -> Channel: | ||||||
|  | @ -136,8 +140,6 @@ class Portal: | ||||||
|         ) |         ) | ||||||
|         return self.chan |         return self.chan | ||||||
| 
 | 
 | ||||||
|     # TODO: factor this out into a `.highlevel` API-wrapper that uses |  | ||||||
|     # a single `.open_context()` call underneath. |  | ||||||
|     async def _submit_for_result( |     async def _submit_for_result( | ||||||
|         self, |         self, | ||||||
|         ns: str, |         ns: str, | ||||||
|  | @ -145,34 +147,32 @@ class Portal: | ||||||
|         **kwargs |         **kwargs | ||||||
|     ) -> None: |     ) -> None: | ||||||
| 
 | 
 | ||||||
|         if self._expect_result_ctx is not None: |         assert self._expect_result is None, ( | ||||||
|             raise RuntimeError( |             "A pending main result has already been submitted" | ||||||
|                 'A pending main result has already been submitted' |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|         self._expect_result_ctx: Context = await self.actor.start_remote_task( |  | ||||||
|             self.channel, |  | ||||||
|             nsf=NamespacePath(f'{ns}:{func}'), |  | ||||||
|             kwargs=kwargs, |  | ||||||
|             portal=self, |  | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|     # TODO: we should deprecate this API right? since if we remove |         self._expect_result = await self.actor.start_remote_task( | ||||||
|     # `.run_in_actor()` (and instead move it to a `.highlevel` |             self.channel, | ||||||
|     # wrapper api (around a single `.open_context()` call) we don't |             nsf=NamespacePath(f'{ns}:{func}'), | ||||||
|     # really have any notion of a "main" remote task any more? |             kwargs=kwargs | ||||||
|     # |         ) | ||||||
|     # @api_frame | 
 | ||||||
|     async def wait_for_result( |     async def _return_once( | ||||||
|         self, |         self, | ||||||
|         hide_tb: bool = True, |         ctx: Context, | ||||||
|     ) -> Any: | 
 | ||||||
|  |     ) -> dict[str, Any]: | ||||||
|  | 
 | ||||||
|  |         assert ctx._remote_func_type == 'asyncfunc'  # single response | ||||||
|  |         msg: dict = await ctx._recv_chan.receive() | ||||||
|  |         return msg | ||||||
|  | 
 | ||||||
|  |     async def result(self) -> Any: | ||||||
|         ''' |         ''' | ||||||
|         Return the final result delivered by a `Return`-msg from the |         Return the result(s) from the remote actor's "main" task. | ||||||
|         remote peer actor's "main" task's `return` statement. |  | ||||||
| 
 | 
 | ||||||
|         ''' |         ''' | ||||||
|         __tracebackhide__: bool = hide_tb |         # __tracebackhide__ = True | ||||||
|         # Check for non-rpc errors slapped on the |         # Check for non-rpc errors slapped on the | ||||||
|         # channel for which we always raise |         # channel for which we always raise | ||||||
|         exc = self.channel._exc |         exc = self.channel._exc | ||||||
|  | @ -180,66 +180,32 @@ class Portal: | ||||||
|             raise exc |             raise exc | ||||||
| 
 | 
 | ||||||
|         # not expecting a "main" result |         # not expecting a "main" result | ||||||
|         if self._expect_result_ctx is None: |         if self._expect_result is None: | ||||||
|             peer_id: str = f'{self.channel.aid.reprol()!r}' |  | ||||||
|             log.warning( |             log.warning( | ||||||
|                 f'Portal to peer {peer_id} will not deliver a final result?\n' |                 f"Portal for {self.channel.uid} not expecting a final" | ||||||
|                 f'\n' |                 " result?\nresult() should only be called if subactor" | ||||||
|                 f'Context.result() can only be called by the parent of ' |                 " was spawned with `ActorNursery.run_in_actor()`") | ||||||
|                 f'a sub-actor when it was spawned with ' |  | ||||||
|                 f'`ActorNursery.run_in_actor()`' |  | ||||||
|                 f'\n' |  | ||||||
|                 f'Further this `ActorNursery`-method-API will deprecated in the' |  | ||||||
|                 f'near fututre!\n' |  | ||||||
|             ) |  | ||||||
|             return NoResult |             return NoResult | ||||||
| 
 | 
 | ||||||
|         # expecting a "main" result |         # expecting a "main" result | ||||||
|         assert self._expect_result_ctx |         assert self._expect_result | ||||||
| 
 | 
 | ||||||
|         if self._final_result_msg is None: |         if self._result_msg is None: | ||||||
|             try: |             self._result_msg = await self._return_once( | ||||||
|                 ( |                 self._expect_result | ||||||
|                     self._final_result_msg, |             ) | ||||||
|                     self._final_result_pld, |  | ||||||
|                 ) = await self._expect_result_ctx._pld_rx.recv_msg( |  | ||||||
|                     ipc=self._expect_result_ctx, |  | ||||||
|                     expect_msg=Return, |  | ||||||
|                 ) |  | ||||||
|             except BaseException as err: |  | ||||||
|                 # TODO: wrap this into `@api_frame` optionally with |  | ||||||
|                 # some kinda filtering mechanism like log levels? |  | ||||||
|                 __tracebackhide__: bool = False |  | ||||||
|                 raise err |  | ||||||
| 
 | 
 | ||||||
|         return self._final_result_pld |         return _unwrap_msg( | ||||||
| 
 |             self._result_msg, | ||||||
|     # TODO: factor this out into a `.highlevel` API-wrapper that uses |             self.channel, | ||||||
|     # a single `.open_context()` call underneath. |  | ||||||
|     async def result( |  | ||||||
|         self, |  | ||||||
|         *args, |  | ||||||
|         **kwargs, |  | ||||||
|     ) -> Any|Exception: |  | ||||||
|         typname: str = type(self).__name__ |  | ||||||
|         log.warning( |  | ||||||
|             f'`{typname}.result()` is DEPRECATED!\n' |  | ||||||
|             f'\n' |  | ||||||
|             f'Use `{typname}.wait_for_result()` instead!\n' |  | ||||||
|         ) |  | ||||||
|         return await self.wait_for_result( |  | ||||||
|             *args, |  | ||||||
|             **kwargs, |  | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|     async def _cancel_streams(self): |     async def _cancel_streams(self): | ||||||
|         # terminate all locally running async generator |         # terminate all locally running async generator | ||||||
|         # IPC calls |         # IPC calls | ||||||
|         if self._streams: |         if self._streams: | ||||||
|             peer_id: str = f'{self.channel.aid.reprol()!r}' |             log.cancel( | ||||||
|             report: str = ( |                 f"Cancelling all streams with {self.channel.uid}") | ||||||
|                 f'Cancelling all msg-streams with {peer_id}\n' |  | ||||||
|             ) |  | ||||||
|             for stream in self._streams.copy(): |             for stream in self._streams.copy(): | ||||||
|                 try: |                 try: | ||||||
|                     await stream.aclose() |                     await stream.aclose() | ||||||
|  | @ -248,18 +214,10 @@ class Portal: | ||||||
|                     # (unless of course at some point down the road we |                     # (unless of course at some point down the road we | ||||||
|                     # won't expect this to always be the case or need to |                     # won't expect this to always be the case or need to | ||||||
|                     # detect it for respawning purposes?) |                     # detect it for respawning purposes?) | ||||||
|                     report += ( |                     log.debug(f"{stream} was already closed.") | ||||||
|                         f'->) {stream!r} already closed\n' |  | ||||||
|                     ) |  | ||||||
| 
 |  | ||||||
|             log.cancel(report) |  | ||||||
| 
 | 
 | ||||||
|     async def aclose(self): |     async def aclose(self): | ||||||
|         log.debug( |         log.debug(f"Closing {self}") | ||||||
|             f'Closing portal\n' |  | ||||||
|             f'>}}\n' |  | ||||||
|             f'|_{self}\n' |  | ||||||
|         ) |  | ||||||
|         # TODO: once we move to implementing our own `ReceiveChannel` |         # TODO: once we move to implementing our own `ReceiveChannel` | ||||||
|         # (including remote task cancellation inside its `.aclose()`) |         # (including remote task cancellation inside its `.aclose()`) | ||||||
|         # we'll need to .aclose all those channels here |         # we'll need to .aclose all those channels here | ||||||
|  | @ -282,25 +240,23 @@ class Portal: | ||||||
|         purpose. |         purpose. | ||||||
| 
 | 
 | ||||||
|         ''' |         ''' | ||||||
|         __runtimeframe__: int = 1  # noqa |  | ||||||
| 
 |  | ||||||
|         chan: Channel = self.channel |         chan: Channel = self.channel | ||||||
|         peer_id: str = f'{self.channel.aid.reprol()!r}' |  | ||||||
|         if not chan.connected(): |         if not chan.connected(): | ||||||
|             log.runtime( |             log.runtime( | ||||||
|                 'Peer {peer_id} is already disconnected\n' |                 'This channel is already closed, skipping cancel request..' | ||||||
|                 '-> skipping cancel request..\n' |  | ||||||
|             ) |             ) | ||||||
|             return False |             return False | ||||||
| 
 | 
 | ||||||
|  |         reminfo: str = ( | ||||||
|  |             f'`Portal.cancel_actor()` => {self.channel.uid}\n' | ||||||
|  |             f' |_{chan}\n' | ||||||
|  |         ) | ||||||
|         log.cancel( |         log.cancel( | ||||||
|             f'Sending actor-runtime-cancel-req to peer\n' |             f'Sending runtime `.cancel()` request to peer\n\n' | ||||||
|             f'\n' |             f'{reminfo}' | ||||||
|             f'c)=> {peer_id}\n' |  | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|         # XXX the one spot we set it? |         self.channel._cancel_called: bool = True | ||||||
|         chan._cancel_called: bool = True |  | ||||||
|         try: |         try: | ||||||
|             # send cancel cmd - might not get response |             # send cancel cmd - might not get response | ||||||
|             # XXX: sure would be nice to make this work with |             # XXX: sure would be nice to make this work with | ||||||
|  | @ -321,47 +277,24 @@ class Portal: | ||||||
|                 # may timeout and we never get an ack (obvi racy) |                 # may timeout and we never get an ack (obvi racy) | ||||||
|                 # but that doesn't mean it wasn't cancelled. |                 # but that doesn't mean it wasn't cancelled. | ||||||
|                 log.debug( |                 log.debug( | ||||||
|                     f'May have failed to cancel peer?\n' |                     'May have failed to cancel peer?\n' | ||||||
|                     f'\n' |                     f'{reminfo}' | ||||||
|                     f'c)=?> {peer_id}\n' |  | ||||||
|                 ) |                 ) | ||||||
| 
 | 
 | ||||||
|             # if we get here some weird cancellation case happened |             # if we get here some weird cancellation case happened | ||||||
|             return False |             return False | ||||||
| 
 | 
 | ||||||
|         except ( |         except ( | ||||||
|             # XXX, should never really get raised unless we aren't |  | ||||||
|             # wrapping them in the below type by mistake? |  | ||||||
|             # |  | ||||||
|             # Leaving the catch here for now until we're very sure |  | ||||||
|             # all the cases (for various tpt protos) have indeed been |  | ||||||
|             # re-wrapped ;p |  | ||||||
|             trio.ClosedResourceError, |             trio.ClosedResourceError, | ||||||
|             trio.BrokenResourceError, |             trio.BrokenResourceError, | ||||||
| 
 |         ): | ||||||
|             TransportClosed, |             log.debug( | ||||||
|         ) as tpt_err: |                 'IPC chan for actor already closed or broken?\n\n' | ||||||
|             ipc_borked_report: str = ( |                 f'{self.channel.uid}\n' | ||||||
|                 f'IPC for actor already closed/broken?\n\n' |                 f' |_{self.channel}\n' | ||||||
|                 f'\n' |  | ||||||
|                 f'c)=x> {peer_id}\n' |  | ||||||
|             ) |             ) | ||||||
|             match tpt_err: |  | ||||||
|                 case TransportClosed(): |  | ||||||
|                     log.debug(ipc_borked_report) |  | ||||||
|                 case _: |  | ||||||
|                     ipc_borked_report += ( |  | ||||||
|                         f'\n' |  | ||||||
|                         f'Unhandled low-level transport-closed/error during\n' |  | ||||||
|                         f'Portal.cancel_actor()` request?\n' |  | ||||||
|                         f'<{type(tpt_err).__name__}( {tpt_err} )>\n' |  | ||||||
|                     ) |  | ||||||
|                     log.warning(ipc_borked_report) |  | ||||||
| 
 |  | ||||||
|             return False |             return False | ||||||
| 
 | 
 | ||||||
|     # TODO: do we still need this for low level `Actor`-runtime |  | ||||||
|     # method calls or can we also remove it? |  | ||||||
|     async def run_from_ns( |     async def run_from_ns( | ||||||
|         self, |         self, | ||||||
|         namespace_path: str, |         namespace_path: str, | ||||||
|  | @ -384,23 +317,21 @@ class Portal: | ||||||
|           internals! |           internals! | ||||||
| 
 | 
 | ||||||
|         ''' |         ''' | ||||||
|         __runtimeframe__: int = 1  # noqa |  | ||||||
|         nsf = NamespacePath( |         nsf = NamespacePath( | ||||||
|             f'{namespace_path}:{function_name}' |             f'{namespace_path}:{function_name}' | ||||||
|         ) |         ) | ||||||
|         ctx: Context = await self.actor.start_remote_task( |         ctx = await self.actor.start_remote_task( | ||||||
|             chan=self.channel, |             chan=self.channel, | ||||||
|             nsf=nsf, |             nsf=nsf, | ||||||
|             kwargs=kwargs, |             kwargs=kwargs, | ||||||
|             portal=self, |  | ||||||
|         ) |         ) | ||||||
|         return await ctx._pld_rx.recv_pld( |         ctx._portal = self | ||||||
|             ipc=ctx, |         msg = await self._return_once(ctx) | ||||||
|             expect_msg=Return, |         return _unwrap_msg( | ||||||
|  |             msg, | ||||||
|  |             self.channel, | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|     # TODO: factor this out into a `.highlevel` API-wrapper that uses |  | ||||||
|     # a single `.open_context()` call underneath. |  | ||||||
|     async def run( |     async def run( | ||||||
|         self, |         self, | ||||||
|         func: str, |         func: str, | ||||||
|  | @ -416,8 +347,6 @@ class Portal: | ||||||
|         remote rpc task or a local async generator instance. |         remote rpc task or a local async generator instance. | ||||||
| 
 | 
 | ||||||
|         ''' |         ''' | ||||||
|         __runtimeframe__: int = 1  # noqa |  | ||||||
| 
 |  | ||||||
|         if isinstance(func, str): |         if isinstance(func, str): | ||||||
|             warnings.warn( |             warnings.warn( | ||||||
|                 "`Portal.run(namespace: str, funcname: str)` is now" |                 "`Portal.run(namespace: str, funcname: str)` is now" | ||||||
|  | @ -448,15 +377,13 @@ class Portal: | ||||||
|             self.channel, |             self.channel, | ||||||
|             nsf=nsf, |             nsf=nsf, | ||||||
|             kwargs=kwargs, |             kwargs=kwargs, | ||||||
|             portal=self, |  | ||||||
|         ) |         ) | ||||||
|         return await ctx._pld_rx.recv_pld( |         ctx._portal = self | ||||||
|             ipc=ctx, |         return _unwrap_msg( | ||||||
|             expect_msg=Return, |             await self._return_once(ctx), | ||||||
|  |             self.channel, | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|     # TODO: factor this out into a `.highlevel` API-wrapper that uses |  | ||||||
|     # a single `.open_context()` call underneath. |  | ||||||
|     @acm |     @acm | ||||||
|     async def open_stream_from( |     async def open_stream_from( | ||||||
|         self, |         self, | ||||||
|  | @ -464,14 +391,6 @@ class Portal: | ||||||
|         **kwargs, |         **kwargs, | ||||||
| 
 | 
 | ||||||
|     ) -> AsyncGenerator[MsgStream, None]: |     ) -> AsyncGenerator[MsgStream, None]: | ||||||
|         ''' |  | ||||||
|         Legacy one-way streaming API. |  | ||||||
| 
 |  | ||||||
|         TODO: re-impl on top `Portal.open_context()` + an async gen |  | ||||||
|         around `Context.open_stream()`. |  | ||||||
| 
 |  | ||||||
|         ''' |  | ||||||
|         __runtimeframe__: int = 1  # noqa |  | ||||||
| 
 | 
 | ||||||
|         if not inspect.isasyncgenfunction(async_gen_func): |         if not inspect.isasyncgenfunction(async_gen_func): | ||||||
|             if not ( |             if not ( | ||||||
|  | @ -485,8 +404,8 @@ class Portal: | ||||||
|             self.channel, |             self.channel, | ||||||
|             nsf=NamespacePath.from_ref(async_gen_func), |             nsf=NamespacePath.from_ref(async_gen_func), | ||||||
|             kwargs=kwargs, |             kwargs=kwargs, | ||||||
|             portal=self, |  | ||||||
|         ) |         ) | ||||||
|  |         ctx._portal = self | ||||||
| 
 | 
 | ||||||
|         # ensure receive-only stream entrypoint |         # ensure receive-only stream entrypoint | ||||||
|         assert ctx._remote_func_type == 'asyncgen' |         assert ctx._remote_func_type == 'asyncgen' | ||||||
|  | @ -495,13 +414,13 @@ class Portal: | ||||||
|             # deliver receive only stream |             # deliver receive only stream | ||||||
|             async with MsgStream( |             async with MsgStream( | ||||||
|                 ctx=ctx, |                 ctx=ctx, | ||||||
|                 rx_chan=ctx._rx_chan, |                 rx_chan=ctx._recv_chan, | ||||||
|             ) as stream: |             ) as rchan: | ||||||
|                 self._streams.add(stream) |                 self._streams.add(rchan) | ||||||
|                 ctx._stream = stream |                 yield rchan | ||||||
|                 yield stream |  | ||||||
| 
 | 
 | ||||||
|         finally: |         finally: | ||||||
|  | 
 | ||||||
|             # cancel the far end task on consumer close |             # cancel the far end task on consumer close | ||||||
|             # NOTE: this is a special case since we assume that if using |             # NOTE: this is a special case since we assume that if using | ||||||
|             # this ``.open_fream_from()`` api, the stream is one a one |             # this ``.open_fream_from()`` api, the stream is one a one | ||||||
|  | @ -513,17 +432,14 @@ class Portal: | ||||||
|                 with trio.CancelScope(shield=True): |                 with trio.CancelScope(shield=True): | ||||||
|                     await ctx.cancel() |                     await ctx.cancel() | ||||||
| 
 | 
 | ||||||
|             except trio.ClosedResourceError as cre: |             except trio.ClosedResourceError: | ||||||
|                 # if the far end terminates before we send a cancel the |                 # if the far end terminates before we send a cancel the | ||||||
|                 # underlying transport-channel may already be closed. |                 # underlying transport-channel may already be closed. | ||||||
|                 log.cancel( |                 log.cancel(f'Context {ctx} was already closed?') | ||||||
|                     f'Context.cancel() -> {cre!r}\n' |  | ||||||
|                     f'cid: {ctx.cid!r} already closed?\n' |  | ||||||
|                 ) |  | ||||||
| 
 | 
 | ||||||
|             # XXX: should this always be done? |             # XXX: should this always be done? | ||||||
|             # await recv_chan.aclose() |             # await recv_chan.aclose() | ||||||
|             self._streams.remove(stream) |             self._streams.remove(rchan) | ||||||
| 
 | 
 | ||||||
|     # NOTE: impl is found in `._context`` mod to make |     # NOTE: impl is found in `._context`` mod to make | ||||||
|     # reading/groking the details simpler code-org-wise. This |     # reading/groking the details simpler code-org-wise. This | ||||||
|  | @ -545,23 +461,14 @@ class LocalPortal: | ||||||
|     actor: 'Actor'  # type: ignore # noqa |     actor: 'Actor'  # type: ignore # noqa | ||||||
|     channel: Channel |     channel: Channel | ||||||
| 
 | 
 | ||||||
|     async def run_from_ns( |     async def run_from_ns(self, ns: str, func_name: str, **kwargs) -> Any: | ||||||
|         self, |  | ||||||
|         ns: str, |  | ||||||
|         func_name: str, |  | ||||||
|         **kwargs, |  | ||||||
|     ) -> Any: |  | ||||||
|         ''' |         ''' | ||||||
|         Run a requested local function from a namespace path and |         Run a requested local function from a namespace path and | ||||||
|         return it's result. |         return it's result. | ||||||
| 
 | 
 | ||||||
|         ''' |         ''' | ||||||
|         obj = ( |         obj = self.actor if ns == 'self' else importlib.import_module(ns) | ||||||
|             self.actor |         func = getattr(obj, func_name) | ||||||
|             if ns == 'self' |  | ||||||
|             else importlib.import_module(ns) |  | ||||||
|         ) |  | ||||||
|         func: Callable = getattr(obj, func_name) |  | ||||||
|         return await func(**kwargs) |         return await func(**kwargs) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | @ -569,7 +476,7 @@ class LocalPortal: | ||||||
| async def open_portal( | async def open_portal( | ||||||
| 
 | 
 | ||||||
|     channel: Channel, |     channel: Channel, | ||||||
|     tn: trio.Nursery|None = None, |     nursery: trio.Nursery|None = None, | ||||||
|     start_msg_loop: bool = True, |     start_msg_loop: bool = True, | ||||||
|     shield: bool = False, |     shield: bool = False, | ||||||
| 
 | 
 | ||||||
|  | @ -577,39 +484,31 @@ async def open_portal( | ||||||
|     ''' |     ''' | ||||||
|     Open a ``Portal`` through the provided ``channel``. |     Open a ``Portal`` through the provided ``channel``. | ||||||
| 
 | 
 | ||||||
|     Spawns a background task to handle RPC processing, normally |     Spawns a background task to handle message processing (normally | ||||||
|     done by the actor-runtime implicitly via a call to |     done by the actor-runtime implicitly). | ||||||
|     `._rpc.process_messages()`. just after connection establishment. |  | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|     actor = current_actor() |     actor = current_actor() | ||||||
|     assert actor |     assert actor | ||||||
|     was_connected: bool = False |     was_connected = False | ||||||
| 
 | 
 | ||||||
|     async with ( |     async with maybe_open_nursery(nursery, shield=shield) as nursery: | ||||||
|         collapse_eg(), |  | ||||||
|         maybe_open_nursery( |  | ||||||
|             tn, |  | ||||||
|             shield=shield, |  | ||||||
|         ) as tn, |  | ||||||
|     ): |  | ||||||
| 
 | 
 | ||||||
|         if not channel.connected(): |         if not channel.connected(): | ||||||
|             await channel.connect() |             await channel.connect() | ||||||
|             was_connected = True |             was_connected = True | ||||||
| 
 | 
 | ||||||
|         if channel.aid is None: |         if channel.uid is None: | ||||||
|             await channel._do_handshake( |             await actor._do_handshake(channel) | ||||||
|                 aid=actor.aid, |  | ||||||
|             ) |  | ||||||
| 
 | 
 | ||||||
|         msg_loop_cs: trio.CancelScope|None = None |         msg_loop_cs: trio.CancelScope|None = None | ||||||
|         if start_msg_loop: |         if start_msg_loop: | ||||||
|             from . import _rpc |             from ._runtime import process_messages | ||||||
|             msg_loop_cs = await tn.start( |             msg_loop_cs = await nursery.start( | ||||||
|                 partial( |                 partial( | ||||||
|                     _rpc.process_messages, |                     process_messages, | ||||||
|                     chan=channel, |                     actor, | ||||||
|  |                     channel, | ||||||
|                     # if the local task is cancelled we want to keep |                     # if the local task is cancelled we want to keep | ||||||
|                     # the msg loop running until our block ends |                     # the msg loop running until our block ends | ||||||
|                     shield=True, |                     shield=True, | ||||||
|  | @ -622,10 +521,12 @@ async def open_portal( | ||||||
|             await portal.aclose() |             await portal.aclose() | ||||||
| 
 | 
 | ||||||
|             if was_connected: |             if was_connected: | ||||||
|                 await channel.aclose() |                 # gracefully signal remote channel-msg loop | ||||||
|  |                 await channel.send(None) | ||||||
|  |                 # await channel.aclose() | ||||||
| 
 | 
 | ||||||
|             # cancel background msg loop task |             # cancel background msg loop task | ||||||
|             if msg_loop_cs is not None: |             if msg_loop_cs: | ||||||
|                 msg_loop_cs.cancel() |                 msg_loop_cs.cancel() | ||||||
| 
 | 
 | ||||||
|             tn.cancel_scope.cancel() |             nursery.cancel_scope.cancel() | ||||||
|  |  | ||||||
							
								
								
									
										742
									
								
								tractor/_root.py
								
								
								
								
							
							
						
						
									
										742
									
								
								tractor/_root.py
								
								
								
								
							|  | @ -18,585 +18,262 @@ | ||||||
| Root actor runtime ignition(s). | Root actor runtime ignition(s). | ||||||
| 
 | 
 | ||||||
| ''' | ''' | ||||||
| from contextlib import ( | from contextlib import asynccontextmanager | ||||||
|     asynccontextmanager as acm, |  | ||||||
| ) |  | ||||||
| from functools import partial | from functools import partial | ||||||
| import importlib | import importlib | ||||||
| import inspect |  | ||||||
| import logging | import logging | ||||||
| import os |  | ||||||
| import signal | import signal | ||||||
| import sys | import sys | ||||||
| from typing import ( | import os | ||||||
|     Any, | import typing | ||||||
|     Callable, |  | ||||||
| ) |  | ||||||
| import warnings | import warnings | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| import trio | import trio | ||||||
| 
 | 
 | ||||||
| from . import _runtime | from ._runtime import ( | ||||||
| from .devx import ( |     Actor, | ||||||
|     debug, |     Arbiter, | ||||||
|     _frame_stack, |     # TODO: rename and make a non-actor subtype? | ||||||
|     pformat as _pformat, |     # Arbiter as Registry, | ||||||
|  |     async_main, | ||||||
| ) | ) | ||||||
|  | from . import _debug | ||||||
| from . import _spawn | from . import _spawn | ||||||
| from . import _state | from . import _state | ||||||
| from . import log | from . import log | ||||||
| from .ipc import ( | from ._ipc import _connect_chan | ||||||
|     _connect_chan, | from ._exceptions import is_multi_cancelled | ||||||
| ) | 
 | ||||||
| from ._addr import ( | 
 | ||||||
|     Address, | # set at startup and after forks | ||||||
|     UnwrappedAddress, | _default_arbiter_host: str = '127.0.0.1' | ||||||
|     default_lo_addrs, | _default_arbiter_port: int = 1616 | ||||||
|     mk_uuid, |  | ||||||
|     wrap_address, |  | ||||||
| ) |  | ||||||
| from .trionics import ( |  | ||||||
|     is_multi_cancelled, |  | ||||||
|     collapse_eg, |  | ||||||
| ) |  | ||||||
| from ._exceptions import ( |  | ||||||
|     RuntimeFailure, |  | ||||||
| ) |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| logger = log.get_logger('tractor') | logger = log.get_logger('tractor') | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| # TODO: stick this in a `@acm` defined in `devx.debug`? | @asynccontextmanager | ||||||
| # -[ ] also maybe consider making this a `wrapt`-deco to |  | ||||||
| #     save an indent level? |  | ||||||
| # |  | ||||||
| @acm |  | ||||||
| async def maybe_block_bp( |  | ||||||
|     debug_mode: bool, |  | ||||||
|     maybe_enable_greenback: bool, |  | ||||||
| ) -> bool: |  | ||||||
|     # Override the global debugger hook to make it play nice with |  | ||||||
|     # ``trio``, see much discussion in: |  | ||||||
|     # https://github.com/python-trio/trio/issues/1155#issuecomment-742964018 |  | ||||||
|     builtin_bp_handler: Callable = sys.breakpointhook |  | ||||||
|     orig_bp_path: str|None = os.environ.get( |  | ||||||
|         'PYTHONBREAKPOINT', |  | ||||||
|         None, |  | ||||||
|     ) |  | ||||||
|     bp_blocked: bool |  | ||||||
|     if ( |  | ||||||
|         debug_mode |  | ||||||
|         and maybe_enable_greenback |  | ||||||
|         and ( |  | ||||||
|             maybe_mod := await debug.maybe_init_greenback( |  | ||||||
|                 raise_not_found=False, |  | ||||||
|             ) |  | ||||||
|         ) |  | ||||||
|     ): |  | ||||||
|         logger.info( |  | ||||||
|             f'Found `greenback` installed @ {maybe_mod}\n' |  | ||||||
|             f'Enabling `tractor.pause_from_sync()` support!\n' |  | ||||||
|         ) |  | ||||||
|         os.environ['PYTHONBREAKPOINT'] = ( |  | ||||||
|             'tractor.devx.debug._sync_pause_from_builtin' |  | ||||||
|         ) |  | ||||||
|         _state._runtime_vars['use_greenback'] = True |  | ||||||
|         bp_blocked = False |  | ||||||
| 
 |  | ||||||
|     else: |  | ||||||
|         # TODO: disable `breakpoint()` by default (without |  | ||||||
|         # `greenback`) since it will break any multi-actor |  | ||||||
|         # usage by a clobbered TTY's stdstreams! |  | ||||||
|         def block_bps(*args, **kwargs): |  | ||||||
|             raise RuntimeError( |  | ||||||
|                 'Trying to use `breakpoint()` eh?\n\n' |  | ||||||
|                 'Welp, `tractor` blocks `breakpoint()` built-in calls by default!\n' |  | ||||||
|                 'If you need to use it please install `greenback` and set ' |  | ||||||
|                 '`debug_mode=True` when opening the runtime ' |  | ||||||
|                 '(either via `.open_nursery()` or `open_root_actor()`)\n' |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|         sys.breakpointhook = block_bps |  | ||||||
|         # lol ok, |  | ||||||
|         # https://docs.python.org/3/library/sys.html#sys.breakpointhook |  | ||||||
|         os.environ['PYTHONBREAKPOINT'] = "0" |  | ||||||
|         bp_blocked = True |  | ||||||
| 
 |  | ||||||
|     try: |  | ||||||
|         yield bp_blocked |  | ||||||
|     finally: |  | ||||||
|         # restore any prior built-in `breakpoint()` hook state |  | ||||||
|         if builtin_bp_handler is not None: |  | ||||||
|             sys.breakpointhook = builtin_bp_handler |  | ||||||
| 
 |  | ||||||
|         if orig_bp_path is not None: |  | ||||||
|             os.environ['PYTHONBREAKPOINT'] = orig_bp_path |  | ||||||
| 
 |  | ||||||
|         else: |  | ||||||
|             # clear env back to having no entry |  | ||||||
|             os.environ.pop('PYTHONBREAKPOINT', None) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @acm |  | ||||||
| async def open_root_actor( | async def open_root_actor( | ||||||
|  | 
 | ||||||
|     *, |     *, | ||||||
|     # defaults are above |     # defaults are above | ||||||
|     registry_addrs: list[UnwrappedAddress]|None = None, |     arbiter_addr: tuple[str, int] | None = None, | ||||||
| 
 | 
 | ||||||
|     # defaults are above |     # defaults are above | ||||||
|     arbiter_addr: tuple[UnwrappedAddress]|None = None, |     registry_addr: tuple[str, int] | None = None, | ||||||
| 
 | 
 | ||||||
|     enable_transports: list[ |     name: str | None = 'root', | ||||||
|         # TODO, this should eventually be the pairs as |  | ||||||
|         # defined by (codec, proto) as on `MsgTransport. |  | ||||||
|         _state.TransportProtocolKey, |  | ||||||
|     ]|None = None, |  | ||||||
| 
 |  | ||||||
|     name: str|None = 'root', |  | ||||||
| 
 | 
 | ||||||
|     # either the `multiprocessing` start method: |     # either the `multiprocessing` start method: | ||||||
|     # https://docs.python.org/3/library/multiprocessing.html#contexts-and-start-methods |     # https://docs.python.org/3/library/multiprocessing.html#contexts-and-start-methods | ||||||
|     # OR `trio` (the new default). |     # OR `trio` (the new default). | ||||||
|     start_method: _spawn.SpawnMethodKey|None = None, |     start_method: _spawn.SpawnMethodKey | None = None, | ||||||
| 
 | 
 | ||||||
|     # enables the multi-process debugger support |     # enables the multi-process debugger support | ||||||
|     debug_mode: bool = False, |     debug_mode: bool = False, | ||||||
|     maybe_enable_greenback: bool = False,  # `.pause_from_sync()/breakpoint()` support |  | ||||||
|     # ^XXX NOTE^ the perf implications of use, |  | ||||||
|     # https://greenback.readthedocs.io/en/latest/principle.html#performance |  | ||||||
|     enable_stack_on_sig: bool = False, |  | ||||||
| 
 | 
 | ||||||
|     # internal logging |     # internal logging | ||||||
|     loglevel: str|None = None, |     loglevel: str | None = None, | ||||||
| 
 | 
 | ||||||
|     enable_modules: list|None = None, |     enable_modules: list | None = None, | ||||||
|     rpc_module_paths: list|None = None, |     rpc_module_paths: list | None = None, | ||||||
| 
 | 
 | ||||||
|     # NOTE: allow caller to ensure that only one registry exists | ) -> typing.Any: | ||||||
|     # and that this call creates it. |  | ||||||
|     ensure_registry: bool = False, |  | ||||||
| 
 |  | ||||||
|     hide_tb: bool = True, |  | ||||||
| 
 |  | ||||||
|     # XXX, proxied directly to `.devx.debug._maybe_enter_pm()` |  | ||||||
|     # for REPL-entry logic. |  | ||||||
|     debug_filter: Callable[ |  | ||||||
|         [BaseException|BaseExceptionGroup], |  | ||||||
|         bool, |  | ||||||
|     ] = lambda err: not is_multi_cancelled(err), |  | ||||||
| 
 |  | ||||||
|     # TODO, a way for actors to augment passing derived |  | ||||||
|     # read-only state to sublayers? |  | ||||||
|     # extra_rt_vars: dict|None = None, |  | ||||||
| 
 |  | ||||||
| ) -> _runtime.Actor: |  | ||||||
|     ''' |     ''' | ||||||
|     Initialize the `tractor` runtime by starting a "root actor" in |     Runtime init entry point for ``tractor``. | ||||||
|     a parent-most Python process. |  | ||||||
| 
 |  | ||||||
|     All (disjoint) actor-process-trees-as-programs are created via |  | ||||||
|     this entrypoint. |  | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|     # XXX NEVER allow nested actor-trees! |     # Override the global debugger hook to make it play nice with | ||||||
|     if already_actor := _state.current_actor( |     # ``trio``, see much discussion in: | ||||||
|         err_on_no_runtime=False, |     # https://github.com/python-trio/trio/issues/1155#issuecomment-742964018 | ||||||
|     ): |     builtin_bp_handler = sys.breakpointhook | ||||||
|         rtvs: dict[str, Any] = _state._runtime_vars |     orig_bp_path: str | None = os.environ.get('PYTHONBREAKPOINT', None) | ||||||
|         root_mailbox: list[str, int] = rtvs['_root_mailbox'] |     os.environ['PYTHONBREAKPOINT'] = 'tractor._debug._set_trace' | ||||||
|         registry_addrs: list[list[str, int]] = rtvs['_registry_addrs'] | 
 | ||||||
|         raise RuntimeFailure( |     # attempt to retreive ``trio``'s sigint handler and stash it | ||||||
|             f'A current actor already exists !?\n' |     # on our debugger lock state. | ||||||
|             f'({already_actor}\n' |     _debug.Lock._trio_handler = signal.getsignal(signal.SIGINT) | ||||||
|             f'\n' | 
 | ||||||
|             f'You can NOT open a second root actor from within ' |     # mark top most level process as root actor | ||||||
|             f'an existing tree and the current root of this ' |     _state._runtime_vars['_is_root'] = True | ||||||
|             f'already exists !!\n' | 
 | ||||||
|             f'\n' |     # caps based rpc list | ||||||
|             f'_root_mailbox: {root_mailbox!r}\n' |     enable_modules = enable_modules or [] | ||||||
|             f'_registry_addrs: {registry_addrs!r}\n' | 
 | ||||||
|  |     if rpc_module_paths: | ||||||
|  |         warnings.warn( | ||||||
|  |             "`rpc_module_paths` is now deprecated, use " | ||||||
|  |             " `enable_modules` instead.", | ||||||
|  |             DeprecationWarning, | ||||||
|  |             stacklevel=2, | ||||||
|  |         ) | ||||||
|  |         enable_modules.extend(rpc_module_paths) | ||||||
|  | 
 | ||||||
|  |     if start_method is not None: | ||||||
|  |         _spawn.try_set_start_method(start_method) | ||||||
|  | 
 | ||||||
|  |     if arbiter_addr is not None: | ||||||
|  |         warnings.warn( | ||||||
|  |             '`arbiter_addr` is now deprecated and has been renamed to' | ||||||
|  |             '`registry_addr`.\nUse that instead..', | ||||||
|  |             DeprecationWarning, | ||||||
|  |             stacklevel=2, | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|     async with maybe_block_bp( |     registry_addr = (host, port) = ( | ||||||
|         debug_mode=debug_mode, |         registry_addr | ||||||
|         maybe_enable_greenback=maybe_enable_greenback, |         or arbiter_addr | ||||||
|     ): |         or ( | ||||||
|         if enable_transports is None: |             _default_arbiter_host, | ||||||
|             enable_transports: list[str] = _state.current_ipc_protos() |             _default_arbiter_port, | ||||||
|         else: |         ) | ||||||
|             _state._runtime_vars['_enable_tpts'] = enable_transports |     ) | ||||||
| 
 | 
 | ||||||
|         # TODO! support multi-tpts per actor! |     loglevel = (loglevel or log._default_loglevel).upper() | ||||||
|         # Bo |  | ||||||
|         if not len(enable_transports) == 1: |  | ||||||
|             raise RuntimeError( |  | ||||||
|                 f'No multi-tpt support yet!\n' |  | ||||||
|                 f'enable_transports={enable_transports!r}\n' |  | ||||||
|             ) |  | ||||||
| 
 | 
 | ||||||
|         _frame_stack.hide_runtime_frames() |     if debug_mode and _spawn._spawn_method == 'trio': | ||||||
|         __tracebackhide__: bool = hide_tb |         _state._runtime_vars['_debug_mode'] = True | ||||||
| 
 | 
 | ||||||
|         # attempt to retreive ``trio``'s sigint handler and stash it |         # expose internal debug module to every actor allowing | ||||||
|         # on our debugger lock state. |         # for use of ``await tractor.breakpoint()`` | ||||||
|         debug.DebugStatus._trio_handler = signal.getsignal(signal.SIGINT) |         enable_modules.append('tractor._debug') | ||||||
| 
 | 
 | ||||||
|         # mark top most level process as root actor |         # if debug mode get's enabled *at least* use that level of | ||||||
|         _state._runtime_vars['_is_root'] = True |         # logging for some informative console prompts. | ||||||
|  |         if ( | ||||||
|  |             logging.getLevelName( | ||||||
|  |                 # lul, need the upper case for the -> int map? | ||||||
|  |                 # sweet "dynamic function behaviour" stdlib... | ||||||
|  |                 loglevel, | ||||||
|  |             ) > logging.getLevelName('PDB') | ||||||
|  |         ): | ||||||
|  |             loglevel = 'PDB' | ||||||
| 
 | 
 | ||||||
|         # caps based rpc list |     elif debug_mode: | ||||||
|         enable_modules = ( |         raise RuntimeError( | ||||||
|             enable_modules |             "Debug mode is only supported for the `trio` backend!" | ||||||
|             or |  | ||||||
|             [] |  | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|         if rpc_module_paths: |     log.get_console_log(loglevel) | ||||||
|             warnings.warn( | 
 | ||||||
|                 "`rpc_module_paths` is now deprecated, use " |     try: | ||||||
|                 " `enable_modules` instead.", |         # make a temporary connection to see if an arbiter exists, | ||||||
|                 DeprecationWarning, |         # if one can't be made quickly we assume none exists. | ||||||
|                 stacklevel=2, |         arbiter_found = False | ||||||
|  | 
 | ||||||
|  |         # TODO: this connect-and-bail forces us to have to carefully | ||||||
|  |         # rewrap TCP 104-connection-reset errors as EOF so as to avoid | ||||||
|  |         # propagating cancel-causing errors to the channel-msg loop | ||||||
|  |         # machinery.  Likely it would be better to eventually have | ||||||
|  |         # a "discovery" protocol with basic handshake instead. | ||||||
|  |         with trio.move_on_after(1): | ||||||
|  |             async with _connect_chan(host, port): | ||||||
|  |                 arbiter_found = True | ||||||
|  | 
 | ||||||
|  |     except OSError: | ||||||
|  |         # TODO: make this a "discovery" log level? | ||||||
|  |         logger.warning(f"No actor registry found @ {host}:{port}") | ||||||
|  | 
 | ||||||
|  |     # create a local actor and start up its main routine/task | ||||||
|  |     if arbiter_found: | ||||||
|  | 
 | ||||||
|  |         # we were able to connect to an arbiter | ||||||
|  |         logger.info(f"Arbiter seems to exist @ {host}:{port}") | ||||||
|  | 
 | ||||||
|  |         actor = Actor( | ||||||
|  |             name or 'anonymous', | ||||||
|  |             arbiter_addr=registry_addr, | ||||||
|  |             loglevel=loglevel, | ||||||
|  |             enable_modules=enable_modules, | ||||||
|  |         ) | ||||||
|  |         host, port = (host, 0) | ||||||
|  | 
 | ||||||
|  |     else: | ||||||
|  |         # start this local actor as the arbiter (aka a regular actor who | ||||||
|  |         # manages the local registry of "mailboxes") | ||||||
|  | 
 | ||||||
|  |         # Note that if the current actor is the arbiter it is desirable | ||||||
|  |         # for it to stay up indefinitely until a re-election process has | ||||||
|  |         # taken place - which is not implemented yet FYI). | ||||||
|  | 
 | ||||||
|  |         actor = Arbiter( | ||||||
|  |             name or 'arbiter', | ||||||
|  |             arbiter_addr=registry_addr, | ||||||
|  |             loglevel=loglevel, | ||||||
|  |             enable_modules=enable_modules, | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|  |     try: | ||||||
|  |         # assign process-local actor | ||||||
|  |         _state._current_actor = actor | ||||||
|  | 
 | ||||||
|  |         # start local channel-server and fake the portal API | ||||||
|  |         # NOTE: this won't block since we provide the nursery | ||||||
|  |         logger.info(f"Starting local {actor} @ {host}:{port}") | ||||||
|  | 
 | ||||||
|  |         # start the actor runtime in a new task | ||||||
|  |         async with trio.open_nursery() as nursery: | ||||||
|  | 
 | ||||||
|  |             # ``_runtime.async_main()`` creates an internal nursery and | ||||||
|  |             # thus blocks here until the entire underlying actor tree has | ||||||
|  |             # terminated thereby conducting structured concurrency. | ||||||
|  | 
 | ||||||
|  |             await nursery.start( | ||||||
|  |                 partial( | ||||||
|  |                     async_main, | ||||||
|  |                     actor, | ||||||
|  |                     accept_addr=(host, port), | ||||||
|  |                     parent_addr=None | ||||||
|  |                 ) | ||||||
|             ) |             ) | ||||||
|             enable_modules.extend(rpc_module_paths) |  | ||||||
| 
 |  | ||||||
|         if start_method is not None: |  | ||||||
|             _spawn.try_set_start_method(start_method) |  | ||||||
| 
 |  | ||||||
|         # TODO! remove this ASAP! |  | ||||||
|         if arbiter_addr is not None: |  | ||||||
|             warnings.warn( |  | ||||||
|                 '`arbiter_addr` is now deprecated\n' |  | ||||||
|                 'Use `registry_addrs: list[tuple]` instead..', |  | ||||||
|                 DeprecationWarning, |  | ||||||
|                 stacklevel=2, |  | ||||||
|             ) |  | ||||||
|             uw_reg_addrs = [arbiter_addr] |  | ||||||
| 
 |  | ||||||
|         uw_reg_addrs = registry_addrs |  | ||||||
|         if not uw_reg_addrs: |  | ||||||
|             uw_reg_addrs: list[UnwrappedAddress] = default_lo_addrs( |  | ||||||
|                 enable_transports |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|         # must exist by now since all below code is dependent |  | ||||||
|         assert uw_reg_addrs |  | ||||||
|         registry_addrs: list[Address] = [ |  | ||||||
|             wrap_address(uw_addr) |  | ||||||
|             for uw_addr in uw_reg_addrs |  | ||||||
|         ] |  | ||||||
| 
 |  | ||||||
|         loglevel = ( |  | ||||||
|             loglevel |  | ||||||
|             or log._default_loglevel |  | ||||||
|         ).upper() |  | ||||||
| 
 |  | ||||||
|         if ( |  | ||||||
|             debug_mode |  | ||||||
|             and |  | ||||||
|             _spawn._spawn_method == 'trio' |  | ||||||
|         ): |  | ||||||
|             _state._runtime_vars['_debug_mode'] = True |  | ||||||
| 
 |  | ||||||
|             # expose internal debug module to every actor allowing for |  | ||||||
|             # use of ``await tractor.pause()`` |  | ||||||
|             enable_modules.append('tractor.devx.debug._tty_lock') |  | ||||||
| 
 |  | ||||||
|             # if debug mode get's enabled *at least* use that level of |  | ||||||
|             # logging for some informative console prompts. |  | ||||||
|             if ( |  | ||||||
|                 logging.getLevelName( |  | ||||||
|                     # lul, need the upper case for the -> int map? |  | ||||||
|                     # sweet "dynamic function behaviour" stdlib... |  | ||||||
|                     loglevel, |  | ||||||
|                 ) > logging.getLevelName('PDB') |  | ||||||
|             ): |  | ||||||
|                 loglevel = 'PDB' |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|         elif debug_mode: |  | ||||||
|             raise RuntimeError( |  | ||||||
|                 "Debug mode is only supported for the `trio` backend!" |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|         assert loglevel |  | ||||||
|         _log = log.get_console_log(loglevel) |  | ||||||
|         assert _log |  | ||||||
| 
 |  | ||||||
|         # TODO: factor this into `.devx._stackscope`!! |  | ||||||
|         if ( |  | ||||||
|             debug_mode |  | ||||||
|             and |  | ||||||
|             enable_stack_on_sig |  | ||||||
|         ): |  | ||||||
|             from .devx._stackscope import enable_stack_on_sig |  | ||||||
|             enable_stack_on_sig() |  | ||||||
| 
 |  | ||||||
|         # closed into below ping task-func |  | ||||||
|         ponged_addrs: list[Address] = [] |  | ||||||
| 
 |  | ||||||
|         async def ping_tpt_socket( |  | ||||||
|             addr: Address, |  | ||||||
|             timeout: float = 1, |  | ||||||
|         ) -> None: |  | ||||||
|             ''' |  | ||||||
|             Attempt temporary connection to see if a registry is |  | ||||||
|             listening at the requested address by a tranport layer |  | ||||||
|             ping. |  | ||||||
| 
 |  | ||||||
|             If a connection can't be made quickly we assume none no |  | ||||||
|             server is listening at that addr. |  | ||||||
| 
 |  | ||||||
|             ''' |  | ||||||
|             try: |             try: | ||||||
|                 # TODO: this connect-and-bail forces us to have to |                 yield actor | ||||||
|                 # carefully rewrap TCP 104-connection-reset errors as | 
 | ||||||
|                 # EOF so as to avoid propagating cancel-causing errors |             except ( | ||||||
|                 # to the channel-msg loop machinery. Likely it would |                 Exception, | ||||||
|                 # be better to eventually have a "discovery" protocol |                 BaseExceptionGroup, | ||||||
|                 # with basic handshake instead? |             ) as err: | ||||||
|                 with trio.move_on_after(timeout): | 
 | ||||||
|                     async with _connect_chan(addr.unwrap()): |                 entered = await _debug._maybe_enter_pm(err) | ||||||
|                         ponged_addrs.append(addr) | 
 | ||||||
|  |                 if ( | ||||||
|  |                     not entered | ||||||
|  |                     and not is_multi_cancelled(err) | ||||||
|  |                 ): | ||||||
|  |                     logger.exception('Root actor crashed:\n') | ||||||
|  | 
 | ||||||
|  |                 # always re-raise | ||||||
|  |                 raise | ||||||
|  | 
 | ||||||
|  |             finally: | ||||||
|  |                 # NOTE: not sure if we'll ever need this but it's | ||||||
|  |                 # possibly better for even more determinism? | ||||||
|  |                 # logger.cancel( | ||||||
|  |                 #     f'Waiting on {len(nurseries)} nurseries in root..') | ||||||
|  |                 # nurseries = actor._actoruid2nursery.values() | ||||||
|  |                 # async with trio.open_nursery() as tempn: | ||||||
|  |                 #     for an in nurseries: | ||||||
|  |                 #         tempn.start_soon(an.exited.wait) | ||||||
| 
 | 
 | ||||||
|             except OSError: |  | ||||||
|                 # ?TODO, make this a "discovery" log level? |  | ||||||
|                 logger.info( |                 logger.info( | ||||||
|                     f'No root-actor registry found @ {addr!r}\n' |                     'Closing down root actor' | ||||||
|                 ) |                 ) | ||||||
|  |                 await actor.cancel(None)  # self cancel | ||||||
|  |     finally: | ||||||
|  |         _state._current_actor = None | ||||||
|  |         _state._last_actor_terminated = actor | ||||||
| 
 | 
 | ||||||
|         # !TODO, this is basically just another (abstract) |         # restore breakpoint hook state | ||||||
|         # happy-eyeballs, so we should try for formalize it somewhere |         sys.breakpointhook = builtin_bp_handler | ||||||
|         # in a `.[_]discovery` ya? |         if orig_bp_path is not None: | ||||||
|         # |             os.environ['PYTHONBREAKPOINT'] = orig_bp_path | ||||||
|         async with trio.open_nursery() as tn: |  | ||||||
|             for uw_addr in uw_reg_addrs: |  | ||||||
|                 addr: Address = wrap_address(uw_addr) |  | ||||||
|                 tn.start_soon( |  | ||||||
|                     ping_tpt_socket, |  | ||||||
|                     addr, |  | ||||||
|                 ) |  | ||||||
| 
 |  | ||||||
|         trans_bind_addrs: list[UnwrappedAddress] = [] |  | ||||||
| 
 |  | ||||||
|         # Create a new local root-actor instance which IS NOT THE |  | ||||||
|         # REGISTRAR |  | ||||||
|         if ponged_addrs: |  | ||||||
|             if ensure_registry: |  | ||||||
|                 raise RuntimeError( |  | ||||||
|                      f'Failed to open `{name}`@{ponged_addrs}: ' |  | ||||||
|                     'registry socket(s) already bound' |  | ||||||
|                 ) |  | ||||||
| 
 |  | ||||||
|             # we were able to connect to an arbiter |  | ||||||
|             logger.info( |  | ||||||
|                 f'Registry(s) seem(s) to exist @ {ponged_addrs}' |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|             actor = _runtime.Actor( |  | ||||||
|                 name=name or 'anonymous', |  | ||||||
|                 uuid=mk_uuid(), |  | ||||||
|                 registry_addrs=ponged_addrs, |  | ||||||
|                 loglevel=loglevel, |  | ||||||
|                 enable_modules=enable_modules, |  | ||||||
|             ) |  | ||||||
|             # **DO NOT** use the registry_addrs as the |  | ||||||
|             # ipc-transport-server's bind-addrs as this is |  | ||||||
|             # a new NON-registrar, ROOT-actor. |  | ||||||
|             # |  | ||||||
|             # XXX INSTEAD, bind random addrs using the same tpt |  | ||||||
|             # proto. |  | ||||||
|             for addr in ponged_addrs: |  | ||||||
|                 trans_bind_addrs.append( |  | ||||||
|                     addr.get_random( |  | ||||||
|                         bindspace=addr.bindspace, |  | ||||||
|                     ) |  | ||||||
|                 ) |  | ||||||
| 
 |  | ||||||
|         # Start this local actor as the "registrar", aka a regular |  | ||||||
|         # actor who manages the local registry of "mailboxes" of |  | ||||||
|         # other process-tree-local sub-actors. |  | ||||||
|         else: |         else: | ||||||
|             # NOTE that if the current actor IS THE REGISTAR, the |             # clear env back to having no entry | ||||||
|             # following init steps are taken: |             os.environ.pop('PYTHONBREAKPOINT') | ||||||
|             # - the tranport layer server is bound to each addr |  | ||||||
|             #   pair defined in provided registry_addrs, or the default. |  | ||||||
|             trans_bind_addrs = uw_reg_addrs |  | ||||||
| 
 | 
 | ||||||
|             # - it is normally desirable for any registrar to stay up |         logger.runtime("Root actor terminated") | ||||||
|             #   indefinitely until either all registered (child/sub) |  | ||||||
|             #   actors are terminated (via SC supervision) or, |  | ||||||
|             #   a re-election process has taken place. |  | ||||||
|             # NOTE: all of ^ which is not implemented yet - see: |  | ||||||
|             # https://github.com/goodboy/tractor/issues/216 |  | ||||||
|             # https://github.com/goodboy/tractor/pull/348 |  | ||||||
|             # https://github.com/goodboy/tractor/issues/296 |  | ||||||
| 
 |  | ||||||
|             # TODO: rename as `RootActor` or is that even necessary? |  | ||||||
|             actor = _runtime.Arbiter( |  | ||||||
|                 name=name or 'registrar', |  | ||||||
|                 uuid=mk_uuid(), |  | ||||||
|                 registry_addrs=registry_addrs, |  | ||||||
|                 loglevel=loglevel, |  | ||||||
|                 enable_modules=enable_modules, |  | ||||||
|             ) |  | ||||||
|             # XXX, in case the root actor runtime was actually run from |  | ||||||
|             # `tractor.to_asyncio.run_as_asyncio_guest()` and NOt |  | ||||||
|             # `.trio.run()`. |  | ||||||
|             actor._infected_aio = _state._runtime_vars['_is_infected_aio'] |  | ||||||
| 
 |  | ||||||
|         # NOTE, only set the loopback addr for the |  | ||||||
|         # process-tree-global "root" mailbox since all sub-actors |  | ||||||
|         # should be able to speak to their root actor over that |  | ||||||
|         # channel. |  | ||||||
|         raddrs: list[Address] = _state._runtime_vars['_root_addrs'] |  | ||||||
|         raddrs.extend(trans_bind_addrs) |  | ||||||
|         # TODO, remove once we have also removed all usage; |  | ||||||
|         # eventually all (root-)registry apis should expect > 1 addr. |  | ||||||
|         _state._runtime_vars['_root_mailbox'] = raddrs[0] |  | ||||||
| 
 |  | ||||||
|         # Start up main task set via core actor-runtime nurseries. |  | ||||||
|         try: |  | ||||||
|             # assign process-local actor |  | ||||||
|             _state._current_actor = actor |  | ||||||
| 
 |  | ||||||
|             # start local channel-server and fake the portal API |  | ||||||
|             # NOTE: this won't block since we provide the nursery |  | ||||||
|             report: str = f'Starting actor-runtime for {actor.aid.reprol()!r}\n' |  | ||||||
|             if reg_addrs := actor.registry_addrs: |  | ||||||
|                 report += ( |  | ||||||
|                     '-> Opening new registry @ ' |  | ||||||
|                     + |  | ||||||
|                     '\n'.join( |  | ||||||
|                         f'{addr}' for addr in reg_addrs |  | ||||||
|                     ) |  | ||||||
|                 ) |  | ||||||
|             logger.info(f'{report}\n') |  | ||||||
| 
 |  | ||||||
|             # start runtime in a bg sub-task, yield to caller. |  | ||||||
|             async with ( |  | ||||||
|                 collapse_eg(), |  | ||||||
|                 trio.open_nursery() as root_tn, |  | ||||||
| 
 |  | ||||||
|                 # ?TODO? finally-footgun below? |  | ||||||
|                 # -> see note on why shielding. |  | ||||||
|                 # maybe_raise_from_masking_exc(), |  | ||||||
|             ): |  | ||||||
|                 actor._root_tn = root_tn |  | ||||||
|                 # `_runtime.async_main()` creates an internal nursery |  | ||||||
|                 # and blocks here until any underlying actor(-process) |  | ||||||
|                 # tree has terminated thereby conducting so called |  | ||||||
|                 # "end-to-end" structured concurrency throughout an |  | ||||||
|                 # entire hierarchical python sub-process set; all |  | ||||||
|                 # "actor runtime" primitives are SC-compat and thus all |  | ||||||
|                 # transitively spawned actors/processes must be as |  | ||||||
|                 # well. |  | ||||||
|                 await root_tn.start( |  | ||||||
|                     partial( |  | ||||||
|                         _runtime.async_main, |  | ||||||
|                         actor, |  | ||||||
|                         accept_addrs=trans_bind_addrs, |  | ||||||
|                         parent_addr=None |  | ||||||
|                     ) |  | ||||||
|                 ) |  | ||||||
|                 try: |  | ||||||
|                     yield actor |  | ||||||
|                 except ( |  | ||||||
|                     Exception, |  | ||||||
|                     BaseExceptionGroup, |  | ||||||
|                 ) as err: |  | ||||||
| 
 |  | ||||||
|                     # TODO, in beginning to handle the subsubactor with |  | ||||||
|                     # crashed grandparent cases.. |  | ||||||
|                     # |  | ||||||
|                     # was_locked: bool = await debug.maybe_wait_for_debugger( |  | ||||||
|                     #     child_in_debug=True, |  | ||||||
|                     # ) |  | ||||||
|                     # XXX NOTE XXX see equiv note inside |  | ||||||
|                     # `._runtime.Actor._stream_handler()` where in the |  | ||||||
|                     # non-root or root-that-opened-this-mahually case we |  | ||||||
|                     # wait for the local actor-nursery to exit before |  | ||||||
|                     # exiting the transport channel handler. |  | ||||||
|                     entered: bool = await debug._maybe_enter_pm( |  | ||||||
|                         err, |  | ||||||
|                         api_frame=inspect.currentframe(), |  | ||||||
|                         debug_filter=debug_filter, |  | ||||||
| 
 |  | ||||||
|                         # XXX NOTE, required to debug root-actor |  | ||||||
|                         # crashes under cancellation conditions; so |  | ||||||
|                         # most of them! |  | ||||||
|                         shield=root_tn.cancel_scope.cancel_called, |  | ||||||
|                     ) |  | ||||||
| 
 |  | ||||||
|                     if ( |  | ||||||
|                         not entered |  | ||||||
|                         and |  | ||||||
|                         not is_multi_cancelled( |  | ||||||
|                             err, |  | ||||||
|                         ) |  | ||||||
|                     ): |  | ||||||
|                         logger.exception( |  | ||||||
|                             'Root actor crashed\n' |  | ||||||
|                             f'>x)\n' |  | ||||||
|                             f' |_{actor}\n' |  | ||||||
|                         ) |  | ||||||
| 
 |  | ||||||
|                     # ALWAYS re-raise any error bubbled up from the |  | ||||||
|                     # runtime! |  | ||||||
|                     raise |  | ||||||
| 
 |  | ||||||
|                 finally: |  | ||||||
|                     # NOTE/TODO?, not sure if we'll ever need this but it's |  | ||||||
|                     # possibly better for even more determinism? |  | ||||||
|                     # logger.cancel( |  | ||||||
|                     #     f'Waiting on {len(nurseries)} nurseries in root..') |  | ||||||
|                     # nurseries = actor._actoruid2nursery.values() |  | ||||||
|                     # async with trio.open_nursery() as tempn: |  | ||||||
|                     #     for an in nurseries: |  | ||||||
|                     #         tempn.start_soon(an.exited.wait) |  | ||||||
| 
 |  | ||||||
|                     op_nested_actor_repr: str = _pformat.nest_from_op( |  | ||||||
|                         input_op='>) ', |  | ||||||
|                         text=actor.pformat(), |  | ||||||
|                         nest_prefix='|_', |  | ||||||
|                     ) |  | ||||||
|                     logger.info( |  | ||||||
|                         f'Closing down root actor\n' |  | ||||||
|                         f'{op_nested_actor_repr}' |  | ||||||
|                     ) |  | ||||||
|                     # XXX, THIS IS A *finally-footgun*! |  | ||||||
|                     # (also mentioned in with-block above) |  | ||||||
|                     # -> though already shields iternally it can |  | ||||||
|                     # taskc here and mask underlying errors raised in |  | ||||||
|                     # the try-block above? |  | ||||||
|                     with trio.CancelScope(shield=True): |  | ||||||
|                         await actor.cancel(None)  # self cancel |  | ||||||
|         finally: |  | ||||||
|             # revert all process-global runtime state |  | ||||||
|             if ( |  | ||||||
|                 debug_mode |  | ||||||
|                 and |  | ||||||
|                 _spawn._spawn_method == 'trio' |  | ||||||
|             ): |  | ||||||
|                 _state._runtime_vars['_debug_mode'] = False |  | ||||||
| 
 |  | ||||||
|             _state._current_actor = None |  | ||||||
|             _state._last_actor_terminated = actor |  | ||||||
| 
 |  | ||||||
|             sclang_repr: str = _pformat.nest_from_op( |  | ||||||
|                 input_op=')>', |  | ||||||
|                 text=actor.pformat(), |  | ||||||
|                 nest_prefix='|_', |  | ||||||
|                 nest_indent=1, |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|             logger.info( |  | ||||||
|                 f'Root actor terminated\n' |  | ||||||
|                 f'{sclang_repr}' |  | ||||||
|             ) |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def run_daemon( | def run_daemon( | ||||||
|  | @ -604,23 +281,19 @@ def run_daemon( | ||||||
| 
 | 
 | ||||||
|     # runtime kwargs |     # runtime kwargs | ||||||
|     name: str | None = 'root', |     name: str | None = 'root', | ||||||
|     registry_addrs: list[UnwrappedAddress]|None = None, |     registry_addr: tuple[str, int] = ( | ||||||
|  |         _default_arbiter_host, | ||||||
|  |         _default_arbiter_port, | ||||||
|  |     ), | ||||||
| 
 | 
 | ||||||
|     start_method: str | None = None, |     start_method: str | None = None, | ||||||
|     debug_mode: bool = False, |     debug_mode: bool = False, | ||||||
| 
 |  | ||||||
|     # TODO, support `infected_aio=True` mode by, |  | ||||||
|     # - calling the appropriate entrypoint-func from `.to_asyncio` |  | ||||||
|     # - maybe init-ing `greenback` as done above in |  | ||||||
|     #   `open_root_actor()`. |  | ||||||
| 
 |  | ||||||
|     **kwargs |     **kwargs | ||||||
| 
 | 
 | ||||||
| ) -> None: | ) -> None: | ||||||
|     ''' |     ''' | ||||||
|     Spawn a root (daemon) actor which will respond to RPC; the main |     Spawn daemon actor which will respond to RPC; the main task simply | ||||||
|     task simply starts the runtime and then blocks via embedded |     starts the runtime and then sleeps forever. | ||||||
|     `trio.sleep_forever()`. |  | ||||||
| 
 | 
 | ||||||
|     This is a very minimal convenience wrapper around starting |     This is a very minimal convenience wrapper around starting | ||||||
|     a "run-until-cancelled" root actor which can be started with a set |     a "run-until-cancelled" root actor which can be started with a set | ||||||
|  | @ -633,8 +306,9 @@ def run_daemon( | ||||||
|         importlib.import_module(path) |         importlib.import_module(path) | ||||||
| 
 | 
 | ||||||
|     async def _main(): |     async def _main(): | ||||||
|  | 
 | ||||||
|         async with open_root_actor( |         async with open_root_actor( | ||||||
|             registry_addrs=registry_addrs, |             registry_addr=registry_addr, | ||||||
|             name=name, |             name=name, | ||||||
|             start_method=start_method, |             start_method=start_method, | ||||||
|             debug_mode=debug_mode, |             debug_mode=debug_mode, | ||||||
|  |  | ||||||
							
								
								
									
										1168
									
								
								tractor/_rpc.py
								
								
								
								
							
							
						
						
									
										1168
									
								
								tractor/_rpc.py
								
								
								
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										1927
									
								
								tractor/_runtime.py
								
								
								
								
							
							
						
						
									
										1927
									
								
								tractor/_runtime.py
								
								
								
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							|  | @ -34,38 +34,27 @@ from typing import ( | ||||||
| import trio | import trio | ||||||
| from trio import TaskStatus | from trio import TaskStatus | ||||||
| 
 | 
 | ||||||
| from .devx import ( | from ._debug import ( | ||||||
|     debug, |     maybe_wait_for_debugger, | ||||||
|     pformat as _pformat |     acquire_debug_lock, | ||||||
| ) | ) | ||||||
| from tractor._state import ( | from tractor._state import ( | ||||||
|     current_actor, |     current_actor, | ||||||
|     is_main_process, |     is_main_process, | ||||||
|     is_root_process, |     is_root_process, | ||||||
|     debug_mode, |     debug_mode, | ||||||
|     _runtime_vars, |  | ||||||
| ) | ) | ||||||
| from tractor.log import get_logger | from tractor.log import get_logger | ||||||
| from tractor._addr import UnwrappedAddress |  | ||||||
| from tractor._portal import Portal | from tractor._portal import Portal | ||||||
| from tractor._runtime import Actor | from tractor._runtime import Actor | ||||||
| from tractor._entry import _mp_main | from tractor._entry import _mp_main | ||||||
| from tractor._exceptions import ActorFailure | from tractor._exceptions import ActorFailure | ||||||
| from tractor.msg import ( |  | ||||||
|     types as msgtypes, |  | ||||||
|     pretty_struct, |  | ||||||
| ) |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| if TYPE_CHECKING: | if TYPE_CHECKING: | ||||||
|     from ipc import ( |  | ||||||
|         _server, |  | ||||||
|         Channel, |  | ||||||
|     ) |  | ||||||
|     from ._supervise import ActorNursery |     from ._supervise import ActorNursery | ||||||
|     ProcessType = TypeVar('ProcessType', mp.Process, trio.Process) |     ProcessType = TypeVar('ProcessType', mp.Process, trio.Process) | ||||||
| 
 | 
 | ||||||
| 
 |  | ||||||
| log = get_logger('tractor') | log = get_logger('tractor') | ||||||
| 
 | 
 | ||||||
| # placeholder for an mp start context if so using that backend | # placeholder for an mp start context if so using that backend | ||||||
|  | @ -150,13 +139,11 @@ async def exhaust_portal( | ||||||
|     ''' |     ''' | ||||||
|     __tracebackhide__ = True |     __tracebackhide__ = True | ||||||
|     try: |     try: | ||||||
|         log.debug( |         log.debug(f"Waiting on final result from {actor.uid}") | ||||||
|             f'Waiting on final result from {actor.uid}' |  | ||||||
|         ) |  | ||||||
| 
 | 
 | ||||||
|         # XXX: streams should never be reaped here since they should |         # XXX: streams should never be reaped here since they should | ||||||
|         # always be established and shutdown using a context manager api |         # always be established and shutdown using a context manager api | ||||||
|         final: Any = await portal.wait_for_result() |         final: Any = await portal.result() | ||||||
| 
 | 
 | ||||||
|     except ( |     except ( | ||||||
|         Exception, |         Exception, | ||||||
|  | @ -170,7 +157,7 @@ async def exhaust_portal( | ||||||
|         # TODO: merge with above? |         # TODO: merge with above? | ||||||
|         log.warning( |         log.warning( | ||||||
|             'Cancelled portal result waiter task:\n' |             'Cancelled portal result waiter task:\n' | ||||||
|             f'uid: {portal.channel.aid}\n' |             f'uid: {portal.channel.uid}\n' | ||||||
|             f'error: {err}\n' |             f'error: {err}\n' | ||||||
|         ) |         ) | ||||||
|         return err |         return err | ||||||
|  | @ -178,7 +165,7 @@ async def exhaust_portal( | ||||||
|     else: |     else: | ||||||
|         log.debug( |         log.debug( | ||||||
|             f'Returning final result from portal:\n' |             f'Returning final result from portal:\n' | ||||||
|             f'uid: {portal.channel.aid}\n' |             f'uid: {portal.channel.uid}\n' | ||||||
|             f'result: {final}\n' |             f'result: {final}\n' | ||||||
|         ) |         ) | ||||||
|         return final |         return final | ||||||
|  | @ -205,10 +192,7 @@ async def cancel_on_completion( | ||||||
|     # if this call errors we store the exception for later |     # if this call errors we store the exception for later | ||||||
|     # in ``errors`` which will be reraised inside |     # in ``errors`` which will be reraised inside | ||||||
|     # an exception group and we still send out a cancel request |     # an exception group and we still send out a cancel request | ||||||
|     result: Any|Exception = await exhaust_portal( |     result: Any|Exception = await exhaust_portal(portal, actor) | ||||||
|         portal, |  | ||||||
|         actor, |  | ||||||
|     ) |  | ||||||
|     if isinstance(result, Exception): |     if isinstance(result, Exception): | ||||||
|         errors[actor.uid]: Exception = result |         errors[actor.uid]: Exception = result | ||||||
|         log.cancel( |         log.cancel( | ||||||
|  | @ -230,8 +214,8 @@ async def cancel_on_completion( | ||||||
| 
 | 
 | ||||||
| async def hard_kill( | async def hard_kill( | ||||||
|     proc: trio.Process, |     proc: trio.Process, | ||||||
| 
 |  | ||||||
|     terminate_after: int = 1.6, |     terminate_after: int = 1.6, | ||||||
|  | 
 | ||||||
|     # NOTE: for mucking with `.pause()`-ing inside the runtime |     # NOTE: for mucking with `.pause()`-ing inside the runtime | ||||||
|     # whilst also hacking on it XD |     # whilst also hacking on it XD | ||||||
|     # terminate_after: int = 99999, |     # terminate_after: int = 99999, | ||||||
|  | @ -253,9 +237,8 @@ async def hard_kill( | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|     log.cancel( |     log.cancel( | ||||||
|         'Terminating sub-proc\n' |         'Terminating sub-proc:\n' | ||||||
|         f'>x)\n' |         f'|_{proc}\n' | ||||||
|         f' |_{proc}\n' |  | ||||||
|     ) |     ) | ||||||
|     # NOTE: this timeout used to do nothing since we were shielding |     # NOTE: this timeout used to do nothing since we were shielding | ||||||
|     # the ``.wait()`` inside ``new_proc()`` which will pretty much |     # the ``.wait()`` inside ``new_proc()`` which will pretty much | ||||||
|  | @ -297,34 +280,18 @@ async def hard_kill( | ||||||
|     # zombies (as a feature) we ask the OS to do send in the |     # zombies (as a feature) we ask the OS to do send in the | ||||||
|     # removal swad as the last resort. |     # removal swad as the last resort. | ||||||
|     if cs.cancelled_caught: |     if cs.cancelled_caught: | ||||||
| 
 |  | ||||||
|         # TODO? attempt at intermediary-rent-sub |  | ||||||
|         # with child in debug lock? |  | ||||||
|         # |_https://github.com/goodboy/tractor/issues/320 |  | ||||||
|         # |  | ||||||
|         # if not is_root_process(): |  | ||||||
|         #     log.warning( |  | ||||||
|         #         'Attempting to acquire debug-REPL-lock before zombie reap!' |  | ||||||
|         #     ) |  | ||||||
|         #     with trio.CancelScope(shield=True): |  | ||||||
|         #         async with debug.acquire_debug_lock( |  | ||||||
|         #             subactor_uid=current_actor().uid, |  | ||||||
|         #         ) as _ctx: |  | ||||||
|         #             log.warning( |  | ||||||
|         #                 'Acquired debug lock, child ready to be killed ??\n' |  | ||||||
|         #             ) |  | ||||||
| 
 |  | ||||||
|         # TODO: toss in the skynet-logo face as ascii art? |         # TODO: toss in the skynet-logo face as ascii art? | ||||||
|         log.critical( |         log.critical( | ||||||
|             # 'Well, the #ZOMBIE_LORD_IS_HERE# to collect\n' |             # 'Well, the #ZOMBIE_LORD_IS_HERE# to collect\n' | ||||||
|             '#T-800 deployed to collect zombie B0\n' |             '#T-800 deployed to collect zombie B0\n' | ||||||
|             f'>x)\n' |             f'|\n' | ||||||
|             f' |_{proc}\n' |             f'|_{proc}\n' | ||||||
|         ) |         ) | ||||||
|         proc.kill() |         proc.kill() | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def soft_kill( | async def soft_kill( | ||||||
|  | 
 | ||||||
|     proc: ProcessType, |     proc: ProcessType, | ||||||
|     wait_func: Callable[ |     wait_func: Callable[ | ||||||
|         [ProcessType], |         [ProcessType], | ||||||
|  | @ -344,31 +311,16 @@ async def soft_kill( | ||||||
|     see `.hard_kill()`). |     see `.hard_kill()`). | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|     chan: Channel = portal.channel |     uid: tuple[str, str] = portal.channel.uid | ||||||
|     peer_aid: msgtypes.Aid = chan.aid |  | ||||||
|     try: |     try: | ||||||
|         log.cancel( |         log.cancel( | ||||||
|             f'Soft killing sub-actor via portal request\n' |             'Soft killing sub-actor via `Portal.cancel_actor()`\n' | ||||||
|             f'\n' |             f'|_{proc}\n' | ||||||
|             f'c)=> {peer_aid.reprol()}@[{chan.maddr}]\n' |  | ||||||
|             f'   |_{proc}\n' |  | ||||||
|         ) |         ) | ||||||
|         # wait on sub-proc to signal termination |         # wait on sub-proc to signal termination | ||||||
|         await wait_func(proc) |         await wait_func(proc) | ||||||
| 
 | 
 | ||||||
|     except trio.Cancelled: |     except trio.Cancelled: | ||||||
|         with trio.CancelScope(shield=True): |  | ||||||
|             await debug.maybe_wait_for_debugger( |  | ||||||
|                 child_in_debug=_runtime_vars.get( |  | ||||||
|                     '_debug_mode', False |  | ||||||
|                 ), |  | ||||||
|                 header_msg=( |  | ||||||
|                     'Delaying `soft_kill()` subproc reaper while debugger locked..\n' |  | ||||||
|                 ), |  | ||||||
|                 # TODO: need a diff value then default? |  | ||||||
|                 # poll_steps=9999999, |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|         # if cancelled during a soft wait, cancel the child |         # if cancelled during a soft wait, cancel the child | ||||||
|         # actor before entering the hard reap sequence |         # actor before entering the hard reap sequence | ||||||
|         # below. This means we try to do a graceful teardown |         # below. This means we try to do a graceful teardown | ||||||
|  | @ -399,7 +351,7 @@ async def soft_kill( | ||||||
|             if proc.poll() is None:  # type: ignore |             if proc.poll() is None:  # type: ignore | ||||||
|                 log.warning( |                 log.warning( | ||||||
|                     'Subactor still alive after cancel request?\n\n' |                     'Subactor still alive after cancel request?\n\n' | ||||||
|                     f'uid: {peer_aid}\n' |                     f'uid: {uid}\n' | ||||||
|                     f'|_{proc}\n' |                     f'|_{proc}\n' | ||||||
|                 ) |                 ) | ||||||
|                 n.cancel_scope.cancel() |                 n.cancel_scope.cancel() | ||||||
|  | @ -413,15 +365,14 @@ async def new_proc( | ||||||
|     errors: dict[tuple[str, str], Exception], |     errors: dict[tuple[str, str], Exception], | ||||||
| 
 | 
 | ||||||
|     # passed through to actor main |     # passed through to actor main | ||||||
|     bind_addrs: list[UnwrappedAddress], |     bind_addr: tuple[str, int], | ||||||
|     parent_addr: UnwrappedAddress, |     parent_addr: tuple[str, int], | ||||||
|     _runtime_vars: dict[str, Any],  # serialized and sent to _child |     _runtime_vars: dict[str, Any],  # serialized and sent to _child | ||||||
| 
 | 
 | ||||||
|     *, |     *, | ||||||
| 
 | 
 | ||||||
|     infect_asyncio: bool = False, |     infect_asyncio: bool = False, | ||||||
|     task_status: TaskStatus[Portal] = trio.TASK_STATUS_IGNORED, |     task_status: TaskStatus[Portal] = trio.TASK_STATUS_IGNORED | ||||||
|     proc_kwargs: dict[str, any] = {} |  | ||||||
| 
 | 
 | ||||||
| ) -> None: | ) -> None: | ||||||
| 
 | 
 | ||||||
|  | @ -436,12 +387,11 @@ async def new_proc( | ||||||
|         actor_nursery, |         actor_nursery, | ||||||
|         subactor, |         subactor, | ||||||
|         errors, |         errors, | ||||||
|         bind_addrs, |         bind_addr, | ||||||
|         parent_addr, |         parent_addr, | ||||||
|         _runtime_vars,  # run time vars |         _runtime_vars,  # run time vars | ||||||
|         infect_asyncio=infect_asyncio, |         infect_asyncio=infect_asyncio, | ||||||
|         task_status=task_status, |         task_status=task_status, | ||||||
|         proc_kwargs=proc_kwargs |  | ||||||
|     ) |     ) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | @ -452,13 +402,12 @@ async def trio_proc( | ||||||
|     errors: dict[tuple[str, str], Exception], |     errors: dict[tuple[str, str], Exception], | ||||||
| 
 | 
 | ||||||
|     # passed through to actor main |     # passed through to actor main | ||||||
|     bind_addrs: list[UnwrappedAddress], |     bind_addr: tuple[str, int], | ||||||
|     parent_addr: UnwrappedAddress, |     parent_addr: tuple[str, int], | ||||||
|     _runtime_vars: dict[str, Any],  # serialized and sent to _child |     _runtime_vars: dict[str, Any],  # serialized and sent to _child | ||||||
|     *, |     *, | ||||||
|     infect_asyncio: bool = False, |     infect_asyncio: bool = False, | ||||||
|     task_status: TaskStatus[Portal] = trio.TASK_STATUS_IGNORED, |     task_status: TaskStatus[Portal] = trio.TASK_STATUS_IGNORED | ||||||
|     proc_kwargs: dict[str, any] = {} |  | ||||||
| 
 | 
 | ||||||
| ) -> None: | ) -> None: | ||||||
|     ''' |     ''' | ||||||
|  | @ -480,9 +429,6 @@ async def trio_proc( | ||||||
|         # the OS; it otherwise can be passed via the parent channel if |         # the OS; it otherwise can be passed via the parent channel if | ||||||
|         # we prefer in the future (for privacy). |         # we prefer in the future (for privacy). | ||||||
|         "--uid", |         "--uid", | ||||||
|         # TODO, how to pass this over "wire" encodings like |  | ||||||
|         # cmdline args? |  | ||||||
|         # -[ ] maybe we can add an `msgtypes.Aid.min_tuple()` ? |  | ||||||
|         str(subactor.uid), |         str(subactor.uid), | ||||||
|         # Address the child must connect to on startup |         # Address the child must connect to on startup | ||||||
|         "--parent_addr", |         "--parent_addr", | ||||||
|  | @ -500,20 +446,19 @@ async def trio_proc( | ||||||
| 
 | 
 | ||||||
|     cancelled_during_spawn: bool = False |     cancelled_during_spawn: bool = False | ||||||
|     proc: trio.Process|None = None |     proc: trio.Process|None = None | ||||||
|     ipc_server: _server.Server = actor_nursery._actor.ipc_server |  | ||||||
|     try: |     try: | ||||||
|         try: |         try: | ||||||
|             proc: trio.Process = await trio.lowlevel.open_process(spawn_cmd, **proc_kwargs) |             # TODO: needs ``trio_typing`` patch? | ||||||
|  |             proc = await trio.lowlevel.open_process(spawn_cmd) | ||||||
|             log.runtime( |             log.runtime( | ||||||
|                 f'Started new child subproc\n' |                 'Started new sub-proc\n' | ||||||
|                 f'(>\n' |                 f'|_{proc}\n' | ||||||
|                 f' |_{proc}\n' |  | ||||||
|             ) |             ) | ||||||
| 
 | 
 | ||||||
|             # wait for actor to spawn and connect back to us |             # wait for actor to spawn and connect back to us | ||||||
|             # channel should have handshake completed by the |             # channel should have handshake completed by the | ||||||
|             # local actor by the time we get a ref to it |             # local actor by the time we get a ref to it | ||||||
|             event, chan = await ipc_server.wait_for_peer( |             event, chan = await actor_nursery._actor.wait_for_peer( | ||||||
|                 subactor.uid |                 subactor.uid | ||||||
|             ) |             ) | ||||||
| 
 | 
 | ||||||
|  | @ -525,10 +470,10 @@ async def trio_proc( | ||||||
|                 with trio.CancelScope(shield=True): |                 with trio.CancelScope(shield=True): | ||||||
|                     # don't clobber an ongoing pdb |                     # don't clobber an ongoing pdb | ||||||
|                     if is_root_process(): |                     if is_root_process(): | ||||||
|                         await debug.maybe_wait_for_debugger() |                         await maybe_wait_for_debugger() | ||||||
| 
 | 
 | ||||||
|                     elif proc is not None: |                     elif proc is not None: | ||||||
|                         async with debug.acquire_debug_lock(subactor.uid): |                         async with acquire_debug_lock(subactor.uid): | ||||||
|                             # soft wait on the proc to terminate |                             # soft wait on the proc to terminate | ||||||
|                             with trio.move_on_after(0.5): |                             with trio.move_on_after(0.5): | ||||||
|                                 await proc.wait() |                                 await proc.wait() | ||||||
|  | @ -544,25 +489,18 @@ async def trio_proc( | ||||||
|             portal, |             portal, | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|         # send a "spawning specification" which configures the |         # send additional init params | ||||||
|         # initial runtime state of the child. |         await chan.send({ | ||||||
|         sspec = msgtypes.SpawnSpec( |             "_parent_main_data": subactor._parent_main_data, | ||||||
|             _parent_main_data=subactor._parent_main_data, |             "enable_modules": subactor.enable_modules, | ||||||
|             enable_modules=subactor.enable_modules, |             "_arb_addr": subactor._arb_addr, | ||||||
|             reg_addrs=subactor.reg_addrs, |             "bind_host": bind_addr[0], | ||||||
|             bind_addrs=bind_addrs, |             "bind_port": bind_addr[1], | ||||||
|             _runtime_vars=_runtime_vars, |             "_runtime_vars": _runtime_vars, | ||||||
|         ) |         }) | ||||||
|         log.runtime( |  | ||||||
|             f'Sending spawn spec to child\n' |  | ||||||
|             f'{{}}=> {chan.aid.reprol()!r}\n' |  | ||||||
|             f'\n' |  | ||||||
|             f'{pretty_struct.pformat(sspec)}\n' |  | ||||||
|         ) |  | ||||||
|         await chan.send(sspec) |  | ||||||
| 
 | 
 | ||||||
|         # track subactor in current nursery |         # track subactor in current nursery | ||||||
|         curr_actor: Actor = current_actor() |         curr_actor = current_actor() | ||||||
|         curr_actor._actoruid2nursery[subactor.uid] = actor_nursery |         curr_actor._actoruid2nursery[subactor.uid] = actor_nursery | ||||||
| 
 | 
 | ||||||
|         # resume caller at next checkpoint now that child is up |         # resume caller at next checkpoint now that child is up | ||||||
|  | @ -586,15 +524,15 @@ async def trio_proc( | ||||||
|             # condition. |             # condition. | ||||||
|             await soft_kill( |             await soft_kill( | ||||||
|                 proc, |                 proc, | ||||||
|                 trio.Process.wait,  # XXX, uses `pidfd_open()` below. |                 trio.Process.wait, | ||||||
|                 portal |                 portal | ||||||
|             ) |             ) | ||||||
| 
 | 
 | ||||||
|             # cancel result waiter that may have been spawned in |             # cancel result waiter that may have been spawned in | ||||||
|             # tandem if not done already |             # tandem if not done already | ||||||
|             log.cancel( |             log.cancel( | ||||||
|                 'Cancelling portal result reaper task\n' |                 'Cancelling existing result waiter task for ' | ||||||
|                 f'c)> {subactor.aid.reprol()!r}\n' |                 f'{subactor.uid}' | ||||||
|             ) |             ) | ||||||
|             nursery.cancel_scope.cancel() |             nursery.cancel_scope.cancel() | ||||||
| 
 | 
 | ||||||
|  | @ -603,31 +541,23 @@ async def trio_proc( | ||||||
|         # allowed! Do this **after** cancellation/teardown to avoid |         # allowed! Do this **after** cancellation/teardown to avoid | ||||||
|         # killing the process too early. |         # killing the process too early. | ||||||
|         if proc: |         if proc: | ||||||
|             reap_repr: str = _pformat.nest_from_op( |             log.cancel(f'Hard reap sequence starting for {subactor.uid}') | ||||||
|                 input_op='>x)', |  | ||||||
|                 text=subactor.pformat(), |  | ||||||
|             ) |  | ||||||
|             log.cancel( |  | ||||||
|                 f'Hard reap sequence starting for subactor\n' |  | ||||||
|                 f'{reap_repr}' |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|             with trio.CancelScope(shield=True): |             with trio.CancelScope(shield=True): | ||||||
|  | 
 | ||||||
|                 # don't clobber an ongoing pdb |                 # don't clobber an ongoing pdb | ||||||
|                 if cancelled_during_spawn: |                 if cancelled_during_spawn: | ||||||
|                     # Try again to avoid TTY clobbering. |                     # Try again to avoid TTY clobbering. | ||||||
|                     async with debug.acquire_debug_lock(subactor.uid): |                     async with acquire_debug_lock(subactor.uid): | ||||||
|                         with trio.move_on_after(0.5): |                         with trio.move_on_after(0.5): | ||||||
|                             await proc.wait() |                             await proc.wait() | ||||||
| 
 | 
 | ||||||
|                 await debug.maybe_wait_for_debugger( |                 log.pdb( | ||||||
|  |                     'Delaying subproc reaper while debugger locked..' | ||||||
|  |                 ) | ||||||
|  |                 await maybe_wait_for_debugger( | ||||||
|                     child_in_debug=_runtime_vars.get( |                     child_in_debug=_runtime_vars.get( | ||||||
|                         '_debug_mode', False |                         '_debug_mode', False | ||||||
|                     ), |                     ), | ||||||
|                     header_msg=( |  | ||||||
|                         'Delaying subproc reaper while debugger locked..\n' |  | ||||||
|                     ), |  | ||||||
| 
 |  | ||||||
|                     # TODO: need a diff value then default? |                     # TODO: need a diff value then default? | ||||||
|                     # poll_steps=9999999, |                     # poll_steps=9999999, | ||||||
|                 ) |                 ) | ||||||
|  | @ -649,7 +579,7 @@ async def trio_proc( | ||||||
|                 #     acquire the lock and get notified of who has it, |                 #     acquire the lock and get notified of who has it, | ||||||
|                 #     check that uid against our known children? |                 #     check that uid against our known children? | ||||||
|                 # this_uid: tuple[str, str] = current_actor().uid |                 # this_uid: tuple[str, str] = current_actor().uid | ||||||
|                 # await debug.acquire_debug_lock(this_uid) |                 # await acquire_debug_lock(this_uid) | ||||||
| 
 | 
 | ||||||
|                 if proc.poll() is None: |                 if proc.poll() is None: | ||||||
|                     log.cancel(f"Attempting to hard kill {proc}") |                     log.cancel(f"Attempting to hard kill {proc}") | ||||||
|  | @ -671,13 +601,12 @@ async def mp_proc( | ||||||
|     subactor: Actor, |     subactor: Actor, | ||||||
|     errors: dict[tuple[str, str], Exception], |     errors: dict[tuple[str, str], Exception], | ||||||
|     # passed through to actor main |     # passed through to actor main | ||||||
|     bind_addrs: list[UnwrappedAddress], |     bind_addr: tuple[str, int], | ||||||
|     parent_addr: UnwrappedAddress, |     parent_addr: tuple[str, int], | ||||||
|     _runtime_vars: dict[str, Any],  # serialized and sent to _child |     _runtime_vars: dict[str, Any],  # serialized and sent to _child | ||||||
|     *, |     *, | ||||||
|     infect_asyncio: bool = False, |     infect_asyncio: bool = False, | ||||||
|     task_status: TaskStatus[Portal] = trio.TASK_STATUS_IGNORED, |     task_status: TaskStatus[Portal] = trio.TASK_STATUS_IGNORED | ||||||
|     proc_kwargs: dict[str, any] = {} |  | ||||||
| 
 | 
 | ||||||
| ) -> None: | ) -> None: | ||||||
| 
 | 
 | ||||||
|  | @ -730,7 +659,7 @@ async def mp_proc( | ||||||
|         target=_mp_main, |         target=_mp_main, | ||||||
|         args=( |         args=( | ||||||
|             subactor, |             subactor, | ||||||
|             bind_addrs, |             bind_addr, | ||||||
|             fs_info, |             fs_info, | ||||||
|             _spawn_method, |             _spawn_method, | ||||||
|             parent_addr, |             parent_addr, | ||||||
|  | @ -752,14 +681,12 @@ async def mp_proc( | ||||||
| 
 | 
 | ||||||
|     log.runtime(f"Started {proc}") |     log.runtime(f"Started {proc}") | ||||||
| 
 | 
 | ||||||
|     ipc_server: _server.Server = actor_nursery._actor.ipc_server |  | ||||||
|     try: |     try: | ||||||
|         # wait for actor to spawn and connect back to us |         # wait for actor to spawn and connect back to us | ||||||
|         # channel should have handshake completed by the |         # channel should have handshake completed by the | ||||||
|         # local actor by the time we get a ref to it |         # local actor by the time we get a ref to it | ||||||
|         event, chan = await ipc_server.wait_for_peer( |         event, chan = await actor_nursery._actor.wait_for_peer( | ||||||
|             subactor.uid, |             subactor.uid) | ||||||
|         ) |  | ||||||
| 
 | 
 | ||||||
|         # XXX: monkey patch poll API to match the ``subprocess`` API.. |         # XXX: monkey patch poll API to match the ``subprocess`` API.. | ||||||
|         # not sure why they don't expose this but kk. |         # not sure why they don't expose this but kk. | ||||||
|  |  | ||||||
|  | @ -14,62 +14,26 @@ | ||||||
| # You should have received a copy of the GNU Affero General Public License | # You should have received a copy of the GNU Affero General Public License | ||||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||||
| 
 | 
 | ||||||
| ''' | """ | ||||||
| Per actor-process runtime state mgmt APIs. | Per process state | ||||||
| 
 | 
 | ||||||
| ''' | """ | ||||||
| from __future__ import annotations | from __future__ import annotations | ||||||
| from contextvars import ( |  | ||||||
|     ContextVar, |  | ||||||
| ) |  | ||||||
| import os |  | ||||||
| from pathlib import Path |  | ||||||
| from typing import ( | from typing import ( | ||||||
|     Any, |     Any, | ||||||
|     Literal, |  | ||||||
|     TYPE_CHECKING, |     TYPE_CHECKING, | ||||||
| ) | ) | ||||||
| 
 | 
 | ||||||
| from trio.lowlevel import current_task |  | ||||||
| 
 |  | ||||||
| if TYPE_CHECKING: | if TYPE_CHECKING: | ||||||
|     from ._runtime import Actor |     from ._runtime import Actor | ||||||
|     from ._context import Context |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| # default IPC transport protocol settings |  | ||||||
| TransportProtocolKey = Literal[ |  | ||||||
|     'tcp', |  | ||||||
|     'uds', |  | ||||||
| ] |  | ||||||
| _def_tpt_proto: TransportProtocolKey = 'tcp' |  | ||||||
| 
 |  | ||||||
| _current_actor: Actor|None = None  # type: ignore # noqa | _current_actor: Actor|None = None  # type: ignore # noqa | ||||||
| _last_actor_terminated: Actor|None = None | _last_actor_terminated: Actor|None = None | ||||||
| 
 |  | ||||||
| # TODO: mk this a `msgspec.Struct`! |  | ||||||
| # -[ ] type out all fields obvi! |  | ||||||
| # -[ ] (eventually) mk wire-ready for monitoring? |  | ||||||
| _runtime_vars: dict[str, Any] = { | _runtime_vars: dict[str, Any] = { | ||||||
|     # root of actor-process tree info |     '_debug_mode': False, | ||||||
|     '_is_root': False,  # bool |     '_is_root': False, | ||||||
|     '_root_mailbox': (None, None),  # tuple[str|None, str|None] |     '_root_mailbox': (None, None) | ||||||
|     '_root_addrs': [],  # tuple[str|None, str|None] |  | ||||||
| 
 |  | ||||||
|     # parent->chld ipc protocol caps |  | ||||||
|     '_enable_tpts': [_def_tpt_proto], |  | ||||||
| 
 |  | ||||||
|     # registrar info |  | ||||||
|     '_registry_addrs': [], |  | ||||||
| 
 |  | ||||||
|     # `debug_mode: bool` settings |  | ||||||
|     '_debug_mode': False,  # bool |  | ||||||
|     'repl_fixture': False,  # |AbstractContextManager[bool] |  | ||||||
|     # for `tractor.pause_from_sync()` & `breakpoint()` support |  | ||||||
|     'use_greenback': False, |  | ||||||
| 
 |  | ||||||
|     # infected-`asyncio`-mode: `trio` running as guest. |  | ||||||
|     '_is_infected_aio': False, |  | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | @ -94,10 +58,9 @@ def current_actor( | ||||||
|     ''' |     ''' | ||||||
|     if ( |     if ( | ||||||
|         err_on_no_runtime |         err_on_no_runtime | ||||||
|         and |         and _current_actor is None | ||||||
|         _current_actor is None |  | ||||||
|     ): |     ): | ||||||
|         msg: str = 'No local actor has been initialized yet?\n' |         msg: str = 'No local actor has been initialized yet' | ||||||
|         from ._exceptions import NoRuntime |         from ._exceptions import NoRuntime | ||||||
| 
 | 
 | ||||||
|         if last := last_actor(): |         if last := last_actor(): | ||||||
|  | @ -110,8 +73,8 @@ def current_actor( | ||||||
|         # this process. |         # this process. | ||||||
|         else: |         else: | ||||||
|             msg += ( |             msg += ( | ||||||
|                 # 'No last actor found?\n' |                 'No last actor found?\n' | ||||||
|                 '\nDid you forget to call one of,\n' |                 'Did you forget to open one of:\n\n' | ||||||
|                 '- `tractor.open_root_actor()`\n' |                 '- `tractor.open_root_actor()`\n' | ||||||
|                 '- `tractor.open_nursery()`\n' |                 '- `tractor.open_nursery()`\n' | ||||||
|             ) |             ) | ||||||
|  | @ -121,7 +84,7 @@ def current_actor( | ||||||
|     return _current_actor |     return _current_actor | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def is_root_process() -> bool: | def is_main_process() -> bool: | ||||||
|     ''' |     ''' | ||||||
|     Bool determining if this actor is running in the top-most process. |     Bool determining if this actor is running in the top-most process. | ||||||
| 
 | 
 | ||||||
|  | @ -130,10 +93,7 @@ def is_root_process() -> bool: | ||||||
|     return mp.current_process().name == 'MainProcess' |     return mp.current_process().name == 'MainProcess' | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| is_main_process = is_root_process | def debug_mode() -> bool: | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def is_debug_mode() -> bool: |  | ||||||
|     ''' |     ''' | ||||||
|     Bool determining if "debug mode" is on which enables |     Bool determining if "debug mode" is on which enables | ||||||
|     remote subactor pdb entry on crashes. |     remote subactor pdb entry on crashes. | ||||||
|  | @ -142,62 +102,5 @@ def is_debug_mode() -> bool: | ||||||
|     return bool(_runtime_vars['_debug_mode']) |     return bool(_runtime_vars['_debug_mode']) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| debug_mode = is_debug_mode |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def is_root_process() -> bool: | def is_root_process() -> bool: | ||||||
|     return _runtime_vars['_is_root'] |     return _runtime_vars['_is_root'] | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| _ctxvar_Context: ContextVar[Context] = ContextVar( |  | ||||||
|     'ipc_context', |  | ||||||
|     default=None, |  | ||||||
| ) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def current_ipc_ctx( |  | ||||||
|     error_on_not_set: bool = False, |  | ||||||
| ) -> Context|None: |  | ||||||
|     ctx: Context = _ctxvar_Context.get() |  | ||||||
| 
 |  | ||||||
|     if ( |  | ||||||
|         not ctx |  | ||||||
|         and error_on_not_set |  | ||||||
|     ): |  | ||||||
|         from ._exceptions import InternalError |  | ||||||
|         raise InternalError( |  | ||||||
|             'No IPC context has been allocated for this task yet?\n' |  | ||||||
|             f'|_{current_task()}\n' |  | ||||||
|         ) |  | ||||||
|     return ctx |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| # std ODE (mutable) app state location |  | ||||||
| _rtdir: Path = Path(os.environ['XDG_RUNTIME_DIR']) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def get_rt_dir( |  | ||||||
|     subdir: str = 'tractor' |  | ||||||
| ) -> Path: |  | ||||||
|     ''' |  | ||||||
|     Return the user "runtime dir" where most userspace apps stick |  | ||||||
|     their IPC and cache related system util-files; we take hold |  | ||||||
|     of a `'XDG_RUNTIME_DIR'/tractor/` subdir by default. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     rtdir: Path = _rtdir / subdir |  | ||||||
|     if not rtdir.is_dir(): |  | ||||||
|         rtdir.mkdir() |  | ||||||
|     return rtdir |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def current_ipc_protos() -> list[str]: |  | ||||||
|     ''' |  | ||||||
|     Return the list of IPC transport protocol keys currently |  | ||||||
|     in use by this actor. |  | ||||||
| 
 |  | ||||||
|     The keys are as declared by `MsgTransport` and `Address` |  | ||||||
|     concrete-backend sub-types defined throughout `tractor.ipc`. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     return _runtime_vars['_enable_tpts'] |  | ||||||
|  |  | ||||||
|  | @ -26,7 +26,6 @@ import inspect | ||||||
| from pprint import pformat | from pprint import pformat | ||||||
| from typing import ( | from typing import ( | ||||||
|     Any, |     Any, | ||||||
|     AsyncGenerator, |  | ||||||
|     Callable, |     Callable, | ||||||
|     AsyncIterator, |     AsyncIterator, | ||||||
|     TYPE_CHECKING, |     TYPE_CHECKING, | ||||||
|  | @ -36,27 +35,17 @@ import warnings | ||||||
| import trio | import trio | ||||||
| 
 | 
 | ||||||
| from ._exceptions import ( | from ._exceptions import ( | ||||||
|  |     _raise_from_no_key_in_msg, | ||||||
|     ContextCancelled, |     ContextCancelled, | ||||||
|     RemoteActorError, |  | ||||||
| ) | ) | ||||||
| from .log import get_logger | from .log import get_logger | ||||||
| from .trionics import ( | from .trionics import ( | ||||||
|     broadcast_receiver, |     broadcast_receiver, | ||||||
|     BroadcastReceiver, |     BroadcastReceiver, | ||||||
| ) | ) | ||||||
| from tractor.msg import ( |  | ||||||
|     Error, |  | ||||||
|     Return, |  | ||||||
|     Stop, |  | ||||||
|     MsgType, |  | ||||||
|     PayloadT, |  | ||||||
|     Yield, |  | ||||||
| ) |  | ||||||
| 
 | 
 | ||||||
| if TYPE_CHECKING: | if TYPE_CHECKING: | ||||||
|     from ._runtime import Actor |  | ||||||
|     from ._context import Context |     from ._context import Context | ||||||
|     from .ipc import Channel |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| log = get_logger(__name__) | log = get_logger(__name__) | ||||||
|  | @ -70,9 +59,10 @@ log = get_logger(__name__) | ||||||
| class MsgStream(trio.abc.Channel): | class MsgStream(trio.abc.Channel): | ||||||
|     ''' |     ''' | ||||||
|     A bidirectional message stream for receiving logically sequenced |     A bidirectional message stream for receiving logically sequenced | ||||||
|     values over an inter-actor IPC `Channel`. |     values over an inter-actor IPC ``Channel``. | ||||||
| 
 |  | ||||||
| 
 | 
 | ||||||
|  |     This is the type returned to a local task which entered either | ||||||
|  |     ``Portal.open_stream_from()`` or ``Context.open_stream()``. | ||||||
| 
 | 
 | ||||||
|     Termination rules: |     Termination rules: | ||||||
| 
 | 
 | ||||||
|  | @ -88,109 +78,46 @@ class MsgStream(trio.abc.Channel): | ||||||
|         self, |         self, | ||||||
|         ctx: Context,  # typing: ignore # noqa |         ctx: Context,  # typing: ignore # noqa | ||||||
|         rx_chan: trio.MemoryReceiveChannel, |         rx_chan: trio.MemoryReceiveChannel, | ||||||
|         _broadcaster: BroadcastReceiver|None = None, |         _broadcaster: BroadcastReceiver | None = None, | ||||||
| 
 | 
 | ||||||
|     ) -> None: |     ) -> None: | ||||||
|         self._ctx = ctx |         self._ctx = ctx | ||||||
|         self._rx_chan = rx_chan |         self._rx_chan = rx_chan | ||||||
|         self._broadcaster = _broadcaster |         self._broadcaster = _broadcaster | ||||||
| 
 | 
 | ||||||
|         # any actual IPC msg which is effectively an `EndOfStream` |  | ||||||
|         self._stop_msg: bool|Stop = False |  | ||||||
| 
 |  | ||||||
|         # flag to denote end of stream |         # flag to denote end of stream | ||||||
|         self._eoc: bool|trio.EndOfChannel = False |         self._eoc: bool|trio.EndOfChannel = False | ||||||
|         self._closed: bool|trio.ClosedResourceError = False |         self._closed: bool|trio.ClosedResourceError = False | ||||||
| 
 | 
 | ||||||
|     @property |  | ||||||
|     def ctx(self) -> Context: |  | ||||||
|         ''' |  | ||||||
|         A read-only ref to this stream's inter-actor-task `Context`. |  | ||||||
| 
 |  | ||||||
|         ''' |  | ||||||
|         return self._ctx |  | ||||||
| 
 |  | ||||||
|     @property |  | ||||||
|     def chan(self) -> Channel: |  | ||||||
|         ''' |  | ||||||
|         Ref to the containing `Context`'s transport `Channel`. |  | ||||||
| 
 |  | ||||||
|         ''' |  | ||||||
|         return self._ctx.chan |  | ||||||
| 
 |  | ||||||
|     # TODO: could we make this a direct method bind to `PldRx`? |  | ||||||
|     # -> receive_nowait = PldRx.recv_pld |  | ||||||
|     # |_ means latter would have to accept `MsgStream`-as-`self`? |  | ||||||
|     #  => should be fine as long as, |  | ||||||
|     #  -[ ] both define `._rx_chan` |  | ||||||
|     #  -[ ] .ctx is bound into `PldRx` using a `@cm`? |  | ||||||
|     # |  | ||||||
|     # delegate directly to underlying mem channel |     # delegate directly to underlying mem channel | ||||||
|     def receive_nowait( |     def receive_nowait( | ||||||
|         self, |         self, | ||||||
|         expect_msg: MsgType = Yield, |         allow_msg_keys: list[str] = ['yield'], | ||||||
|     ) -> PayloadT: |     ): | ||||||
|         ctx: Context = self._ctx |         msg: dict = self._rx_chan.receive_nowait() | ||||||
|         ( |         for ( | ||||||
|             msg, |             i, | ||||||
|             pld, |             key, | ||||||
|         ) = ctx._pld_rx.recv_msg_nowait( |         ) in enumerate(allow_msg_keys): | ||||||
|             ipc=self, |             try: | ||||||
|             expect_msg=expect_msg, |                 return msg[key] | ||||||
|         ) |             except KeyError as kerr: | ||||||
|  |                 if i < (len(allow_msg_keys) - 1): | ||||||
|  |                     continue | ||||||
| 
 | 
 | ||||||
|         # ?TODO, maybe factor this into a hyper-common `unwrap_pld()` |                 _raise_from_no_key_in_msg( | ||||||
|         # |                     ctx=self._ctx, | ||||||
|         match msg: |                     msg=msg, | ||||||
| 
 |                     src_err=kerr, | ||||||
|             # XXX, these never seems to ever hit? cool? |                     log=log, | ||||||
|             case Stop(): |                     expect_key=key, | ||||||
|                 log.cancel( |                     stream=self, | ||||||
|                     f'Msg-stream was ended via stop msg\n' |  | ||||||
|                     f'{msg}' |  | ||||||
|                 ) |                 ) | ||||||
|             case Error(): |  | ||||||
|                 log.error( |  | ||||||
|                     f'Msg-stream was ended via error msg\n' |  | ||||||
|                     f'{msg}' |  | ||||||
|                 ) |  | ||||||
| 
 |  | ||||||
|             # XXX NOTE, always set any final result on the ctx to |  | ||||||
|             # avoid teardown race conditions where previously this msg |  | ||||||
|             # would be consumed silently (by `.aclose()` doing its |  | ||||||
|             # own "msg drain loop" but WITHOUT those `drained: lists[MsgType]` |  | ||||||
|             # being post-close-processed! |  | ||||||
|             # |  | ||||||
|             # !!TODO, see the equiv todo-comment in `.receive()` |  | ||||||
|             # around the `if drained:` where we should prolly |  | ||||||
|             # ACTUALLY be doing this post-close processing?? |  | ||||||
|             # |  | ||||||
|             case Return(pld=pld): |  | ||||||
|                 log.warning( |  | ||||||
|                     f'Msg-stream final result msg for IPC ctx?\n' |  | ||||||
|                     f'{msg}' |  | ||||||
|                 ) |  | ||||||
|                 # XXX TODO, this **should be covered** by higher |  | ||||||
|                 # scoped runtime-side method calls such as |  | ||||||
|                 # `Context._deliver_msg()`, so you should never |  | ||||||
|                 # really see the warning above or else something |  | ||||||
|                 # racy/out-of-order is likely going on between |  | ||||||
|                 # actor-runtime-side push tasks and the user-app-side |  | ||||||
|                 # consume tasks! |  | ||||||
|                 # -[ ] figure out that set of race cases and fix! |  | ||||||
|                 # -[ ] possibly return the `msg` given an input |  | ||||||
|                 #     arg-flag is set so we can process the `Return` |  | ||||||
|                 #     from the `.aclose()` caller? |  | ||||||
|                 # |  | ||||||
|                 # breakpoint()  # to debug this RACE CASE! |  | ||||||
|                 ctx._result = pld |  | ||||||
|                 ctx._outcome_msg = msg |  | ||||||
| 
 |  | ||||||
|         return pld |  | ||||||
| 
 | 
 | ||||||
|     async def receive( |     async def receive( | ||||||
|         self, |         self, | ||||||
|         hide_tb: bool = False, | 
 | ||||||
|  |         hide_tb: bool = True, | ||||||
|     ): |     ): | ||||||
|         ''' |         ''' | ||||||
|         Receive a single msg from the IPC transport, the next in |         Receive a single msg from the IPC transport, the next in | ||||||
|  | @ -200,8 +127,9 @@ class MsgStream(trio.abc.Channel): | ||||||
|         ''' |         ''' | ||||||
|         __tracebackhide__: bool = hide_tb |         __tracebackhide__: bool = hide_tb | ||||||
| 
 | 
 | ||||||
|         # NOTE FYI: `trio.ReceiveChannel` implements EOC handling as |         # NOTE: `trio.ReceiveChannel` implements | ||||||
|         # follows (aka uses it to gracefully exit async for loops): |         # EOC handling as follows (aka uses it | ||||||
|  |         # to gracefully exit async for loops): | ||||||
|         # |         # | ||||||
|         # async def __anext__(self) -> ReceiveType: |         # async def __anext__(self) -> ReceiveType: | ||||||
|         #     try: |         #     try: | ||||||
|  | @ -209,7 +137,7 @@ class MsgStream(trio.abc.Channel): | ||||||
|         #     except trio.EndOfChannel: |         #     except trio.EndOfChannel: | ||||||
|         #         raise StopAsyncIteration |         #         raise StopAsyncIteration | ||||||
|         # |         # | ||||||
|         # see `.aclose()` for notes on the old behaviour prior to |         # see ``.aclose()`` for notes on the old behaviour prior to | ||||||
|         # introducing this |         # introducing this | ||||||
|         if self._eoc: |         if self._eoc: | ||||||
|             raise self._eoc |             raise self._eoc | ||||||
|  | @ -219,33 +147,62 @@ class MsgStream(trio.abc.Channel): | ||||||
| 
 | 
 | ||||||
|         src_err: Exception|None = None  # orig tb |         src_err: Exception|None = None  # orig tb | ||||||
|         try: |         try: | ||||||
|             ctx: Context = self._ctx |             try: | ||||||
|             pld = await ctx._pld_rx.recv_pld( |                 msg = await self._rx_chan.receive() | ||||||
|                 ipc=self, |                 return msg['yield'] | ||||||
|                 expect_msg=Yield, | 
 | ||||||
|             ) |             except KeyError as kerr: | ||||||
|             return pld |                 src_err = kerr | ||||||
|  | 
 | ||||||
|  |                 # NOTE: may raise any of the below error types | ||||||
|  |                 # includg EoC when a 'stop' msg is found. | ||||||
|  |                 _raise_from_no_key_in_msg( | ||||||
|  |                     ctx=self._ctx, | ||||||
|  |                     msg=msg, | ||||||
|  |                     src_err=kerr, | ||||||
|  |                     log=log, | ||||||
|  |                     expect_key='yield', | ||||||
|  |                     stream=self, | ||||||
|  |                 ) | ||||||
| 
 | 
 | ||||||
|         # XXX: the stream terminates on either of: |         # XXX: the stream terminates on either of: | ||||||
|         # - `self._rx_chan.receive()` raising  after manual closure |         # - via `self._rx_chan.receive()` raising  after manual closure | ||||||
|         #   by the rpc-runtime, |         #   by the rpc-runtime OR, | ||||||
|         #   OR |         # - via a received `{'stop': ...}` msg from remote side. | ||||||
|         # - via a `Stop`-msg received from remote peer task. |         #   |_ NOTE: previously this was triggered by calling | ||||||
|         #   NOTE |         #   ``._rx_chan.aclose()`` on the send side of the channel inside | ||||||
|         #   |_ previously this was triggered by calling |         #   `Actor._push_result()`, but now the 'stop' message handling | ||||||
|         #   `._rx_chan.aclose()` on the send side of the channel |         #   has been put just above inside `_raise_from_no_key_in_msg()`. | ||||||
|         #   inside `Actor._deliver_ctx_payload()`, but now the 'stop' |         except ( | ||||||
|         #   message handling gets delegated to `PldRFx.recv_pld()` |             trio.EndOfChannel, | ||||||
|         #   internals. |         ) as eoc: | ||||||
|         except trio.EndOfChannel as eoc: |  | ||||||
|             # a graceful stream finished signal |  | ||||||
|             self._eoc = eoc |  | ||||||
|             src_err = eoc |             src_err = eoc | ||||||
|  |             self._eoc = eoc | ||||||
| 
 | 
 | ||||||
|         # a `ClosedResourceError` indicates that the internal feeder |             # TODO: Locally, we want to close this stream gracefully, by | ||||||
|         # memory receive channel was closed likely by the runtime |             # terminating any local consumers tasks deterministically. | ||||||
|         # after the associated transport-channel disconnected or |             # Once we have broadcast support, we **don't** want to be | ||||||
|         # broke. |             # closing this stream and not flushing a final value to | ||||||
|  |             # remaining (clone) consumers who may not have been | ||||||
|  |             # scheduled to receive it yet. | ||||||
|  |             # try: | ||||||
|  |             #     maybe_err_msg_or_res: dict = self._rx_chan.receive_nowait() | ||||||
|  |             #     if maybe_err_msg_or_res: | ||||||
|  |             #         log.warning( | ||||||
|  |             #             'Discarding un-processed msg:\n' | ||||||
|  |             #             f'{maybe_err_msg_or_res}' | ||||||
|  |             #         ) | ||||||
|  |             # except trio.WouldBlock: | ||||||
|  |             #     # no queued msgs that might be another remote | ||||||
|  |             #     # error, so just raise the original EoC | ||||||
|  |             #     pass | ||||||
|  | 
 | ||||||
|  |             # raise eoc | ||||||
|  | 
 | ||||||
|  |         # a ``ClosedResourceError`` indicates that the internal | ||||||
|  |         # feeder memory receive channel was closed likely by the | ||||||
|  |         # runtime after the associated transport-channel | ||||||
|  |         # disconnected or broke. | ||||||
|         except trio.ClosedResourceError as cre:  # by self._rx_chan.receive() |         except trio.ClosedResourceError as cre:  # by self._rx_chan.receive() | ||||||
|             src_err = cre |             src_err = cre | ||||||
|             log.warning( |             log.warning( | ||||||
|  | @ -257,60 +214,47 @@ class MsgStream(trio.abc.Channel): | ||||||
|         # terminated and signal this local iterator to stop |         # terminated and signal this local iterator to stop | ||||||
|         drained: list[Exception|dict] = await self.aclose() |         drained: list[Exception|dict] = await self.aclose() | ||||||
|         if drained: |         if drained: | ||||||
|         #  ^^^^^^^^TODO? pass these to the `._ctx._drained_msgs: |             # from .devx import pause | ||||||
|         #  deque` and then iterate them as part of any |             # await pause() | ||||||
|         #  `.wait_for_result()` call? |  | ||||||
|         # |  | ||||||
|         # -[ ] move the match-case processing from |  | ||||||
|         #     `.receive_nowait()` instead to right here, use it from |  | ||||||
|         #     a for msg in drained:` post-proc loop? |  | ||||||
|         # |  | ||||||
|             log.warning( |             log.warning( | ||||||
|                 'Drained context msgs during closure\n\n' |                 'Drained context msgs during closure:\n' | ||||||
|                 f'{drained}' |                 f'{drained}' | ||||||
|             ) |             ) | ||||||
|  |         # TODO: pass these to the `._ctx._drained_msgs: deque` | ||||||
|  |         # and then iterate them as part of any `.result()` call? | ||||||
| 
 | 
 | ||||||
|         # NOTE XXX: if the context was cancelled or remote-errored |         # NOTE XXX: if the context was cancelled or remote-errored | ||||||
|         # but we received the stream close msg first, we |         # but we received the stream close msg first, we | ||||||
|         # probably want to instead raise the remote error |         # probably want to instead raise the remote error | ||||||
|         # over the end-of-stream connection error since likely |         # over the end-of-stream connection error since likely | ||||||
|         # the remote error was the source cause? |         # the remote error was the source cause? | ||||||
|         # ctx: Context = self._ctx |         ctx: Context = self._ctx | ||||||
|         ctx.maybe_raise( |         ctx.maybe_raise( | ||||||
|             raise_ctxc_from_self_call=True, |             raise_ctxc_from_self_call=True, | ||||||
|             from_src_exc=src_err, |  | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|         # propagate any error but hide low-level frame details from |         # propagate any error but hide low-level frame details | ||||||
|         # the caller by default for console/debug-REPL noise |         # from the caller by default for debug noise reduction. | ||||||
|         # reduction. |  | ||||||
|         if ( |         if ( | ||||||
|             hide_tb |             hide_tb | ||||||
|             and ( |  | ||||||
| 
 | 
 | ||||||
|                 # XXX NOTE special conditions: don't reraise on |             # XXX NOTE XXX don't reraise on certain | ||||||
|                 # certain stream-specific internal error types like, |             # stream-specific internal error types like, | ||||||
|                 # |             # | ||||||
|                 # - `trio.EoC` since we want to use the exact instance |             # - `trio.EoC` since we want to use the exact instance | ||||||
|                 #   to ensure that it is the error that bubbles upward |             #   to ensure that it is the error that bubbles upward | ||||||
|                 #   for silent absorption by `Context.open_stream()`. |             #   for silent absorption by `Context.open_stream()`. | ||||||
|                 not self._eoc |             and not self._eoc | ||||||
| 
 | 
 | ||||||
|                 # - `RemoteActorError` (or subtypes like ctxc) |             # - `RemoteActorError` (or `ContextCancelled`) if it gets | ||||||
|                 #    since we want to present the error as though it is |             #   raised from `_raise_from_no_key_in_msg()` since we | ||||||
|                 #    "sourced" directly from this `.receive()` call and |             #   want the same (as the above bullet) for any | ||||||
|                 #    generally NOT include the stack frames raised from |             #   `.open_context()` block bubbled error raised by | ||||||
|                 #    inside the `PldRx` and/or the transport stack |             #   any nearby ctx API remote-failures. | ||||||
|                 #    layers. |             # and not isinstance(src_err, RemoteActorError) | ||||||
|                 or isinstance(src_err, RemoteActorError) |  | ||||||
|             ) |  | ||||||
|         ): |         ): | ||||||
|             raise type(src_err)(*src_err.args) from src_err |             raise type(src_err)(*src_err.args) from src_err | ||||||
|         else: |         else: | ||||||
|             # for any non-graceful-EOC we want to NOT hide this frame |  | ||||||
|             if not self._eoc: |  | ||||||
|                 __tracebackhide__: bool = False |  | ||||||
| 
 |  | ||||||
|             raise src_err |             raise src_err | ||||||
| 
 | 
 | ||||||
|     async def aclose(self) -> list[Exception|dict]: |     async def aclose(self) -> list[Exception|dict]: | ||||||
|  | @ -327,6 +271,9 @@ class MsgStream(trio.abc.Channel): | ||||||
|          - more or less we try to maintain adherance to trio's `.aclose()` semantics: |          - more or less we try to maintain adherance to trio's `.aclose()` semantics: | ||||||
|            https://trio.readthedocs.io/en/stable/reference-io.html#trio.abc.AsyncResource.aclose |            https://trio.readthedocs.io/en/stable/reference-io.html#trio.abc.AsyncResource.aclose | ||||||
|         ''' |         ''' | ||||||
|  | 
 | ||||||
|  |         # rx_chan = self._rx_chan | ||||||
|  | 
 | ||||||
|         # XXX NOTE XXX |         # XXX NOTE XXX | ||||||
|         # it's SUPER IMPORTANT that we ensure we don't DOUBLE |         # it's SUPER IMPORTANT that we ensure we don't DOUBLE | ||||||
|         # DRAIN msgs on closure so avoid getting stuck handing on |         # DRAIN msgs on closure so avoid getting stuck handing on | ||||||
|  | @ -338,16 +285,14 @@ class MsgStream(trio.abc.Channel): | ||||||
|             # this stream has already been closed so silently succeed as |             # this stream has already been closed so silently succeed as | ||||||
|             # per ``trio.AsyncResource`` semantics. |             # per ``trio.AsyncResource`` semantics. | ||||||
|             # https://trio.readthedocs.io/en/stable/reference-io.html#trio.abc.AsyncResource.aclose |             # https://trio.readthedocs.io/en/stable/reference-io.html#trio.abc.AsyncResource.aclose | ||||||
|             # import tractor |  | ||||||
|             # await tractor.pause() |  | ||||||
|             return [] |             return [] | ||||||
| 
 | 
 | ||||||
|         ctx: Context = self._ctx |         ctx: Context = self._ctx | ||||||
|         drained: list[Exception|dict] = [] |         drained: list[Exception|dict] = [] | ||||||
|         while not drained: |         while not drained: | ||||||
|             try: |             try: | ||||||
|                 maybe_final_msg: Yield|Return = self.receive_nowait( |                 maybe_final_msg = self.receive_nowait( | ||||||
|                     expect_msg=Yield|Return, |                     allow_msg_keys=['yield', 'return'], | ||||||
|                 ) |                 ) | ||||||
|                 if maybe_final_msg: |                 if maybe_final_msg: | ||||||
|                     log.debug( |                     log.debug( | ||||||
|  | @ -426,37 +371,23 @@ class MsgStream(trio.abc.Channel): | ||||||
|             self._closed = re |             self._closed = re | ||||||
| 
 | 
 | ||||||
|         # if caught_eoc: |         # if caught_eoc: | ||||||
|         #     # from .devx import debug |         #     # from .devx import _debug | ||||||
|         #     # await debug.pause() |         #     # await _debug.pause() | ||||||
|         #     with trio.CancelScope(shield=True): |         #     with trio.CancelScope(shield=True): | ||||||
|         #         await rx_chan.aclose() |         #         await rx_chan.aclose() | ||||||
| 
 | 
 | ||||||
|         if not self._eoc: |         if not self._eoc: | ||||||
|             this_side: str = self._ctx.side |             log.cancel( | ||||||
|             peer_side: str = self._ctx.peer_side |                 'Stream closed before it received an EoC?\n' | ||||||
|             message: str = ( |                 'Setting eoc manually..\n..' | ||||||
|                 f'Stream self-closed by {this_side!r}-side before EoC from {peer_side!r}\n' |             ) | ||||||
|                 # } bc a stream is a "scope"/msging-phase inside an IPC |             self._eoc: bool = trio.EndOfChannel( | ||||||
|                 f'c}}>\n' |                 f'Context stream closed by {self._ctx.side}\n' | ||||||
|                 f'  |_{self}\n' |                 f'|_{self}\n' | ||||||
|             ) |             ) | ||||||
|             if ( |  | ||||||
|                 (rx_chan := self._rx_chan) |  | ||||||
|                 and |  | ||||||
|                 (stats := rx_chan.statistics()).tasks_waiting_receive |  | ||||||
|             ): |  | ||||||
|                 message += ( |  | ||||||
|                     f'AND there is still reader tasks,\n' |  | ||||||
|                     f'\n' |  | ||||||
|                     f'{stats}\n' |  | ||||||
|                 ) |  | ||||||
| 
 |  | ||||||
|             log.cancel(message) |  | ||||||
|             self._eoc = trio.EndOfChannel(message) |  | ||||||
| 
 |  | ||||||
|         # ?XXX WAIT, why do we not close the local mem chan `._rx_chan` XXX? |         # ?XXX WAIT, why do we not close the local mem chan `._rx_chan` XXX? | ||||||
|         # => NO, DEFINITELY NOT! <= |         # => NO, DEFINITELY NOT! <= | ||||||
|         # if we're a bi-dir `MsgStream` BECAUSE this same |         # if we're a bi-dir ``MsgStream`` BECAUSE this same | ||||||
|         # core-msg-loop mem recv-chan is used to deliver the |         # core-msg-loop mem recv-chan is used to deliver the | ||||||
|         # potential final result from the surrounding inter-actor |         # potential final result from the surrounding inter-actor | ||||||
|         # `Context` so we don't want to close it until that |         # `Context` so we don't want to close it until that | ||||||
|  | @ -538,9 +469,6 @@ class MsgStream(trio.abc.Channel): | ||||||
|                 self, |                 self, | ||||||
|                 # use memory channel size by default |                 # use memory channel size by default | ||||||
|                 self._rx_chan._state.max_buffer_size,  # type: ignore |                 self._rx_chan._state.max_buffer_size,  # type: ignore | ||||||
| 
 |  | ||||||
|                 # TODO: can remove this kwarg right since |  | ||||||
|                 # by default behaviour is to do this anyway? |  | ||||||
|                 receive_afunc=self.receive, |                 receive_afunc=self.receive, | ||||||
|             ) |             ) | ||||||
| 
 | 
 | ||||||
|  | @ -587,249 +515,24 @@ class MsgStream(trio.abc.Channel): | ||||||
| 
 | 
 | ||||||
|         try: |         try: | ||||||
|             await self._ctx.chan.send( |             await self._ctx.chan.send( | ||||||
|                 payload=Yield( |                 payload={ | ||||||
|                     cid=self._ctx.cid, |                     'yield': data, | ||||||
|                     pld=data, |                     'cid': self._ctx.cid, | ||||||
|                 ), |                 }, | ||||||
|  |                 # hide_tb=hide_tb, | ||||||
|             ) |             ) | ||||||
|         except ( |         except ( | ||||||
|             trio.ClosedResourceError, |             trio.ClosedResourceError, | ||||||
|             trio.BrokenResourceError, |             trio.BrokenResourceError, | ||||||
|             BrokenPipeError, |             BrokenPipeError, | ||||||
|         ) as _trans_err: |         ) as trans_err: | ||||||
|             trans_err = _trans_err |             if hide_tb: | ||||||
|             if ( |  | ||||||
|                 hide_tb |  | ||||||
|                 and |  | ||||||
|                 self._ctx.chan._exc is trans_err |  | ||||||
|                 # ^XXX, IOW, only if the channel is marked errored |  | ||||||
|                 # for the same reason as whatever its underlying |  | ||||||
|                 # transport raised, do we keep the full low-level tb |  | ||||||
|                 # suppressed from the user. |  | ||||||
|             ): |  | ||||||
|                 raise type(trans_err)( |                 raise type(trans_err)( | ||||||
|                     *trans_err.args |                     *trans_err.args | ||||||
|                 ) from trans_err |                 ) from trans_err | ||||||
|             else: |             else: | ||||||
|                 raise |                 raise | ||||||
| 
 | 
 | ||||||
|     # TODO: msg capability context api1 |  | ||||||
|     # @acm |  | ||||||
|     # async def enable_msg_caps( |  | ||||||
|     #     self, |  | ||||||
|     #     msg_subtypes: Union[ |  | ||||||
|     #         list[list[Struct]], |  | ||||||
|     #         Protocol,   # hypothetical type that wraps a msg set |  | ||||||
|     #     ], |  | ||||||
|     # ) -> tuple[Callable, Callable]:  # payload enc, dec pair |  | ||||||
|     #     ... |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @acm |  | ||||||
| async def open_stream_from_ctx( |  | ||||||
|     ctx: Context, |  | ||||||
|     allow_overruns: bool|None = False, |  | ||||||
|     msg_buffer_size: int|None = None, |  | ||||||
| 
 |  | ||||||
| ) -> AsyncGenerator[MsgStream, None]: |  | ||||||
|     ''' |  | ||||||
|     Open a `MsgStream`, a bi-directional msg transport dialog |  | ||||||
|     connected to the cross-actor peer task for an IPC `Context`. |  | ||||||
| 
 |  | ||||||
|     This context manager must be entered in both the "parent" (task |  | ||||||
|     which entered `Portal.open_context()`) and "child" (RPC task |  | ||||||
|     which is decorated by `@context`) tasks for the stream to |  | ||||||
|     logically be considered "open"; if one side begins sending to an |  | ||||||
|     un-opened peer, depending on policy config, msgs will either be |  | ||||||
|     queued until the other side opens and/or a `StreamOverrun` will |  | ||||||
|     (eventually) be raised. |  | ||||||
| 
 |  | ||||||
|                          ------ - ------ |  | ||||||
| 
 |  | ||||||
|     Runtime semantics design: |  | ||||||
| 
 |  | ||||||
|     A `MsgStream` session adheres to "one-shot use" semantics, |  | ||||||
|     meaning if you close the scope it **can not** be "re-opened". |  | ||||||
| 
 |  | ||||||
|     Instead you must re-establish a new surrounding RPC `Context` |  | ||||||
|     (RTC: remote task context?) using `Portal.open_context()`. |  | ||||||
| 
 |  | ||||||
|     In the future this *design choice* may need to be changed but |  | ||||||
|     currently there seems to be no obvious reason to support such |  | ||||||
|     semantics.. |  | ||||||
| 
 |  | ||||||
|     - "pausing a stream" can be supported with a message implemented |  | ||||||
|       by the `tractor` application dev. |  | ||||||
| 
 |  | ||||||
|     - any remote error will normally require a restart of the entire |  | ||||||
|       `trio.Task`'s scope due to the nature of `trio`'s cancellation |  | ||||||
|       (`CancelScope`) system and semantics (level triggered). |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     actor: Actor = ctx._actor |  | ||||||
| 
 |  | ||||||
|     # If the surrounding context has been cancelled by some |  | ||||||
|     # task with a handle to THIS, we error here immediately |  | ||||||
|     # since it likely means the surrounding lexical-scope has |  | ||||||
|     # errored, been `trio.Cancelled` or at the least |  | ||||||
|     # `Context.cancel()` was called by some task. |  | ||||||
|     if ctx._cancel_called: |  | ||||||
| 
 |  | ||||||
|         # XXX NOTE: ALWAYS RAISE any remote error here even if |  | ||||||
|         # it's an expected `ContextCancelled` due to a local |  | ||||||
|         # task having called `.cancel()`! |  | ||||||
|         # |  | ||||||
|         # WHY: we expect the error to always bubble up to the |  | ||||||
|         # surrounding `Portal.open_context()` call and be |  | ||||||
|         # absorbed there (silently) and we DO NOT want to |  | ||||||
|         # actually try to stream - a cancel msg was already |  | ||||||
|         # sent to the other side! |  | ||||||
|         ctx.maybe_raise( |  | ||||||
|             raise_ctxc_from_self_call=True, |  | ||||||
|         ) |  | ||||||
|         # NOTE: this is diff then calling |  | ||||||
|         # `._maybe_raise_remote_err()` specifically |  | ||||||
|         # because we want to raise a ctxc on any task entering this `.open_stream()` |  | ||||||
|         # AFTER cancellation was already been requested, |  | ||||||
|         # we DO NOT want to absorb any ctxc ACK silently! |  | ||||||
|         # if ctx._remote_error: |  | ||||||
|         #     raise ctx._remote_error |  | ||||||
| 
 |  | ||||||
|         # XXX NOTE: if no `ContextCancelled` has been responded |  | ||||||
|         # back from the other side (yet), we raise a different |  | ||||||
|         # runtime error indicating that this task's usage of |  | ||||||
|         # `Context.cancel()` and then `.open_stream()` is WRONG! |  | ||||||
|         task: str = trio.lowlevel.current_task().name |  | ||||||
|         raise RuntimeError( |  | ||||||
|             'Stream opened after `Context.cancel()` called..?\n' |  | ||||||
|             f'task: {actor.uid[0]}:{task}\n' |  | ||||||
|             f'{ctx}' |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|     if ( |  | ||||||
|         not ctx._portal |  | ||||||
|         and not ctx._started_called |  | ||||||
|     ): |  | ||||||
|         raise RuntimeError( |  | ||||||
|             'Context.started()` must be called before opening a stream' |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|     # NOTE: in one way streaming this only happens on the |  | ||||||
|     # parent-ctx-task side (on the side that calls |  | ||||||
|     # `Actor.start_remote_task()`) so if you try to send |  | ||||||
|     # a stop from the caller to the callee in the |  | ||||||
|     # single-direction-stream case you'll get a lookup error |  | ||||||
|     # currently. |  | ||||||
|     ctx: Context = actor.get_context( |  | ||||||
|         chan=ctx.chan, |  | ||||||
|         cid=ctx.cid, |  | ||||||
|         nsf=ctx._nsf, |  | ||||||
|         # side=ctx.side, |  | ||||||
| 
 |  | ||||||
|         msg_buffer_size=msg_buffer_size, |  | ||||||
|         allow_overruns=allow_overruns, |  | ||||||
|     ) |  | ||||||
|     ctx._allow_overruns: bool = allow_overruns |  | ||||||
|     assert ctx is ctx |  | ||||||
| 
 |  | ||||||
|     # XXX: If the underlying channel feeder receive mem chan has |  | ||||||
|     # been closed then likely client code has already exited |  | ||||||
|     # a ``.open_stream()`` block prior or there was some other |  | ||||||
|     # unanticipated error or cancellation from ``trio``. |  | ||||||
| 
 |  | ||||||
|     if ctx._rx_chan._closed: |  | ||||||
|         raise trio.ClosedResourceError( |  | ||||||
|             'The underlying channel for this stream was already closed!\n' |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|     # NOTE: implicitly this will call `MsgStream.aclose()` on |  | ||||||
|     # `.__aexit__()` due to stream's parent `Channel` type! |  | ||||||
|     # |  | ||||||
|     # XXX NOTE XXX: ensures the stream is "one-shot use", |  | ||||||
|     # which specifically means that on exit, |  | ||||||
|     # - signal ``trio.EndOfChannel``/``StopAsyncIteration`` to |  | ||||||
|     #   the far end indicating that the caller exited |  | ||||||
|     #   the streaming context purposefully by letting |  | ||||||
|     #   the exit block exec. |  | ||||||
|     # - this is diff from the cancel/error case where |  | ||||||
|     #   a cancel request from this side or an error |  | ||||||
|     #   should be sent to the far end indicating the |  | ||||||
|     #   stream WAS NOT just closed normally/gracefully. |  | ||||||
|     async with MsgStream( |  | ||||||
|         ctx=ctx, |  | ||||||
|         rx_chan=ctx._rx_chan, |  | ||||||
|     ) as stream: |  | ||||||
| 
 |  | ||||||
|         # NOTE: we track all existing streams per portal for |  | ||||||
|         # the purposes of attempting graceful closes on runtime |  | ||||||
|         # cancel requests. |  | ||||||
|         if ctx._portal: |  | ||||||
|             ctx._portal._streams.add(stream) |  | ||||||
| 
 |  | ||||||
|         try: |  | ||||||
|             ctx._stream_opened: bool = True |  | ||||||
|             ctx._stream = stream |  | ||||||
| 
 |  | ||||||
|             # XXX: do we need this? |  | ||||||
|             # ensure we aren't cancelled before yielding the stream |  | ||||||
|             # await trio.lowlevel.checkpoint() |  | ||||||
|             yield stream |  | ||||||
| 
 |  | ||||||
|             # XXX: (MEGA IMPORTANT) if this is a root opened process we |  | ||||||
|             # wait for any immediate child in debug before popping the |  | ||||||
|             # context from the runtime msg loop otherwise inside |  | ||||||
|             # ``Actor._deliver_ctx_payload()`` the msg will be discarded and in |  | ||||||
|             # the case where that msg is global debugger unlock (via |  | ||||||
|             # a "stop" msg for a stream), this can result in a deadlock |  | ||||||
|             # where the root is waiting on the lock to clear but the |  | ||||||
|             # child has already cleared it and clobbered IPC. |  | ||||||
|             # |  | ||||||
|             # await maybe_wait_for_debugger() |  | ||||||
| 
 |  | ||||||
|             # XXX TODO: pretty sure this isn't needed (see |  | ||||||
|             # note above this block) AND will result in |  | ||||||
|             # a double `.send_stop()` call. The only reason to |  | ||||||
|             # put it here would be to due with "order" in |  | ||||||
|             # terms of raising any remote error (as per |  | ||||||
|             # directly below) or bc the stream's |  | ||||||
|             # `.__aexit__()` block might not get run |  | ||||||
|             # (doubtful)? Either way if we did put this back |  | ||||||
|             # in we also need a state var to avoid the double |  | ||||||
|             # stop-msg send.. |  | ||||||
|             # |  | ||||||
|             # await stream.aclose() |  | ||||||
| 
 |  | ||||||
|         # NOTE: absorb and do not raise any |  | ||||||
|         # EoC received from the other side such that |  | ||||||
|         # it is not raised inside the surrounding |  | ||||||
|         # context block's scope! |  | ||||||
|         except trio.EndOfChannel as eoc: |  | ||||||
|             if ( |  | ||||||
|                 eoc |  | ||||||
|                 and |  | ||||||
|                 stream.closed |  | ||||||
|             ): |  | ||||||
|                 # sanity, can remove? |  | ||||||
|                 assert eoc is stream._eoc |  | ||||||
| 
 |  | ||||||
|                 log.runtime( |  | ||||||
|                     'Stream was terminated by EoC\n\n' |  | ||||||
|                     # NOTE: won't show the error <Type> but |  | ||||||
|                     # does show txt followed by IPC msg. |  | ||||||
|                     f'{str(eoc)}\n' |  | ||||||
|                 ) |  | ||||||
|         finally: |  | ||||||
|             if ctx._portal: |  | ||||||
|                 try: |  | ||||||
|                     ctx._portal._streams.remove(stream) |  | ||||||
|                 except KeyError: |  | ||||||
|                     log.warning( |  | ||||||
|                         f'Stream was already destroyed?\n' |  | ||||||
|                         f'actor: {ctx.chan.uid}\n' |  | ||||||
|                         f'ctx id: {ctx.cid}' |  | ||||||
|                     ) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| 
 | 
 | ||||||
| def stream(func: Callable) -> Callable: | def stream(func: Callable) -> Callable: | ||||||
|     ''' |     ''' | ||||||
|  | @ -838,7 +541,7 @@ def stream(func: Callable) -> Callable: | ||||||
|     ''' |     ''' | ||||||
|     # TODO: apply whatever solution ``mypy`` ends up picking for this: |     # TODO: apply whatever solution ``mypy`` ends up picking for this: | ||||||
|     # https://github.com/python/mypy/issues/2087#issuecomment-769266912 |     # https://github.com/python/mypy/issues/2087#issuecomment-769266912 | ||||||
|     func._tractor_stream_function: bool = True  # type: ignore |     func._tractor_stream_function = True  # type: ignore | ||||||
| 
 | 
 | ||||||
|     sig = inspect.signature(func) |     sig = inspect.signature(func) | ||||||
|     params = sig.parameters |     params = sig.parameters | ||||||
|  |  | ||||||
|  | @ -21,7 +21,9 @@ | ||||||
| from contextlib import asynccontextmanager as acm | from contextlib import asynccontextmanager as acm | ||||||
| from functools import partial | from functools import partial | ||||||
| import inspect | import inspect | ||||||
|  | from pprint import pformat | ||||||
| from typing import ( | from typing import ( | ||||||
|  |     Optional, | ||||||
|     TYPE_CHECKING, |     TYPE_CHECKING, | ||||||
| ) | ) | ||||||
| import typing | import typing | ||||||
|  | @ -29,41 +31,27 @@ import warnings | ||||||
| 
 | 
 | ||||||
| import trio | import trio | ||||||
| 
 | 
 | ||||||
| 
 | from ._debug import maybe_wait_for_debugger | ||||||
| from .devx import ( |  | ||||||
|     debug, |  | ||||||
|     pformat as _pformat, |  | ||||||
| ) |  | ||||||
| from ._addr import ( |  | ||||||
|     UnwrappedAddress, |  | ||||||
|     mk_uuid, |  | ||||||
| ) |  | ||||||
| from ._state import current_actor, is_main_process | from ._state import current_actor, is_main_process | ||||||
| from .log import get_logger, get_loglevel | from .log import get_logger, get_loglevel | ||||||
| from ._runtime import Actor | from ._runtime import Actor | ||||||
| from ._portal import Portal | from ._portal import Portal | ||||||
| from .trionics import ( |  | ||||||
|     is_multi_cancelled, |  | ||||||
|     collapse_eg, |  | ||||||
| ) |  | ||||||
| from ._exceptions import ( | from ._exceptions import ( | ||||||
|  |     is_multi_cancelled, | ||||||
|     ContextCancelled, |     ContextCancelled, | ||||||
| ) | ) | ||||||
| from ._root import ( | from ._root import open_root_actor | ||||||
|     open_root_actor, |  | ||||||
| ) |  | ||||||
| from . import _state | from . import _state | ||||||
| from . import _spawn | from . import _spawn | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| if TYPE_CHECKING: | if TYPE_CHECKING: | ||||||
|     import multiprocessing as mp |     import multiprocessing as mp | ||||||
|     # from .ipc._server import IPCServer |  | ||||||
|     from .ipc import IPCServer |  | ||||||
| 
 |  | ||||||
| 
 | 
 | ||||||
| log = get_logger(__name__) | log = get_logger(__name__) | ||||||
| 
 | 
 | ||||||
|  | _default_bind_addr: tuple[str, int] = ('127.0.0.1', 0) | ||||||
|  | 
 | ||||||
| 
 | 
 | ||||||
| class ActorNursery: | class ActorNursery: | ||||||
|     ''' |     ''' | ||||||
|  | @ -95,32 +83,30 @@ class ActorNursery: | ||||||
|     ''' |     ''' | ||||||
|     def __init__( |     def __init__( | ||||||
|         self, |         self, | ||||||
|         # TODO: maybe def these as fields of a struct looking type? |  | ||||||
|         actor: Actor, |         actor: Actor, | ||||||
|         ria_nursery: trio.Nursery, |         ria_nursery: trio.Nursery, | ||||||
|         da_nursery: trio.Nursery, |         da_nursery: trio.Nursery, | ||||||
|         errors: dict[tuple[str, str], BaseException], |         errors: dict[tuple[str, str], BaseException], | ||||||
| 
 |  | ||||||
|     ) -> None: |     ) -> None: | ||||||
|         # self.supervisor = supervisor  # TODO |         # self.supervisor = supervisor  # TODO | ||||||
|         self._actor: Actor = actor |         self._actor: Actor = actor | ||||||
| 
 |         self._ria_nursery = ria_nursery | ||||||
|         # TODO: rename to `._tn` for our conventional "task-nursery" |  | ||||||
|         self._da_nursery = da_nursery |         self._da_nursery = da_nursery | ||||||
| 
 |  | ||||||
|         self._children: dict[ |         self._children: dict[ | ||||||
|             tuple[str, str], |             tuple[str, str], | ||||||
|             tuple[ |             tuple[ | ||||||
|                 Actor, |                 Actor, | ||||||
|                 trio.Process | mp.Process, |                 trio.Process | mp.Process, | ||||||
|                 Portal | None, |                 Optional[Portal], | ||||||
|             ] |             ] | ||||||
|         ] = {} |         ] = {} | ||||||
| 
 |         # portals spawned with ``run_in_actor()`` are | ||||||
|  |         # cancelled when their "main" result arrives | ||||||
|  |         self._cancel_after_result_on_exit: set = set() | ||||||
|  |         self.cancelled: bool = False | ||||||
|         self._join_procs = trio.Event() |         self._join_procs = trio.Event() | ||||||
|         self._at_least_one_child_in_debug: bool = False |         self._at_least_one_child_in_debug: bool = False | ||||||
|         self.errors = errors |         self.errors = errors | ||||||
|         self._scope_error: BaseException|None = None |  | ||||||
|         self.exited = trio.Event() |         self.exited = trio.Event() | ||||||
| 
 | 
 | ||||||
|         # NOTE: when no explicit call is made to |         # NOTE: when no explicit call is made to | ||||||
|  | @ -131,93 +117,28 @@ class ActorNursery: | ||||||
|         # and syncing purposes to any actor opened nurseries. |         # and syncing purposes to any actor opened nurseries. | ||||||
|         self._implicit_runtime_started: bool = False |         self._implicit_runtime_started: bool = False | ||||||
| 
 | 
 | ||||||
|         # TODO: remove the `.run_in_actor()` API and thus this 2ndary |  | ||||||
|         # nursery when that API get's moved outside this primitive! |  | ||||||
|         self._ria_nursery = ria_nursery |  | ||||||
| 
 |  | ||||||
|         # TODO, factor this into a .hilevel api! |  | ||||||
|         # |  | ||||||
|         # portals spawned with ``run_in_actor()`` are |  | ||||||
|         # cancelled when their "main" result arrives |  | ||||||
|         self._cancel_after_result_on_exit: set = set() |  | ||||||
| 
 |  | ||||||
|         # trio.Nursery-like cancel (request) statuses |  | ||||||
|         self._cancelled_caught: bool = False |  | ||||||
|         self._cancel_called: bool = False |  | ||||||
| 
 |  | ||||||
|     @property |  | ||||||
|     def cancel_called(self) -> bool: |  | ||||||
|         ''' |  | ||||||
|         Records whether cancellation has been requested for this |  | ||||||
|         actor-nursery by a call to  `.cancel()` either due to, |  | ||||||
|         - an explicit call by some actor-local-task, |  | ||||||
|         - an implicit call due to an error/cancel emited inside |  | ||||||
|           the `tractor.open_nursery()` block. |  | ||||||
| 
 |  | ||||||
|         ''' |  | ||||||
|         return self._cancel_called |  | ||||||
| 
 |  | ||||||
|     @property |  | ||||||
|     def cancelled_caught(self) -> bool: |  | ||||||
|         ''' |  | ||||||
|         Set when this nursery was able to cance all spawned subactors |  | ||||||
|         gracefully via an (implicit) call to `.cancel()`. |  | ||||||
| 
 |  | ||||||
|         ''' |  | ||||||
|         return self._cancelled_caught |  | ||||||
| 
 |  | ||||||
|     # TODO! remove internal/test-suite usage! |  | ||||||
|     @property |  | ||||||
|     def cancelled(self) -> bool: |  | ||||||
|         warnings.warn( |  | ||||||
|             "`ActorNursery.cancelled` is now deprecated, use " |  | ||||||
|             " `.cancel_called` instead.", |  | ||||||
|             DeprecationWarning, |  | ||||||
|             stacklevel=2, |  | ||||||
|         ) |  | ||||||
|         return ( |  | ||||||
|             self._cancel_called |  | ||||||
|             # and |  | ||||||
|             # self._cancelled_caught |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|     async def start_actor( |     async def start_actor( | ||||||
|         self, |         self, | ||||||
|         name: str, |         name: str, | ||||||
| 
 |  | ||||||
|         *, |         *, | ||||||
| 
 |         bind_addr: tuple[str, int] = _default_bind_addr, | ||||||
|         bind_addrs: list[UnwrappedAddress]|None = None, |         rpc_module_paths: list[str] | None = None, | ||||||
|         rpc_module_paths: list[str]|None = None, |         enable_modules: list[str] | None = None, | ||||||
|         enable_transports: list[str] = [_state._def_tpt_proto], |         loglevel: str | None = None,  # set log level per subactor | ||||||
|         enable_modules: list[str]|None = None, |         nursery: trio.Nursery | None = None, | ||||||
|         loglevel: str|None = None,  # set log level per subactor |         debug_mode: Optional[bool] | None = None, | ||||||
|         debug_mode: bool|None = None, |  | ||||||
|         infect_asyncio: bool = False, |         infect_asyncio: bool = False, | ||||||
| 
 |  | ||||||
|         # TODO: ideally we can rm this once we no longer have |  | ||||||
|         # a `._ria_nursery` since the dependent APIs have been |  | ||||||
|         # removed! |  | ||||||
|         nursery: trio.Nursery|None = None, |  | ||||||
|         proc_kwargs: dict[str, any] = {} |  | ||||||
| 
 |  | ||||||
|     ) -> Portal: |     ) -> Portal: | ||||||
|         ''' |         ''' | ||||||
|         Start a (daemon) actor: an process that has no designated |         Start a (daemon) actor: an process that has no designated | ||||||
|         "main task" besides the runtime. |         "main task" besides the runtime. | ||||||
| 
 | 
 | ||||||
|         ''' |         ''' | ||||||
|         __runtimeframe__: int = 1  # noqa |         loglevel = loglevel or self._actor.loglevel or get_loglevel() | ||||||
|         loglevel: str = ( |  | ||||||
|             loglevel |  | ||||||
|             or self._actor.loglevel |  | ||||||
|             or get_loglevel() |  | ||||||
|         ) |  | ||||||
| 
 | 
 | ||||||
|         # configure and pass runtime state |         # configure and pass runtime state | ||||||
|         _rtv = _state._runtime_vars.copy() |         _rtv = _state._runtime_vars.copy() | ||||||
|         _rtv['_is_root'] = False |         _rtv['_is_root'] = False | ||||||
|         _rtv['_is_infected_aio'] = infect_asyncio |  | ||||||
| 
 | 
 | ||||||
|         # allow setting debug policy per actor |         # allow setting debug policy per actor | ||||||
|         if debug_mode is not None: |         if debug_mode is not None: | ||||||
|  | @ -236,17 +157,13 @@ class ActorNursery: | ||||||
|             enable_modules.extend(rpc_module_paths) |             enable_modules.extend(rpc_module_paths) | ||||||
| 
 | 
 | ||||||
|         subactor = Actor( |         subactor = Actor( | ||||||
|             name=name, |             name, | ||||||
|             uuid=mk_uuid(), |  | ||||||
| 
 |  | ||||||
|             # modules allowed to invoked funcs from |             # modules allowed to invoked funcs from | ||||||
|             enable_modules=enable_modules, |             enable_modules=enable_modules, | ||||||
|             loglevel=loglevel, |             loglevel=loglevel, | ||||||
| 
 |             arbiter_addr=current_actor()._arb_addr, | ||||||
|             # verbatim relay this actor's registrar addresses |  | ||||||
|             registry_addrs=current_actor().registry_addrs, |  | ||||||
|         ) |         ) | ||||||
|         parent_addr: UnwrappedAddress = self._actor.accept_addr |         parent_addr = self._actor.accept_addr | ||||||
|         assert parent_addr |         assert parent_addr | ||||||
| 
 | 
 | ||||||
|         # start a task to spawn a process |         # start a task to spawn a process | ||||||
|  | @ -261,35 +178,25 @@ class ActorNursery: | ||||||
|                 self, |                 self, | ||||||
|                 subactor, |                 subactor, | ||||||
|                 self.errors, |                 self.errors, | ||||||
|                 bind_addrs, |                 bind_addr, | ||||||
|                 parent_addr, |                 parent_addr, | ||||||
|                 _rtv,  # run time vars |                 _rtv,  # run time vars | ||||||
|                 infect_asyncio=infect_asyncio, |                 infect_asyncio=infect_asyncio, | ||||||
|                 proc_kwargs=proc_kwargs |  | ||||||
|             ) |             ) | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|     # TODO: DEPRECATE THIS: |  | ||||||
|     # -[ ] impl instead as a hilevel wrapper on |  | ||||||
|     #   top of a `@context` style invocation. |  | ||||||
|     #  |_ dynamic @context decoration on child side |  | ||||||
|     #  |_ implicit `Portal.open_context() as (ctx, first):` |  | ||||||
|     #    and `return first` on parent side. |  | ||||||
|     #  |_ mention how it's similar to `trio-parallel` API? |  | ||||||
|     # -[ ] use @api_frame on the wrapper |  | ||||||
|     async def run_in_actor( |     async def run_in_actor( | ||||||
|         self, |         self, | ||||||
| 
 | 
 | ||||||
|         fn: typing.Callable, |         fn: typing.Callable, | ||||||
|         *, |         *, | ||||||
| 
 | 
 | ||||||
|         name: str | None = None, |         name: Optional[str] = None, | ||||||
|         bind_addrs: UnwrappedAddress|None = None, |         bind_addr: tuple[str, int] = _default_bind_addr, | ||||||
|         rpc_module_paths: list[str] | None = None, |         rpc_module_paths: list[str] | None = None, | ||||||
|         enable_modules: list[str] | None = None, |         enable_modules: list[str] | None = None, | ||||||
|         loglevel: str | None = None,  # set log level per subactor |         loglevel: str | None = None,  # set log level per subactor | ||||||
|         infect_asyncio: bool = False, |         infect_asyncio: bool = False, | ||||||
|         proc_kwargs: dict[str, any] = {}, |  | ||||||
| 
 | 
 | ||||||
|         **kwargs,  # explicit args to ``fn`` |         **kwargs,  # explicit args to ``fn`` | ||||||
| 
 | 
 | ||||||
|  | @ -303,24 +210,22 @@ class ActorNursery: | ||||||
|         the actor is terminated. |         the actor is terminated. | ||||||
| 
 | 
 | ||||||
|         ''' |         ''' | ||||||
|         __runtimeframe__: int = 1  # noqa |  | ||||||
|         mod_path: str = fn.__module__ |         mod_path: str = fn.__module__ | ||||||
| 
 | 
 | ||||||
|         if name is None: |         if name is None: | ||||||
|             # use the explicit function name if not provided |             # use the explicit function name if not provided | ||||||
|             name = fn.__name__ |             name = fn.__name__ | ||||||
| 
 | 
 | ||||||
|         portal: Portal = await self.start_actor( |         portal = await self.start_actor( | ||||||
|             name, |             name, | ||||||
|             enable_modules=[mod_path] + ( |             enable_modules=[mod_path] + ( | ||||||
|                 enable_modules or rpc_module_paths or [] |                 enable_modules or rpc_module_paths or [] | ||||||
|             ), |             ), | ||||||
|             bind_addrs=bind_addrs, |             bind_addr=bind_addr, | ||||||
|             loglevel=loglevel, |             loglevel=loglevel, | ||||||
|             # use the run_in_actor nursery |             # use the run_in_actor nursery | ||||||
|             nursery=self._ria_nursery, |             nursery=self._ria_nursery, | ||||||
|             infect_asyncio=infect_asyncio, |             infect_asyncio=infect_asyncio, | ||||||
|             proc_kwargs=proc_kwargs |  | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|         # XXX: don't allow stream funcs |         # XXX: don't allow stream funcs | ||||||
|  | @ -340,39 +245,28 @@ class ActorNursery: | ||||||
|         ) |         ) | ||||||
|         return portal |         return portal | ||||||
| 
 | 
 | ||||||
|     # @api_frame |  | ||||||
|     async def cancel( |     async def cancel( | ||||||
|         self, |         self, | ||||||
|         hard_kill: bool = False, |         hard_kill: bool = False, | ||||||
| 
 | 
 | ||||||
|     ) -> None: |     ) -> None: | ||||||
|         ''' |         ''' | ||||||
|         Cancel this actor-nursery by instructing each subactor's |         Cancel this nursery by instructing each subactor to cancel | ||||||
|         runtime to cancel and wait for all underlying sub-processes |         itself and wait for all subactors to terminate. | ||||||
|         to terminate. |  | ||||||
| 
 | 
 | ||||||
|         If `hard_kill` is set then kill the processes directly using |         If ``hard_killl`` is set to ``True`` then kill the processes | ||||||
|         the spawning-backend's API/OS-machinery without any attempt |         directly without any far end graceful ``trio`` cancellation. | ||||||
|         at (graceful) `trio`-style cancellation using our |  | ||||||
|         `Actor.cancel()`. |  | ||||||
| 
 | 
 | ||||||
|         ''' |         ''' | ||||||
|         __runtimeframe__: int = 1  # noqa |         self.cancelled = True | ||||||
|         self._cancel_called = True |  | ||||||
| 
 | 
 | ||||||
|         # TODO: impl a repr for spawn more compact |         # TODO: impl a repr for spawn more compact | ||||||
|         # then `._children`.. |         # then `._children`.. | ||||||
|         children: dict = self._children |         children: dict = self._children | ||||||
|         child_count: int = len(children) |         child_count: int = len(children) | ||||||
|         msg: str = f'Cancelling actor nursery with {child_count} children\n' |         msg: str = f'Cancelling actor nursery with {child_count} children\n' | ||||||
| 
 |  | ||||||
|         server: IPCServer = self._actor.ipc_server |  | ||||||
| 
 |  | ||||||
|         with trio.move_on_after(3) as cs: |         with trio.move_on_after(3) as cs: | ||||||
|             async with ( |             async with trio.open_nursery() as tn: | ||||||
|                 collapse_eg(), |  | ||||||
|                 trio.open_nursery() as tn, |  | ||||||
|             ): |  | ||||||
| 
 | 
 | ||||||
|                 subactor: Actor |                 subactor: Actor | ||||||
|                 proc: trio.Process |                 proc: trio.Process | ||||||
|  | @ -391,7 +285,7 @@ class ActorNursery: | ||||||
| 
 | 
 | ||||||
|                     else: |                     else: | ||||||
|                         if portal is None:  # actor hasn't fully spawned yet |                         if portal is None:  # actor hasn't fully spawned yet | ||||||
|                             event: trio.Event = server._peer_connected[subactor.uid] |                             event = self._actor._peer_connected[subactor.uid] | ||||||
|                             log.warning( |                             log.warning( | ||||||
|                                 f"{subactor.uid} never 't finished spawning?" |                                 f"{subactor.uid} never 't finished spawning?" | ||||||
|                             ) |                             ) | ||||||
|  | @ -407,7 +301,7 @@ class ActorNursery: | ||||||
|                             if portal is None: |                             if portal is None: | ||||||
|                                 # cancelled while waiting on the event |                                 # cancelled while waiting on the event | ||||||
|                                 # to arrive |                                 # to arrive | ||||||
|                                 chan = server._peers[subactor.uid][-1] |                                 chan = self._actor._peers[subactor.uid][-1] | ||||||
|                                 if chan: |                                 if chan: | ||||||
|                                     portal = Portal(chan) |                                     portal = Portal(chan) | ||||||
|                                 else:  # there's no other choice left |                                 else:  # there's no other choice left | ||||||
|  | @ -436,8 +330,6 @@ class ActorNursery: | ||||||
|             ) in children.values(): |             ) in children.values(): | ||||||
|                 log.warning(f"Hard killing process {proc}") |                 log.warning(f"Hard killing process {proc}") | ||||||
|                 proc.terminate() |                 proc.terminate() | ||||||
|         else: |  | ||||||
|             self._cancelled_caught |  | ||||||
| 
 | 
 | ||||||
|         # mark ourselves as having (tried to have) cancelled all subactors |         # mark ourselves as having (tried to have) cancelled all subactors | ||||||
|         self._join_procs.set() |         self._join_procs.set() | ||||||
|  | @ -446,15 +338,11 @@ class ActorNursery: | ||||||
| @acm | @acm | ||||||
| async def _open_and_supervise_one_cancels_all_nursery( | async def _open_and_supervise_one_cancels_all_nursery( | ||||||
|     actor: Actor, |     actor: Actor, | ||||||
|     hide_tb: bool = True, |  | ||||||
| 
 | 
 | ||||||
| ) -> typing.AsyncGenerator[ActorNursery, None]: | ) -> typing.AsyncGenerator[ActorNursery, None]: | ||||||
| 
 | 
 | ||||||
|     # normally don't need to show user by default |     # TODO: yay or nay? | ||||||
|     __tracebackhide__: bool = hide_tb |     __tracebackhide__ = True | ||||||
| 
 |  | ||||||
|     outer_err: BaseException|None = None |  | ||||||
|     inner_err: BaseException|None = None |  | ||||||
| 
 | 
 | ||||||
|     # the collection of errors retreived from spawned sub-actors |     # the collection of errors retreived from spawned sub-actors | ||||||
|     errors: dict[tuple[str, str], BaseException] = {} |     errors: dict[tuple[str, str], BaseException] = {} | ||||||
|  | @ -464,26 +352,21 @@ async def _open_and_supervise_one_cancels_all_nursery( | ||||||
|     # handling errors that are generated by the inner nursery in |     # handling errors that are generated by the inner nursery in | ||||||
|     # a supervisor strategy **before** blocking indefinitely to wait for |     # a supervisor strategy **before** blocking indefinitely to wait for | ||||||
|     # actors spawned in "daemon mode" (aka started using |     # actors spawned in "daemon mode" (aka started using | ||||||
|     # `ActorNursery.start_actor()`). |     # ``ActorNursery.start_actor()``). | ||||||
| 
 | 
 | ||||||
|     # errors from this daemon actor nursery bubble up to caller |     # errors from this daemon actor nursery bubble up to caller | ||||||
|     async with ( |     async with trio.open_nursery() as da_nursery: | ||||||
|         collapse_eg(), |  | ||||||
|         trio.open_nursery() as da_nursery, |  | ||||||
|     ): |  | ||||||
|         try: |         try: | ||||||
|             # This is the inner level "run in actor" nursery. It is |             # This is the inner level "run in actor" nursery. It is | ||||||
|             # awaited first since actors spawned in this way (using |             # awaited first since actors spawned in this way (using | ||||||
|             # `ActorNusery.run_in_actor()`) are expected to only |             # ``ActorNusery.run_in_actor()``) are expected to only | ||||||
|             # return a single result and then complete (i.e. be canclled |             # return a single result and then complete (i.e. be canclled | ||||||
|             # gracefully). Errors collected from these actors are |             # gracefully). Errors collected from these actors are | ||||||
|             # immediately raised for handling by a supervisor strategy. |             # immediately raised for handling by a supervisor strategy. | ||||||
|             # As such if the strategy propagates any error(s) upwards |             # As such if the strategy propagates any error(s) upwards | ||||||
|             # the above "daemon actor" nursery will be notified. |             # the above "daemon actor" nursery will be notified. | ||||||
|             async with ( |             async with trio.open_nursery() as ria_nursery: | ||||||
|                 collapse_eg(), | 
 | ||||||
|                 trio.open_nursery() as ria_nursery, |  | ||||||
|             ): |  | ||||||
|                 an = ActorNursery( |                 an = ActorNursery( | ||||||
|                     actor, |                     actor, | ||||||
|                     ria_nursery, |                     ria_nursery, | ||||||
|  | @ -500,12 +383,11 @@ async def _open_and_supervise_one_cancels_all_nursery( | ||||||
|                     # the "hard join phase". |                     # the "hard join phase". | ||||||
|                     log.runtime( |                     log.runtime( | ||||||
|                         'Waiting on subactors to complete:\n' |                         'Waiting on subactors to complete:\n' | ||||||
|                         f'>}} {len(an._children)}\n' |                         f'{pformat(an._children)}\n' | ||||||
|                     ) |                     ) | ||||||
|                     an._join_procs.set() |                     an._join_procs.set() | ||||||
| 
 | 
 | ||||||
|                 except BaseException as _inner_err: |                 except BaseException as inner_err: | ||||||
|                     inner_err = _inner_err |  | ||||||
|                     errors[actor.uid] = inner_err |                     errors[actor.uid] = inner_err | ||||||
| 
 | 
 | ||||||
|                     # If we error in the root but the debugger is |                     # If we error in the root but the debugger is | ||||||
|  | @ -514,7 +396,7 @@ async def _open_and_supervise_one_cancels_all_nursery( | ||||||
|                     # will make the pdb repl unusable. |                     # will make the pdb repl unusable. | ||||||
|                     # Instead try to wait for pdb to be released before |                     # Instead try to wait for pdb to be released before | ||||||
|                     # tearing down. |                     # tearing down. | ||||||
|                     await debug.maybe_wait_for_debugger( |                     await maybe_wait_for_debugger( | ||||||
|                         child_in_debug=an._at_least_one_child_in_debug |                         child_in_debug=an._at_least_one_child_in_debug | ||||||
|                     ) |                     ) | ||||||
| 
 | 
 | ||||||
|  | @ -549,8 +431,8 @@ async def _open_and_supervise_one_cancels_all_nursery( | ||||||
|                             ContextCancelled, |                             ContextCancelled, | ||||||
|                         }: |                         }: | ||||||
|                             log.cancel( |                             log.cancel( | ||||||
|                                 'Actor-nursery caught remote cancellation\n' |                                 'Actor-nursery caught remote cancellation\n\n' | ||||||
|                                 '\n' | 
 | ||||||
|                                 f'{inner_err.tb_str}' |                                 f'{inner_err.tb_str}' | ||||||
|                             ) |                             ) | ||||||
|                         else: |                         else: | ||||||
|  | @ -583,14 +465,12 @@ async def _open_and_supervise_one_cancels_all_nursery( | ||||||
|             Exception, |             Exception, | ||||||
|             BaseExceptionGroup, |             BaseExceptionGroup, | ||||||
|             trio.Cancelled |             trio.Cancelled | ||||||
|         ) as _outer_err: |  | ||||||
|             outer_err = _outer_err |  | ||||||
| 
 | 
 | ||||||
|             an._scope_error = outer_err or inner_err |         ) as err: | ||||||
| 
 | 
 | ||||||
|             # XXX: yet another guard before allowing the cancel |             # XXX: yet another guard before allowing the cancel | ||||||
|             # sequence in case a (single) child is in debug. |             # sequence in case a (single) child is in debug. | ||||||
|             await debug.maybe_wait_for_debugger( |             await maybe_wait_for_debugger( | ||||||
|                 child_in_debug=an._at_least_one_child_in_debug |                 child_in_debug=an._at_least_one_child_in_debug | ||||||
|             ) |             ) | ||||||
| 
 | 
 | ||||||
|  | @ -601,7 +481,7 @@ async def _open_and_supervise_one_cancels_all_nursery( | ||||||
|             if an._children: |             if an._children: | ||||||
|                 log.cancel( |                 log.cancel( | ||||||
|                     'Actor-nursery cancelling due error type:\n' |                     'Actor-nursery cancelling due error type:\n' | ||||||
|                     f'{outer_err}\n' |                     f'{err}\n' | ||||||
|                 ) |                 ) | ||||||
|                 with trio.CancelScope(shield=True): |                 with trio.CancelScope(shield=True): | ||||||
|                     await an.cancel() |                     await an.cancel() | ||||||
|  | @ -628,29 +508,13 @@ async def _open_and_supervise_one_cancels_all_nursery( | ||||||
|                 else: |                 else: | ||||||
|                     raise list(errors.values())[0] |                     raise list(errors.values())[0] | ||||||
| 
 | 
 | ||||||
|             # show frame on any (likely) internal error |  | ||||||
|             if ( |  | ||||||
|                 not an.cancelled |  | ||||||
|                 and an._scope_error |  | ||||||
|             ): |  | ||||||
|                 __tracebackhide__: bool = False |  | ||||||
| 
 |  | ||||||
|         # da_nursery scope end - nursery checkpoint |         # da_nursery scope end - nursery checkpoint | ||||||
|     # final exit |     # final exit | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| _shutdown_msg: str = ( |  | ||||||
|     'Actor-runtime-shutdown' |  | ||||||
| ) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @acm | @acm | ||||||
| # @api_frame |  | ||||||
| async def open_nursery( | async def open_nursery( | ||||||
|     *,  # named params only! |  | ||||||
|     hide_tb: bool = True, |  | ||||||
|     **kwargs, |     **kwargs, | ||||||
|     # ^TODO, paramspec for `open_root_actor()` |  | ||||||
| 
 | 
 | ||||||
| ) -> typing.AsyncGenerator[ActorNursery, None]: | ) -> typing.AsyncGenerator[ActorNursery, None]: | ||||||
|     ''' |     ''' | ||||||
|  | @ -668,7 +532,6 @@ async def open_nursery( | ||||||
|     which cancellation scopes correspond to each spawned subactor set. |     which cancellation scopes correspond to each spawned subactor set. | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|     __tracebackhide__: bool = hide_tb |  | ||||||
|     implicit_runtime: bool = False |     implicit_runtime: bool = False | ||||||
|     actor: Actor = current_actor(err_on_no_runtime=False) |     actor: Actor = current_actor(err_on_no_runtime=False) | ||||||
|     an: ActorNursery|None = None |     an: ActorNursery|None = None | ||||||
|  | @ -684,10 +547,7 @@ async def open_nursery( | ||||||
|             # mark us for teardown on exit |             # mark us for teardown on exit | ||||||
|             implicit_runtime: bool = True |             implicit_runtime: bool = True | ||||||
| 
 | 
 | ||||||
|             async with open_root_actor( |             async with open_root_actor(**kwargs) as actor: | ||||||
|                 hide_tb=hide_tb, |  | ||||||
|                 **kwargs, |  | ||||||
|             ) as actor: |  | ||||||
|                 assert actor is current_actor() |                 assert actor is current_actor() | ||||||
| 
 | 
 | ||||||
|                 try: |                 try: | ||||||
|  | @ -722,36 +582,13 @@ async def open_nursery( | ||||||
|                 an.exited.set() |                 an.exited.set() | ||||||
| 
 | 
 | ||||||
|     finally: |     finally: | ||||||
|         # show frame on any internal runtime-scope error |         msg: str = ( | ||||||
|         if ( |             'Actor-nursery exited\n' | ||||||
|             an |             f'|_{an}\n\n' | ||||||
|             and |  | ||||||
|             not an.cancelled |  | ||||||
|             and |  | ||||||
|             an._scope_error |  | ||||||
|         ): |  | ||||||
|             __tracebackhide__: bool = False |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|         op_nested_an_repr: str = _pformat.nest_from_op( |  | ||||||
|             input_op=')>', |  | ||||||
|             text=f'{an}', |  | ||||||
|             # nest_prefix='|_', |  | ||||||
|             nest_indent=1,  # under > |  | ||||||
|         ) |         ) | ||||||
|         an_msg: str = ( |  | ||||||
|             f'Actor-nursery exited\n' |  | ||||||
|             f'{op_nested_an_repr}\n' |  | ||||||
|         ) |  | ||||||
|         # keep noise low during std operation. |  | ||||||
|         log.runtime(an_msg) |  | ||||||
| 
 | 
 | ||||||
|  |         # shutdown runtime if it was started | ||||||
|         if implicit_runtime: |         if implicit_runtime: | ||||||
|             # shutdown runtime if it was started and report noisly |             msg += '=> Shutting down actor runtime <=\n' | ||||||
|             # that we're did so. | 
 | ||||||
|             msg: str = ( |         log.info(msg) | ||||||
|                 '\n' |  | ||||||
|                 '\n' |  | ||||||
|                 f'{_shutdown_msg} )>\n' |  | ||||||
|             ) |  | ||||||
|             log.info(msg) |  | ||||||
|  |  | ||||||
|  | @ -19,27 +19,15 @@ Various helpers/utils for auditing your `tractor` app and/or the | ||||||
| core runtime. | core runtime. | ||||||
| 
 | 
 | ||||||
| ''' | ''' | ||||||
| from contextlib import ( | from contextlib import asynccontextmanager as acm | ||||||
|     asynccontextmanager as acm, |  | ||||||
| ) |  | ||||||
| import os |  | ||||||
| import pathlib | import pathlib | ||||||
| 
 | 
 | ||||||
| import tractor | import tractor | ||||||
| from tractor.devx.debug import ( |  | ||||||
|     BoxedMaybeException, |  | ||||||
| ) |  | ||||||
| from .pytest import ( | from .pytest import ( | ||||||
|     tractor_test as tractor_test |     tractor_test as tractor_test | ||||||
| ) | ) | ||||||
| from .fault_simulation import ( |  | ||||||
|     break_ipc as break_ipc, |  | ||||||
| ) |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| # TODO, use dulwhich for this instead? |  | ||||||
| # -> we're going to likely need it (or something similar) |  | ||||||
| #   for supporting hot-coad reload feats eventually anyway! |  | ||||||
| def repodir() -> pathlib.Path: | def repodir() -> pathlib.Path: | ||||||
|     ''' |     ''' | ||||||
|     Return the abspath to the repo directory. |     Return the abspath to the repo directory. | ||||||
|  | @ -63,35 +51,6 @@ def examples_dir() -> pathlib.Path: | ||||||
|     return repodir() / 'examples' |     return repodir() / 'examples' | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def mk_cmd( |  | ||||||
|     ex_name: str, |  | ||||||
|     exs_subpath: str = 'debugging', |  | ||||||
| ) -> str: |  | ||||||
|     ''' |  | ||||||
|     Generate a shell command suitable to pass to `pexpect.spawn()` |  | ||||||
|     which runs the script as a python program's entrypoint. |  | ||||||
| 
 |  | ||||||
|     In particular ensure we disable the new tb coloring via unsetting |  | ||||||
|     `$PYTHON_COLORS` so that `pexpect` can pattern match without |  | ||||||
|     color-escape-codes. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     script_path: pathlib.Path = ( |  | ||||||
|         examples_dir() |  | ||||||
|         / exs_subpath |  | ||||||
|         / f'{ex_name}.py' |  | ||||||
|     ) |  | ||||||
|     py_cmd: str = ' '.join([ |  | ||||||
|         'python', |  | ||||||
|         str(script_path) |  | ||||||
|     ]) |  | ||||||
|     # XXX, required for py 3.13+ |  | ||||||
|     # https://docs.python.org/3/using/cmdline.html#using-on-controlling-color |  | ||||||
|     # https://docs.python.org/3/using/cmdline.html#envvar-PYTHON_COLORS |  | ||||||
|     os.environ['PYTHON_COLORS'] = '0' |  | ||||||
|     return py_cmd |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @acm | @acm | ||||||
| async def expect_ctxc( | async def expect_ctxc( | ||||||
|     yay: bool, |     yay: bool, | ||||||
|  | @ -104,13 +63,12 @@ async def expect_ctxc( | ||||||
|     ''' |     ''' | ||||||
|     if yay: |     if yay: | ||||||
|         try: |         try: | ||||||
|             yield (maybe_exc := BoxedMaybeException()) |             yield | ||||||
|             raise RuntimeError('Never raised ctxc?') |             raise RuntimeError('Never raised ctxc?') | ||||||
|         except tractor.ContextCancelled as ctxc: |         except tractor.ContextCancelled: | ||||||
|             maybe_exc.value = ctxc |  | ||||||
|             if reraise: |             if reraise: | ||||||
|                 raise |                 raise | ||||||
|             else: |             else: | ||||||
|                 return |                 return | ||||||
|     else: |     else: | ||||||
|         yield (maybe_exc := BoxedMaybeException()) |         yield | ||||||
|  |  | ||||||
|  | @ -1,70 +0,0 @@ | ||||||
| # tractor: structured concurrent "actors". |  | ||||||
| # Copyright 2018-eternity Tyler Goodlet. |  | ||||||
| 
 |  | ||||||
| # This program is free software: you can redistribute it and/or modify |  | ||||||
| # it under the terms of the GNU Affero General Public License as published by |  | ||||||
| # the Free Software Foundation, either version 3 of the License, or |  | ||||||
| # (at your option) any later version. |  | ||||||
| 
 |  | ||||||
| # This program is distributed in the hope that it will be useful, |  | ||||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of |  | ||||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the |  | ||||||
| # GNU Affero General Public License for more details. |  | ||||||
| 
 |  | ||||||
| # You should have received a copy of the GNU Affero General Public License |  | ||||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. |  | ||||||
| 
 |  | ||||||
| ''' |  | ||||||
| Random IPC addr generation for isolating |  | ||||||
| the discovery space between test sessions. |  | ||||||
| 
 |  | ||||||
| Might be eventually useful to expose as a util set from |  | ||||||
| our `tractor.discovery` subsys? |  | ||||||
| 
 |  | ||||||
| ''' |  | ||||||
| import random |  | ||||||
| from typing import ( |  | ||||||
|     Type, |  | ||||||
| ) |  | ||||||
| from tractor import ( |  | ||||||
|     _addr, |  | ||||||
| ) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def get_rando_addr( |  | ||||||
|     tpt_proto: str, |  | ||||||
|     *, |  | ||||||
| 
 |  | ||||||
|     # choose random port at import time |  | ||||||
|     _rando_port: str = random.randint(1000, 9999) |  | ||||||
| 
 |  | ||||||
| ) -> tuple[str, str|int]: |  | ||||||
|     ''' |  | ||||||
|     Used to globally override the runtime to the |  | ||||||
|     per-test-session-dynamic addr so that all tests never conflict |  | ||||||
|     with any other actor tree using the default. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     addr_type: Type[_addr.Addres] = _addr._address_types[tpt_proto] |  | ||||||
|     def_reg_addr: tuple[str, int] = _addr._default_lo_addrs[tpt_proto] |  | ||||||
| 
 |  | ||||||
|     # this is the "unwrapped" form expected to be passed to |  | ||||||
|     # `.open_root_actor()` by test body. |  | ||||||
|     testrun_reg_addr: tuple[str, int|str] |  | ||||||
|     match tpt_proto: |  | ||||||
|         case 'tcp': |  | ||||||
|             testrun_reg_addr = ( |  | ||||||
|                 addr_type.def_bindspace, |  | ||||||
|                 _rando_port, |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|         # NOTE, file-name uniqueness (no-collisions) will be based on |  | ||||||
|         # the runtime-directory and root (pytest-proc's) pid. |  | ||||||
|         case 'uds': |  | ||||||
|             testrun_reg_addr = addr_type.get_random().unwrap() |  | ||||||
| 
 |  | ||||||
|     # XXX, as sanity it should never the same as the default for the |  | ||||||
|     # host-singleton registry actor. |  | ||||||
|     assert def_reg_addr != testrun_reg_addr |  | ||||||
| 
 |  | ||||||
|     return testrun_reg_addr |  | ||||||
|  | @ -1,92 +0,0 @@ | ||||||
| # tractor: structured concurrent "actors". |  | ||||||
| # Copyright 2018-eternity Tyler Goodlet. |  | ||||||
| 
 |  | ||||||
| # This program is free software: you can redistribute it and/or modify |  | ||||||
| # it under the terms of the GNU Affero General Public License as published by |  | ||||||
| # the Free Software Foundation, either version 3 of the License, or |  | ||||||
| # (at your option) any later version. |  | ||||||
| 
 |  | ||||||
| # This program is distributed in the hope that it will be useful, |  | ||||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of |  | ||||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the |  | ||||||
| # GNU Affero General Public License for more details. |  | ||||||
| 
 |  | ||||||
| # You should have received a copy of the GNU Affero General Public License |  | ||||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. |  | ||||||
| 
 |  | ||||||
| ''' |  | ||||||
| `pytest` utils helpers and plugins for testing `tractor`'s runtime |  | ||||||
| and applications. |  | ||||||
| 
 |  | ||||||
| ''' |  | ||||||
| 
 |  | ||||||
| from tractor import ( |  | ||||||
|     MsgStream, |  | ||||||
| ) |  | ||||||
| 
 |  | ||||||
| async def break_ipc( |  | ||||||
|     stream: MsgStream, |  | ||||||
|     method: str|None = None, |  | ||||||
|     pre_close: bool = False, |  | ||||||
| 
 |  | ||||||
|     def_method: str = 'socket_close', |  | ||||||
| 
 |  | ||||||
| ) -> None: |  | ||||||
|     ''' |  | ||||||
|     XXX: close the channel right after an error is raised |  | ||||||
|     purposely breaking the IPC transport to make sure the parent |  | ||||||
|     doesn't get stuck in debug or hang on the connection join. |  | ||||||
|     this more or less simulates an infinite msg-receive hang on |  | ||||||
|     the other end. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     # close channel via IPC prot msging before |  | ||||||
|     # any transport breakage |  | ||||||
|     if pre_close: |  | ||||||
|         await stream.aclose() |  | ||||||
| 
 |  | ||||||
|     method: str = method or def_method |  | ||||||
|     print( |  | ||||||
|         '#################################\n' |  | ||||||
|         'Simulating CHILD-side IPC BREAK!\n' |  | ||||||
|         f'method: {method}\n' |  | ||||||
|         f'pre `.aclose()`: {pre_close}\n' |  | ||||||
|         '#################################\n' |  | ||||||
|     ) |  | ||||||
| 
 |  | ||||||
|     match method: |  | ||||||
|         case 'socket_close': |  | ||||||
|             await stream._ctx.chan.transport.stream.aclose() |  | ||||||
| 
 |  | ||||||
|         case 'socket_eof': |  | ||||||
|             # NOTE: `trio` does the following underneath this |  | ||||||
|             # call in `src/trio/_highlevel_socket.py`: |  | ||||||
|             # `Stream.socket.shutdown(tsocket.SHUT_WR)` |  | ||||||
|             await stream._ctx.chan.transport.stream.send_eof() |  | ||||||
| 
 |  | ||||||
|         # TODO: remove since now this will be invalid with our |  | ||||||
|         # new typed msg spec? |  | ||||||
|         # case 'msg': |  | ||||||
|         #     await stream._ctx.chan.send(None) |  | ||||||
| 
 |  | ||||||
|         # TODO: the actual real-world simulated cases like |  | ||||||
|         # transport layer hangs and/or lower layer 2-gens type |  | ||||||
|         # scenarios.. |  | ||||||
|         # |  | ||||||
|         # -[ ] already have some issues for this general testing |  | ||||||
|         # area: |  | ||||||
|         #  - https://github.com/goodboy/tractor/issues/97 |  | ||||||
|         #  - https://github.com/goodboy/tractor/issues/124 |  | ||||||
|         #   - PR from @guille: |  | ||||||
|         #     https://github.com/goodboy/tractor/pull/149 |  | ||||||
|         # case 'hang': |  | ||||||
|         # TODO: framework research: |  | ||||||
|         # |  | ||||||
|         # - https://github.com/GuoTengda1993/pynetem |  | ||||||
|         # - https://github.com/shopify/toxiproxy |  | ||||||
|         # - https://manpages.ubuntu.com/manpages/trusty/man1/wirefilter.1.html |  | ||||||
| 
 |  | ||||||
|         case _: |  | ||||||
|             raise RuntimeError( |  | ||||||
|                 f'IPC break method unsupported: {method}' |  | ||||||
|             ) |  | ||||||
|  | @ -26,46 +26,29 @@ from functools import ( | ||||||
| import inspect | import inspect | ||||||
| import platform | import platform | ||||||
| 
 | 
 | ||||||
| import pytest |  | ||||||
| import tractor | import tractor | ||||||
| import trio | import trio | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def tractor_test(fn): | def tractor_test(fn): | ||||||
|     ''' |     ''' | ||||||
|     Decorator for async test fns to decorator-wrap them as "native" |     Decorator for async test funcs to present them as "native" | ||||||
|     looking sync funcs runnable by `pytest` and auto invoked with |     looking sync funcs runnable by `pytest` using `trio.run()`. | ||||||
|     `trio.run()` (much like the `pytest-trio` plugin's approach). |  | ||||||
| 
 | 
 | ||||||
|     Further the test fn body will be invoked AFTER booting the actor |     Use: | ||||||
|     runtime, i.e. from inside a `tractor.open_root_actor()` block AND |  | ||||||
|     with various runtime and tooling parameters implicitly passed as |  | ||||||
|     requested by by the test session's config; see immediately below. |  | ||||||
| 
 | 
 | ||||||
|     Basic deco use: |     @tractor_test | ||||||
|     --------------- |     async def test_whatever(): | ||||||
|  |         await ... | ||||||
| 
 | 
 | ||||||
|       @tractor_test |     If fixtures: | ||||||
|       async def test_whatever(): |  | ||||||
|           await ... |  | ||||||
| 
 | 
 | ||||||
|  |         - ``reg_addr`` (a socket addr tuple where arbiter is listening) | ||||||
|  |         - ``loglevel`` (logging level passed to tractor internals) | ||||||
|  |         - ``start_method`` (subprocess spawning backend) | ||||||
| 
 | 
 | ||||||
|     Runtime config via special fixtures: |     are defined in the `pytest` fixture space they will be automatically | ||||||
|     ------------------------------------ |     injected to tests declaring these funcargs. | ||||||
|     If any of the following fixture are requested by the wrapped test |  | ||||||
|     fn (via normal func-args declaration), |  | ||||||
| 
 |  | ||||||
|     - `reg_addr` (a socket addr tuple where arbiter is listening) |  | ||||||
|     - `loglevel` (logging level passed to tractor internals) |  | ||||||
|     - `start_method` (subprocess spawning backend) |  | ||||||
| 
 |  | ||||||
|     (TODO support) |  | ||||||
|     - `tpt_proto` (IPC transport protocol key) |  | ||||||
| 
 |  | ||||||
|     they will be automatically injected to each test as normally |  | ||||||
|     expected as well as passed to the initial |  | ||||||
|     `tractor.open_root_actor()` funcargs. |  | ||||||
| 
 |  | ||||||
|     ''' |     ''' | ||||||
|     @wraps(fn) |     @wraps(fn) | ||||||
|     def wrapper( |     def wrapper( | ||||||
|  | @ -128,164 +111,3 @@ def tractor_test(fn): | ||||||
|         return trio.run(main) |         return trio.run(main) | ||||||
| 
 | 
 | ||||||
|     return wrapper |     return wrapper | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def pytest_addoption( |  | ||||||
|     parser: pytest.Parser, |  | ||||||
| ): |  | ||||||
|     # parser.addoption( |  | ||||||
|     #     "--ll", |  | ||||||
|     #     action="store", |  | ||||||
|     #     dest='loglevel', |  | ||||||
|     #     default='ERROR', help="logging level to set when testing" |  | ||||||
|     # ) |  | ||||||
| 
 |  | ||||||
|     parser.addoption( |  | ||||||
|         "--spawn-backend", |  | ||||||
|         action="store", |  | ||||||
|         dest='spawn_backend', |  | ||||||
|         default='trio', |  | ||||||
|         help="Processing spawning backend to use for test run", |  | ||||||
|     ) |  | ||||||
| 
 |  | ||||||
|     parser.addoption( |  | ||||||
|         "--tpdb", |  | ||||||
|         "--debug-mode", |  | ||||||
|         action="store_true", |  | ||||||
|         dest='tractor_debug_mode', |  | ||||||
|         # default=False, |  | ||||||
|         help=( |  | ||||||
|             'Enable a flag that can be used by tests to to set the ' |  | ||||||
|             '`debug_mode: bool` for engaging the internal ' |  | ||||||
|             'multi-proc debugger sys.' |  | ||||||
|         ), |  | ||||||
|     ) |  | ||||||
| 
 |  | ||||||
|     # provide which IPC transport protocols opting-in test suites |  | ||||||
|     # should accumulatively run against. |  | ||||||
|     parser.addoption( |  | ||||||
|         "--tpt-proto", |  | ||||||
|         nargs='+',  # accumulate-multiple-args |  | ||||||
|         action="store", |  | ||||||
|         dest='tpt_protos', |  | ||||||
|         default=['tcp'], |  | ||||||
|         help="Transport protocol to use under the `tractor.ipc.Channel`", |  | ||||||
|     ) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def pytest_configure(config): |  | ||||||
|     backend = config.option.spawn_backend |  | ||||||
|     tractor._spawn.try_set_start_method(backend) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @pytest.fixture(scope='session') |  | ||||||
| def debug_mode(request) -> bool: |  | ||||||
|     ''' |  | ||||||
|     Flag state for whether `--tpdb` (for `tractor`-py-debugger) |  | ||||||
|     was passed to the test run. |  | ||||||
| 
 |  | ||||||
|     Normally tests should pass this directly to `.open_root_actor()` |  | ||||||
|     to allow the user to opt into suite-wide crash handling. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     debug_mode: bool = request.config.option.tractor_debug_mode |  | ||||||
|     return debug_mode |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @pytest.fixture(scope='session') |  | ||||||
| def spawn_backend(request) -> str: |  | ||||||
|     return request.config.option.spawn_backend |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @pytest.fixture(scope='session') |  | ||||||
| def tpt_protos(request) -> list[str]: |  | ||||||
| 
 |  | ||||||
|     # allow quoting on CLI |  | ||||||
|     proto_keys: list[str] = [ |  | ||||||
|         proto_key.replace('"', '').replace("'", "") |  | ||||||
|         for proto_key in request.config.option.tpt_protos |  | ||||||
|     ] |  | ||||||
| 
 |  | ||||||
|     # ?TODO, eventually support multiple protos per test-sesh? |  | ||||||
|     if len(proto_keys) > 1: |  | ||||||
|         pytest.fail( |  | ||||||
|             'We only support one `--tpt-proto <key>` atm!\n' |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|     # XXX ensure we support the protocol by name via lookup! |  | ||||||
|     for proto_key in proto_keys: |  | ||||||
|         addr_type = tractor._addr._address_types[proto_key] |  | ||||||
|         assert addr_type.proto_key == proto_key |  | ||||||
| 
 |  | ||||||
|     yield proto_keys |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @pytest.fixture( |  | ||||||
|     scope='session', |  | ||||||
|     autouse=True, |  | ||||||
| ) |  | ||||||
| def tpt_proto( |  | ||||||
|     tpt_protos: list[str], |  | ||||||
| ) -> str: |  | ||||||
|     proto_key: str = tpt_protos[0] |  | ||||||
| 
 |  | ||||||
|     from tractor import _state |  | ||||||
|     if _state._def_tpt_proto != proto_key: |  | ||||||
|         _state._def_tpt_proto = proto_key |  | ||||||
| 
 |  | ||||||
|     yield proto_key |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @pytest.fixture(scope='session') |  | ||||||
| def reg_addr( |  | ||||||
|     tpt_proto: str, |  | ||||||
| ) -> tuple[str, int|str]: |  | ||||||
|     ''' |  | ||||||
|     Deliver a test-sesh unique registry address such |  | ||||||
|     that each run's (tests which use this fixture) will |  | ||||||
|     have no conflicts/cross-talk when running simultaneously |  | ||||||
|     nor will interfere with other live `tractor` apps active |  | ||||||
|     on the same network-host (namespace). |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     from tractor._testing.addr import get_rando_addr |  | ||||||
|     return get_rando_addr( |  | ||||||
|         tpt_proto=tpt_proto, |  | ||||||
|     ) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def pytest_generate_tests( |  | ||||||
|     metafunc: pytest.Metafunc, |  | ||||||
| ): |  | ||||||
|     spawn_backend: str = metafunc.config.option.spawn_backend |  | ||||||
| 
 |  | ||||||
|     if not spawn_backend: |  | ||||||
|         # XXX some weird windows bug with `pytest`? |  | ||||||
|         spawn_backend = 'trio' |  | ||||||
| 
 |  | ||||||
|     # TODO: maybe just use the literal `._spawn.SpawnMethodKey`? |  | ||||||
|     assert spawn_backend in ( |  | ||||||
|         'mp_spawn', |  | ||||||
|         'mp_forkserver', |  | ||||||
|         'trio', |  | ||||||
|     ) |  | ||||||
| 
 |  | ||||||
|     # NOTE: used-to-be-used-to dyanmically parametrize tests for when |  | ||||||
|     # you just passed --spawn-backend=`mp` on the cli, but now we expect |  | ||||||
|     # that cli input to be manually specified, BUT, maybe we'll do |  | ||||||
|     # something like this again in the future? |  | ||||||
|     if 'start_method' in metafunc.fixturenames: |  | ||||||
|         metafunc.parametrize( |  | ||||||
|             "start_method", |  | ||||||
|             [spawn_backend], |  | ||||||
|             scope='module', |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|     # TODO, parametrize any `tpt_proto: str` declaring tests! |  | ||||||
|     # proto_tpts: list[str] = metafunc.config.option.proto_tpts |  | ||||||
|     # if 'tpt_proto' in metafunc.fixturenames: |  | ||||||
|     #     metafunc.parametrize( |  | ||||||
|     #         'tpt_proto', |  | ||||||
|     #         proto_tpts,  # TODO, double check this list usage! |  | ||||||
|     #         scope='module', |  | ||||||
|     #     ) |  | ||||||
|  |  | ||||||
|  | @ -1,35 +0,0 @@ | ||||||
| import os |  | ||||||
| import random |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def generate_sample_messages( |  | ||||||
|     amount: int, |  | ||||||
|     rand_min: int = 0, |  | ||||||
|     rand_max: int = 0, |  | ||||||
|     silent: bool = False |  | ||||||
| ) -> tuple[list[bytes], int]: |  | ||||||
| 
 |  | ||||||
|     msgs = [] |  | ||||||
|     size = 0 |  | ||||||
| 
 |  | ||||||
|     if not silent: |  | ||||||
|         print(f'\ngenerating {amount} messages...') |  | ||||||
| 
 |  | ||||||
|     for i in range(amount): |  | ||||||
|         msg = f'[{i:08}]'.encode('utf-8') |  | ||||||
| 
 |  | ||||||
|         if rand_max > 0: |  | ||||||
|             msg += os.urandom( |  | ||||||
|                 random.randint(rand_min, rand_max)) |  | ||||||
| 
 |  | ||||||
|         size += len(msg) |  | ||||||
| 
 |  | ||||||
|         msgs.append(msg) |  | ||||||
| 
 |  | ||||||
|         if not silent and i and i % 10_000 == 0: |  | ||||||
|             print(f'{i} generated') |  | ||||||
| 
 |  | ||||||
|     if not silent: |  | ||||||
|         print(f'done, {size:,} bytes in total') |  | ||||||
| 
 |  | ||||||
|     return msgs, size |  | ||||||
|  | @ -1,78 +0,0 @@ | ||||||
| # tractor: structured concurrent "actors". |  | ||||||
| # Copyright 2018-eternity Tyler Goodlet. |  | ||||||
| 
 |  | ||||||
| # This program is free software: you can redistribute it and/or modify |  | ||||||
| # it under the terms of the GNU Affero General Public License as published by |  | ||||||
| # the Free Software Foundation, either version 3 of the License, or |  | ||||||
| # (at your option) any later version. |  | ||||||
| 
 |  | ||||||
| # This program is distributed in the hope that it will be useful, |  | ||||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of |  | ||||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the |  | ||||||
| # GNU Affero General Public License for more details. |  | ||||||
| 
 |  | ||||||
| # You should have received a copy of the GNU Affero General Public License |  | ||||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. |  | ||||||
| 
 |  | ||||||
| """ |  | ||||||
| Runtime "developer experience" utils and addons to aid our |  | ||||||
| (advanced) users and core devs in building distributed applications |  | ||||||
| and working with/on the actor runtime. |  | ||||||
| 
 |  | ||||||
| """ |  | ||||||
| from .debug import ( |  | ||||||
|     maybe_wait_for_debugger as maybe_wait_for_debugger, |  | ||||||
|     acquire_debug_lock as acquire_debug_lock, |  | ||||||
|     breakpoint as breakpoint, |  | ||||||
|     pause as pause, |  | ||||||
|     pause_from_sync as pause_from_sync, |  | ||||||
|     sigint_shield as sigint_shield, |  | ||||||
|     open_crash_handler as open_crash_handler, |  | ||||||
|     maybe_open_crash_handler as maybe_open_crash_handler, |  | ||||||
|     maybe_init_greenback as maybe_init_greenback, |  | ||||||
|     post_mortem as post_mortem, |  | ||||||
|     mk_pdb as mk_pdb, |  | ||||||
| ) |  | ||||||
| from ._stackscope import ( |  | ||||||
|     enable_stack_on_sig as enable_stack_on_sig, |  | ||||||
| ) |  | ||||||
| from .pformat import ( |  | ||||||
|     add_div as add_div, |  | ||||||
|     pformat_caller_frame as pformat_caller_frame, |  | ||||||
|     pformat_boxed_tb as pformat_boxed_tb, |  | ||||||
| ) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| # TODO, move this to a new `.devx._pdbp` mod? |  | ||||||
| def _enable_readline_feats() -> str: |  | ||||||
|     ''' |  | ||||||
|     Handle `readline` when compiled with `libedit` to avoid breaking |  | ||||||
|     tab completion in `pdbp` (and its dep `tabcompleter`) |  | ||||||
|     particularly since `uv` cpython distis are compiled this way.. |  | ||||||
| 
 |  | ||||||
|     See docs for deats, |  | ||||||
|     https://docs.python.org/3/library/readline.html#module-readline |  | ||||||
| 
 |  | ||||||
|     Originally discovered soln via SO answer, |  | ||||||
|     https://stackoverflow.com/q/49287102 |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     import readline |  | ||||||
|     if ( |  | ||||||
|         # 3.13+ attr |  | ||||||
|         # https://docs.python.org/3/library/readline.html#readline.backend |  | ||||||
|         (getattr(readline, 'backend', False) == 'libedit') |  | ||||||
|         or |  | ||||||
|         'libedit' in readline.__doc__ |  | ||||||
|     ): |  | ||||||
|         readline.parse_and_bind("python:bind -v") |  | ||||||
|         readline.parse_and_bind("python:bind ^I rl_complete") |  | ||||||
|         return 'libedit' |  | ||||||
|     else: |  | ||||||
|         readline.parse_and_bind("tab: complete") |  | ||||||
|         readline.parse_and_bind("set editing-mode vi") |  | ||||||
|         readline.parse_and_bind("set keymap vi") |  | ||||||
|         return 'readline' |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| _enable_readline_feats() |  | ||||||
|  | @ -1,380 +0,0 @@ | ||||||
| # tractor: structured concurrent "actors". |  | ||||||
| # Copyright 2018-eternity Tyler Goodlet. |  | ||||||
| 
 |  | ||||||
| # This program is free software: you can redistribute it and/or modify |  | ||||||
| # it under the terms of the GNU Affero General Public License as published by |  | ||||||
| # the Free Software Foundation, either version 3 of the License, or |  | ||||||
| # (at your option) any later version. |  | ||||||
| 
 |  | ||||||
| # This program is distributed in the hope that it will be useful, |  | ||||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of |  | ||||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the |  | ||||||
| # GNU Affero General Public License for more details. |  | ||||||
| 
 |  | ||||||
| # You should have received a copy of the GNU Affero General Public License |  | ||||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. |  | ||||||
| 
 |  | ||||||
| ''' |  | ||||||
| Tools for code-object annotation, introspection and mutation |  | ||||||
| as it pertains to improving the grok-ability of our runtime! |  | ||||||
| 
 |  | ||||||
| ''' |  | ||||||
| from __future__ import annotations |  | ||||||
| from contextlib import ( |  | ||||||
|     _GeneratorContextManager, |  | ||||||
|     _AsyncGeneratorContextManager, |  | ||||||
| ) |  | ||||||
| from functools import partial |  | ||||||
| import inspect |  | ||||||
| import textwrap |  | ||||||
| from types import ( |  | ||||||
|     FrameType, |  | ||||||
|     FunctionType, |  | ||||||
|     MethodType, |  | ||||||
|     CodeType, |  | ||||||
| ) |  | ||||||
| from typing import ( |  | ||||||
|     Any, |  | ||||||
|     Callable, |  | ||||||
|     Type, |  | ||||||
| ) |  | ||||||
| 
 |  | ||||||
| import pdbp |  | ||||||
| from tractor.log import get_logger |  | ||||||
| import trio |  | ||||||
| from tractor.msg import ( |  | ||||||
|     pretty_struct, |  | ||||||
|     NamespacePath, |  | ||||||
| ) |  | ||||||
| import wrapt |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| log = get_logger(__name__) |  | ||||||
| 
 |  | ||||||
| # TODO: yeah, i don't love this and we should prolly just |  | ||||||
| # write a decorator that actually keeps a stupid ref to the func |  | ||||||
| # obj.. |  | ||||||
| def get_class_from_frame(fr: FrameType) -> ( |  | ||||||
|     FunctionType |  | ||||||
|     |MethodType |  | ||||||
| ): |  | ||||||
|     ''' |  | ||||||
|     Attempt to get the function (or method) reference |  | ||||||
|     from a given `FrameType`. |  | ||||||
| 
 |  | ||||||
|     Verbatim from an SO: |  | ||||||
|     https://stackoverflow.com/a/2220759 |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     args, _, _, value_dict = inspect.getargvalues(fr) |  | ||||||
| 
 |  | ||||||
|     # we check the first parameter for the frame function is |  | ||||||
|     # named 'self' |  | ||||||
|     if ( |  | ||||||
|         len(args) |  | ||||||
|         and |  | ||||||
|         # TODO: other cases for `@classmethod` etc..?) |  | ||||||
|         args[0] == 'self' |  | ||||||
|     ): |  | ||||||
|         # in that case, 'self' will be referenced in value_dict |  | ||||||
|         instance: object = value_dict.get('self') |  | ||||||
|         if instance: |  | ||||||
|           # return its class |  | ||||||
|           return getattr( |  | ||||||
|               instance, |  | ||||||
|               '__class__', |  | ||||||
|               None, |  | ||||||
|           ) |  | ||||||
| 
 |  | ||||||
|     # return None otherwise |  | ||||||
|     return None |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def get_ns_and_func_from_frame( |  | ||||||
|     frame: FrameType, |  | ||||||
| ) -> Callable: |  | ||||||
|     ''' |  | ||||||
|     Return the corresponding function object reference from |  | ||||||
|     a `FrameType`, and return it and it's parent namespace `dict`. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     ns: dict[str, Any] |  | ||||||
| 
 |  | ||||||
|     # for a method, go up a frame and lookup the name in locals() |  | ||||||
|     if '.' in (qualname := frame.f_code.co_qualname): |  | ||||||
|         cls_name, _, func_name = qualname.partition('.') |  | ||||||
|         ns = frame.f_back.f_locals[cls_name].__dict__ |  | ||||||
| 
 |  | ||||||
|     else: |  | ||||||
|         func_name: str = frame.f_code.co_name |  | ||||||
|         ns = frame.f_globals |  | ||||||
| 
 |  | ||||||
|     return ( |  | ||||||
|         ns, |  | ||||||
|         ns[func_name], |  | ||||||
|     ) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def func_ref_from_frame( |  | ||||||
|     frame: FrameType, |  | ||||||
| ) -> Callable: |  | ||||||
|     func_name: str = frame.f_code.co_name |  | ||||||
|     try: |  | ||||||
|         return frame.f_globals[func_name] |  | ||||||
|     except KeyError: |  | ||||||
|         cls: Type|None = get_class_from_frame(frame) |  | ||||||
|         if cls: |  | ||||||
|             return getattr( |  | ||||||
|                 cls, |  | ||||||
|                 func_name, |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| class CallerInfo(pretty_struct.Struct): |  | ||||||
|     # https://docs.python.org/dev/reference/datamodel.html#frame-objects |  | ||||||
|     # https://docs.python.org/dev/library/inspect.html#the-interpreter-stack |  | ||||||
|     _api_frame: FrameType |  | ||||||
| 
 |  | ||||||
|     @property |  | ||||||
|     def api_frame(self) -> FrameType: |  | ||||||
|         try: |  | ||||||
|             self._api_frame.clear() |  | ||||||
|         except RuntimeError: |  | ||||||
|             # log.warning( |  | ||||||
|             print( |  | ||||||
|                 f'Frame {self._api_frame} for {self.api_func} is still active!' |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|         return self._api_frame |  | ||||||
| 
 |  | ||||||
|     _api_func: Callable |  | ||||||
| 
 |  | ||||||
|     @property |  | ||||||
|     def api_func(self) -> Callable: |  | ||||||
|         return self._api_func |  | ||||||
| 
 |  | ||||||
|     _caller_frames_up: int|None = 1 |  | ||||||
|     _caller_frame: FrameType|None = None  # cached after first stack scan |  | ||||||
| 
 |  | ||||||
|     @property |  | ||||||
|     def api_nsp(self) -> NamespacePath|None: |  | ||||||
|         func: FunctionType = self.api_func |  | ||||||
|         if func: |  | ||||||
|             return NamespacePath.from_ref(func) |  | ||||||
| 
 |  | ||||||
|         return '<unknown>' |  | ||||||
| 
 |  | ||||||
|     @property |  | ||||||
|     def caller_frame(self) -> FrameType: |  | ||||||
| 
 |  | ||||||
|         # if not already cached, scan up stack explicitly by |  | ||||||
|         # configured count. |  | ||||||
|         if not self._caller_frame: |  | ||||||
|             if self._caller_frames_up: |  | ||||||
|                 for _ in range(self._caller_frames_up): |  | ||||||
|                     caller_frame: FrameType|None = self.api_frame.f_back |  | ||||||
| 
 |  | ||||||
|                 if not caller_frame: |  | ||||||
|                     raise ValueError( |  | ||||||
|                         'No frame exists {self._caller_frames_up} up from\n' |  | ||||||
|                         f'{self.api_frame} @ {self.api_nsp}\n' |  | ||||||
|                     ) |  | ||||||
| 
 |  | ||||||
|             self._caller_frame = caller_frame |  | ||||||
| 
 |  | ||||||
|         return self._caller_frame |  | ||||||
| 
 |  | ||||||
|     @property |  | ||||||
|     def caller_nsp(self) -> NamespacePath|None: |  | ||||||
|         func: FunctionType = self.api_func |  | ||||||
|         if func: |  | ||||||
|             return NamespacePath.from_ref(func) |  | ||||||
| 
 |  | ||||||
|         return '<unknown>' |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def find_caller_info( |  | ||||||
|     dunder_var: str = '__runtimeframe__', |  | ||||||
|     iframes:int = 1, |  | ||||||
|     check_frame_depth: bool = True, |  | ||||||
| 
 |  | ||||||
| ) -> CallerInfo|None: |  | ||||||
|     ''' |  | ||||||
|     Scan up the callstack for a frame with a `dunder_var: str` variable |  | ||||||
|     and return the `iframes` frames above it. |  | ||||||
| 
 |  | ||||||
|     By default we scan for a `__runtimeframe__` scope var which |  | ||||||
|     denotes a `tractor` API above which (one frame up) is "user |  | ||||||
|     app code" which "called into" the `tractor` method or func. |  | ||||||
| 
 |  | ||||||
|     TODO: ex with `Portal.open_context()` |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     # TODO: use this instead? |  | ||||||
|     # https://docs.python.org/3/library/inspect.html#inspect.getouterframes |  | ||||||
|     frames: list[inspect.FrameInfo] = inspect.stack() |  | ||||||
|     for fi in frames: |  | ||||||
|         assert ( |  | ||||||
|             fi.function |  | ||||||
|             == |  | ||||||
|             fi.frame.f_code.co_name |  | ||||||
|         ) |  | ||||||
|         this_frame: FrameType = fi.frame |  | ||||||
|         dunder_val: int|None = this_frame.f_locals.get(dunder_var) |  | ||||||
|         if dunder_val: |  | ||||||
|             go_up_iframes: int = ( |  | ||||||
|                 dunder_val  # could be 0 or `True` i guess? |  | ||||||
|                 or |  | ||||||
|                 iframes |  | ||||||
|             ) |  | ||||||
|             rt_frame: FrameType = fi.frame |  | ||||||
|             call_frame = rt_frame |  | ||||||
|             for i in range(go_up_iframes): |  | ||||||
|                 call_frame = call_frame.f_back |  | ||||||
| 
 |  | ||||||
|             return CallerInfo( |  | ||||||
|                 _api_frame=rt_frame, |  | ||||||
|                 _api_func=func_ref_from_frame(rt_frame), |  | ||||||
|                 _caller_frames_up=go_up_iframes, |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|     return None |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| _frame2callerinfo_cache: dict[FrameType, CallerInfo] = {} |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| # TODO: -[x] move all this into new `.devx._frame_stack`! |  | ||||||
| # -[ ] consider rename to _callstack? |  | ||||||
| # -[ ] prolly create a `@runtime_api` dec? |  | ||||||
| #   |_ @api_frame seems better? |  | ||||||
| # -[ ] ^- make it capture and/or accept buncha optional |  | ||||||
| #     meta-data like a fancier version of `@pdbp.hideframe`. |  | ||||||
| # |  | ||||||
| def api_frame( |  | ||||||
|     wrapped: Callable|None = None, |  | ||||||
|     *, |  | ||||||
|     caller_frames_up: int = 1, |  | ||||||
| 
 |  | ||||||
| ) -> Callable: |  | ||||||
| 
 |  | ||||||
|     # handle the decorator called WITHOUT () case, |  | ||||||
|     # i.e. just @api_frame, NOT @api_frame(extra=<blah>) |  | ||||||
|     if wrapped is None: |  | ||||||
|         return partial( |  | ||||||
|             api_frame, |  | ||||||
|             caller_frames_up=caller_frames_up, |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|     @wrapt.decorator |  | ||||||
|     async def wrapper( |  | ||||||
|         wrapped: Callable, |  | ||||||
|         instance: object, |  | ||||||
|         args: tuple, |  | ||||||
|         kwargs: dict, |  | ||||||
|     ): |  | ||||||
|         # maybe cache the API frame for this call |  | ||||||
|         global _frame2callerinfo_cache |  | ||||||
|         this_frame: FrameType = inspect.currentframe() |  | ||||||
|         api_frame: FrameType = this_frame.f_back |  | ||||||
| 
 |  | ||||||
|         if not _frame2callerinfo_cache.get(api_frame): |  | ||||||
|             _frame2callerinfo_cache[api_frame] = CallerInfo( |  | ||||||
|                 _api_frame=api_frame, |  | ||||||
|                 _api_func=wrapped, |  | ||||||
|                 _caller_frames_up=caller_frames_up, |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|         return wrapped(*args, **kwargs) |  | ||||||
| 
 |  | ||||||
|     # annotate the function as a "api function", meaning it is |  | ||||||
|     # a function for which the function above it in the call stack should be |  | ||||||
|     # non-`tractor` code aka "user code". |  | ||||||
|     # |  | ||||||
|     # in the global frame cache for easy lookup from a given |  | ||||||
|     # func-instance |  | ||||||
|     wrapped._call_infos: dict[FrameType, CallerInfo] = _frame2callerinfo_cache |  | ||||||
|     wrapped.__api_func__: bool = True |  | ||||||
|     return wrapper(wrapped) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| # TODO: something like this instead of the adhoc frame-unhiding |  | ||||||
| # blocks all over the runtime!! XD |  | ||||||
| # -[ ] ideally we can expect a certain error (set) and if something |  | ||||||
| #     else is raised then all frames below the wrapped one will be |  | ||||||
| #     un-hidden via `__tracebackhide__: bool = False`. |  | ||||||
| # |_ might need to dynamically mutate the code objs like |  | ||||||
| #    `pdbp.hideframe()` does? |  | ||||||
| # -[ ] use this as a `@acm` decorator as introed in 3.10? |  | ||||||
| # @acm |  | ||||||
| # async def unhide_frame_when_not( |  | ||||||
| #     error_set: set[BaseException], |  | ||||||
| # ) -> TracebackType: |  | ||||||
| #     ... |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def hide_runtime_frames() -> dict[FunctionType, CodeType]: |  | ||||||
|     ''' |  | ||||||
|     Hide call-stack frames for various std-lib and `trio`-API primitives |  | ||||||
|     such that the tracebacks presented from our runtime are as minimized |  | ||||||
|     as possible, particularly from inside a `PdbREPL`. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     # XXX HACKZONE XXX |  | ||||||
|     #  hide exit stack frames on nurseries and cancel-scopes! |  | ||||||
|     # |_ so avoid seeing it when the `pdbp` REPL is first engaged from |  | ||||||
|     #    inside a `trio.open_nursery()` scope (with no line after it |  | ||||||
|     #    in before the block end??). |  | ||||||
|     # |  | ||||||
|     # TODO: FINALLY got this workin originally with |  | ||||||
|     #  `@pdbp.hideframe` around the `wrapper()` def embedded inside |  | ||||||
|     #  `_ki_protection_decoratior()`.. which is in the module: |  | ||||||
|     #  /home/goodboy/.virtualenvs/tractor311/lib/python3.11/site-packages/trio/_core/_ki.py |  | ||||||
|     # |  | ||||||
|     # -[ ] make an issue and patch for `trio` core? maybe linked |  | ||||||
|     #    to the long outstanding `pdb` one below? |  | ||||||
|     #   |_ it's funny that there's frame hiding throughout `._run.py` |  | ||||||
|     #      but not where it matters on the below exit funcs.. |  | ||||||
|     # |  | ||||||
|     # -[ ] provide a patchset for the lonstanding |  | ||||||
|     #   |_ https://github.com/python-trio/trio/issues/1155 |  | ||||||
|     # |  | ||||||
|     # -[ ] make a linked issue to ^ and propose allowing all the |  | ||||||
|     #     `._core._run` code to have their `__tracebackhide__` value |  | ||||||
|     #     configurable by a `RunVar` to allow getting scheduler frames |  | ||||||
|     #     if desired through configuration? |  | ||||||
|     # |  | ||||||
|     # -[ ] maybe dig into the core `pdb` issue why the extra frame is shown |  | ||||||
|     #      at all? |  | ||||||
|     # |  | ||||||
|     funcs: list[FunctionType] = [ |  | ||||||
|         trio._core._run.NurseryManager.__aexit__, |  | ||||||
|         trio._core._run.CancelScope.__exit__, |  | ||||||
|          _GeneratorContextManager.__exit__, |  | ||||||
|          _AsyncGeneratorContextManager.__aexit__, |  | ||||||
|          _AsyncGeneratorContextManager.__aenter__, |  | ||||||
|          trio.Event.wait, |  | ||||||
|     ] |  | ||||||
|     func_list_str: str = textwrap.indent( |  | ||||||
|         "\n".join(f.__qualname__ for f in funcs), |  | ||||||
|         prefix=' |_ ', |  | ||||||
|     ) |  | ||||||
|     log.devx( |  | ||||||
|         'Hiding the following runtime frames by default:\n' |  | ||||||
|         f'{func_list_str}\n' |  | ||||||
|     ) |  | ||||||
| 
 |  | ||||||
|     codes: dict[FunctionType, CodeType] = {} |  | ||||||
|     for ref in funcs: |  | ||||||
|         # stash a pre-modified version of each ref's code-obj |  | ||||||
|         # so it can be reverted later if needed. |  | ||||||
|         codes[ref] = ref.__code__ |  | ||||||
|         pdbp.hideframe(ref) |  | ||||||
|     # |  | ||||||
|     # pdbp.hideframe(trio._core._run.NurseryManager.__aexit__) |  | ||||||
|     # pdbp.hideframe(trio._core._run.CancelScope.__exit__) |  | ||||||
|     # pdbp.hideframe(_GeneratorContextManager.__exit__) |  | ||||||
|     # pdbp.hideframe(_AsyncGeneratorContextManager.__aexit__) |  | ||||||
|     # pdbp.hideframe(_AsyncGeneratorContextManager.__aenter__) |  | ||||||
|     # pdbp.hideframe(trio.Event.wait) |  | ||||||
|     return codes |  | ||||||
|  | @ -1,265 +0,0 @@ | ||||||
| # tractor: structured concurrent "actors". |  | ||||||
| # Copyright eternity Tyler Goodlet. |  | ||||||
| 
 |  | ||||||
| # This program is free software: you can redistribute it and/or modify |  | ||||||
| # it under the terms of the GNU Affero General Public License as published by |  | ||||||
| # the Free Software Foundation, either version 3 of the License, or |  | ||||||
| # (at your option) any later version. |  | ||||||
| 
 |  | ||||||
| # This program is distributed in the hope that it will be useful, |  | ||||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of |  | ||||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the |  | ||||||
| # GNU Affero General Public License for more details. |  | ||||||
| 
 |  | ||||||
| # You should have received a copy of the GNU Affero General Public License |  | ||||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. |  | ||||||
| 
 |  | ||||||
| ''' |  | ||||||
| The fundamental cross process SC abstraction: an inter-actor, |  | ||||||
| cancel-scope linked task "context". |  | ||||||
| 
 |  | ||||||
| A ``Context`` is very similar to the ``trio.Nursery.cancel_scope`` built |  | ||||||
| into each ``trio.Nursery`` except it links the lifetimes of memory space |  | ||||||
| disjoint, parallel executing tasks in separate actors. |  | ||||||
| 
 |  | ||||||
| ''' |  | ||||||
| from __future__ import annotations |  | ||||||
| # from functools import partial |  | ||||||
| from threading import ( |  | ||||||
|     current_thread, |  | ||||||
|     Thread, |  | ||||||
|     RLock, |  | ||||||
| ) |  | ||||||
| import multiprocessing as mp |  | ||||||
| from signal import ( |  | ||||||
|     signal, |  | ||||||
|     getsignal, |  | ||||||
|     SIGUSR1, |  | ||||||
|     SIGINT, |  | ||||||
| ) |  | ||||||
| # import traceback |  | ||||||
| from types import ModuleType |  | ||||||
| from typing import ( |  | ||||||
|     Callable, |  | ||||||
|     TYPE_CHECKING, |  | ||||||
| ) |  | ||||||
| 
 |  | ||||||
| import trio |  | ||||||
| from tractor import ( |  | ||||||
|     _state, |  | ||||||
|     log as logmod, |  | ||||||
| ) |  | ||||||
| from tractor.devx import debug |  | ||||||
| 
 |  | ||||||
| log = logmod.get_logger(__name__) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| if TYPE_CHECKING: |  | ||||||
|     from tractor._spawn import ProcessType |  | ||||||
|     from tractor import ( |  | ||||||
|         Actor, |  | ||||||
|         ActorNursery, |  | ||||||
|     ) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @trio.lowlevel.disable_ki_protection |  | ||||||
| def dump_task_tree() -> None: |  | ||||||
|     ''' |  | ||||||
|     Do a classic `stackscope.extract()` task-tree dump to console at |  | ||||||
|     `.devx()` level. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     import stackscope |  | ||||||
|     tree_str: str = str( |  | ||||||
|         stackscope.extract( |  | ||||||
|             trio.lowlevel.current_root_task(), |  | ||||||
|             recurse_child_tasks=True |  | ||||||
|         ) |  | ||||||
|     ) |  | ||||||
|     actor: Actor = _state.current_actor() |  | ||||||
|     thr: Thread = current_thread() |  | ||||||
|     current_sigint_handler: Callable = getsignal(SIGINT) |  | ||||||
|     if ( |  | ||||||
|         current_sigint_handler |  | ||||||
|         is not |  | ||||||
|         debug.DebugStatus._trio_handler |  | ||||||
|     ): |  | ||||||
|         sigint_handler_report: str = ( |  | ||||||
|             'The default `trio` SIGINT handler was replaced?!' |  | ||||||
|         ) |  | ||||||
|     else: |  | ||||||
|         sigint_handler_report: str = ( |  | ||||||
|             'The default `trio` SIGINT handler is in use?!' |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|     # sclang symbology |  | ||||||
|     # |_<object> |  | ||||||
|     # |_(Task/Thread/Process/Actor |  | ||||||
|     # |_{Supervisor/Scope |  | ||||||
|     # |_[Storage/Memory/IPC-Stream/Data-Struct |  | ||||||
| 
 |  | ||||||
|     log.devx( |  | ||||||
|         f'Dumping `stackscope` tree for actor\n' |  | ||||||
|         f'(>: {actor.uid!r}\n' |  | ||||||
|         f' |_{mp.current_process()}\n' |  | ||||||
|         f'   |_{thr}\n' |  | ||||||
|         f'     |_{actor}\n' |  | ||||||
|         f'\n' |  | ||||||
|         f'{sigint_handler_report}\n' |  | ||||||
|         f'signal.getsignal(SIGINT) -> {current_sigint_handler!r}\n' |  | ||||||
|         # f'\n' |  | ||||||
|         # start-of-trace-tree delimiter (mostly for testing) |  | ||||||
|         # f'------ {actor.uid!r} ------\n' |  | ||||||
|         f'\n' |  | ||||||
|         f'------ start-of-{actor.uid!r} ------\n' |  | ||||||
|         f'|\n' |  | ||||||
|         f'{tree_str}' |  | ||||||
|         # end-of-trace-tree delimiter (mostly for testing) |  | ||||||
|         f'|\n' |  | ||||||
|         f'|_____ end-of-{actor.uid!r} ______\n' |  | ||||||
|     ) |  | ||||||
|     # TODO: can remove this right? |  | ||||||
|     # -[ ] was original code from author |  | ||||||
|     # |  | ||||||
|     # print( |  | ||||||
|     #     'DUMPING FROM PRINT\n' |  | ||||||
|     #     + |  | ||||||
|     #     content |  | ||||||
|     # ) |  | ||||||
|     # import logging |  | ||||||
|     # try: |  | ||||||
|     #     with open("/dev/tty", "w") as tty: |  | ||||||
|     #         tty.write(tree_str) |  | ||||||
|     # except BaseException: |  | ||||||
|     #     logging.getLogger( |  | ||||||
|     #         "task_tree" |  | ||||||
|     #     ).exception("Error printing task tree") |  | ||||||
| 
 |  | ||||||
| _handler_lock = RLock() |  | ||||||
| _tree_dumped: bool = False |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def dump_tree_on_sig( |  | ||||||
|     sig: int, |  | ||||||
|     frame: object, |  | ||||||
| 
 |  | ||||||
|     relay_to_subs: bool = True, |  | ||||||
| 
 |  | ||||||
| ) -> None: |  | ||||||
|     global _tree_dumped, _handler_lock |  | ||||||
|     with _handler_lock: |  | ||||||
|         # if _tree_dumped: |  | ||||||
|         #     log.warning( |  | ||||||
|         #         'Already dumped for this actor...??' |  | ||||||
|         #     ) |  | ||||||
|         #     return |  | ||||||
| 
 |  | ||||||
|         _tree_dumped = True |  | ||||||
| 
 |  | ||||||
|         # actor: Actor = _state.current_actor() |  | ||||||
|         log.devx( |  | ||||||
|             'Trying to dump `stackscope` tree..\n' |  | ||||||
|         ) |  | ||||||
|         try: |  | ||||||
|             dump_task_tree() |  | ||||||
|             # await actor._service_n.start_soon( |  | ||||||
|             #     partial( |  | ||||||
|             #         trio.to_thread.run_sync, |  | ||||||
|             #         dump_task_tree, |  | ||||||
|             #     ) |  | ||||||
|             # ) |  | ||||||
|             # trio.lowlevel.current_trio_token().run_sync_soon( |  | ||||||
|             #     dump_task_tree |  | ||||||
|             # ) |  | ||||||
| 
 |  | ||||||
|         except RuntimeError: |  | ||||||
|             log.exception( |  | ||||||
|                 'Failed to dump `stackscope` tree..\n' |  | ||||||
|             ) |  | ||||||
|             # not in async context -- print a normal traceback |  | ||||||
|             # traceback.print_stack() |  | ||||||
|             raise |  | ||||||
| 
 |  | ||||||
|         except BaseException: |  | ||||||
|             log.exception( |  | ||||||
|                 'Failed to dump `stackscope` tree..\n' |  | ||||||
|             ) |  | ||||||
|             raise |  | ||||||
| 
 |  | ||||||
|         # log.devx( |  | ||||||
|         #     'Supposedly we dumped just fine..?' |  | ||||||
|         # ) |  | ||||||
| 
 |  | ||||||
|     if not relay_to_subs: |  | ||||||
|         return |  | ||||||
| 
 |  | ||||||
|     an: ActorNursery |  | ||||||
|     for an in _state.current_actor()._actoruid2nursery.values(): |  | ||||||
|         subproc: ProcessType |  | ||||||
|         subactor: Actor |  | ||||||
|         for subactor, subproc, _ in an._children.values(): |  | ||||||
|             log.warning( |  | ||||||
|                 f'Relaying `SIGUSR1`[{sig}] to sub-actor\n' |  | ||||||
|                 f'{subactor}\n' |  | ||||||
|                 f' |_{subproc}\n' |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|             # bc of course stdlib can't have a std API.. XD |  | ||||||
|             match subproc: |  | ||||||
|                 case trio.Process(): |  | ||||||
|                     subproc.send_signal(sig) |  | ||||||
| 
 |  | ||||||
|                 case mp.Process(): |  | ||||||
|                     subproc._send_signal(sig) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def enable_stack_on_sig( |  | ||||||
|     sig: int = SIGUSR1, |  | ||||||
| ) -> ModuleType: |  | ||||||
|     ''' |  | ||||||
|     Enable `stackscope` tracing on reception of a signal; by |  | ||||||
|     default this is SIGUSR1. |  | ||||||
| 
 |  | ||||||
|     HOT TIP: a task/ctx-tree dump can be triggered from a shell with |  | ||||||
|     fancy cmds. |  | ||||||
| 
 |  | ||||||
|     For ex. from `bash` using `pgrep` and cmd-sustitution |  | ||||||
|     (https://www.gnu.org/software/bash/manual/bash.html#Command-Substitution) |  | ||||||
|     you could use: |  | ||||||
| 
 |  | ||||||
|     >> kill -SIGUSR1 $(pgrep -f <part-of-cmd: str>) |  | ||||||
| 
 |  | ||||||
|     OR without a sub-shell, |  | ||||||
| 
 |  | ||||||
|     >> pkill --signal SIGUSR1 -f <part-of-cmd: str> |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     try: |  | ||||||
|         import stackscope |  | ||||||
|     except ImportError: |  | ||||||
|         log.warning( |  | ||||||
|             'The `stackscope` lib is not installed!\n' |  | ||||||
|             '`Ignoring enable_stack_on_sig() call!\n' |  | ||||||
|         ) |  | ||||||
|         return None |  | ||||||
| 
 |  | ||||||
|     handler: Callable|int = getsignal(sig) |  | ||||||
|     if handler is dump_tree_on_sig: |  | ||||||
|         log.devx( |  | ||||||
|             'A `SIGUSR1` handler already exists?\n' |  | ||||||
|             f'|_ {handler!r}\n' |  | ||||||
|         ) |  | ||||||
|         return |  | ||||||
| 
 |  | ||||||
|     signal( |  | ||||||
|         sig, |  | ||||||
|         dump_tree_on_sig, |  | ||||||
|     ) |  | ||||||
|     log.devx( |  | ||||||
|         f'Enabling trace-trees on `SIGUSR1` ' |  | ||||||
|         f'since `stackscope` is installed @ \n' |  | ||||||
|         f'{stackscope!r}\n\n' |  | ||||||
|         f'With `SIGUSR1` handler\n' |  | ||||||
|         f'|_{dump_tree_on_sig}\n' |  | ||||||
|     ) |  | ||||||
|     return stackscope |  | ||||||
|  | @ -1,129 +0,0 @@ | ||||||
| # tractor: structured concurrent "actors". |  | ||||||
| # Copyright 2018-eternity Tyler Goodlet. |  | ||||||
| 
 |  | ||||||
| # This program is free software: you can redistribute it and/or modify |  | ||||||
| # it under the terms of the GNU Affero General Public License as published by |  | ||||||
| # the Free Software Foundation, either version 3 of the License, or |  | ||||||
| # (at your option) any later version. |  | ||||||
| 
 |  | ||||||
| # This program is distributed in the hope that it will be useful, |  | ||||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of |  | ||||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the |  | ||||||
| # GNU Affero General Public License for more details. |  | ||||||
| 
 |  | ||||||
| # You should have received a copy of the GNU Affero General Public License |  | ||||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. |  | ||||||
| 
 |  | ||||||
| """ |  | ||||||
| CLI framework extensions for hacking on the actor runtime. |  | ||||||
| 
 |  | ||||||
| Currently popular frameworks supported are: |  | ||||||
| 
 |  | ||||||
|   - `typer` via the `@callback` API |  | ||||||
| 
 |  | ||||||
| """ |  | ||||||
| from __future__ import annotations |  | ||||||
| from typing import ( |  | ||||||
|     Any, |  | ||||||
|     Callable, |  | ||||||
| ) |  | ||||||
| from typing_extensions import Annotated |  | ||||||
| 
 |  | ||||||
| import typer |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| _runtime_vars: dict[str, Any] = {} |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def load_runtime_vars( |  | ||||||
|     ctx: typer.Context, |  | ||||||
|     callback: Callable, |  | ||||||
|     pdb: bool = False,  # --pdb |  | ||||||
|     ll: Annotated[ |  | ||||||
|         str, |  | ||||||
|         typer.Option( |  | ||||||
|             '--loglevel', |  | ||||||
|             '-l', |  | ||||||
|             help='BigD logging level', |  | ||||||
|         ), |  | ||||||
|     ] = 'cancel',  # -l info |  | ||||||
| ): |  | ||||||
|     ''' |  | ||||||
|     Maybe engage crash handling with `pdbp` when code inside |  | ||||||
|     a `typer` CLI endpoint cmd raises. |  | ||||||
| 
 |  | ||||||
|     To use this callback simply take your `app = typer.Typer()` instance |  | ||||||
|     and decorate this function with it like so: |  | ||||||
| 
 |  | ||||||
|     .. code:: python |  | ||||||
| 
 |  | ||||||
|         from tractor.devx import cli |  | ||||||
| 
 |  | ||||||
|         app = typer.Typer() |  | ||||||
| 
 |  | ||||||
|         # manual decoration to hook into `click`'s context system! |  | ||||||
|         cli.load_runtime_vars = app.callback( |  | ||||||
|             invoke_without_command=True, |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|     And then you can use the now augmented `click` CLI context as so, |  | ||||||
| 
 |  | ||||||
|     .. code:: python |  | ||||||
| 
 |  | ||||||
|         @app.command( |  | ||||||
|             context_settings={ |  | ||||||
|                 "allow_extra_args": True, |  | ||||||
|                 "ignore_unknown_options": True, |  | ||||||
|             } |  | ||||||
|         ) |  | ||||||
|         def my_cli_cmd( |  | ||||||
|             ctx: typer.Context, |  | ||||||
|         ): |  | ||||||
|             rtvars: dict = ctx.runtime_vars |  | ||||||
|             pdb: bool = rtvars['pdb'] |  | ||||||
| 
 |  | ||||||
|             with tractor.devx.cli.maybe_open_crash_handler(pdb=pdb): |  | ||||||
|                 trio.run( |  | ||||||
|                     partial( |  | ||||||
|                         my_tractor_main_task_func, |  | ||||||
|                         debug_mode=pdb, |  | ||||||
|                         loglevel=rtvars['ll'], |  | ||||||
|                     ) |  | ||||||
|                 ) |  | ||||||
| 
 |  | ||||||
|     which will enable log level and debug mode globally for the entire |  | ||||||
|     `tractor` + `trio` runtime thereafter! |  | ||||||
| 
 |  | ||||||
|     Bo |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     global _runtime_vars |  | ||||||
|     _runtime_vars |= { |  | ||||||
|         'pdb': pdb, |  | ||||||
|         'll': ll, |  | ||||||
|     } |  | ||||||
| 
 |  | ||||||
|     ctx.runtime_vars: dict[str, Any] = _runtime_vars |  | ||||||
|     print( |  | ||||||
|         f'`typer` sub-cmd: {ctx.invoked_subcommand}\n' |  | ||||||
|         f'`tractor` runtime vars: {_runtime_vars}' |  | ||||||
|     ) |  | ||||||
| 
 |  | ||||||
|     # XXX NOTE XXX: hackzone.. if no sub-cmd is specified (the |  | ||||||
|     # default if the user just invokes `bigd`) then we simply |  | ||||||
|     # invoke the sole `_bigd()` cmd passing in the "parent" |  | ||||||
|     # typer.Context directly to that call since we're treating it |  | ||||||
|     # as a "non sub-command" or wtv.. |  | ||||||
|     # TODO: ideally typer would have some kinda built-in way to get |  | ||||||
|     # this behaviour without having to construct and manually |  | ||||||
|     # invoke our own cmd.. |  | ||||||
|     if ( |  | ||||||
|         ctx.invoked_subcommand is None |  | ||||||
|         or ctx.invoked_subcommand == callback.__name__ |  | ||||||
|     ): |  | ||||||
|         cmd: typer.core.TyperCommand = typer.core.TyperCommand( |  | ||||||
|             name='bigd', |  | ||||||
|             callback=callback, |  | ||||||
|         ) |  | ||||||
|         ctx.params = {'ctx': ctx} |  | ||||||
|         cmd.invoke(ctx) |  | ||||||
Some files were not shown because too many files have changed in this diff Show More
		Loading…
	
		Reference in New Issue