Compare commits
	
		
			55 Commits 
		
	
	
		
			main
			...
			msgspec_in
		
	
	| Author | SHA1 | Date | 
|---|---|---|
|  | 57edf481e8 | |
|  | d6ddc47e58 | |
|  | 86f4f2df6f | |
|  | 2bd5ba76b9 | |
|  | a4859c969c | |
|  | 2dfa12c743 | |
|  | f812c344a7 | |
|  | e161f7bac0 | |
|  | 3fd28ee3a5 | |
|  | 8dba692ef5 | |
|  | 673aeef4e9 | |
|  | 9e6f75a592 | |
|  | 76f9ff608c | |
|  | cbdf23ee6b | |
|  | cb43c4c428 | |
|  | f154f492fc | |
|  | 0802736095 | |
|  | 68e5c2a95f | |
|  | 2adb59f40f | |
|  | 0bac1f3021 | |
|  | 25c19b9274 | |
|  | 86089800ab | |
|  | 92594d8222 | |
|  | 45a743cdd4 | |
|  | a2d119ab56 | |
|  | 24a63415ef | |
|  | 9b7a4a1cd5 | |
|  | d55671f68b | |
|  | 38d4fe31ac | |
|  | 73f814e0d8 | |
|  | e8b282810e | |
|  | 22383d1ed9 | |
|  | 0d41f1410f | |
|  | 6cf4a80fe4 | |
|  | c188008844 | |
|  | 593fd24a9e | |
|  | bb8452dbdb | |
|  | 82999d10df | |
|  | a085111173 | |
|  | c46bf6b3c4 | |
|  | b8b264ae54 | |
|  | c27b00687c | |
|  | fa6d9bef52 | |
|  | bdde646d4c | |
|  | 7d0541d864 | |
|  | 7888de6070 | |
|  | 3b2598a060 | |
|  | eb44244f24 | |
|  | 7b902b7e9c | |
|  | fdd2da238a | |
|  | bc6af2219e | |
|  | 5e03108211 | |
|  | 132b9651dd | |
|  | adc77861bb | |
|  | 93a83eab1c | 
|  | @ -1,168 +1,82 @@ | ||||||
| name: CI | name: CI | ||||||
| 
 | 
 | ||||||
| on: | on: push | ||||||
|   # any time someone pushes a new branch to origin |  | ||||||
|   push: |  | ||||||
| 
 |  | ||||||
|   # Allows you to run this workflow manually from the Actions tab |  | ||||||
|   workflow_dispatch: |  | ||||||
| 
 | 
 | ||||||
| jobs: | jobs: | ||||||
|   # ------ sdist ------ | 
 | ||||||
|   # test that we can generate a software distribution and install it |   mypy: | ||||||
|   # thus avoid missing file issues after packaging. |     name: 'MyPy' | ||||||
|   # |  | ||||||
|   # -[x] produce sdist with uv |  | ||||||
|   # ------ - ------ |  | ||||||
|   sdist-linux: |  | ||||||
|     name: 'sdist' |  | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
| 
 | 
 | ||||||
|     steps: |     steps: | ||||||
|       - name: Checkout |       - name: Checkout | ||||||
|         uses: actions/checkout@v4 |         uses: actions/checkout@v2 | ||||||
| 
 | 
 | ||||||
|       - name: Install latest uv |       - name: Setup python | ||||||
|         uses: astral-sh/setup-uv@v6 |         uses: actions/setup-python@v2 | ||||||
|  |         with: | ||||||
|  |           python-version: '3.9' | ||||||
| 
 | 
 | ||||||
|       - name: Build sdist as tar.gz |       - name: Install dependencies | ||||||
|         run: uv build --sdist --python=3.13 |         run: pip install -U . --upgrade-strategy eager -r requirements-test.txt | ||||||
| 
 | 
 | ||||||
|       - name: Install sdist from .tar.gz |       - name: Run MyPy check | ||||||
|         run: python -m pip install dist/*.tar.gz |         run: mypy tractor/ --ignore-missing-imports | ||||||
| 
 | 
 | ||||||
|   # ------ type-check ------ |   testing: | ||||||
|   # mypy: |  | ||||||
|   #   name: 'MyPy' |  | ||||||
|   #   runs-on: ubuntu-latest |  | ||||||
| 
 | 
 | ||||||
|   #   steps: |  | ||||||
|   #     - name: Checkout |  | ||||||
|   #       uses: actions/checkout@v4 |  | ||||||
| 
 |  | ||||||
|   #     - name: Install latest uv |  | ||||||
|   #       uses: astral-sh/setup-uv@v6 |  | ||||||
| 
 |  | ||||||
|   #     # faster due to server caching? |  | ||||||
|   #     # https://docs.astral.sh/uv/guides/integration/github/#setting-up-python |  | ||||||
|   #     - name: "Set up Python" |  | ||||||
|   #       uses: actions/setup-python@v6 |  | ||||||
|   #       with: |  | ||||||
|   #         python-version-file: "pyproject.toml" |  | ||||||
| 
 |  | ||||||
|   #     # w uv |  | ||||||
|   #     # - name: Set up Python |  | ||||||
|   #     #   run: uv python install |  | ||||||
| 
 |  | ||||||
|   #     - name: Setup uv venv |  | ||||||
|   #       run: uv venv .venv --python=3.13 |  | ||||||
| 
 |  | ||||||
|   #     - name: Install |  | ||||||
|   #       run: uv sync --dev |  | ||||||
| 
 |  | ||||||
|   #     # TODO, ty cmd over repo |  | ||||||
|   #     # - name: type check with ty |  | ||||||
|   #     #   run: ty ./tractor/ |  | ||||||
| 
 |  | ||||||
|   #     # - uses: actions/cache@v3 |  | ||||||
|   #     #     name: Cache uv virtenv as default .venv |  | ||||||
|   #     #     with: |  | ||||||
|   #     #       path: ./.venv |  | ||||||
|   #     #       key: venv-${{ hashFiles('uv.lock') }} |  | ||||||
| 
 |  | ||||||
|   #     - name: Run MyPy check |  | ||||||
|   #       run: mypy tractor/ --ignore-missing-imports --show-traceback |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|   testing-linux: |  | ||||||
|     name: '${{ matrix.os }} Python ${{ matrix.python }} - ${{ matrix.spawn_backend }}' |     name: '${{ matrix.os }} Python ${{ matrix.python }} - ${{ matrix.spawn_backend }}' | ||||||
|  |     timeout-minutes: 9 | ||||||
|  |     runs-on: ${{ matrix.os }} | ||||||
|  | 
 | ||||||
|  |     strategy: | ||||||
|  |       fail-fast: false | ||||||
|  |       matrix: | ||||||
|  |         os: [ubuntu-latest, windows-latest] | ||||||
|  |         python: ['3.8', '3.9'] | ||||||
|  |         spawn_backend: ['trio', 'mp'] | ||||||
|  | 
 | ||||||
|  |     steps: | ||||||
|  | 
 | ||||||
|  |       - name: Checkout | ||||||
|  |         uses: actions/checkout@v2 | ||||||
|  | 
 | ||||||
|  |       - name: Setup python | ||||||
|  |         uses: actions/setup-python@v2 | ||||||
|  |         with: | ||||||
|  |           python-version: '${{ matrix.python }}' | ||||||
|  | 
 | ||||||
|  |       - name: Install dependencies | ||||||
|  |         run: pip install -U . -r requirements-test.txt -r requirements-docs.txt --upgrade-strategy eager | ||||||
|  | 
 | ||||||
|  |       - name: Run tests | ||||||
|  |         run: pytest tests/ --spawn-backend=${{ matrix.spawn_backend }} -rs | ||||||
|  | 
 | ||||||
|  |   testing-msgspec: | ||||||
|  |     # runs py3.9 jobs on all OS's but with optional `msgspec` dep installed | ||||||
|  |     name: '${{ matrix.os }} Python ${{ matrix.python }} - ${{ matrix.spawn_backend }} - msgspec' | ||||||
|     timeout-minutes: 10 |     timeout-minutes: 10 | ||||||
|     runs-on: ${{ matrix.os }} |     runs-on: ${{ matrix.os }} | ||||||
| 
 | 
 | ||||||
|     strategy: |     strategy: | ||||||
|       fail-fast: false |       fail-fast: false | ||||||
|       matrix: |       matrix: | ||||||
|         os: [ubuntu-latest] |         os: [ubuntu-latest, windows-latest] | ||||||
|         python-version: ['3.13'] |         python: ['3.9'] | ||||||
|         spawn_backend: [ |         spawn_backend: ['trio', 'mp'] | ||||||
|           'trio', |  | ||||||
|           # 'mp_spawn', |  | ||||||
|           # 'mp_forkserver', |  | ||||||
|         ] |  | ||||||
| 
 | 
 | ||||||
|     steps: |     steps: | ||||||
| 
 | 
 | ||||||
|       - uses: actions/checkout@v4 |       - name: Checkout | ||||||
|  |         uses: actions/checkout@v2 | ||||||
| 
 | 
 | ||||||
|       - name: 'Install uv + py-${{ matrix.python-version }}' |       - name: Setup python | ||||||
|         uses: astral-sh/setup-uv@v6 |         uses: actions/setup-python@v2 | ||||||
|         with: |         with: | ||||||
|           python-version: ${{ matrix.python-version }} |           python-version: '${{ matrix.python }}' | ||||||
| 
 | 
 | ||||||
|       # GH way.. faster? |       - name: Install dependencies | ||||||
|       # - name: setup-python@v6 |         run: pip install -U .[msgspec] -r requirements-test.txt -r requirements-docs.txt --upgrade-strategy eager | ||||||
|       #   uses: actions/setup-python@v6 |  | ||||||
|       #   with: |  | ||||||
|       #     python-version: '${{ matrix.python-version }}' |  | ||||||
| 
 |  | ||||||
|       # consider caching for speedups? |  | ||||||
|       # https://docs.astral.sh/uv/guides/integration/github/#caching |  | ||||||
| 
 |  | ||||||
|       - name: Install the project w uv |  | ||||||
|         run: uv sync --all-extras --dev |  | ||||||
| 
 |  | ||||||
|       # - name: Install dependencies |  | ||||||
|       #   run: pip install -U . -r requirements-test.txt -r requirements-docs.txt --upgrade-strategy eager |  | ||||||
| 
 |  | ||||||
|       - name: List deps tree |  | ||||||
|         run: uv tree |  | ||||||
| 
 | 
 | ||||||
|       - name: Run tests |       - name: Run tests | ||||||
|         run: uv run pytest tests/ --spawn-backend=${{ matrix.spawn_backend }} -rsx |         run: pytest tests/ --spawn-backend=${{ matrix.spawn_backend }} -rs | ||||||
| 
 |  | ||||||
|   # XXX legacy NOTE XXX |  | ||||||
|   # |  | ||||||
|   # We skip 3.10 on windows for now due to not having any collabs to |  | ||||||
|   # debug the CI failures. Anyone wanting to hack and solve them is very |  | ||||||
|   # welcome, but our primary user base is not using that OS. |  | ||||||
| 
 |  | ||||||
|   # TODO: use job filtering to accomplish instead of repeated |  | ||||||
|   # boilerplate as is above XD: |  | ||||||
|   # - https://docs.github.com/en/actions/learn-github-actions/managing-complex-workflows |  | ||||||
|   # - https://docs.github.com/en/actions/learn-github-actions/managing-complex-workflows#using-a-build-matrix |  | ||||||
|   # - https://docs.github.com/en/actions/learn-github-actions/workflow-syntax-for-github-actions#jobsjob_idif |  | ||||||
|   # testing-windows: |  | ||||||
|   #   name: '${{ matrix.os }} Python ${{ matrix.python }} - ${{ matrix.spawn_backend }}' |  | ||||||
|   #   timeout-minutes: 12 |  | ||||||
|   #   runs-on: ${{ matrix.os }} |  | ||||||
| 
 |  | ||||||
|   #   strategy: |  | ||||||
|   #     fail-fast: false |  | ||||||
|   #     matrix: |  | ||||||
|   #       os: [windows-latest] |  | ||||||
|   #       python: ['3.10'] |  | ||||||
|   #       spawn_backend: ['trio', 'mp'] |  | ||||||
| 
 |  | ||||||
|   #   steps: |  | ||||||
| 
 |  | ||||||
|   #     - name: Checkout |  | ||||||
|   #       uses: actions/checkout@v2 |  | ||||||
| 
 |  | ||||||
|   #     - name: Setup python |  | ||||||
|   #       uses: actions/setup-python@v2 |  | ||||||
|   #       with: |  | ||||||
|   #         python-version: '${{ matrix.python }}' |  | ||||||
| 
 |  | ||||||
|   #     - name: Install dependencies |  | ||||||
|   #       run: pip install -U . -r requirements-test.txt -r requirements-docs.txt --upgrade-strategy eager |  | ||||||
| 
 |  | ||||||
|   #     # TODO: pretty sure this solves debugger deps-issues on windows, but it needs to |  | ||||||
|   #     # be verified by someone with a native setup. |  | ||||||
|   #     # - name: Force pyreadline3 |  | ||||||
|   #     #   run: pip uninstall pyreadline; pip install -U pyreadline3 |  | ||||||
| 
 |  | ||||||
|   #     - name: List dependencies |  | ||||||
|   #       run: pip list |  | ||||||
| 
 |  | ||||||
|   #     - name: Run tests |  | ||||||
|   #       run: pytest tests/ --spawn-backend=${{ matrix.spawn_backend }} -rsx |  | ||||||
|  |  | ||||||
							
								
								
									
										147
									
								
								LICENSE
								
								
								
								
							
							
						
						
									
										147
									
								
								LICENSE
								
								
								
								
							|  | @ -1,21 +1,23 @@ | ||||||
|                     GNU AFFERO GENERAL PUBLIC LICENSE |                     GNU GENERAL PUBLIC LICENSE | ||||||
|                        Version 3, 19 November 2007 |                        Version 3, 29 June 2007 | ||||||
| 
 | 
 | ||||||
|  Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/> |  Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/> | ||||||
|  Everyone is permitted to copy and distribute verbatim copies |  Everyone is permitted to copy and distribute verbatim copies | ||||||
|  of this license document, but changing it is not allowed. |  of this license document, but changing it is not allowed. | ||||||
| 
 | 
 | ||||||
|                             Preamble |                             Preamble | ||||||
| 
 | 
 | ||||||
|   The GNU Affero General Public License is a free, copyleft license for |   The GNU General Public License is a free, copyleft license for | ||||||
| software and other kinds of works, specifically designed to ensure | software and other kinds of works. | ||||||
| cooperation with the community in the case of network server software. |  | ||||||
| 
 | 
 | ||||||
|   The licenses for most software and other practical works are designed |   The licenses for most software and other practical works are designed | ||||||
| to take away your freedom to share and change the works.  By contrast, | to take away your freedom to share and change the works.  By contrast, | ||||||
| our General Public Licenses are intended to guarantee your freedom to | the GNU General Public License is intended to guarantee your freedom to | ||||||
| share and change all versions of a program--to make sure it remains free | share and change all versions of a program--to make sure it remains free | ||||||
| software for all its users. | software for all its users.  We, the Free Software Foundation, use the | ||||||
|  | GNU General Public License for most of our software; it applies also to | ||||||
|  | any other work released this way by its authors.  You can apply it to | ||||||
|  | your programs, too. | ||||||
| 
 | 
 | ||||||
|   When we speak of free software, we are referring to freedom, not |   When we speak of free software, we are referring to freedom, not | ||||||
| price.  Our General Public Licenses are designed to make sure that you | price.  Our General Public Licenses are designed to make sure that you | ||||||
|  | @ -24,34 +26,44 @@ them if you wish), that you receive source code or can get it if you | ||||||
| want it, that you can change the software or use pieces of it in new | want it, that you can change the software or use pieces of it in new | ||||||
| free programs, and that you know you can do these things. | free programs, and that you know you can do these things. | ||||||
| 
 | 
 | ||||||
|   Developers that use our General Public Licenses protect your rights |   To protect your rights, we need to prevent others from denying you | ||||||
| with two steps: (1) assert copyright on the software, and (2) offer | these rights or asking you to surrender the rights.  Therefore, you have | ||||||
| you this License which gives you legal permission to copy, distribute | certain responsibilities if you distribute copies of the software, or if | ||||||
| and/or modify the software. | you modify it: responsibilities to respect the freedom of others. | ||||||
| 
 | 
 | ||||||
|   A secondary benefit of defending all users' freedom is that |   For example, if you distribute copies of such a program, whether | ||||||
| improvements made in alternate versions of the program, if they | gratis or for a fee, you must pass on to the recipients the same | ||||||
| receive widespread use, become available for other developers to | freedoms that you received.  You must make sure that they, too, receive | ||||||
| incorporate.  Many developers of free software are heartened and | or can get the source code.  And you must show them these terms so they | ||||||
| encouraged by the resulting cooperation.  However, in the case of | know their rights. | ||||||
| software used on network servers, this result may fail to come about. |  | ||||||
| The GNU General Public License permits making a modified version and |  | ||||||
| letting the public access it on a server without ever releasing its |  | ||||||
| source code to the public. |  | ||||||
| 
 | 
 | ||||||
|   The GNU Affero General Public License is designed specifically to |   Developers that use the GNU GPL protect your rights with two steps: | ||||||
| ensure that, in such cases, the modified source code becomes available | (1) assert copyright on the software, and (2) offer you this License | ||||||
| to the community.  It requires the operator of a network server to | giving you legal permission to copy, distribute and/or modify it. | ||||||
| provide the source code of the modified version running there to the |  | ||||||
| users of that server.  Therefore, public use of a modified version, on |  | ||||||
| a publicly accessible server, gives the public access to the source |  | ||||||
| code of the modified version. |  | ||||||
| 
 | 
 | ||||||
|   An older license, called the Affero General Public License and |   For the developers' and authors' protection, the GPL clearly explains | ||||||
| published by Affero, was designed to accomplish similar goals.  This is | that there is no warranty for this free software.  For both users' and | ||||||
| a different license, not a version of the Affero GPL, but Affero has | authors' sake, the GPL requires that modified versions be marked as | ||||||
| released a new version of the Affero GPL which permits relicensing under | changed, so that their problems will not be attributed erroneously to | ||||||
| this license. | authors of previous versions. | ||||||
|  | 
 | ||||||
|  |   Some devices are designed to deny users access to install or run | ||||||
|  | modified versions of the software inside them, although the manufacturer | ||||||
|  | can do so.  This is fundamentally incompatible with the aim of | ||||||
|  | protecting users' freedom to change the software.  The systematic | ||||||
|  | pattern of such abuse occurs in the area of products for individuals to | ||||||
|  | use, which is precisely where it is most unacceptable.  Therefore, we | ||||||
|  | have designed this version of the GPL to prohibit the practice for those | ||||||
|  | products.  If such problems arise substantially in other domains, we | ||||||
|  | stand ready to extend this provision to those domains in future versions | ||||||
|  | of the GPL, as needed to protect the freedom of users. | ||||||
|  | 
 | ||||||
|  |   Finally, every program is threatened constantly by software patents. | ||||||
|  | States should not allow patents to restrict development and use of | ||||||
|  | software on general-purpose computers, but in those that do, we wish to | ||||||
|  | avoid the special danger that patents applied to a free program could | ||||||
|  | make it effectively proprietary.  To prevent this, the GPL assures that | ||||||
|  | patents cannot be used to render the program non-free. | ||||||
| 
 | 
 | ||||||
|   The precise terms and conditions for copying, distribution and |   The precise terms and conditions for copying, distribution and | ||||||
| modification follow. | modification follow. | ||||||
|  | @ -60,7 +72,7 @@ modification follow. | ||||||
| 
 | 
 | ||||||
|   0. Definitions. |   0. Definitions. | ||||||
| 
 | 
 | ||||||
|   "This License" refers to version 3 of the GNU Affero General Public License. |   "This License" refers to version 3 of the GNU General Public License. | ||||||
| 
 | 
 | ||||||
|   "Copyright" also means copyright-like laws that apply to other kinds of |   "Copyright" also means copyright-like laws that apply to other kinds of | ||||||
| works, such as semiconductor masks. | works, such as semiconductor masks. | ||||||
|  | @ -537,45 +549,35 @@ to collect a royalty for further conveying from those to whom you convey | ||||||
| the Program, the only way you could satisfy both those terms and this | the Program, the only way you could satisfy both those terms and this | ||||||
| License would be to refrain entirely from conveying the Program. | License would be to refrain entirely from conveying the Program. | ||||||
| 
 | 
 | ||||||
|   13. Remote Network Interaction; Use with the GNU General Public License. |   13. Use with the GNU Affero General Public License. | ||||||
| 
 |  | ||||||
|   Notwithstanding any other provision of this License, if you modify the |  | ||||||
| Program, your modified version must prominently offer all users |  | ||||||
| interacting with it remotely through a computer network (if your version |  | ||||||
| supports such interaction) an opportunity to receive the Corresponding |  | ||||||
| Source of your version by providing access to the Corresponding Source |  | ||||||
| from a network server at no charge, through some standard or customary |  | ||||||
| means of facilitating copying of software.  This Corresponding Source |  | ||||||
| shall include the Corresponding Source for any work covered by version 3 |  | ||||||
| of the GNU General Public License that is incorporated pursuant to the |  | ||||||
| following paragraph. |  | ||||||
| 
 | 
 | ||||||
|   Notwithstanding any other provision of this License, you have |   Notwithstanding any other provision of this License, you have | ||||||
| permission to link or combine any covered work with a work licensed | permission to link or combine any covered work with a work licensed | ||||||
| under version 3 of the GNU General Public License into a single | under version 3 of the GNU Affero General Public License into a single | ||||||
| combined work, and to convey the resulting work.  The terms of this | combined work, and to convey the resulting work.  The terms of this | ||||||
| License will continue to apply to the part which is the covered work, | License will continue to apply to the part which is the covered work, | ||||||
| but the work with which it is combined will remain governed by version | but the special requirements of the GNU Affero General Public License, | ||||||
| 3 of the GNU General Public License. | section 13, concerning interaction through a network will apply to the | ||||||
|  | combination as such. | ||||||
| 
 | 
 | ||||||
|   14. Revised Versions of this License. |   14. Revised Versions of this License. | ||||||
| 
 | 
 | ||||||
|   The Free Software Foundation may publish revised and/or new versions of |   The Free Software Foundation may publish revised and/or new versions of | ||||||
| the GNU Affero General Public License from time to time.  Such new versions | the GNU General Public License from time to time.  Such new versions will | ||||||
| will be similar in spirit to the present version, but may differ in detail to | be similar in spirit to the present version, but may differ in detail to | ||||||
| address new problems or concerns. | address new problems or concerns. | ||||||
| 
 | 
 | ||||||
|   Each version is given a distinguishing version number.  If the |   Each version is given a distinguishing version number.  If the | ||||||
| Program specifies that a certain numbered version of the GNU Affero General | Program specifies that a certain numbered version of the GNU General | ||||||
| Public License "or any later version" applies to it, you have the | Public License "or any later version" applies to it, you have the | ||||||
| option of following the terms and conditions either of that numbered | option of following the terms and conditions either of that numbered | ||||||
| version or of any later version published by the Free Software | version or of any later version published by the Free Software | ||||||
| Foundation.  If the Program does not specify a version number of the | Foundation.  If the Program does not specify a version number of the | ||||||
| GNU Affero General Public License, you may choose any version ever published | GNU General Public License, you may choose any version ever published | ||||||
| by the Free Software Foundation. | by the Free Software Foundation. | ||||||
| 
 | 
 | ||||||
|   If the Program specifies that a proxy can decide which future |   If the Program specifies that a proxy can decide which future | ||||||
| versions of the GNU Affero General Public License can be used, that proxy's | versions of the GNU General Public License can be used, that proxy's | ||||||
| public statement of acceptance of a version permanently authorizes you | public statement of acceptance of a version permanently authorizes you | ||||||
| to choose that version for the Program. | to choose that version for the Program. | ||||||
| 
 | 
 | ||||||
|  | @ -633,29 +635,40 @@ the "copyright" line and a pointer to where the full notice is found. | ||||||
|     Copyright (C) <year>  <name of author> |     Copyright (C) <year>  <name of author> | ||||||
| 
 | 
 | ||||||
|     This program is free software: you can redistribute it and/or modify |     This program is free software: you can redistribute it and/or modify | ||||||
|     it under the terms of the GNU Affero General Public License as published by |     it under the terms of the GNU General Public License as published by | ||||||
|     the Free Software Foundation, either version 3 of the License, or |     the Free Software Foundation, either version 3 of the License, or | ||||||
|     (at your option) any later version. |     (at your option) any later version. | ||||||
| 
 | 
 | ||||||
|     This program is distributed in the hope that it will be useful, |     This program is distributed in the hope that it will be useful, | ||||||
|     but WITHOUT ANY WARRANTY; without even the implied warranty of |     but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||||
|     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the |     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||||
|     GNU Affero General Public License for more details. |     GNU General Public License for more details. | ||||||
| 
 | 
 | ||||||
|     You should have received a copy of the GNU Affero General Public License |     You should have received a copy of the GNU General Public License | ||||||
|     along with this program.  If not, see <https://www.gnu.org/licenses/>. |     along with this program.  If not, see <http://www.gnu.org/licenses/>. | ||||||
| 
 | 
 | ||||||
| Also add information on how to contact you by electronic and paper mail. | Also add information on how to contact you by electronic and paper mail. | ||||||
| 
 | 
 | ||||||
|   If your software can interact with users remotely through a computer |   If the program does terminal interaction, make it output a short | ||||||
| network, you should also make sure that it provides a way for users to | notice like this when it starts in an interactive mode: | ||||||
| get its source.  For example, if your program is a web application, its | 
 | ||||||
| interface could display a "Source" link that leads users to an archive |     <program>  Copyright (C) <year>  <name of author> | ||||||
| of the code.  There are many ways you could offer source, and different |     This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. | ||||||
| solutions will be better for different programs; see section 13 for the |     This is free software, and you are welcome to redistribute it | ||||||
| specific requirements. |     under certain conditions; type `show c' for details. | ||||||
|  | 
 | ||||||
|  | The hypothetical commands `show w' and `show c' should show the appropriate | ||||||
|  | parts of the General Public License.  Of course, your program's commands | ||||||
|  | might be different; for a GUI interface, you would use an "about box". | ||||||
| 
 | 
 | ||||||
|   You should also get your employer (if you work as a programmer) or school, |   You should also get your employer (if you work as a programmer) or school, | ||||||
| if any, to sign a "copyright disclaimer" for the program, if necessary. | if any, to sign a "copyright disclaimer" for the program, if necessary. | ||||||
| For more information on this, and how to apply and follow the GNU AGPL, see | For more information on this, and how to apply and follow the GNU GPL, see | ||||||
| <https://www.gnu.org/licenses/>. | <http://www.gnu.org/licenses/>. | ||||||
|  | 
 | ||||||
|  |   The GNU General Public License does not permit incorporating your program | ||||||
|  | into proprietary programs.  If your program is a subroutine library, you | ||||||
|  | may consider it more useful to permit linking proprietary applications with | ||||||
|  | the library.  If this is what you want to do, use the GNU Lesser General | ||||||
|  | Public License instead of this License.  But first, please read | ||||||
|  | <http://www.gnu.org/philosophy/why-not-lgpl.html>. | ||||||
|  |  | ||||||
|  | @ -1,2 +0,0 @@ | ||||||
| # https://packaging.python.org/en/latest/guides/using-manifest-in/#using-manifest-in |  | ||||||
| include docs/README.rst |  | ||||||
							
								
								
									
										425
									
								
								NEWS.rst
								
								
								
								
							
							
						
						
									
										425
									
								
								NEWS.rst
								
								
								
								
							|  | @ -4,431 +4,6 @@ Changelog | ||||||
| 
 | 
 | ||||||
| .. towncrier release notes start | .. towncrier release notes start | ||||||
| 
 | 
 | ||||||
| tractor 0.1.0a5 (2022-08-03) |  | ||||||
| ============================ |  | ||||||
| 
 |  | ||||||
| This is our final release supporting Python 3.9 since we will be moving |  | ||||||
| internals to the new `match:` syntax from 3.10 going forward and |  | ||||||
| further, we have officially dropped usage of the `msgpack` library and |  | ||||||
| happily adopted `msgspec`. |  | ||||||
| 
 |  | ||||||
| Features |  | ||||||
| -------- |  | ||||||
| 
 |  | ||||||
| - `#165 <https://github.com/goodboy/tractor/issues/165>`_: Add SIGINT |  | ||||||
|   protection to our `pdbpp` based debugger subystem such that for |  | ||||||
|   (single-depth) actor trees in debug mode we ignore interrupts in any |  | ||||||
|   actor currently holding the TTY lock thus avoiding clobbering IPC |  | ||||||
|   connections and/or task and process state when working in the REPL. |  | ||||||
| 
 |  | ||||||
|   As a big note currently so called "nested" actor trees (trees with |  | ||||||
|   actors having more then one parent/ancestor) are not fully supported |  | ||||||
|   since we don't yet have a mechanism to relay the debug mode knowledge |  | ||||||
|   "up" the actor tree (for eg. when handling a crash in a leaf actor). |  | ||||||
|   As such currently there is a set of tests and known scenarios which will |  | ||||||
|   result in process cloberring by the zombie repaing machinery and these |  | ||||||
|   have been documented in https://github.com/goodboy/tractor/issues/320. |  | ||||||
| 
 |  | ||||||
|   The implementation details include: |  | ||||||
| 
 |  | ||||||
|   - utilizing a custom SIGINT handler which we apply whenever an actor's |  | ||||||
|     runtime enters the debug machinery, which we also make sure the |  | ||||||
|     stdlib's `pdb` configuration doesn't override (which it does by |  | ||||||
|     default without special instance config). |  | ||||||
|   - litter the runtime with `maybe_wait_for_debugger()` mostly in spots |  | ||||||
|     where the root actor should block before doing embedded nursery |  | ||||||
|     teardown ops which both cancel potential-children-in-deubg as well |  | ||||||
|     as eventually trigger zombie reaping machinery. |  | ||||||
|   - hardening of the TTY locking semantics/API both in terms of IPC |  | ||||||
|     terminations and cancellation and lock release determinism from |  | ||||||
|     sync debugger instance methods. |  | ||||||
|   - factoring of locking infrastructure into a new `._debug.Lock` global |  | ||||||
|     which encapsulates all details of the ``trio`` sync primitives and |  | ||||||
|     task/actor uid management and tracking. |  | ||||||
| 
 |  | ||||||
|   We also add `ctrl-c` cases throughout the test suite though these are |  | ||||||
|   disabled for py3.9 (`pdbpp` UX differences that don't seem worth |  | ||||||
|   compensating for, especially since this will be our last 3.9 supported |  | ||||||
|   release) and there are a slew of marked cases that aren't expected to |  | ||||||
|   work in CI more generally (as mentioned in the "nested" tree note |  | ||||||
|   above) despite seemingly working  when run manually on linux. |  | ||||||
| 
 |  | ||||||
| - `#304 <https://github.com/goodboy/tractor/issues/304>`_: Add a new |  | ||||||
|   ``to_asyncio.LinkedTaskChannel.subscribe()`` which gives task-oriented |  | ||||||
|   broadcast functionality semantically equivalent to |  | ||||||
|   ``tractor.MsgStream.subscribe()`` this makes it possible for multiple |  | ||||||
|   ``trio``-side tasks to consume ``asyncio``-side task msgs in tandem. |  | ||||||
| 
 |  | ||||||
|   Further Improvements to the test suite were added in this patch set |  | ||||||
|   including a new scenario test for a sub-actor managed "service nursery" |  | ||||||
|   (implementing the basics of a "service manager") including use of |  | ||||||
|   *infected asyncio* mode. Further we added a lower level |  | ||||||
|   ``test_trioisms.py`` to start to track issues we need to work around in |  | ||||||
|   ``trio`` itself which in this case included a bug we were trying to |  | ||||||
|   solve related to https://github.com/python-trio/trio/issues/2258. |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| Bug Fixes |  | ||||||
| --------- |  | ||||||
| 
 |  | ||||||
| - `#318 <https://github.com/goodboy/tractor/issues/318>`_: Fix |  | ||||||
|   a previously undetected ``trio``-``asyncio`` task lifetime linking |  | ||||||
|   issue with the ``to_asyncio.open_channel_from()`` api where both sides |  | ||||||
|   where not properly waiting/signalling termination and it was possible |  | ||||||
|   for ``asyncio``-side errors to not propagate due to a race condition. |  | ||||||
| 
 |  | ||||||
|   The implementation fix summary is: |  | ||||||
|   - add state to signal the end of the ``trio`` side task to be |  | ||||||
|     read by the ``asyncio`` side and always cancel any ongoing |  | ||||||
|     task in such cases. |  | ||||||
|   - always wait on the ``asyncio`` task termination from the ``trio`` |  | ||||||
|     side on error before maybe raising said error. |  | ||||||
|   - always close the ``trio`` mem chan on exit to ensure the other |  | ||||||
|     side can detect it and follow. |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| Trivial/Internal Changes |  | ||||||
| ------------------------ |  | ||||||
| 
 |  | ||||||
| - `#248 <https://github.com/goodboy/tractor/issues/248>`_: Adjust the |  | ||||||
|   `tractor._spawn.soft_wait()` strategy to avoid sending an actor cancel |  | ||||||
|   request (via `Portal.cancel_actor()`) if either the child process is |  | ||||||
|   detected as having terminated or the IPC channel is detected to be |  | ||||||
|   closed. |  | ||||||
| 
 |  | ||||||
|   This ensures (even) more deterministic inter-actor cancellation by |  | ||||||
|   avoiding the timeout condition where possible when a whild never |  | ||||||
|   sucessfully spawned, crashed, or became un-contactable over IPC. |  | ||||||
| 
 |  | ||||||
| - `#295 <https://github.com/goodboy/tractor/issues/295>`_: Add an |  | ||||||
|   experimental ``tractor.msg.NamespacePath`` type for passing Python |  | ||||||
|   objects by "reference" through a ``str``-subtype message and using the |  | ||||||
|   new ``pkgutil.resolve_name()`` for reference loading. |  | ||||||
| 
 |  | ||||||
| - `#298 <https://github.com/goodboy/tractor/issues/298>`_: Add a new |  | ||||||
|   `tractor.experimental` subpackage for staging new high level APIs and |  | ||||||
|   subystems that we might eventually make built-ins. |  | ||||||
| 
 |  | ||||||
| - `#300 <https://github.com/goodboy/tractor/issues/300>`_: Update to and |  | ||||||
|   pin latest ``msgpack`` (1.0.3) and ``msgspec`` (0.4.0) both of which |  | ||||||
|   required adjustments for backwards imcompatible API tweaks. |  | ||||||
| 
 |  | ||||||
| - `#303 <https://github.com/goodboy/tractor/issues/303>`_: Fence off |  | ||||||
|   ``multiprocessing`` imports until absolutely necessary in an effort to |  | ||||||
|   avoid "resource tracker" spawning side effects that seem to have |  | ||||||
|   varying degrees of unreliability per Python release. Port to new |  | ||||||
|   ``msgspec.DecodeError``. |  | ||||||
| 
 |  | ||||||
| - `#305 <https://github.com/goodboy/tractor/issues/305>`_: Add |  | ||||||
|   ``tractor.query_actor()`` an addr looker-upper which doesn't deliver |  | ||||||
|   a ``Portal`` instance and instead just a socket address ``tuple``. |  | ||||||
| 
 |  | ||||||
|   Sometimes it's handy to just have a simple way to figure out if |  | ||||||
|   a "service" actor is up, so add this discovery helper for that. We'll |  | ||||||
|   prolly just leave it undocumented for now until we figure out |  | ||||||
|   a longer-term/better discovery system. |  | ||||||
| 
 |  | ||||||
| - `#316 <https://github.com/goodboy/tractor/issues/316>`_: Run windows |  | ||||||
|   CI jobs on python 3.10 after some hacks for ``pdbpp`` dependency |  | ||||||
|   issues. |  | ||||||
| 
 |  | ||||||
|   Issue was to do with the now deprecated `pyreadline` project which |  | ||||||
|   should be changed over to `pyreadline3`. |  | ||||||
| 
 |  | ||||||
| - `#317 <https://github.com/goodboy/tractor/issues/317>`_: Drop use of |  | ||||||
|   the ``msgpack`` package and instead move fully to the ``msgspec`` |  | ||||||
|   codec library. |  | ||||||
| 
 |  | ||||||
|   We've now used ``msgspec`` extensively in production and there's no |  | ||||||
|   reason to not use it as default. Further this change preps us for the up |  | ||||||
|   and coming typed messaging semantics (#196), dialog-unprotocol system |  | ||||||
|   (#297), and caps-based messaging-protocols (#299) planned before our |  | ||||||
|   first beta. |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| tractor 0.1.0a4 (2021-12-18) |  | ||||||
| ============================ |  | ||||||
| 
 |  | ||||||
| Features |  | ||||||
| -------- |  | ||||||
| - `#275 <https://github.com/goodboy/tractor/issues/275>`_: Re-license |  | ||||||
|   code base under AGPLv3. Also see `#274 |  | ||||||
|   <https://github.com/goodboy/tractor/pull/274>`_ for majority |  | ||||||
|   contributor consensus on this decision. |  | ||||||
| 
 |  | ||||||
| - `#121 <https://github.com/goodboy/tractor/issues/121>`_: Add |  | ||||||
|   "infected ``asyncio`` mode; a sub-system to spawn and control |  | ||||||
|   ``asyncio`` actors using ``trio``'s guest-mode. |  | ||||||
| 
 |  | ||||||
|   This gets us the following very interesting functionality: |  | ||||||
| 
 |  | ||||||
|   - ability to spawn an actor that has a process entry point of |  | ||||||
|     ``asyncio.run()`` by passing ``infect_asyncio=True`` to |  | ||||||
|     ``Portal.start_actor()`` (and friends). |  | ||||||
|   - the ``asyncio`` actor embeds ``trio`` using guest-mode and starts |  | ||||||
|     a main ``trio`` task which runs the ``tractor.Actor._async_main()`` |  | ||||||
|     entry point engages all the normal ``tractor`` runtime IPC/messaging |  | ||||||
|     machinery; for all purposes the actor is now running normally on |  | ||||||
|     a ``trio.run()``. |  | ||||||
|   - the actor can now make one-to-one task spawning requests to the |  | ||||||
|     underlying ``asyncio`` event loop using either of: |  | ||||||
| 
 |  | ||||||
|     * ``to_asyncio.run_task()`` to spawn and run an ``asyncio`` task to |  | ||||||
|       completion and block until a return value is delivered. |  | ||||||
|     * ``async with to_asyncio.open_channel_from():`` which spawns a task |  | ||||||
|       and hands it a pair of "memory channels" to allow for bi-directional |  | ||||||
|       streaming between the now SC-linked ``trio`` and ``asyncio`` tasks. |  | ||||||
| 
 |  | ||||||
|   The output from any call(s) to ``asyncio`` can be handled as normal in |  | ||||||
|   ``trio``/``tractor`` task operation with the caveat of the overhead due |  | ||||||
|   to guest-mode use. |  | ||||||
| 
 |  | ||||||
|   For more details see the `original PR |  | ||||||
|   <https://github.com/goodboy/tractor/pull/121>`_ and `issue |  | ||||||
|   <https://github.com/goodboy/tractor/issues/120>`_. |  | ||||||
| 
 |  | ||||||
| - `#257 <https://github.com/goodboy/tractor/issues/257>`_: Add |  | ||||||
|   ``trionics.maybe_open_context()`` an actor-scoped async multi-task |  | ||||||
|   context manager resource caching API. |  | ||||||
| 
 |  | ||||||
|   Adds an SC-safe cacheing async context manager api that only enters on |  | ||||||
|   the *first* task entry and only exits on the *last* task exit while in |  | ||||||
|   between delivering the same cached value per input key. Keys can be |  | ||||||
|   either an explicit ``key`` named arg provided by the user or a |  | ||||||
|   hashable ``kwargs`` dict (will be converted to a ``list[tuple]``) which |  | ||||||
|   is passed to the underlying manager function as input. |  | ||||||
| 
 |  | ||||||
| - `#261 <https://github.com/goodboy/tractor/issues/261>`_: Add |  | ||||||
|   cross-actor-task ``Context`` oriented error relay, a new stream |  | ||||||
|   overrun error-signal ``StreamOverrun``, and support disabling |  | ||||||
|   ``MsgStream`` backpressure as the default before a stream is opened or |  | ||||||
|   by choice of the user. |  | ||||||
| 
 |  | ||||||
|   We added stricter semantics around ``tractor.Context.open_stream():`` |  | ||||||
|   particularly to do with streams which are only opened at one end. |  | ||||||
|   Previously, if only one end opened a stream there was no way for that |  | ||||||
|   sender to know if msgs are being received until first, the feeder mem |  | ||||||
|   chan on the receiver side hit a backpressure state and then that |  | ||||||
|   condition delayed its msg loop processing task to eventually create |  | ||||||
|   backpressure on the associated IPC transport. This is non-ideal in the |  | ||||||
|   case where the receiver side never opened a stream by mistake since it |  | ||||||
|   results in silent block of the sender and no adherence to the underlying |  | ||||||
|   mem chan buffer size settings (which is still unsolved btw). |  | ||||||
| 
 |  | ||||||
|   To solve this we add non-backpressure style message pushing inside |  | ||||||
|   ``Actor._push_result()`` by default and only use the backpressure |  | ||||||
|   ``trio.MemorySendChannel.send()`` call **iff** the local end of the |  | ||||||
|   context has entered ``Context.open_stream():``. This way if the stream |  | ||||||
|   was never opened but the mem chan is overrun, we relay back to the |  | ||||||
|   sender a (new exception) ``SteamOverrun`` error which is raised in the |  | ||||||
|   sender's scope with a special error message about the stream never |  | ||||||
|   having been opened. Further, this behaviour (non-backpressure style |  | ||||||
|   where senders can expect an error on overruns) can now be enabled with |  | ||||||
|   ``.open_stream(backpressure=False)`` and the underlying mem chan size |  | ||||||
|   can be specified with a kwarg ``msg_buffer_size: int``. |  | ||||||
| 
 |  | ||||||
|   Further bug fixes and enhancements in this changeset include: |  | ||||||
| 
 |  | ||||||
|   - fix a race we were ignoring where if the callee task opened a context |  | ||||||
|     it could enter ``Context.open_stream()`` before calling |  | ||||||
|     ``.started()``. |  | ||||||
|   - Disallow calling ``Context.started()`` more then once. |  | ||||||
|   - Enable ``Context`` linked tasks error relaying via the new |  | ||||||
|     ``Context._maybe_raise_from_remote_msg()`` which (for now) uses |  | ||||||
|     a simple ``trio.Nursery.start_soon()`` to raise the error via closure |  | ||||||
|     in the local scope. |  | ||||||
| 
 |  | ||||||
| - `#267 <https://github.com/goodboy/tractor/issues/267>`_: This |  | ||||||
|   (finally) adds fully acknowledged remote cancellation messaging |  | ||||||
|   support for both explicit ``Portal.cancel_actor()`` calls as well as |  | ||||||
|   when there is a "runtime-wide" cancellations (eg. during KBI or |  | ||||||
|   general actor nursery exception handling which causes a full actor |  | ||||||
|   "crash"/termination). |  | ||||||
| 
 |  | ||||||
|   You can think of this as the most ideal case in 2-generals where the |  | ||||||
|   actor requesting the cancel of its child is able to always receive back |  | ||||||
|   the ACK to that request. This leads to a more deterministic shutdown of |  | ||||||
|   the child where the parent is able to wait for the child to fully |  | ||||||
|   respond to the request. On a localhost setup, where the parent can |  | ||||||
|   monitor the state of the child through process or other OS APIs instead |  | ||||||
|   of solely through IPC messaging, the parent can know whether or not the |  | ||||||
|   child decided to cancel with more certainty. In the case of separate |  | ||||||
|   hosts, we still rely on a simple timeout approach until such a time |  | ||||||
|   where we prefer to get "fancier". |  | ||||||
| 
 |  | ||||||
| - `#271 <https://github.com/goodboy/tractor/issues/271>`_: Add a per |  | ||||||
|   actor ``debug_mode: bool`` control to our nursery. |  | ||||||
| 
 |  | ||||||
|   This allows spawning actors via ``ActorNursery.start_actor()`` (and |  | ||||||
|   other dependent methods) with a ``debug_mode=True`` flag much like |  | ||||||
|   ``tractor.open_nursery():`` such that per process crash handling |  | ||||||
|   can be toggled for cases where a user does not need/want all child actors |  | ||||||
|   to drop into the debugger on error. This is often useful when you have |  | ||||||
|   actor-tasks which are expected to error often (and be re-run) but want |  | ||||||
|   to specifically interact with some (problematic) child. |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| Bugfixes |  | ||||||
| -------- |  | ||||||
| 
 |  | ||||||
| - `#239 <https://github.com/goodboy/tractor/issues/239>`_: Fix |  | ||||||
|   keyboard interrupt handling in ``Portal.open_context()`` blocks. |  | ||||||
| 
 |  | ||||||
|   Previously this was not triggering cancellation of the remote task |  | ||||||
|   context and could result in hangs if a stream was also opened. This |  | ||||||
|   fix is to accept `BaseException` since it is likely any other top |  | ||||||
|   level exception other then KBI (even though not expected) should also |  | ||||||
|   get this result. |  | ||||||
| 
 |  | ||||||
| - `#264 <https://github.com/goodboy/tractor/issues/264>`_: Fix |  | ||||||
|   ``Portal.run_in_actor()`` returns ``None`` result. |  | ||||||
| 
 |  | ||||||
|   ``None`` was being used as the cached result flag and obviously breaks |  | ||||||
|   on a ``None`` returned from the remote target task. This would cause an |  | ||||||
|   infinite hang if user code ever called ``Portal.result()`` *before* the |  | ||||||
|   nursery exit. The simple fix is to use the *return message* as the |  | ||||||
|   initial "no-result-received-yet" flag value and, once received, the |  | ||||||
|   return value is read from the message to avoid the cache logic error. |  | ||||||
| 
 |  | ||||||
| - `#266 <https://github.com/goodboy/tractor/issues/266>`_: Fix |  | ||||||
|   graceful cancellation of daemon actors |  | ||||||
| 
 |  | ||||||
|   Previously, his was a bug where if the soft wait on a sub-process (the |  | ||||||
|   ``await .proc.wait()``) in the reaper task teardown was cancelled we |  | ||||||
|   would fail over to the hard reaping sequence (meant for culling off any |  | ||||||
|   potential zombies via system kill signals). The hard reap has a timeout |  | ||||||
|   of 3s (currently though in theory we could make it shorter?) before |  | ||||||
|   system signalling kicks in. This means that any daemon actor still |  | ||||||
|   running during nursery exit would get hard reaped (3s later) instead of |  | ||||||
|   cancelled via IPC message. Now we catch the ``trio.Cancelled``, call |  | ||||||
|   ``Portal.cancel_actor()`` on the daemon and expect the child to |  | ||||||
|   self-terminate after the runtime cancels and shuts down the process. |  | ||||||
| 
 |  | ||||||
| - `#278 <https://github.com/goodboy/tractor/issues/278>`_: Repair |  | ||||||
|   inter-actor stream closure semantics to work correctly with |  | ||||||
|   ``tractor.trionics.BroadcastReceiver`` task fan out usage. |  | ||||||
| 
 |  | ||||||
|   A set of previously unknown bugs discovered in `#257 |  | ||||||
|   <https://github.com/goodboy/tractor/pull/257>`_ let graceful stream |  | ||||||
|   closure result in hanging consumer tasks that use the broadcast APIs. |  | ||||||
|   This adds better internal closure state tracking to the broadcast |  | ||||||
|   receiver and message stream APIs and in particular ensures that when an |  | ||||||
|   underlying stream/receive-channel (a broadcast receiver is receiving |  | ||||||
|   from) is closed, all consumer tasks waiting on that underlying channel |  | ||||||
|   are woken so they can receive the ``trio.EndOfChannel`` signal and |  | ||||||
|   promptly terminate. |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| tractor 0.1.0a3 (2021-11-02) |  | ||||||
| ============================ |  | ||||||
| 
 |  | ||||||
| Features |  | ||||||
| -------- |  | ||||||
| 
 |  | ||||||
| - Switch to using the ``trio`` process spawner by default on windows. (#166) |  | ||||||
| 
 |  | ||||||
|   This gets windows users debugger support (manually tested) and in |  | ||||||
|   general a more resilient (nested) actor tree implementation. |  | ||||||
| 
 |  | ||||||
| - Add optional `msgspec <https://jcristharif.com/msgspec/>`_ support |  | ||||||
|   as an alernative, faster MessagePack codec. (#214) |  | ||||||
| 
 |  | ||||||
|   Provides us with a path toward supporting typed IPC message contracts. Further, |  | ||||||
|   ``msgspec`` structs may be a valid tool to start for formalizing our |  | ||||||
|   "SC dialog un-protocol" messages as described in `#36 |  | ||||||
|   <https://github.com/goodboy/tractor/issues/36>`_. |  | ||||||
| 
 |  | ||||||
| - Introduce a new ``tractor.trionics`` `sub-package`_ that exposes |  | ||||||
|   a selection of our relevant high(er) level trio primitives and |  | ||||||
|   goodies. (#241) |  | ||||||
| 
 |  | ||||||
|   At outset we offer a ``gather_contexts()`` context manager for |  | ||||||
|   concurrently entering a sequence of async context managers (much like |  | ||||||
|   a version of ``asyncio.gather()`` but for context managers) and use it |  | ||||||
|   in a new ``tractor.open_actor_cluster()`` manager-helper that can be |  | ||||||
|   entered to concurrently spawn a flat actor pool. We also now publicly |  | ||||||
|   expose our "broadcast channel" APIs (``open_broadcast_receiver()``) |  | ||||||
|   from here. |  | ||||||
| 
 |  | ||||||
| .. _sub-package: ../tractor/trionics |  | ||||||
| 
 |  | ||||||
| - Change the core message loop to handle task and actor-runtime cancel |  | ||||||
|   requests immediately instead of scheduling them as is done for rpc-task |  | ||||||
|   requests. (#245) |  | ||||||
| 
 |  | ||||||
|   In order to obtain more reliable teardown mechanics for (complex) actor |  | ||||||
|   trees it's important that we specially treat cancel requests as having |  | ||||||
|   higher priority. Previously, it was possible that task cancel requests |  | ||||||
|   could actually also themselves be cancelled if a "actor-runtime" cancel |  | ||||||
|   request was received (can happen during messy multi actor crashes that |  | ||||||
|   propagate). Instead cancels now block the msg loop until serviced and |  | ||||||
|   a response is relayed back to the requester. This also allows for |  | ||||||
|   improved debugger support since we have determinism guarantees about |  | ||||||
|   which processes must wait before hard killing their children. |  | ||||||
| 
 |  | ||||||
| - (`#248 <https://github.com/goodboy/tractor/pull/248>`_) Drop Python |  | ||||||
|   3.8 support in favour of rolling with two latest releases for the time |  | ||||||
|   being. |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| Misc |  | ||||||
| ---- |  | ||||||
| 
 |  | ||||||
| - (`#243 <https://github.com/goodboy/tractor/pull/243>`_) add a distinct |  | ||||||
|   ``'CANCEL'`` log level to allow the runtime to emit details about |  | ||||||
|   cancellation machinery statuses. |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| tractor 0.1.0a2 (2021-09-07) |  | ||||||
| ============================ |  | ||||||
| 
 |  | ||||||
| Features |  | ||||||
| -------- |  | ||||||
| 
 |  | ||||||
| - Add `tokio-style broadcast channels |  | ||||||
|   <https://docs.rs/tokio/1.11.0/tokio/sync/broadcast/index.html>`_ as |  | ||||||
|   a solution for `#204 <https://github.com/goodboy/tractor/pull/204>`_ and |  | ||||||
|   discussed thoroughly in `trio/#987 |  | ||||||
|   <https://github.com/python-trio/trio/issues/987>`_. |  | ||||||
| 
 |  | ||||||
|   This gives us local task broadcast functionality using a new |  | ||||||
|   ``BroadcastReceiver`` type which can wrap ``trio.ReceiveChannel``  and |  | ||||||
|   provide fan-out copies of a stream of data to every subscribed consumer. |  | ||||||
|   We use this new machinery to provide a ``ReceiveMsgStream.subscribe()`` |  | ||||||
|   async context manager which can be used by actor-local concumers tasks |  | ||||||
|   to easily pull from a shared and dynamic IPC stream. (`#229 |  | ||||||
|   <https://github.com/goodboy/tractor/pull/229>`_) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| Bugfixes |  | ||||||
| -------- |  | ||||||
| 
 |  | ||||||
| - Handle broken channel/stream faults where the root's tty lock is left |  | ||||||
|   acquired by some child actor who went MIA and the root ends up hanging |  | ||||||
|   indefinitely. (`#234 <https://github.com/goodboy/tractor/pull/234>`_) |  | ||||||
| 
 |  | ||||||
|   There's two parts here: we no longer shield wait on the lock and, |  | ||||||
|   now always do our best to release the lock on the expected worst |  | ||||||
|   case connection faults. |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| Deprecations and Removals |  | ||||||
| ------------------------- |  | ||||||
| 
 |  | ||||||
| - Drop stream "shielding" support which was originally added to sidestep |  | ||||||
|   a cancelled call to ``.receive()`` |  | ||||||
| 
 |  | ||||||
|   In the original api design a stream instance was returned directly from |  | ||||||
|   a call to ``Portal.run()`` and thus there was no "exit phase" to handle |  | ||||||
|   cancellations and errors which would trigger implicit closure. Now that |  | ||||||
|   we have said enter/exit semantics with ``Portal.open_stream_from()`` and |  | ||||||
|   ``Context.open_stream()`` we can drop this implicit (and arguably |  | ||||||
|   confusing) behavior. (`#230 <https://github.com/goodboy/tractor/pull/230>`_) |  | ||||||
| 
 |  | ||||||
| - Drop Python 3.7 support in preparation for supporting 3.9+ syntax. |  | ||||||
|   (`#232 <https://github.com/goodboy/tractor/pull/232>`_) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| tractor 0.1.0a1 (2021-08-01) | tractor 0.1.0a1 (2021-08-01) | ||||||
| ============================ | ============================ | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
							
								
								
									
										19
									
								
								default.nix
								
								
								
								
							
							
						
						
									
										19
									
								
								default.nix
								
								
								
								
							|  | @ -1,19 +0,0 @@ | ||||||
| { pkgs ? import <nixpkgs> {} }: |  | ||||||
| let |  | ||||||
|   nativeBuildInputs = with pkgs; [ |  | ||||||
|     stdenv.cc.cc.lib |  | ||||||
|     uv |  | ||||||
|   ]; |  | ||||||
| 
 |  | ||||||
| in |  | ||||||
| pkgs.mkShell { |  | ||||||
|   inherit nativeBuildInputs; |  | ||||||
| 
 |  | ||||||
|   LD_LIBRARY_PATH = pkgs.lib.makeLibraryPath nativeBuildInputs; |  | ||||||
|   TMPDIR = "/tmp"; |  | ||||||
| 
 |  | ||||||
|   shellHook = '' |  | ||||||
|     set -e |  | ||||||
|     uv venv .venv --python=3.12 |  | ||||||
|   ''; |  | ||||||
| } |  | ||||||
							
								
								
									
										369
									
								
								docs/README.rst
								
								
								
								
							
							
						
						
									
										369
									
								
								docs/README.rst
								
								
								
								
							|  | @ -1,126 +1,37 @@ | ||||||
| |logo| ``tractor``: distributed structurred concurrency | |logo| ``tractor``: next-gen Python parallelism | ||||||
| 
 | 
 | ||||||
| ``tractor`` is a `structured concurrency`_ (SC), multi-processing_ runtime built on trio_. | |gh_actions| | ||||||
|  | |docs| | ||||||
| 
 | 
 | ||||||
| Fundamentally, ``tractor`` provides parallelism via | ``tractor`` is a `structured concurrent`_, multi-processing_ runtime built on trio_. | ||||||
| ``trio``-"*actors*": independent Python **processes** (i.e. |  | ||||||
| *non-shared-memory threads*) which can schedule ``trio`` tasks whilst |  | ||||||
| maintaining *end-to-end SC* inside a *distributed supervision tree*. |  | ||||||
| 
 | 
 | ||||||
| Cross-process (and thus cross-host) SC is accomplished through the | Fundamentally ``tractor`` gives you parallelism via ``trio``-"*actors*": | ||||||
| combined use of our, | our nurseries_ let you spawn new Python processes which each run a ``trio`` | ||||||
|  | scheduled runtime - a call to ``trio.run()``. | ||||||
| 
 | 
 | ||||||
| - "actor nurseries_" which provide for spawning multiple, and | We believe the system adhere's to the `3 axioms`_ of an "`actor model`_" | ||||||
|   possibly nested, Python processes each running a ``trio`` scheduled | but likely *does not* look like what *you* probably think an "actor | ||||||
|   runtime - a call to ``trio.run()``, | model" looks like, and that's *intentional*. | ||||||
| - an "SC-transitive supervision protocol" enforced as an |  | ||||||
|   IPC-message-spec encapsulating all RPC-dialogs. |  | ||||||
| 
 | 
 | ||||||
| We believe the system adheres to the `3 axioms`_ of an "`actor model`_" | The first step to grok ``tractor`` is to get the basics of ``trio`` down. | ||||||
| but likely **does not** look like what **you** probably *think* an "actor | A great place to start is the `trio docs`_ and this `blog post`_. | ||||||
| model" looks like, and that's **intentional**. |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| Where do i start!? |  | ||||||
| ------------------ |  | ||||||
| The first step to grok ``tractor`` is to get an intermediate |  | ||||||
| knowledge of ``trio`` and **structured concurrency** B) |  | ||||||
| 
 |  | ||||||
| Some great places to start are, |  | ||||||
| 
 |  | ||||||
| - the seminal `blog post`_ |  | ||||||
| - obviously the `trio docs`_ |  | ||||||
| - wikipedia's nascent SC_ page |  | ||||||
| - the fancy diagrams @ libdill-docs_ |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| Features | Features | ||||||
| -------- | -------- | ||||||
| - **It's just** a ``trio`` API! | - **It's just** a ``trio`` API | ||||||
| - *Infinitely nesteable* process trees running embedded ``trio`` tasks. | - *Infinitely nesteable* process trees | ||||||
| - Swappable, OS-specific, process spawning via multiple backends. | - Built-in inter-process streaming APIs | ||||||
| - Modular IPC stack, allowing for custom interchange formats (eg. | - A (first ever?) "native" multi-core debugger UX for Python using `pdb++`_ | ||||||
|   as offered from `msgspec`_), varied transport protocols (TCP, RUDP, | - Support for a swappable, OS specific, process spawning layer | ||||||
|   QUIC, wireguard), and OS-env specific higher-perf primitives (UDS, | - A modular transport stack, allowing for custom serialization (eg. | ||||||
|   shm-ring-buffers). |   `msgspec`_), communications protocols, and environment specific IPC | ||||||
| - Optionally distributed_: all IPC and RPC APIs work over multi-host |   primitives | ||||||
|   transports the same as local. | - `structured concurrency`_ from the ground up | ||||||
| - Builtin high-level streaming API that enables your app to easily |  | ||||||
|   leverage the benefits of a "`cheap or nasty`_" `(un)protocol`_. |  | ||||||
| - A "native UX" around a multi-process safe debugger REPL using |  | ||||||
|   `pdbp`_ (a fork & fix of `pdb++`_) |  | ||||||
| - "Infected ``asyncio``" mode: support for starting an actor's |  | ||||||
|   runtime as a `guest`_ on the ``asyncio`` loop allowing us to |  | ||||||
|   provide stringent SC-style ``trio.Task``-supervision around any |  | ||||||
|   ``asyncio.Task`` spawned via our ``tractor.to_asyncio`` APIs. |  | ||||||
| - A **very naive** and still very much work-in-progress inter-actor |  | ||||||
|   `discovery`_ sys with plans to support multiple `modern protocol`_ |  | ||||||
|   approaches. |  | ||||||
| - Various ``trio`` extension APIs via ``tractor.trionics`` such as, |  | ||||||
|   - task fan-out `broadcasting`_, |  | ||||||
|   - multi-task-single-resource-caching and fan-out-to-multi |  | ||||||
|     ``__aenter__()`` APIs for ``@acm`` functions, |  | ||||||
|   - (WIP) a ``TaskMngr``: one-cancels-one style nursery supervisor. |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| Status of `main` / infra |  | ||||||
| ------------------------ |  | ||||||
| 
 |  | ||||||
| - |gh_actions| |  | ||||||
| - |docs| |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| Install |  | ||||||
| ------- |  | ||||||
| ``tractor`` is still in a *alpha-near-beta-stage* for many |  | ||||||
| of its subsystems, however we are very close to having a stable |  | ||||||
| lowlevel runtime and API. |  | ||||||
| 
 |  | ||||||
| As such, it's currently recommended that you clone and install the |  | ||||||
| repo from source:: |  | ||||||
| 
 |  | ||||||
|     pip install git+git://github.com/goodboy/tractor.git |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| We use the very hip `uv`_ for project mgmt:: |  | ||||||
| 
 |  | ||||||
|     git clone https://github.com/goodboy/tractor.git |  | ||||||
|     cd tractor |  | ||||||
|     uv sync --dev |  | ||||||
|     uv run python examples/rpc_bidir_streaming.py |  | ||||||
| 
 |  | ||||||
| Consider activating a virtual/project-env before starting to hack on |  | ||||||
| the code base:: |  | ||||||
| 
 |  | ||||||
|     # you could use plain ol' venvs |  | ||||||
|     # https://docs.astral.sh/uv/pip/environments/ |  | ||||||
|     uv venv tractor_py313 --python 3.13 |  | ||||||
| 
 |  | ||||||
|     # but @goodboy prefers the more explicit (and shell agnostic) |  | ||||||
|     # https://docs.astral.sh/uv/configuration/environment/#uv_project_environment |  | ||||||
|     UV_PROJECT_ENVIRONMENT="tractor_py313 |  | ||||||
| 
 |  | ||||||
|     # hint hint, enter @goodboy's fave shell B) |  | ||||||
|     uv run --dev xonsh |  | ||||||
| 
 |  | ||||||
| Alongside all this we ofc offer "releases" on PyPi:: |  | ||||||
| 
 |  | ||||||
|     pip install tractor |  | ||||||
| 
 |  | ||||||
| Just note that YMMV since the main git branch is often much further |  | ||||||
| ahead then any latest release. |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| Example codez |  | ||||||
| ------------- |  | ||||||
| In ``tractor``'s (very lacking) documention we prefer to point to |  | ||||||
| example scripts in the repo over duplicating them in docs, but with |  | ||||||
| that in mind here are some definitive snippets to try and hook you |  | ||||||
| into digging deeper. |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| Run a func in a process | Run a func in a process | ||||||
| *********************** | ----------------------- | ||||||
| Use ``trio``'s style of focussing on *tasks as functions*: | Use ``trio``'s style of focussing on *tasks as functions*: | ||||||
| 
 | 
 | ||||||
| .. code:: python | .. code:: python | ||||||
|  | @ -178,7 +89,7 @@ might want to check out `trio-parallel`_. | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| Zombie safe: self-destruct a process tree | Zombie safe: self-destruct a process tree | ||||||
| ***************************************** | ----------------------------------------- | ||||||
| ``tractor`` tries to protect you from zombies, no matter what. | ``tractor`` tries to protect you from zombies, no matter what. | ||||||
| 
 | 
 | ||||||
| .. code:: python | .. code:: python | ||||||
|  | @ -234,8 +145,8 @@ it **is a bug**. | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| "Native" multi-process debugging | "Native" multi-process debugging | ||||||
| ******************************** | -------------------------------- | ||||||
| Using the magic of `pdbp`_ and our internal IPC, we've | Using the magic of `pdb++`_ and our internal IPC, we've | ||||||
| been able to create a native feeling debugging experience for | been able to create a native feeling debugging experience for | ||||||
| any (sub-)process in your ``tractor`` tree. | any (sub-)process in your ``tractor`` tree. | ||||||
| 
 | 
 | ||||||
|  | @ -289,7 +200,7 @@ We're hoping to add a respawn-from-repl system soon! | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| SC compatible bi-directional streaming | SC compatible bi-directional streaming | ||||||
| ************************************** | -------------------------------------- | ||||||
| Yes, you saw it here first; we provide 2-way streams | Yes, you saw it here first; we provide 2-way streams | ||||||
| with reliable, transitive setup/teardown semantics. | with reliable, transitive setup/teardown semantics. | ||||||
| 
 | 
 | ||||||
|  | @ -381,7 +292,7 @@ hear your thoughts on! | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| Worker poolz are easy peasy | Worker poolz are easy peasy | ||||||
| *************************** | --------------------------- | ||||||
| The initial ask from most new users is *"how do I make a worker | The initial ask from most new users is *"how do I make a worker | ||||||
| pool thing?"*. | pool thing?"*. | ||||||
| 
 | 
 | ||||||
|  | @ -402,172 +313,26 @@ real time:: | ||||||
| This uses no extra threads, fancy semaphores or futures; all we need | This uses no extra threads, fancy semaphores or futures; all we need | ||||||
| is ``tractor``'s IPC! | is ``tractor``'s IPC! | ||||||
| 
 | 
 | ||||||
| "Infected ``asyncio``" mode |  | ||||||
| *************************** |  | ||||||
| Have a bunch of ``asyncio`` code you want to force to be SC at the process level? |  | ||||||
| 
 |  | ||||||
| Check out our experimental system for `guest`_-mode controlled |  | ||||||
| ``asyncio`` actors: |  | ||||||
| 
 |  | ||||||
| .. code:: python |  | ||||||
| 
 |  | ||||||
|     import asyncio |  | ||||||
|     from statistics import mean |  | ||||||
|     import time |  | ||||||
| 
 |  | ||||||
|     import trio |  | ||||||
|     import tractor |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     async def aio_echo_server( |  | ||||||
|         to_trio: trio.MemorySendChannel, |  | ||||||
|         from_trio: asyncio.Queue, |  | ||||||
|     ) -> None: |  | ||||||
| 
 |  | ||||||
|         # a first message must be sent **from** this ``asyncio`` |  | ||||||
|         # task or the ``trio`` side will never unblock from |  | ||||||
|         # ``tractor.to_asyncio.open_channel_from():`` |  | ||||||
|         to_trio.send_nowait('start') |  | ||||||
| 
 |  | ||||||
|         # XXX: this uses an ``from_trio: asyncio.Queue`` currently but we |  | ||||||
|         # should probably offer something better. |  | ||||||
|         while True: |  | ||||||
|             # echo the msg back |  | ||||||
|             to_trio.send_nowait(await from_trio.get()) |  | ||||||
|             await asyncio.sleep(0) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     @tractor.context |  | ||||||
|     async def trio_to_aio_echo_server( |  | ||||||
|         ctx: tractor.Context, |  | ||||||
|     ): |  | ||||||
|         # this will block until the ``asyncio`` task sends a "first" |  | ||||||
|         # message. |  | ||||||
|         async with tractor.to_asyncio.open_channel_from( |  | ||||||
|             aio_echo_server, |  | ||||||
|         ) as (first, chan): |  | ||||||
| 
 |  | ||||||
|             assert first == 'start' |  | ||||||
|             await ctx.started(first) |  | ||||||
| 
 |  | ||||||
|             async with ctx.open_stream() as stream: |  | ||||||
| 
 |  | ||||||
|                 async for msg in stream: |  | ||||||
|                     await chan.send(msg) |  | ||||||
| 
 |  | ||||||
|                     out = await chan.receive() |  | ||||||
|                     # echo back to parent actor-task |  | ||||||
|                     await stream.send(out) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     async def main(): |  | ||||||
| 
 |  | ||||||
|         async with tractor.open_nursery() as n: |  | ||||||
|             p = await n.start_actor( |  | ||||||
|                 'aio_server', |  | ||||||
|                 enable_modules=[__name__], |  | ||||||
|                 infect_asyncio=True, |  | ||||||
|             ) |  | ||||||
|             async with p.open_context( |  | ||||||
|                 trio_to_aio_echo_server, |  | ||||||
|             ) as (ctx, first): |  | ||||||
| 
 |  | ||||||
|                 assert first == 'start' |  | ||||||
| 
 |  | ||||||
|                 count = 0 |  | ||||||
|                 async with ctx.open_stream() as stream: |  | ||||||
| 
 |  | ||||||
|                     delays = [] |  | ||||||
|                     send = time.time() |  | ||||||
| 
 |  | ||||||
|                     await stream.send(count) |  | ||||||
|                     async for msg in stream: |  | ||||||
|                         recv = time.time() |  | ||||||
|                         delays.append(recv - send) |  | ||||||
|                         assert msg == count |  | ||||||
|                         count += 1 |  | ||||||
|                         send = time.time() |  | ||||||
|                         await stream.send(count) |  | ||||||
| 
 |  | ||||||
|                         if count >= 1e3: |  | ||||||
|                             break |  | ||||||
| 
 |  | ||||||
|             print(f'mean round trip rate (Hz): {1/mean(delays)}') |  | ||||||
|             await p.cancel_actor() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     if __name__ == '__main__': |  | ||||||
|         trio.run(main) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| Yes, we spawn a python process, run ``asyncio``, start ``trio`` on the |  | ||||||
| ``asyncio`` loop, then send commands to the ``trio`` scheduled tasks to |  | ||||||
| tell ``asyncio`` tasks what to do XD |  | ||||||
| 
 |  | ||||||
| We need help refining the `asyncio`-side channel API to be more |  | ||||||
| `trio`-like. Feel free to sling your opinion in `#273`_! |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| .. _#273: https://github.com/goodboy/tractor/issues/273 |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| Higher level "cluster" APIs |  | ||||||
| *************************** |  | ||||||
| To be extra terse the ``tractor`` devs have started hacking some "higher |  | ||||||
| level" APIs for managing actor trees/clusters. These interfaces should |  | ||||||
| generally be condsidered provisional for now but we encourage you to try |  | ||||||
| them and provide feedback. Here's a new API that let's you quickly |  | ||||||
| spawn a flat cluster: |  | ||||||
| 
 |  | ||||||
| .. code:: python |  | ||||||
| 
 |  | ||||||
|     import trio |  | ||||||
|     import tractor |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     async def sleepy_jane(): |  | ||||||
|         uid = tractor.current_actor().uid |  | ||||||
|         print(f'Yo i am actor {uid}') |  | ||||||
|         await trio.sleep_forever() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     async def main(): |  | ||||||
|         ''' |  | ||||||
|         Spawn a flat actor cluster, with one process per |  | ||||||
|         detected core. |  | ||||||
| 
 |  | ||||||
|         ''' |  | ||||||
|         portal_map: dict[str, tractor.Portal] |  | ||||||
|         results: dict[str, str] |  | ||||||
| 
 |  | ||||||
|         # look at this hip new syntax! |  | ||||||
|         async with ( |  | ||||||
| 
 |  | ||||||
|             tractor.open_actor_cluster( |  | ||||||
|                 modules=[__name__] |  | ||||||
|             ) as portal_map, |  | ||||||
| 
 |  | ||||||
|             trio.open_nursery() as n, |  | ||||||
|         ): |  | ||||||
| 
 |  | ||||||
|             for (name, portal) in portal_map.items(): |  | ||||||
|                 n.start_soon(portal.run, sleepy_jane) |  | ||||||
| 
 |  | ||||||
|             await trio.sleep(0.5) |  | ||||||
| 
 |  | ||||||
|             # kill the cluster with a cancel |  | ||||||
|             raise KeyboardInterrupt |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     if __name__ == '__main__': |  | ||||||
|         try: |  | ||||||
|             trio.run(main) |  | ||||||
|         except KeyboardInterrupt: |  | ||||||
|             pass |  | ||||||
| 
 |  | ||||||
| 
 | 
 | ||||||
| .. _full worker pool re-implementation: https://github.com/goodboy/tractor/blob/master/examples/parallelism/concurrent_actors_primes.py | .. _full worker pool re-implementation: https://github.com/goodboy/tractor/blob/master/examples/parallelism/concurrent_actors_primes.py | ||||||
| 
 | 
 | ||||||
|  | Install | ||||||
|  | ------- | ||||||
|  | From PyPi:: | ||||||
|  | 
 | ||||||
|  |     pip install tractor | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | To try out the (optionally) faster `msgspec`_ codec instead of the | ||||||
|  | default ``msgpack`` lib:: | ||||||
|  | 
 | ||||||
|  |     pip install tractor[msgspec] | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | From git:: | ||||||
|  | 
 | ||||||
|  |     pip install git+git://github.com/goodboy/tractor.git | ||||||
|  | 
 | ||||||
| 
 | 
 | ||||||
| Under the hood | Under the hood | ||||||
| -------------- | -------------- | ||||||
|  | @ -632,22 +397,13 @@ properties of the system. | ||||||
| 
 | 
 | ||||||
| What's on the TODO: | What's on the TODO: | ||||||
| ------------------- | ------------------- | ||||||
| Help us push toward the future of distributed `Python`. | Help us push toward the future. | ||||||
| 
 | 
 | ||||||
| - Erlang-style supervisors via composed context managers (see `#22 | - (Soon to land) ``asyncio`` support allowing for "infected" actors where | ||||||
|   <https://github.com/goodboy/tractor/issues/22>`_) |   `trio` drives the `asyncio` scheduler via the astounding "`guest mode`_" | ||||||
| - Typed messaging protocols (ex. via ``msgspec.Struct``, see `#36 | - Typed messaging protocols (ex. via ``msgspec``, see `#36 | ||||||
|   <https://github.com/goodboy/tractor/issues/36>`_) |   <https://github.com/goodboy/tractor/issues/36>`_) | ||||||
| - Typed capability-based (dialog) protocols ( see `#196 | - Erlang-style supervisors via composed context managers | ||||||
|   <https://github.com/goodboy/tractor/issues/196>`_ with draft work |  | ||||||
|   started in `#311 <https://github.com/goodboy/tractor/pull/311>`_) |  | ||||||
| - We **recently disabled CI-testing on windows** and need help getting |  | ||||||
|   it running again! (see `#327 |  | ||||||
|   <https://github.com/goodboy/tractor/pull/327>`_). **We do have windows |  | ||||||
|   support** (and have for quite a while) but since no active hacker |  | ||||||
|   exists in the user-base to help test on that OS, for now we're not |  | ||||||
|   actively maintaining testing due to the added hassle and general |  | ||||||
|   latency.. |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| Feel like saying hi? | Feel like saying hi? | ||||||
|  | @ -659,45 +415,32 @@ say hi, please feel free to reach us in our `matrix channel`_.  If | ||||||
| matrix seems too hip, we're also mostly all in the the `trio gitter | matrix seems too hip, we're also mostly all in the the `trio gitter | ||||||
| channel`_! | channel`_! | ||||||
| 
 | 
 | ||||||
| .. _structured concurrent: https://trio.discourse.group/t/concise-definition-of-structured-concurrency/228 |  | ||||||
| .. _distributed: https://en.wikipedia.org/wiki/Distributed_computing |  | ||||||
| .. _multi-processing: https://en.wikipedia.org/wiki/Multiprocessing |  | ||||||
| .. _trio: https://github.com/python-trio/trio |  | ||||||
| .. _nurseries: https://vorpus.org/blog/notes-on-structured-concurrency-or-go-statement-considered-harmful/#nurseries-a-structured-replacement-for-go-statements | .. _nurseries: https://vorpus.org/blog/notes-on-structured-concurrency-or-go-statement-considered-harmful/#nurseries-a-structured-replacement-for-go-statements | ||||||
| .. _actor model: https://en.wikipedia.org/wiki/Actor_model | .. _actor model: https://en.wikipedia.org/wiki/Actor_model | ||||||
|  | .. _trio: https://github.com/python-trio/trio | ||||||
|  | .. _multi-processing: https://en.wikipedia.org/wiki/Multiprocessing | ||||||
| .. _trionic: https://trio.readthedocs.io/en/latest/design.html#high-level-design-principles | .. _trionic: https://trio.readthedocs.io/en/latest/design.html#high-level-design-principles | ||||||
| .. _async sandwich: https://trio.readthedocs.io/en/latest/tutorial.html#async-sandwich | .. _async sandwich: https://trio.readthedocs.io/en/latest/tutorial.html#async-sandwich | ||||||
|  | .. _structured concurrent: https://trio.discourse.group/t/concise-definition-of-structured-concurrency/228 | ||||||
| .. _3 axioms: https://www.youtube.com/watch?v=7erJ1DV_Tlo&t=162s | .. _3 axioms: https://www.youtube.com/watch?v=7erJ1DV_Tlo&t=162s | ||||||
| .. .. _3 axioms: https://en.wikipedia.org/wiki/Actor_model#Fundamental_concepts | .. .. _3 axioms: https://en.wikipedia.org/wiki/Actor_model#Fundamental_concepts | ||||||
| .. _adherance to: https://www.youtube.com/watch?v=7erJ1DV_Tlo&t=1821s | .. _adherance to: https://www.youtube.com/watch?v=7erJ1DV_Tlo&t=1821s | ||||||
| .. _trio gitter channel: https://gitter.im/python-trio/general | .. _trio gitter channel: https://gitter.im/python-trio/general | ||||||
| .. _matrix channel: https://matrix.to/#/!tractor:matrix.org | .. _matrix channel: https://matrix.to/#/!tractor:matrix.org | ||||||
| .. _broadcasting: https://github.com/goodboy/tractor/pull/229 |  | ||||||
| .. _modern procotol: https://en.wikipedia.org/wiki/Rendezvous_protocol |  | ||||||
| .. _pdbp: https://github.com/mdmintz/pdbp |  | ||||||
| .. _pdb++: https://github.com/pdbpp/pdbpp | .. _pdb++: https://github.com/pdbpp/pdbpp | ||||||
| .. _cheap or nasty: https://zguide.zeromq.org/docs/chapter7/#The-Cheap-or-Nasty-Pattern | .. _guest mode: https://trio.readthedocs.io/en/stable/reference-lowlevel.html?highlight=guest%20mode#using-guest-mode-to-run-trio-on-top-of-other-event-loops | ||||||
| .. _(un)protocol: https://zguide.zeromq.org/docs/chapter7/#Unprotocols |  | ||||||
| .. _discovery: https://zguide.zeromq.org/docs/chapter8/#Discovery |  | ||||||
| .. _modern protocol: https://en.wikipedia.org/wiki/Rendezvous_protocol |  | ||||||
| .. _messages: https://en.wikipedia.org/wiki/Message_passing | .. _messages: https://en.wikipedia.org/wiki/Message_passing | ||||||
| .. _trio docs: https://trio.readthedocs.io/en/latest/ | .. _trio docs: https://trio.readthedocs.io/en/latest/ | ||||||
| .. _blog post: https://vorpus.org/blog/notes-on-structured-concurrency-or-go-statement-considered-harmful/ | .. _blog post: https://vorpus.org/blog/notes-on-structured-concurrency-or-go-statement-considered-harmful/ | ||||||
| .. _structured concurrency: https://en.wikipedia.org/wiki/Structured_concurrency | .. _structured concurrency: https://en.wikipedia.org/wiki/Structured_concurrency | ||||||
| .. _SC: https://en.wikipedia.org/wiki/Structured_concurrency |  | ||||||
| .. _libdill-docs: https://sustrik.github.io/libdill/structured-concurrency.html |  | ||||||
| .. _unrequirements: https://en.wikipedia.org/wiki/Actor_model#Direct_communication_and_asynchrony | .. _unrequirements: https://en.wikipedia.org/wiki/Actor_model#Direct_communication_and_asynchrony | ||||||
| .. _async generators: https://www.python.org/dev/peps/pep-0525/ | .. _async generators: https://www.python.org/dev/peps/pep-0525/ | ||||||
| .. _trio-parallel: https://github.com/richardsheridan/trio-parallel | .. _trio-parallel: https://github.com/richardsheridan/trio-parallel | ||||||
| .. _uv: https://docs.astral.sh/uv/ |  | ||||||
| .. _msgspec: https://jcristharif.com/msgspec/ | .. _msgspec: https://jcristharif.com/msgspec/ | ||||||
| .. _guest: https://trio.readthedocs.io/en/stable/reference-lowlevel.html?highlight=guest%20mode#using-guest-mode-to-run-trio-on-top-of-other-event-loops |  | ||||||
| 
 | 
 | ||||||
| .. | 
 | ||||||
|    NOTE, on generating badge links from the UI | .. |gh_actions| image:: https://img.shields.io/endpoint.svg?url=https%3A%2F%2Factions-badge.atrox.dev%2Fgoodboy%2Ftractor%2Fbadge&style=popout-square | ||||||
|    https://docs.github.com/en/actions/how-tos/monitoring-and-troubleshooting-workflows/monitoring-workflows/adding-a-workflow-status-badge?ref=gitguardian-blog-automated-secrets-detection#using-the-ui |     :target: https://actions-badge.atrox.dev/goodboy/tractor/goto | ||||||
| .. |gh_actions| image:: https://github.com/goodboy/tractor/actions/workflows/ci.yml/badge.svg?branch=main |  | ||||||
|     :target: https://github.com/goodboy/tractor/actions/workflows/ci.yml |  | ||||||
| 
 | 
 | ||||||
| .. |docs| image:: https://readthedocs.org/projects/tractor/badge/?version=latest | .. |docs| image:: https://readthedocs.org/projects/tractor/badge/?version=latest | ||||||
|     :target: https://tractor.readthedocs.io/en/latest/?badge=latest |     :target: https://tractor.readthedocs.io/en/latest/?badge=latest | ||||||
|  |  | ||||||
|  | @ -1,51 +0,0 @@ | ||||||
| Hot tips for ``tractor`` hackers |  | ||||||
| ================================ |  | ||||||
| 
 |  | ||||||
| This is a WIP guide for newcomers to the project mostly to do with |  | ||||||
| dev, testing, CI and release gotchas, reminders and best practises. |  | ||||||
| 
 |  | ||||||
| ``tractor`` is a fairly novel project compared to most since it is |  | ||||||
| effectively a new way of doing distributed computing in Python and is |  | ||||||
| much closer to working with an "application level runtime" (like erlang |  | ||||||
| OTP or scala's akka project) then it is a traditional Python library. |  | ||||||
| As such, having an arsenal of tools and recipes for figuring out the |  | ||||||
| right way to debug problems when they do arise is somewhat of |  | ||||||
| a necessity. |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| Making a Release |  | ||||||
| ---------------- |  | ||||||
| We currently do nothing special here except the traditional |  | ||||||
| PyPa release recipe as in `documented by twine`_. I personally |  | ||||||
| create sub-dirs within the generated `dist/` with an explicit |  | ||||||
| release name such as `alpha3/` when there's been a sequence of |  | ||||||
| releases I've made, but it really is up to you how you like to |  | ||||||
| organize generated sdists locally. |  | ||||||
| 
 |  | ||||||
| The resulting build cmds are approximately: |  | ||||||
| 
 |  | ||||||
| .. code:: bash |  | ||||||
| 
 |  | ||||||
|     python setup.py sdist -d ./dist/XXX.X/ |  | ||||||
| 
 |  | ||||||
|     twine upload -r testpypi dist/XXX.X/* |  | ||||||
| 
 |  | ||||||
|     twine upload dist/XXX.X/* |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| .. _documented by twine: https://twine.readthedocs.io/en/latest/#using-twine |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| Debugging and monitoring actor trees |  | ||||||
| ------------------------------------ |  | ||||||
| TODO: but there are tips in the readme for some terminal commands |  | ||||||
| which can be used to see the process trees easily on Linux. |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| Using the log system to trace `trio` task flow |  | ||||||
| ---------------------------------------------- |  | ||||||
| TODO: the logging system is meant to be oriented around |  | ||||||
| stack "layers" of the runtime such that you can track |  | ||||||
| "logical abstraction layers" in the code such as errors, cancellation, |  | ||||||
| IPC and streaming, and the low level transport and wire protocols. |  | ||||||
|  | @ -396,7 +396,7 @@ tasks spawned via multiple RPC calls to an actor can modify | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|         # a per process cache |         # a per process cache | ||||||
|         _actor_cache: dict[str, bool] = {} |         _actor_cache: Dict[str, bool] = {} | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|         def ping_endpoints(endpoints: List[str]): |         def ping_endpoints(endpoints: List[str]): | ||||||
|  |  | ||||||
|  | @ -1,262 +0,0 @@ | ||||||
| ''' |  | ||||||
| Complex edge case where during real-time streaming the IPC tranport |  | ||||||
| channels are wiped out (purposely in this example though it could have |  | ||||||
| been an outage) and we want to ensure that despite being in debug mode |  | ||||||
| (or not) the user can sent SIGINT once they notice the hang and the |  | ||||||
| actor tree will eventually be cancelled without leaving any zombies. |  | ||||||
| 
 |  | ||||||
| ''' |  | ||||||
| from contextlib import asynccontextmanager as acm |  | ||||||
| from functools import partial |  | ||||||
| 
 |  | ||||||
| from tractor import ( |  | ||||||
|     open_nursery, |  | ||||||
|     context, |  | ||||||
|     Context, |  | ||||||
|     ContextCancelled, |  | ||||||
|     MsgStream, |  | ||||||
|     _testing, |  | ||||||
|     trionics, |  | ||||||
| ) |  | ||||||
| import trio |  | ||||||
| import pytest |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| async def break_ipc_then_error( |  | ||||||
|     stream: MsgStream, |  | ||||||
|     break_ipc_with: str|None = None, |  | ||||||
|     pre_close: bool = False, |  | ||||||
| ): |  | ||||||
|     await _testing.break_ipc( |  | ||||||
|         stream=stream, |  | ||||||
|         method=break_ipc_with, |  | ||||||
|         pre_close=pre_close, |  | ||||||
|     ) |  | ||||||
|     async for msg in stream: |  | ||||||
|         await stream.send(msg) |  | ||||||
| 
 |  | ||||||
|     assert 0 |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| async def iter_ipc_stream( |  | ||||||
|     stream: MsgStream, |  | ||||||
|     break_ipc_with: str|None = None, |  | ||||||
|     pre_close: bool = False, |  | ||||||
| ): |  | ||||||
|     async for msg in stream: |  | ||||||
|         await stream.send(msg) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @context |  | ||||||
| async def recv_and_spawn_net_killers( |  | ||||||
| 
 |  | ||||||
|     ctx: Context, |  | ||||||
|     break_ipc_after: bool|int = False, |  | ||||||
|     pre_close: bool = False, |  | ||||||
| 
 |  | ||||||
| ) -> None: |  | ||||||
|     ''' |  | ||||||
|     Receive stream msgs and spawn some IPC killers mid-stream. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     broke_ipc: bool = False |  | ||||||
|     await ctx.started() |  | ||||||
|     async with ( |  | ||||||
|         ctx.open_stream() as stream, |  | ||||||
|         trionics.collapse_eg(), |  | ||||||
|         trio.open_nursery() as tn, |  | ||||||
|     ): |  | ||||||
|         async for i in stream: |  | ||||||
|             print(f'child echoing {i}') |  | ||||||
|             if not broke_ipc: |  | ||||||
|                 await stream.send(i) |  | ||||||
|             else: |  | ||||||
|                 await trio.sleep(0.01) |  | ||||||
| 
 |  | ||||||
|             if ( |  | ||||||
|                 break_ipc_after |  | ||||||
|                 and |  | ||||||
|                 i >= break_ipc_after |  | ||||||
|             ): |  | ||||||
|                 broke_ipc = True |  | ||||||
|                 tn.start_soon( |  | ||||||
|                     iter_ipc_stream, |  | ||||||
|                     stream, |  | ||||||
|                 ) |  | ||||||
|                 tn.start_soon( |  | ||||||
|                     partial( |  | ||||||
|                         break_ipc_then_error, |  | ||||||
|                         stream=stream, |  | ||||||
|                         pre_close=pre_close, |  | ||||||
|                     ) |  | ||||||
|                 ) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @acm |  | ||||||
| async def stuff_hangin_ctlc(timeout: float = 1) -> None: |  | ||||||
| 
 |  | ||||||
|     with trio.move_on_after(timeout) as cs: |  | ||||||
|         yield timeout |  | ||||||
| 
 |  | ||||||
|     if cs.cancelled_caught: |  | ||||||
|         # pretend to be a user seeing no streaming action |  | ||||||
|         # thinking it's a hang, and then hitting ctl-c.. |  | ||||||
|         print( |  | ||||||
|             f"i'm a user on the PARENT side and thingz hangin " |  | ||||||
|             f'after timeout={timeout} ???\n\n' |  | ||||||
|             'MASHING CTlR-C..!?\n' |  | ||||||
|         ) |  | ||||||
|         raise KeyboardInterrupt |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| async def main( |  | ||||||
|     debug_mode: bool = False, |  | ||||||
|     start_method: str = 'trio', |  | ||||||
|     loglevel: str = 'cancel', |  | ||||||
| 
 |  | ||||||
|     # by default we break the parent IPC first (if configured to break |  | ||||||
|     # at all), but this can be changed so the child does first (even if |  | ||||||
|     # both are set to break). |  | ||||||
|     break_parent_ipc_after: int|bool = False, |  | ||||||
|     break_child_ipc_after: int|bool = False, |  | ||||||
|     pre_close: bool = False, |  | ||||||
|     tpt_proto: str = 'tcp', |  | ||||||
| 
 |  | ||||||
| ) -> None: |  | ||||||
| 
 |  | ||||||
|     async with ( |  | ||||||
|         open_nursery( |  | ||||||
|             start_method=start_method, |  | ||||||
| 
 |  | ||||||
|             # NOTE: even debugger is used we shouldn't get |  | ||||||
|             # a hang since it never engages due to broken IPC |  | ||||||
|             debug_mode=debug_mode, |  | ||||||
|             loglevel=loglevel, |  | ||||||
|             enable_transports=[tpt_proto], |  | ||||||
| 
 |  | ||||||
|         ) as an, |  | ||||||
|     ): |  | ||||||
|         sub_name: str = 'chitty_hijo' |  | ||||||
|         portal = await an.start_actor( |  | ||||||
|             sub_name, |  | ||||||
|             enable_modules=[__name__], |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|         async with ( |  | ||||||
|             stuff_hangin_ctlc(timeout=2) as timeout, |  | ||||||
|             _testing.expect_ctxc( |  | ||||||
|                 yay=( |  | ||||||
|                     break_parent_ipc_after |  | ||||||
|                     or |  | ||||||
|                     break_child_ipc_after |  | ||||||
|                 ), |  | ||||||
|                 # TODO: we CAN'T remove this right? |  | ||||||
|                 # since we need the ctxc to bubble up from either |  | ||||||
|                 # the stream API after the `None` msg is sent |  | ||||||
|                 # (which actually implicitly cancels all remote |  | ||||||
|                 # tasks in the hijo) or from simluated |  | ||||||
|                 # KBI-mash-from-user |  | ||||||
|                 # or should we expect that a KBI triggers the ctxc |  | ||||||
|                 # and KBI in an eg? |  | ||||||
|                 reraise=True, |  | ||||||
|             ), |  | ||||||
| 
 |  | ||||||
|             portal.open_context( |  | ||||||
|                 recv_and_spawn_net_killers, |  | ||||||
|                 break_ipc_after=break_child_ipc_after, |  | ||||||
|                 pre_close=pre_close, |  | ||||||
|             ) as (ctx, sent), |  | ||||||
|         ): |  | ||||||
|             rx_eoc: bool = False |  | ||||||
|             ipc_break_sent: bool = False |  | ||||||
|             async with ctx.open_stream() as stream: |  | ||||||
|                 for i in range(1000): |  | ||||||
| 
 |  | ||||||
|                     if ( |  | ||||||
|                         break_parent_ipc_after |  | ||||||
|                         and |  | ||||||
|                         i > break_parent_ipc_after |  | ||||||
|                         and |  | ||||||
|                         not ipc_break_sent |  | ||||||
|                     ): |  | ||||||
|                         print( |  | ||||||
|                             '#################################\n' |  | ||||||
|                             'Simulating PARENT-side IPC BREAK!\n' |  | ||||||
|                             '#################################\n' |  | ||||||
|                         ) |  | ||||||
| 
 |  | ||||||
|                         # TODO: other methods? see break func above. |  | ||||||
|                         # await stream._ctx.chan.send(None) |  | ||||||
|                         # await stream._ctx.chan.transport.stream.send_eof() |  | ||||||
|                         await stream._ctx.chan.transport.stream.aclose() |  | ||||||
|                         ipc_break_sent = True |  | ||||||
| 
 |  | ||||||
|                     # it actually breaks right here in the |  | ||||||
|                     # mp_spawn/forkserver backends and thus the |  | ||||||
|                     # zombie reaper never even kicks in? |  | ||||||
|                     try: |  | ||||||
|                         print(f'parent sending {i}') |  | ||||||
|                         await stream.send(i) |  | ||||||
|                     except ContextCancelled as ctxc: |  | ||||||
|                         print( |  | ||||||
|                             'parent received ctxc on `stream.send()`\n' |  | ||||||
|                             f'{ctxc}\n' |  | ||||||
|                         ) |  | ||||||
|                         assert 'root' in ctxc.canceller |  | ||||||
|                         assert sub_name in ctx.canceller |  | ||||||
| 
 |  | ||||||
|                         # TODO: is this needed or no? |  | ||||||
|                         raise |  | ||||||
| 
 |  | ||||||
|                     except trio.ClosedResourceError: |  | ||||||
|                         # NOTE: don't send if we already broke the |  | ||||||
|                         # connection to avoid raising a closed-error |  | ||||||
|                         # such that we drop through to the ctl-c |  | ||||||
|                         # mashing by user. |  | ||||||
|                         await trio.sleep(0.01) |  | ||||||
| 
 |  | ||||||
|                     # timeout: int = 1 |  | ||||||
|                     # with trio.move_on_after(timeout) as cs: |  | ||||||
|                     async with stuff_hangin_ctlc() as timeout: |  | ||||||
|                         print( |  | ||||||
|                             f'PARENT `stream.receive()` with timeout={timeout}\n' |  | ||||||
|                         ) |  | ||||||
|                         # NOTE: in the parent side IPC failure case this |  | ||||||
|                         # will raise an ``EndOfChannel`` after the child |  | ||||||
|                         # is killed and sends a stop msg back to it's |  | ||||||
|                         # caller/this-parent. |  | ||||||
|                         try: |  | ||||||
|                             rx = await stream.receive() |  | ||||||
|                             print( |  | ||||||
|                                 "I'm a happy PARENT user and echoed to me is\n" |  | ||||||
|                                 f'{rx}\n' |  | ||||||
|                             ) |  | ||||||
|                         except trio.EndOfChannel: |  | ||||||
|                             rx_eoc: bool = True |  | ||||||
|                             print('MsgStream got EoC for PARENT') |  | ||||||
|                             raise |  | ||||||
| 
 |  | ||||||
|             print( |  | ||||||
|                 'Streaming finished and we got Eoc.\n' |  | ||||||
|                 'Canceling `.open_context()` in root with\n' |  | ||||||
|                 'CTlR-C..' |  | ||||||
|             ) |  | ||||||
|             if rx_eoc: |  | ||||||
|                 assert stream.closed |  | ||||||
|                 try: |  | ||||||
|                     await stream.send(i) |  | ||||||
|                     pytest.fail('stream not closed?') |  | ||||||
|                 except ( |  | ||||||
|                     trio.ClosedResourceError, |  | ||||||
|                     trio.EndOfChannel, |  | ||||||
|                 ) as send_err: |  | ||||||
|                     if rx_eoc: |  | ||||||
|                         assert send_err is stream._eoc |  | ||||||
|                     else: |  | ||||||
|                         assert send_err is stream._closed |  | ||||||
| 
 |  | ||||||
|             raise KeyboardInterrupt |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| if __name__ == '__main__': |  | ||||||
|     trio.run(main) |  | ||||||
|  | @ -1,136 +0,0 @@ | ||||||
| ''' |  | ||||||
| Examples of using the builtin `breakpoint()` from an `asyncio.Task` |  | ||||||
| running in a subactor spawned with `infect_asyncio=True`. |  | ||||||
| 
 |  | ||||||
| ''' |  | ||||||
| import asyncio |  | ||||||
| 
 |  | ||||||
| import trio |  | ||||||
| import tractor |  | ||||||
| from tractor import ( |  | ||||||
|     to_asyncio, |  | ||||||
|     Portal, |  | ||||||
| ) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| async def aio_sleep_forever(): |  | ||||||
|     await asyncio.sleep(float('inf')) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| async def bp_then_error( |  | ||||||
|     to_trio: trio.MemorySendChannel, |  | ||||||
|     from_trio: asyncio.Queue, |  | ||||||
| 
 |  | ||||||
|     raise_after_bp: bool = True, |  | ||||||
| 
 |  | ||||||
| ) -> None: |  | ||||||
| 
 |  | ||||||
|     # sync with `trio`-side (caller) task |  | ||||||
|     to_trio.send_nowait('start') |  | ||||||
| 
 |  | ||||||
|     # NOTE: what happens here inside the hook needs some refinement.. |  | ||||||
|     # => seems like it's still `.debug._set_trace()` but |  | ||||||
|     #    we set `Lock.local_task_in_debug = 'sync'`, we probably want |  | ||||||
|     #    some further, at least, meta-data about the task/actor in debug |  | ||||||
|     #    in terms of making it clear it's `asyncio` mucking about. |  | ||||||
|     breakpoint()  # asyncio-side |  | ||||||
| 
 |  | ||||||
|     # short checkpoint / delay |  | ||||||
|     await asyncio.sleep(0.5)  # asyncio-side |  | ||||||
| 
 |  | ||||||
|     if raise_after_bp: |  | ||||||
|         raise ValueError('asyncio side error!') |  | ||||||
| 
 |  | ||||||
|     # TODO: test case with this so that it gets cancelled? |  | ||||||
|     else: |  | ||||||
|         # XXX NOTE: this is required in order to get the SIGINT-ignored |  | ||||||
|         # hang case documented in the module script section! |  | ||||||
|         await aio_sleep_forever() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @tractor.context |  | ||||||
| async def trio_ctx( |  | ||||||
|     ctx: tractor.Context, |  | ||||||
|     bp_before_started: bool = False, |  | ||||||
| ): |  | ||||||
| 
 |  | ||||||
|     # this will block until the ``asyncio`` task sends a "first" |  | ||||||
|     # message, see first line in above func. |  | ||||||
|     async with ( |  | ||||||
|         to_asyncio.open_channel_from( |  | ||||||
|             bp_then_error, |  | ||||||
|             # raise_after_bp=not bp_before_started, |  | ||||||
|         ) as (first, chan), |  | ||||||
| 
 |  | ||||||
|         trio.open_nursery() as tn, |  | ||||||
|     ): |  | ||||||
|         assert first == 'start' |  | ||||||
| 
 |  | ||||||
|         if bp_before_started: |  | ||||||
|             await tractor.pause()  # trio-side |  | ||||||
| 
 |  | ||||||
|         await ctx.started(first)  # trio-side |  | ||||||
| 
 |  | ||||||
|         tn.start_soon( |  | ||||||
|             to_asyncio.run_task, |  | ||||||
|             aio_sleep_forever, |  | ||||||
|         ) |  | ||||||
|         await trio.sleep_forever() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| async def main( |  | ||||||
|     bps_all_over: bool = True, |  | ||||||
| 
 |  | ||||||
|     # TODO, WHICH OF THESE HAZ BUGZ? |  | ||||||
|     cancel_from_root: bool = False, |  | ||||||
|     err_from_root: bool = False, |  | ||||||
| 
 |  | ||||||
| ) -> None: |  | ||||||
| 
 |  | ||||||
|     async with tractor.open_nursery( |  | ||||||
|         debug_mode=True, |  | ||||||
|         maybe_enable_greenback=True, |  | ||||||
|         # loglevel='devx', |  | ||||||
|     ) as an: |  | ||||||
|         ptl: Portal = await an.start_actor( |  | ||||||
|             'aio_daemon', |  | ||||||
|             enable_modules=[__name__], |  | ||||||
|             infect_asyncio=True, |  | ||||||
|             debug_mode=True, |  | ||||||
|             # loglevel='cancel', |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|         async with ptl.open_context( |  | ||||||
|             trio_ctx, |  | ||||||
|             bp_before_started=bps_all_over, |  | ||||||
|         ) as (ctx, first): |  | ||||||
| 
 |  | ||||||
|             assert first == 'start' |  | ||||||
| 
 |  | ||||||
|             # pause in parent to ensure no cross-actor |  | ||||||
|             # locking problems exist! |  | ||||||
|             await tractor.pause()  # trio-root |  | ||||||
| 
 |  | ||||||
|             if cancel_from_root: |  | ||||||
|                 await ctx.cancel() |  | ||||||
| 
 |  | ||||||
|             if err_from_root: |  | ||||||
|                 assert 0 |  | ||||||
|             else: |  | ||||||
|                 await trio.sleep_forever() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|         # TODO: case where we cancel from trio-side while asyncio task |  | ||||||
|         # has debugger lock? |  | ||||||
|         # await ptl.cancel_actor() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| if __name__ == '__main__': |  | ||||||
| 
 |  | ||||||
|     # works fine B) |  | ||||||
|     trio.run(main) |  | ||||||
| 
 |  | ||||||
|     # will hang and ignores SIGINT !! |  | ||||||
|     # NOTE: you'll need to send a SIGQUIT (via ctl-\) to kill it |  | ||||||
|     # manually.. |  | ||||||
|     # trio.run(main, True) |  | ||||||
|  | @ -1,9 +0,0 @@ | ||||||
| ''' |  | ||||||
| Reproduce a bug where enabling debug mode for a sub-actor actually causes |  | ||||||
| a hang on teardown... |  | ||||||
| 
 |  | ||||||
| ''' |  | ||||||
| import asyncio |  | ||||||
| 
 |  | ||||||
| import trio |  | ||||||
| import tractor |  | ||||||
|  | @ -1,5 +1,5 @@ | ||||||
| ''' | ''' | ||||||
| Fast fail test with a `Context`. | Fast fail test with a context. | ||||||
| 
 | 
 | ||||||
| Ensure the partially initialized sub-actor process | Ensure the partially initialized sub-actor process | ||||||
| doesn't cause a hang on error/cancel of the parent | doesn't cause a hang on error/cancel of the parent | ||||||
|  | @ -20,7 +20,7 @@ async def sleep( | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def open_ctx( | async def open_ctx( | ||||||
|     n: tractor._supervise.ActorNursery |     n: tractor._trionics.ActorNursery | ||||||
| ): | ): | ||||||
| 
 | 
 | ||||||
|     # spawn both actors |     # spawn both actors | ||||||
|  |  | ||||||
|  | @ -4,15 +4,9 @@ import trio | ||||||
| 
 | 
 | ||||||
| async def breakpoint_forever(): | async def breakpoint_forever(): | ||||||
|     "Indefinitely re-enter debugger in child actor." |     "Indefinitely re-enter debugger in child actor." | ||||||
|     try: |  | ||||||
|     while True: |     while True: | ||||||
|         yield 'yo' |         yield 'yo' | ||||||
|             await tractor.pause() |         await tractor.breakpoint() | ||||||
|     except BaseException: |  | ||||||
|         tractor.log.get_console_log().exception( |  | ||||||
|             'Cancelled while trying to enter pause point!' |  | ||||||
|         ) |  | ||||||
|         raise |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def name_error(): | async def name_error(): | ||||||
|  | @ -21,14 +15,11 @@ async def name_error(): | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def main(): | async def main(): | ||||||
|     ''' |     """Test breakpoint in a streaming actor. | ||||||
|     Test breakpoint in a streaming actor. |     """ | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     async with tractor.open_nursery( |     async with tractor.open_nursery( | ||||||
|         debug_mode=True, |         debug_mode=True, | ||||||
|         loglevel='cancel', |         loglevel='error', | ||||||
|         # loglevel='devx', |  | ||||||
|     ) as n: |     ) as n: | ||||||
| 
 | 
 | ||||||
|         p0 = await n.start_actor('bp_forever', enable_modules=[__name__]) |         p0 = await n.start_actor('bp_forever', enable_modules=[__name__]) | ||||||
|  | @ -36,17 +27,6 @@ async def main(): | ||||||
| 
 | 
 | ||||||
|         # retreive results |         # retreive results | ||||||
|         async with p0.open_stream_from(breakpoint_forever) as stream: |         async with p0.open_stream_from(breakpoint_forever) as stream: | ||||||
| 
 |  | ||||||
|             # triggers the first name error |  | ||||||
|             try: |  | ||||||
|                 await p1.run(name_error) |  | ||||||
|             except tractor.RemoteActorError as rae: |  | ||||||
|                 assert rae.boxed_type is NameError |  | ||||||
| 
 |  | ||||||
|             async for i in stream: |  | ||||||
| 
 |  | ||||||
|                 # a second time try the failing subactor and this tie |  | ||||||
|                 # let error propagate up to the parent/nursery. |  | ||||||
|             await p1.run(name_error) |             await p1.run(name_error) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
|  | @ -10,12 +10,7 @@ async def name_error(): | ||||||
| async def breakpoint_forever(): | async def breakpoint_forever(): | ||||||
|     "Indefinitely re-enter debugger in child actor." |     "Indefinitely re-enter debugger in child actor." | ||||||
|     while True: |     while True: | ||||||
|         await tractor.pause() |         await tractor.breakpoint() | ||||||
| 
 |  | ||||||
|         # NOTE: if the test never sent 'q'/'quit' commands |  | ||||||
|         # on the pdb repl, without this checkpoint line the |  | ||||||
|         # repl would spin in this actor forever. |  | ||||||
|         # await trio.sleep(0) |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def spawn_until(depth=0): | async def spawn_until(depth=0): | ||||||
|  | @ -23,20 +18,12 @@ async def spawn_until(depth=0): | ||||||
|     """ |     """ | ||||||
|     async with tractor.open_nursery() as n: |     async with tractor.open_nursery() as n: | ||||||
|         if depth < 1: |         if depth < 1: | ||||||
| 
 |             # await n.run_in_actor('breakpoint_forever', breakpoint_forever) | ||||||
|             await n.run_in_actor(breakpoint_forever) |             await n.run_in_actor( | ||||||
| 
 |  | ||||||
|             p = await n.run_in_actor( |  | ||||||
|                 name_error, |                 name_error, | ||||||
|                 name='name_error' |                 name='name_error' | ||||||
|             ) |             ) | ||||||
|             await trio.sleep(0.5) |  | ||||||
|             # rx and propagate error from child |  | ||||||
|             await p.result() |  | ||||||
| 
 |  | ||||||
|         else: |         else: | ||||||
|             # recusrive call to spawn another process branching layer of |  | ||||||
|             # the tree |  | ||||||
|             depth -= 1 |             depth -= 1 | ||||||
|             await n.run_in_actor( |             await n.run_in_actor( | ||||||
|                 spawn_until, |                 spawn_until, | ||||||
|  | @ -45,7 +32,6 @@ async def spawn_until(depth=0): | ||||||
|             ) |             ) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| # TODO: notes on the new boxed-relayed errors through proxy actors |  | ||||||
| async def main(): | async def main(): | ||||||
|     """The main ``tractor`` routine. |     """The main ``tractor`` routine. | ||||||
| 
 | 
 | ||||||
|  | @ -67,7 +53,6 @@ async def main(): | ||||||
|     """ |     """ | ||||||
|     async with tractor.open_nursery( |     async with tractor.open_nursery( | ||||||
|         debug_mode=True, |         debug_mode=True, | ||||||
|         # loglevel='cancel', |  | ||||||
|     ) as n: |     ) as n: | ||||||
| 
 | 
 | ||||||
|         # spawn both actors |         # spawn both actors | ||||||
|  | @ -82,16 +67,8 @@ async def main(): | ||||||
|             name='spawner1', |             name='spawner1', | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|         # TODO: test this case as well where the parent don't see |  | ||||||
|         # the sub-actor errors by default and instead expect a user |  | ||||||
|         # ctrl-c to kill the root. |  | ||||||
|         with trio.move_on_after(3): |  | ||||||
|             await trio.sleep_forever() |  | ||||||
| 
 |  | ||||||
|         # gah still an issue here. |         # gah still an issue here. | ||||||
|         await portal.result() |         await portal.result() | ||||||
| 
 |  | ||||||
|         # should never get here |  | ||||||
|         await portal1.result() |         await portal1.result() | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
|  | @ -1,8 +1,3 @@ | ||||||
| ''' |  | ||||||
| Test that a nested nursery will avoid clobbering |  | ||||||
| the debugger latched by a broken child. |  | ||||||
| 
 |  | ||||||
| ''' |  | ||||||
| import trio | import trio | ||||||
| import tractor | import tractor | ||||||
| 
 | 
 | ||||||
|  | @ -40,7 +35,6 @@ async def main(): | ||||||
|     """ |     """ | ||||||
|     async with tractor.open_nursery( |     async with tractor.open_nursery( | ||||||
|         debug_mode=True, |         debug_mode=True, | ||||||
|         loglevel='devx', |  | ||||||
|     ) as n: |     ) as n: | ||||||
| 
 | 
 | ||||||
|         # spawn both actors |         # spawn both actors | ||||||
|  |  | ||||||
|  | @ -6,7 +6,7 @@ async def breakpoint_forever(): | ||||||
|     "Indefinitely re-enter debugger in child actor." |     "Indefinitely re-enter debugger in child actor." | ||||||
|     while True: |     while True: | ||||||
|         await trio.sleep(0.1) |         await trio.sleep(0.1) | ||||||
|         await tractor.pause() |         await tractor.breakpoint() | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def name_error(): | async def name_error(): | ||||||
|  | @ -38,7 +38,6 @@ async def main(): | ||||||
|     """ |     """ | ||||||
|     async with tractor.open_nursery( |     async with tractor.open_nursery( | ||||||
|         debug_mode=True, |         debug_mode=True, | ||||||
|         # loglevel='runtime', |  | ||||||
|     ) as n: |     ) as n: | ||||||
| 
 | 
 | ||||||
|         # Spawn both actors, don't bother with collecting results |         # Spawn both actors, don't bother with collecting results | ||||||
|  |  | ||||||
|  | @ -1,40 +0,0 @@ | ||||||
| import trio |  | ||||||
| import tractor |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @tractor.context |  | ||||||
| async def just_sleep( |  | ||||||
| 
 |  | ||||||
|     ctx: tractor.Context, |  | ||||||
|     **kwargs, |  | ||||||
| 
 |  | ||||||
| ) -> None: |  | ||||||
|     ''' |  | ||||||
|     Start and sleep. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     await ctx.started() |  | ||||||
|     await trio.sleep_forever() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| async def main() -> None: |  | ||||||
| 
 |  | ||||||
|     async with tractor.open_nursery( |  | ||||||
|         debug_mode=True, |  | ||||||
|     ) as n: |  | ||||||
|         portal = await n.start_actor( |  | ||||||
|             'ctx_child', |  | ||||||
| 
 |  | ||||||
|             # XXX: we don't enable the current module in order |  | ||||||
|             # to trigger `ModuleNotFound`. |  | ||||||
|             enable_modules=[], |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|         async with portal.open_context( |  | ||||||
|             just_sleep,  # taken from pytest parameterization |  | ||||||
|         ) as (ctx, sent): |  | ||||||
|             raise KeyboardInterrupt |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| if __name__ == '__main__': |  | ||||||
|     trio.run(main) |  | ||||||
|  | @ -1,28 +0,0 @@ | ||||||
| import trio |  | ||||||
| import tractor |  | ||||||
| 
 |  | ||||||
| async def die(): |  | ||||||
|     raise RuntimeError |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| async def main(): |  | ||||||
|     async with tractor.open_nursery() as tn: |  | ||||||
| 
 |  | ||||||
|         debug_actor = await tn.start_actor( |  | ||||||
|             'debugged_boi', |  | ||||||
|             enable_modules=[__name__], |  | ||||||
|             debug_mode=True, |  | ||||||
|         ) |  | ||||||
|         crash_boi = await tn.start_actor( |  | ||||||
|             'crash_boi', |  | ||||||
|             enable_modules=[__name__], |  | ||||||
|             # debug_mode=True, |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|         async with trio.open_nursery() as n: |  | ||||||
|             n.start_soon(debug_actor.run, die) |  | ||||||
|             n.start_soon(crash_boi.run, die) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| if __name__ == '__main__': |  | ||||||
|     trio.run(main) |  | ||||||
|  | @ -1,56 +0,0 @@ | ||||||
| import trio |  | ||||||
| import tractor |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @tractor.context |  | ||||||
| async def name_error( |  | ||||||
|     ctx: tractor.Context, |  | ||||||
| ): |  | ||||||
|     ''' |  | ||||||
|     Raise a `NameError`, catch it and enter `.post_mortem()`, then |  | ||||||
|     expect the `._rpc._invoke()` crash handler to also engage. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     try: |  | ||||||
|         getattr(doggypants)  # noqa (on purpose) |  | ||||||
|     except NameError: |  | ||||||
|         await tractor.post_mortem() |  | ||||||
|         raise |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| async def main(): |  | ||||||
|     ''' |  | ||||||
|     Test 3 `PdbREPL` entries: |  | ||||||
|       - one in the child due to manual `.post_mortem()`, |  | ||||||
|       - another in the child due to runtime RPC crash handling. |  | ||||||
|       - final one here in parent from the RAE. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     # XXX NOTE: ideally the REPL arrives at this frame in the parent |  | ||||||
|     # ONE UP FROM the inner ctx block below! |  | ||||||
|     async with tractor.open_nursery( |  | ||||||
|         debug_mode=True, |  | ||||||
|         # loglevel='cancel', |  | ||||||
|     ) as an: |  | ||||||
|         p: tractor.Portal = await an.start_actor( |  | ||||||
|             'child', |  | ||||||
|             enable_modules=[__name__], |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|         # XXX should raise `RemoteActorError[NameError]` |  | ||||||
|         # AND be the active frame when REPL enters! |  | ||||||
|         try: |  | ||||||
|             async with p.open_context(name_error) as (ctx, first): |  | ||||||
|                 assert first |  | ||||||
|         except tractor.RemoteActorError as rae: |  | ||||||
|             assert rae.boxed_type is NameError |  | ||||||
| 
 |  | ||||||
|             # manually handle in root's parent task |  | ||||||
|             await tractor.post_mortem() |  | ||||||
|             raise |  | ||||||
|         else: |  | ||||||
|             raise RuntimeError('IPC ctx should have remote errored!?') |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| if __name__ == '__main__': |  | ||||||
|     trio.run(main) |  | ||||||
|  | @ -1,58 +0,0 @@ | ||||||
| import os |  | ||||||
| import sys |  | ||||||
| 
 |  | ||||||
| import trio |  | ||||||
| import tractor |  | ||||||
| 
 |  | ||||||
| # ensure mod-path is correct! |  | ||||||
| from tractor.devx.debug import ( |  | ||||||
|     _sync_pause_from_builtin as _sync_pause_from_builtin, |  | ||||||
| ) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| async def main() -> None: |  | ||||||
| 
 |  | ||||||
|     # intially unset, no entry. |  | ||||||
|     orig_pybp_var: int = os.environ.get('PYTHONBREAKPOINT') |  | ||||||
|     assert orig_pybp_var in {None, "0"} |  | ||||||
| 
 |  | ||||||
|     async with tractor.open_nursery( |  | ||||||
|         debug_mode=True, |  | ||||||
|         loglevel='devx', |  | ||||||
|         maybe_enable_greenback=True, |  | ||||||
|         # ^XXX REQUIRED to enable `breakpoint()` support (from sync |  | ||||||
|         # fns) and thus required here to avoid an assertion err |  | ||||||
|         # on the next line |  | ||||||
|     ): |  | ||||||
|         assert ( |  | ||||||
|             (pybp_var := os.environ['PYTHONBREAKPOINT']) |  | ||||||
|             == |  | ||||||
|             'tractor.devx.debug._sync_pause_from_builtin' |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|         # TODO: an assert that verifies the hook has indeed been, hooked |  | ||||||
|         # XD |  | ||||||
|         assert ( |  | ||||||
|             (pybp_hook := sys.breakpointhook) |  | ||||||
|             is not tractor.devx.debug._set_trace |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|         print( |  | ||||||
|             f'$PYTHONOBREAKPOINT: {pybp_var!r}\n' |  | ||||||
|             f'`sys.breakpointhook`: {pybp_hook!r}\n' |  | ||||||
|         ) |  | ||||||
|         breakpoint()  # first bp, tractor hook set. |  | ||||||
| 
 |  | ||||||
|     # XXX AFTER EXIT (of actor-runtime) verify the hook is unset.. |  | ||||||
|     # |  | ||||||
|     # YES, this is weird but it's how stdlib docs say to do it.. |  | ||||||
|     # https://docs.python.org/3/library/sys.html#sys.breakpointhook |  | ||||||
|     assert os.environ.get('PYTHONBREAKPOINT') is orig_pybp_var |  | ||||||
|     assert sys.breakpointhook |  | ||||||
| 
 |  | ||||||
|     # now ensure a regular builtin pause still works |  | ||||||
|     breakpoint()  # last bp, stdlib hook restored |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| if __name__ == '__main__': |  | ||||||
|     trio.run(main) |  | ||||||
|  | @ -10,7 +10,7 @@ async def main(): | ||||||
| 
 | 
 | ||||||
|         await trio.sleep(0.1) |         await trio.sleep(0.1) | ||||||
| 
 | 
 | ||||||
|         await tractor.pause() |         await tractor.breakpoint() | ||||||
| 
 | 
 | ||||||
|         await trio.sleep(0.1) |         await trio.sleep(0.1) | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
|  | @ -2,16 +2,13 @@ import trio | ||||||
| import tractor | import tractor | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def main( | async def main(): | ||||||
|     registry_addrs: tuple[str, int]|None = None |  | ||||||
| ): |  | ||||||
| 
 | 
 | ||||||
|     async with tractor.open_root_actor( |     async with tractor.open_root_actor( | ||||||
|         debug_mode=True, |         debug_mode=True, | ||||||
|         # loglevel='runtime', |  | ||||||
|     ): |     ): | ||||||
|         while True: |         while True: | ||||||
|             await tractor.pause() |             await tractor.breakpoint() | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| if __name__ == '__main__': | if __name__ == '__main__': | ||||||
|  |  | ||||||
|  | @ -24,9 +24,10 @@ async def spawn_until(depth=0): | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def main(): | async def main(): | ||||||
|     ''' |     """The main ``tractor`` routine. | ||||||
|     The process tree should look as approximately as follows when the | 
 | ||||||
|     debugger first engages: |     The process tree should look as approximately as follows when the debugger | ||||||
|  |     first engages: | ||||||
| 
 | 
 | ||||||
|     python examples/debugging/multi_nested_subactors_bp_forever.py |     python examples/debugging/multi_nested_subactors_bp_forever.py | ||||||
|     ├─ python -m tractor._child --uid ('spawner1', '7eab8462 ...) |     ├─ python -m tractor._child --uid ('spawner1', '7eab8462 ...) | ||||||
|  | @ -36,11 +37,10 @@ async def main(): | ||||||
|     └─ python -m tractor._child --uid ('spawner0', '1d42012b ...) |     └─ python -m tractor._child --uid ('spawner0', '1d42012b ...) | ||||||
|        └─ python -m tractor._child --uid ('name_error', '6c2733b8 ...) |        └─ python -m tractor._child --uid ('name_error', '6c2733b8 ...) | ||||||
| 
 | 
 | ||||||
|     ''' |     """ | ||||||
|     async with tractor.open_nursery( |     async with tractor.open_nursery( | ||||||
|         debug_mode=True, |         debug_mode=True, | ||||||
|         loglevel='devx', |         loglevel='warning' | ||||||
|         enable_transports=['uds'], |  | ||||||
|     ) as n: |     ) as n: | ||||||
| 
 | 
 | ||||||
|         # spawn both actors |         # spawn both actors | ||||||
|  |  | ||||||
|  | @ -1,35 +0,0 @@ | ||||||
| import trio |  | ||||||
| import tractor |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| async def main(): |  | ||||||
|     async with tractor.open_root_actor( |  | ||||||
|         debug_mode=True, |  | ||||||
|         loglevel='cancel', |  | ||||||
|     ) as _root: |  | ||||||
| 
 |  | ||||||
|         # manually trigger self-cancellation and wait |  | ||||||
|         # for it to fully trigger. |  | ||||||
|         _root.cancel_soon() |  | ||||||
|         await _root._cancel_complete.wait() |  | ||||||
|         print('root cancelled') |  | ||||||
| 
 |  | ||||||
|         # now ensure we can still use the REPL |  | ||||||
|         try: |  | ||||||
|             await tractor.pause() |  | ||||||
|         except trio.Cancelled as _taskc: |  | ||||||
|             assert (root_cs := _root._root_tn.cancel_scope).cancel_called |  | ||||||
|             # NOTE^^ above logic but inside `open_root_actor()` and |  | ||||||
|             # passed to the `shield=` expression is effectively what |  | ||||||
|             # we're testing here! |  | ||||||
|             await tractor.pause(shield=root_cs.cancel_called) |  | ||||||
| 
 |  | ||||||
|         # XXX, if shield logic *is wrong* inside `open_root_actor()`'s |  | ||||||
|         # crash-handler block this should never be interacted, |  | ||||||
|         # instead `trio.Cancelled` would be bubbled up: the original |  | ||||||
|         # BUG. |  | ||||||
|         assert 0 |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| if __name__ == '__main__': |  | ||||||
|     trio.run(main) |  | ||||||
|  | @ -1,84 +0,0 @@ | ||||||
| ''' |  | ||||||
| Verify we can dump a `stackscope` tree on a hang. |  | ||||||
| 
 |  | ||||||
| ''' |  | ||||||
| import os |  | ||||||
| import signal |  | ||||||
| 
 |  | ||||||
| import trio |  | ||||||
| import tractor |  | ||||||
| 
 |  | ||||||
| @tractor.context |  | ||||||
| async def start_n_shield_hang( |  | ||||||
|     ctx: tractor.Context, |  | ||||||
| ): |  | ||||||
|     # actor: tractor.Actor = tractor.current_actor() |  | ||||||
| 
 |  | ||||||
|     # sync to parent-side task |  | ||||||
|     await ctx.started(os.getpid()) |  | ||||||
| 
 |  | ||||||
|     print('Entering shield sleep..') |  | ||||||
|     with trio.CancelScope(shield=True): |  | ||||||
|         await trio.sleep_forever()  # in subactor |  | ||||||
| 
 |  | ||||||
|     # XXX NOTE ^^^ since this shields, we expect |  | ||||||
|     # the zombie reaper (aka T800) to engage on |  | ||||||
|     # SIGINT from the user and eventually hard-kill |  | ||||||
|     # this subprocess! |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| async def main( |  | ||||||
|     from_test: bool = False, |  | ||||||
| ) -> None: |  | ||||||
| 
 |  | ||||||
|     async with ( |  | ||||||
|         tractor.open_nursery( |  | ||||||
|             debug_mode=True, |  | ||||||
|             enable_stack_on_sig=True, |  | ||||||
|             # maybe_enable_greenback=False, |  | ||||||
|             loglevel='devx', |  | ||||||
|             enable_transports=['uds'], |  | ||||||
|         ) as an, |  | ||||||
|     ): |  | ||||||
|         ptl: tractor.Portal  = await an.start_actor( |  | ||||||
|             'hanger', |  | ||||||
|             enable_modules=[__name__], |  | ||||||
|             debug_mode=True, |  | ||||||
|         ) |  | ||||||
|         async with ptl.open_context( |  | ||||||
|             start_n_shield_hang, |  | ||||||
|         ) as (ctx, cpid): |  | ||||||
| 
 |  | ||||||
|             _, proc, _ = an._children[ptl.chan.uid] |  | ||||||
|             assert cpid == proc.pid |  | ||||||
| 
 |  | ||||||
|             print( |  | ||||||
|                 'Yo my child hanging..?\n' |  | ||||||
|                 # "i'm a user who wants to see a `stackscope` tree!\n" |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|             # XXX simulate the wrapping test's "user actions" |  | ||||||
|             # (i.e. if a human didn't run this manually but wants to |  | ||||||
|             # know what they should do to reproduce test behaviour) |  | ||||||
|             if from_test: |  | ||||||
|                 print( |  | ||||||
|                     f'Sending SIGUSR1 to {cpid!r}!\n' |  | ||||||
|                 ) |  | ||||||
|                 os.kill( |  | ||||||
|                     cpid, |  | ||||||
|                     signal.SIGUSR1, |  | ||||||
|                 ) |  | ||||||
| 
 |  | ||||||
|                 # simulate user cancelling program |  | ||||||
|                 await trio.sleep(0.5) |  | ||||||
|                 os.kill( |  | ||||||
|                     os.getpid(), |  | ||||||
|                     signal.SIGINT, |  | ||||||
|                 ) |  | ||||||
|             else: |  | ||||||
|                 # actually let user send the ctl-c |  | ||||||
|                 await trio.sleep_forever()  # in root |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| if __name__ == '__main__': |  | ||||||
|     trio.run(main) |  | ||||||
|  | @ -1,88 +0,0 @@ | ||||||
| import trio |  | ||||||
| import tractor |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| async def cancellable_pause_loop( |  | ||||||
|     task_status: trio.TaskStatus[trio.CancelScope] = trio.TASK_STATUS_IGNORED |  | ||||||
| ): |  | ||||||
|     with trio.CancelScope() as cs: |  | ||||||
|         task_status.started(cs) |  | ||||||
|         for _ in range(3): |  | ||||||
|             try: |  | ||||||
|                 # ON first entry, there is no level triggered |  | ||||||
|                 # cancellation yet, so this cp does a parent task |  | ||||||
|                 # ctx-switch so that this scope raises for the NEXT |  | ||||||
|                 # checkpoint we hit. |  | ||||||
|                 await trio.lowlevel.checkpoint() |  | ||||||
|                 await tractor.pause() |  | ||||||
| 
 |  | ||||||
|                 cs.cancel() |  | ||||||
| 
 |  | ||||||
|                 # parent should have called `cs.cancel()` by now |  | ||||||
|                 await trio.lowlevel.checkpoint() |  | ||||||
| 
 |  | ||||||
|             except trio.Cancelled: |  | ||||||
|                 print('INSIDE SHIELDED PAUSE') |  | ||||||
|                 await tractor.pause(shield=True) |  | ||||||
|         else: |  | ||||||
|             # should raise it again, bubbling up to parent |  | ||||||
|             print('BUBBLING trio.Cancelled to parent task-nursery') |  | ||||||
|             await trio.lowlevel.checkpoint() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| async def pm_on_cancelled(): |  | ||||||
|     async with trio.open_nursery() as tn: |  | ||||||
|         tn.cancel_scope.cancel() |  | ||||||
|         try: |  | ||||||
|             await trio.sleep_forever() |  | ||||||
|         except trio.Cancelled: |  | ||||||
|             # should also raise `Cancelled` since |  | ||||||
|             # we didn't pass `shield=True`. |  | ||||||
|             try: |  | ||||||
|                 await tractor.post_mortem(hide_tb=False) |  | ||||||
|             except trio.Cancelled as taskc: |  | ||||||
| 
 |  | ||||||
|                 # should enter just fine, in fact it should |  | ||||||
|                 # be debugging the internals of the previous |  | ||||||
|                 # sin-shield call above Bo |  | ||||||
|                 await tractor.post_mortem( |  | ||||||
|                     hide_tb=False, |  | ||||||
|                     shield=True, |  | ||||||
|                 ) |  | ||||||
|                 raise taskc |  | ||||||
| 
 |  | ||||||
|         else: |  | ||||||
|             raise RuntimeError('Dint cancel as expected!?') |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| async def cancelled_before_pause( |  | ||||||
| ): |  | ||||||
|     ''' |  | ||||||
|     Verify that using a shielded pause works despite surrounding |  | ||||||
|     cancellation called state in the calling task. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     async with trio.open_nursery() as tn: |  | ||||||
|         cs: trio.CancelScope = await tn.start(cancellable_pause_loop) |  | ||||||
|         await trio.sleep(0.1) |  | ||||||
| 
 |  | ||||||
|     assert cs.cancelled_caught |  | ||||||
| 
 |  | ||||||
|     await pm_on_cancelled() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| async def main(): |  | ||||||
|     async with tractor.open_nursery( |  | ||||||
|         debug_mode=True, |  | ||||||
|     ) as n: |  | ||||||
|         portal: tractor.Portal = await n.run_in_actor( |  | ||||||
|             cancelled_before_pause, |  | ||||||
|         ) |  | ||||||
|         await portal.result() |  | ||||||
| 
 |  | ||||||
|         # ensure the same works in the root actor! |  | ||||||
|         await pm_on_cancelled() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| if __name__ == '__main__': |  | ||||||
|     trio.run(main) |  | ||||||
|  | @ -1,53 +0,0 @@ | ||||||
| import tractor |  | ||||||
| import trio |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| async def gen(): |  | ||||||
|     yield 'yo' |  | ||||||
|     await tractor.pause() |  | ||||||
|     yield 'yo' |  | ||||||
|     await tractor.pause() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @tractor.context |  | ||||||
| async def just_bp( |  | ||||||
|     ctx: tractor.Context, |  | ||||||
| ) -> None: |  | ||||||
| 
 |  | ||||||
|     await ctx.started() |  | ||||||
|     await tractor.pause() |  | ||||||
| 
 |  | ||||||
|     # TODO: bps and errors in this call.. |  | ||||||
|     async for val in gen(): |  | ||||||
|         print(val) |  | ||||||
| 
 |  | ||||||
|     # await trio.sleep(0.5) |  | ||||||
| 
 |  | ||||||
|     # prematurely destroy the connection |  | ||||||
|     await ctx.chan.aclose() |  | ||||||
| 
 |  | ||||||
|     # THIS CAUSES AN UNRECOVERABLE HANG |  | ||||||
|     # without latest ``pdbpp``: |  | ||||||
|     assert 0 |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| async def main(): |  | ||||||
| 
 |  | ||||||
|     async with tractor.open_nursery( |  | ||||||
|         debug_mode=True, |  | ||||||
|         enable_transports=['uds'], |  | ||||||
|         loglevel='devx', |  | ||||||
|     ) as n: |  | ||||||
|         p = await n.start_actor( |  | ||||||
|             'bp_boi', |  | ||||||
|             enable_modules=[__name__], |  | ||||||
|         ) |  | ||||||
|         async with p.open_context( |  | ||||||
|             just_bp, |  | ||||||
|         ) as (ctx, first): |  | ||||||
|             await trio.sleep_forever() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| if __name__ == '__main__': |  | ||||||
|     trio.run(main) |  | ||||||
|  | @ -3,20 +3,17 @@ import tractor | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def breakpoint_forever(): | async def breakpoint_forever(): | ||||||
|     ''' |     """Indefinitely re-enter debugger in child actor. | ||||||
|     Indefinitely re-enter debugger in child actor. |     """ | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     while True: |     while True: | ||||||
|         await trio.sleep(0.1) |         await trio.sleep(0.1) | ||||||
|         await tractor.pause() |         await tractor.breakpoint() | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def main(): | async def main(): | ||||||
| 
 | 
 | ||||||
|     async with tractor.open_nursery( |     async with tractor.open_nursery( | ||||||
|         debug_mode=True, |         debug_mode=True, | ||||||
|         loglevel='cancel', |  | ||||||
|     ) as n: |     ) as n: | ||||||
| 
 | 
 | ||||||
|         portal = await n.run_in_actor( |         portal = await n.run_in_actor( | ||||||
|  |  | ||||||
|  | @ -3,26 +3,16 @@ import tractor | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def name_error(): | async def name_error(): | ||||||
|     getattr(doggypants)  # noqa (on purpose) |     getattr(doggypants) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def main(): | async def main(): | ||||||
|     async with tractor.open_nursery( |     async with tractor.open_nursery( | ||||||
|         debug_mode=True, |         debug_mode=True, | ||||||
|         # loglevel='transport', |     ) as n: | ||||||
|     ) as an: |  | ||||||
| 
 | 
 | ||||||
|         # TODO: ideally the REPL arrives at this frame in the parent, |         portal = await n.run_in_actor(name_error) | ||||||
|         # ABOVE the @api_frame of `Portal.run_in_actor()` (which |         await portal.result() | ||||||
|         # should eventually not even be a portal method ... XD) |  | ||||||
|         # await tractor.pause() |  | ||||||
|         p: tractor.Portal = await an.run_in_actor(name_error) |  | ||||||
| 
 |  | ||||||
|         # with this style, should raise on this line |  | ||||||
|         await p.result() |  | ||||||
| 
 |  | ||||||
|         # with this alt style should raise at `open_nusery()` |  | ||||||
|         # return await p.result() |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| if __name__ == '__main__': | if __name__ == '__main__': | ||||||
|  |  | ||||||
|  | @ -1,169 +0,0 @@ | ||||||
| from functools import partial |  | ||||||
| import time |  | ||||||
| 
 |  | ||||||
| import trio |  | ||||||
| import tractor |  | ||||||
| 
 |  | ||||||
| # TODO: only import these when not running from test harness? |  | ||||||
| # can we detect `pexpect` usage maybe? |  | ||||||
| # from tractor.devx.debug import ( |  | ||||||
| #     get_lock, |  | ||||||
| #     get_debug_req, |  | ||||||
| # ) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def sync_pause( |  | ||||||
|     use_builtin: bool = False, |  | ||||||
|     error: bool = False, |  | ||||||
|     hide_tb: bool = True, |  | ||||||
|     pre_sleep: float|None = None, |  | ||||||
| ): |  | ||||||
|     if pre_sleep: |  | ||||||
|         time.sleep(pre_sleep) |  | ||||||
| 
 |  | ||||||
|     if use_builtin: |  | ||||||
|         breakpoint(hide_tb=hide_tb) |  | ||||||
| 
 |  | ||||||
|     else: |  | ||||||
|         # TODO: maybe for testing some kind of cm style interface |  | ||||||
|         # where the `._set_trace()` call doesn't happen until block |  | ||||||
|         # exit? |  | ||||||
|         # assert get_lock().ctx_in_debug is None |  | ||||||
|         # assert get_debug_req().repl is None |  | ||||||
|         tractor.pause_from_sync() |  | ||||||
|         # assert get_debug_req().repl is None |  | ||||||
| 
 |  | ||||||
|     if error: |  | ||||||
|         raise RuntimeError('yoyo sync code error') |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @tractor.context |  | ||||||
| async def start_n_sync_pause( |  | ||||||
|     ctx: tractor.Context, |  | ||||||
| ): |  | ||||||
|     actor: tractor.Actor = tractor.current_actor() |  | ||||||
| 
 |  | ||||||
|     # sync to parent-side task |  | ||||||
|     await ctx.started() |  | ||||||
| 
 |  | ||||||
|     print(f'Entering `sync_pause()` in subactor: {actor.uid}\n') |  | ||||||
|     sync_pause() |  | ||||||
|     print(f'Exited `sync_pause()` in subactor: {actor.uid}\n') |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| async def main() -> None: |  | ||||||
|     async with ( |  | ||||||
|         tractor.open_nursery( |  | ||||||
|             debug_mode=True, |  | ||||||
|             maybe_enable_greenback=True, |  | ||||||
|             enable_stack_on_sig=True, |  | ||||||
|             # loglevel='warning', |  | ||||||
|             # loglevel='devx', |  | ||||||
|         ) as an, |  | ||||||
|         trio.open_nursery() as tn, |  | ||||||
|     ): |  | ||||||
|         # just from root task |  | ||||||
|         sync_pause() |  | ||||||
| 
 |  | ||||||
|         p: tractor.Portal  = await an.start_actor( |  | ||||||
|             'subactor', |  | ||||||
|             enable_modules=[__name__], |  | ||||||
|             # infect_asyncio=True, |  | ||||||
|             debug_mode=True, |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|         # TODO: 3 sub-actor usage cases: |  | ||||||
|         # -[x] via a `.open_context()` |  | ||||||
|         # -[ ] via a `.run_in_actor()` call |  | ||||||
|         # -[ ] via a `.run()` |  | ||||||
|         # -[ ] via a `.to_thread.run_sync()` in subactor |  | ||||||
|         async with p.open_context( |  | ||||||
|             start_n_sync_pause, |  | ||||||
|         ) as (ctx, first): |  | ||||||
|             assert first is None |  | ||||||
| 
 |  | ||||||
|             # TODO: handle bg-thread-in-root-actor special cases! |  | ||||||
|             # |  | ||||||
|             # there are a couple very subtle situations possible here |  | ||||||
|             # and they are likely to become more important as cpython |  | ||||||
|             # moves to support no-GIL. |  | ||||||
|             # |  | ||||||
|             # Cases: |  | ||||||
|             # 1. root-actor bg-threads that call `.pause_from_sync()` |  | ||||||
|             #   whilst an in-tree subactor also is using ` .pause()`. |  | ||||||
|             # |_ since the root-actor bg thread can not |  | ||||||
|             #   `Lock._debug_lock.acquire_nowait()` without running |  | ||||||
|             #   a `trio.Task`, AND because the |  | ||||||
|             #   `PdbREPL.set_continue()` is called from that |  | ||||||
|             #   bg-thread, we can not `._debug_lock.release()` |  | ||||||
|             #   either! |  | ||||||
|             #  |_ this results in no actor-tree `Lock` being used |  | ||||||
|             #    on behalf of the bg-thread and thus the subactor's |  | ||||||
|             #    task and the thread trying to to use stdio |  | ||||||
|             #    simultaneously which results in the classic TTY |  | ||||||
|             #    clobbering! |  | ||||||
|             # |  | ||||||
|             # 2. mutiple sync-bg-threads that call |  | ||||||
|             #   `.pause_from_sync()` where one is scheduled via |  | ||||||
|             #   `Nursery.start_soon(to_thread.run_sync)` in a bg |  | ||||||
|             #   task. |  | ||||||
|             # |  | ||||||
|             #   Due to the GIL, the threads never truly try to step |  | ||||||
|             #   through the REPL simultaneously, BUT their `logging` |  | ||||||
|             #   and traceback outputs are interleaved since the GIL |  | ||||||
|             #   (seemingly) on every REPL-input from the user |  | ||||||
|             #   switches threads.. |  | ||||||
|             # |  | ||||||
|             #   Soo, the context switching semantics of the GIL |  | ||||||
|             #   result in a very confusing and messy interaction UX |  | ||||||
|             #   since eval and (tb) print output is NOT synced to |  | ||||||
|             #   each REPL-cycle (like we normally make it via |  | ||||||
|             #   a `.set_continue()` callback triggering the |  | ||||||
|             #   `Lock.release()`). Ideally we can solve this |  | ||||||
|             #   usability issue NOW because this will of course be |  | ||||||
|             #   that much more important when eventually there is no |  | ||||||
|             #   GIL! |  | ||||||
| 
 |  | ||||||
|             # XXX should cause double REPL entry and thus TTY |  | ||||||
|             # clobbering due to case 1. above! |  | ||||||
|             tn.start_soon( |  | ||||||
|                 partial( |  | ||||||
|                     trio.to_thread.run_sync, |  | ||||||
|                     partial( |  | ||||||
|                         sync_pause, |  | ||||||
|                         use_builtin=False, |  | ||||||
|                         # pre_sleep=0.5, |  | ||||||
|                     ), |  | ||||||
|                     abandon_on_cancel=True, |  | ||||||
|                     thread_name='start_soon_root_bg_thread', |  | ||||||
|                 ) |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|             await tractor.pause() |  | ||||||
| 
 |  | ||||||
|             # XXX should cause double REPL entry and thus TTY |  | ||||||
|             # clobbering due to case 2. above! |  | ||||||
|             await trio.to_thread.run_sync( |  | ||||||
|                 partial( |  | ||||||
|                     sync_pause, |  | ||||||
|                     # NOTE this already works fine since in the new |  | ||||||
|                     # thread the `breakpoint()` built-in is never |  | ||||||
|                     # overloaded, thus NO locking is used, HOWEVER |  | ||||||
|                     # the case 2. from above still exists! |  | ||||||
|                     use_builtin=True, |  | ||||||
|                 ), |  | ||||||
|                 # TODO: with this `False` we can hang!??! |  | ||||||
|                 # abandon_on_cancel=False, |  | ||||||
|                 abandon_on_cancel=True, |  | ||||||
|                 thread_name='inline_root_bg_thread', |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|         await ctx.cancel() |  | ||||||
| 
 |  | ||||||
|         # TODO: case where we cancel from trio-side while asyncio task |  | ||||||
|         # has debugger lock? |  | ||||||
|         await p.cancel_actor() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| if __name__ == '__main__': |  | ||||||
|     trio.run(main) |  | ||||||
|  | @ -1,34 +1,25 @@ | ||||||
| import time | import time | ||||||
| import trio | import trio | ||||||
| import tractor | import tractor | ||||||
| from tractor import ( |  | ||||||
|     ActorNursery, |  | ||||||
|     MsgStream, |  | ||||||
|     Portal, |  | ||||||
| ) |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| # this is the first 2 actors, streamer_1 and streamer_2 | # this is the first 2 actors, streamer_1 and streamer_2 | ||||||
| async def stream_data(seed): | async def stream_data(seed): | ||||||
|     for i in range(seed): |     for i in range(seed): | ||||||
|         yield i |         yield i | ||||||
|         await trio.sleep(0.0001)  # trigger scheduler |         await trio.sleep(0)  # trigger scheduler | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| # this is the third actor; the aggregator | # this is the third actor; the aggregator | ||||||
| async def aggregate(seed): | async def aggregate(seed): | ||||||
|     ''' |     """Ensure that the two streams we receive match but only stream | ||||||
|     Ensure that the two streams we receive match but only stream |  | ||||||
|     a single set of values to the parent. |     a single set of values to the parent. | ||||||
| 
 |     """ | ||||||
|     ''' |     async with tractor.open_nursery() as nursery: | ||||||
|     an: ActorNursery |         portals = [] | ||||||
|     async with tractor.open_nursery() as an: |  | ||||||
|         portals: list[Portal] = [] |  | ||||||
|         for i in range(1, 3): |         for i in range(1, 3): | ||||||
| 
 |             # fork point | ||||||
|             # fork/spawn call |             portal = await nursery.start_actor( | ||||||
|             portal = await an.start_actor( |  | ||||||
|                 name=f'streamer_{i}', |                 name=f'streamer_{i}', | ||||||
|                 enable_modules=[__name__], |                 enable_modules=[__name__], | ||||||
|             ) |             ) | ||||||
|  | @ -52,11 +43,7 @@ async def aggregate(seed): | ||||||
|         async with trio.open_nursery() as n: |         async with trio.open_nursery() as n: | ||||||
| 
 | 
 | ||||||
|             for portal in portals: |             for portal in portals: | ||||||
|                 n.start_soon( |                 n.start_soon(push_to_chan, portal, send_chan.clone()) | ||||||
|                     push_to_chan, |  | ||||||
|                     portal, |  | ||||||
|                     send_chan.clone(), |  | ||||||
|                 ) |  | ||||||
| 
 | 
 | ||||||
|             # close this local task's reference to send side |             # close this local task's reference to send side | ||||||
|             await send_chan.aclose() |             await send_chan.aclose() | ||||||
|  | @ -73,36 +60,26 @@ async def aggregate(seed): | ||||||
| 
 | 
 | ||||||
|             print("FINISHED ITERATING in aggregator") |             print("FINISHED ITERATING in aggregator") | ||||||
| 
 | 
 | ||||||
|         await an.cancel() |         await nursery.cancel() | ||||||
|         print("WAITING on `ActorNursery` to finish") |         print("WAITING on `ActorNursery` to finish") | ||||||
|     print("AGGREGATOR COMPLETE!") |     print("AGGREGATOR COMPLETE!") | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def main() -> list[int]: | # this is the main actor and *arbiter* | ||||||
|     ''' | async def main(): | ||||||
|     This is the "root" actor's main task's entrypoint. |     # a nursery which spawns "actors" | ||||||
| 
 |  | ||||||
|     By default (and if not otherwise specified) that root process |  | ||||||
|     also acts as a "registry actor" / "registrar" on the localhost |  | ||||||
|     for the purposes of multi-actor "service discovery". |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     # yes, a nursery which spawns `trio`-"actors" B) |  | ||||||
|     an: ActorNursery |  | ||||||
|     async with tractor.open_nursery( |     async with tractor.open_nursery( | ||||||
|         loglevel='cancel', |         arbiter_addr=('127.0.0.1', 1616) | ||||||
|         # debug_mode=True, |     ) as nursery: | ||||||
|     ) as an: |  | ||||||
| 
 | 
 | ||||||
|         seed = int(1e3) |         seed = int(1e3) | ||||||
|         pre_start = time.time() |         pre_start = time.time() | ||||||
| 
 | 
 | ||||||
|         portal: Portal = await an.start_actor( |         portal = await nursery.start_actor( | ||||||
|             name='aggregator', |             name='aggregator', | ||||||
|             enable_modules=[__name__], |             enable_modules=[__name__], | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|         stream: MsgStream |  | ||||||
|         async with portal.open_stream_from( |         async with portal.open_stream_from( | ||||||
|             aggregate, |             aggregate, | ||||||
|             seed=seed, |             seed=seed, | ||||||
|  | @ -111,12 +88,11 @@ async def main() -> list[int]: | ||||||
|             start = time.time() |             start = time.time() | ||||||
|             # the portal call returns exactly what you'd expect |             # the portal call returns exactly what you'd expect | ||||||
|             # as if the remote "aggregate" function was called locally |             # as if the remote "aggregate" function was called locally | ||||||
|             result_stream: list[int] = [] |             result_stream = [] | ||||||
|             async for value in stream: |             async for value in stream: | ||||||
|                 result_stream.append(value) |                 result_stream.append(value) | ||||||
| 
 | 
 | ||||||
|         cancelled: bool = await portal.cancel_actor() |         await portal.cancel_actor() | ||||||
|         assert cancelled |  | ||||||
| 
 | 
 | ||||||
|         print(f"STREAM TIME = {time.time() - start}") |         print(f"STREAM TIME = {time.time() - start}") | ||||||
|         print(f"STREAM + SPAWN TIME = {time.time() - pre_start}") |         print(f"STREAM + SPAWN TIME = {time.time() - pre_start}") | ||||||
|  |  | ||||||
|  | @ -1,92 +0,0 @@ | ||||||
| ''' |  | ||||||
| An SC compliant infected ``asyncio`` echo server. |  | ||||||
| 
 |  | ||||||
| ''' |  | ||||||
| import asyncio |  | ||||||
| from statistics import mean |  | ||||||
| import time |  | ||||||
| 
 |  | ||||||
| import trio |  | ||||||
| import tractor |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| async def aio_echo_server( |  | ||||||
|     to_trio: trio.MemorySendChannel, |  | ||||||
|     from_trio: asyncio.Queue, |  | ||||||
| 
 |  | ||||||
| ) -> None: |  | ||||||
| 
 |  | ||||||
|     # a first message must be sent **from** this ``asyncio`` |  | ||||||
|     # task or the ``trio`` side will never unblock from |  | ||||||
|     # ``tractor.to_asyncio.open_channel_from():`` |  | ||||||
|     to_trio.send_nowait('start') |  | ||||||
| 
 |  | ||||||
|     # XXX: this uses an ``from_trio: asyncio.Queue`` currently but we |  | ||||||
|     # should probably offer something better. |  | ||||||
|     while True: |  | ||||||
|         # echo the msg back |  | ||||||
|         to_trio.send_nowait(await from_trio.get()) |  | ||||||
|         await asyncio.sleep(0) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @tractor.context |  | ||||||
| async def trio_to_aio_echo_server( |  | ||||||
|     ctx: tractor.Context, |  | ||||||
| ): |  | ||||||
|     # this will block until the ``asyncio`` task sends a "first" |  | ||||||
|     # message. |  | ||||||
|     async with tractor.to_asyncio.open_channel_from( |  | ||||||
|         aio_echo_server, |  | ||||||
|     ) as (first, chan): |  | ||||||
| 
 |  | ||||||
|         assert first == 'start' |  | ||||||
|         await ctx.started(first) |  | ||||||
| 
 |  | ||||||
|         async with ctx.open_stream() as stream: |  | ||||||
| 
 |  | ||||||
|             async for msg in stream: |  | ||||||
|                 await chan.send(msg) |  | ||||||
| 
 |  | ||||||
|                 out = await chan.receive() |  | ||||||
|                 # echo back to parent actor-task |  | ||||||
|                 await stream.send(out) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| async def main(): |  | ||||||
| 
 |  | ||||||
|     async with tractor.open_nursery() as n: |  | ||||||
|         p = await n.start_actor( |  | ||||||
|             'aio_server', |  | ||||||
|             enable_modules=[__name__], |  | ||||||
|             infect_asyncio=True, |  | ||||||
|         ) |  | ||||||
|         async with p.open_context( |  | ||||||
|             trio_to_aio_echo_server, |  | ||||||
|         ) as (ctx, first): |  | ||||||
| 
 |  | ||||||
|             assert first == 'start' |  | ||||||
| 
 |  | ||||||
|             count = 0 |  | ||||||
|             async with ctx.open_stream() as stream: |  | ||||||
| 
 |  | ||||||
|                 delays = [] |  | ||||||
|                 send = time.time() |  | ||||||
| 
 |  | ||||||
|                 await stream.send(count) |  | ||||||
|                 async for msg in stream: |  | ||||||
|                     recv = time.time() |  | ||||||
|                     delays.append(recv - send) |  | ||||||
|                     assert msg == count |  | ||||||
|                     count += 1 |  | ||||||
|                     send = time.time() |  | ||||||
|                     await stream.send(count) |  | ||||||
| 
 |  | ||||||
|                     if count >= 1e3: |  | ||||||
|                         break |  | ||||||
| 
 |  | ||||||
|         print(f'mean round trip rate (Hz): {1/mean(delays)}') |  | ||||||
|         await p.cancel_actor() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| if __name__ == '__main__': |  | ||||||
|     trio.run(main) |  | ||||||
|  | @ -1,49 +0,0 @@ | ||||||
| import trio |  | ||||||
| import click |  | ||||||
| import tractor |  | ||||||
| import pydantic |  | ||||||
| # from multiprocessing import shared_memory |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @tractor.context |  | ||||||
| async def just_sleep( |  | ||||||
| 
 |  | ||||||
|     ctx: tractor.Context, |  | ||||||
|     **kwargs, |  | ||||||
| 
 |  | ||||||
| ) -> None: |  | ||||||
|     ''' |  | ||||||
|     Test a small ping-pong 2-way streaming server. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     await ctx.started() |  | ||||||
|     await trio.sleep_forever() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| async def main() -> None: |  | ||||||
| 
 |  | ||||||
|     proc = await trio.open_process( ( |  | ||||||
|         'python', |  | ||||||
|         '-c', |  | ||||||
|         'import trio; trio.run(trio.sleep_forever)', |  | ||||||
|     )) |  | ||||||
|     await proc.wait() |  | ||||||
|     # await trio.sleep_forever() |  | ||||||
|     # async with tractor.open_nursery() as n: |  | ||||||
| 
 |  | ||||||
|     #     portal = await n.start_actor( |  | ||||||
|     #         'rpc_server', |  | ||||||
|     #         enable_modules=[__name__], |  | ||||||
|     #     ) |  | ||||||
| 
 |  | ||||||
|     #     async with portal.open_context( |  | ||||||
|     #         just_sleep,  # taken from pytest parameterization |  | ||||||
|     #     ) as (ctx, sent): |  | ||||||
|     #         await trio.sleep_forever() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| if __name__ == '__main__': |  | ||||||
|     import time |  | ||||||
|     # time.sleep(999) |  | ||||||
|     trio.run(main) |  | ||||||
|  | @ -8,17 +8,15 @@ This uses no extra threads, fancy semaphores or futures; all we need | ||||||
| is ``tractor``'s channels. | is ``tractor``'s channels. | ||||||
| 
 | 
 | ||||||
| """ | """ | ||||||
| from contextlib import ( | from contextlib import asynccontextmanager | ||||||
|     asynccontextmanager as acm, | from typing import List, Callable | ||||||
|     aclosing, |  | ||||||
| ) |  | ||||||
| from typing import Callable |  | ||||||
| import itertools | import itertools | ||||||
| import math | import math | ||||||
| import time | import time | ||||||
| 
 | 
 | ||||||
| import tractor | import tractor | ||||||
| import trio | import trio | ||||||
|  | from async_generator import aclosing | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| PRIMES = [ | PRIMES = [ | ||||||
|  | @ -46,7 +44,7 @@ async def is_prime(n): | ||||||
|     return True |     return True | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @acm | @asynccontextmanager | ||||||
| async def worker_pool(workers=4): | async def worker_pool(workers=4): | ||||||
|     """Though it's a trivial special case for ``tractor``, the well |     """Though it's a trivial special case for ``tractor``, the well | ||||||
|     known "worker pool" seems to be the defacto "but, I want this |     known "worker pool" seems to be the defacto "but, I want this | ||||||
|  | @ -73,8 +71,8 @@ async def worker_pool(workers=4): | ||||||
| 
 | 
 | ||||||
|         async def _map( |         async def _map( | ||||||
|             worker_func: Callable[[int], bool], |             worker_func: Callable[[int], bool], | ||||||
|             sequence: list[int] |             sequence: List[int] | ||||||
|         ) -> list[bool]: |         ) -> List[bool]: | ||||||
| 
 | 
 | ||||||
|             # define an async (local) task to collect results from workers |             # define an async (local) task to collect results from workers | ||||||
|             async def send_result(func, value, portal): |             async def send_result(func, value, portal): | ||||||
|  |  | ||||||
|  | @ -1,46 +0,0 @@ | ||||||
| 
 |  | ||||||
| import trio |  | ||||||
| import tractor |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| async def sleepy_jane() -> None: |  | ||||||
|     uid: tuple = tractor.current_actor().uid |  | ||||||
|     print(f'Yo i am actor {uid}') |  | ||||||
|     await trio.sleep_forever() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| async def main(): |  | ||||||
|     ''' |  | ||||||
|     Spawn a flat actor cluster, with one process per detected core. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     portal_map: dict[str, tractor.Portal] |  | ||||||
| 
 |  | ||||||
|     # look at this hip new syntax! |  | ||||||
|     async with ( |  | ||||||
| 
 |  | ||||||
|         tractor.open_actor_cluster( |  | ||||||
|             modules=[__name__] |  | ||||||
|         ) as portal_map, |  | ||||||
| 
 |  | ||||||
|         tractor.trionics.collapse_eg(), |  | ||||||
|         trio.open_nursery() as tn, |  | ||||||
|     ): |  | ||||||
| 
 |  | ||||||
|         for (name, portal) in portal_map.items(): |  | ||||||
|             tn.start_soon( |  | ||||||
|                 portal.run, |  | ||||||
|                 sleepy_jane, |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|         await trio.sleep(0.5) |  | ||||||
| 
 |  | ||||||
|         # kill the cluster with a cancel |  | ||||||
|         raise KeyboardInterrupt |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| if __name__ == '__main__': |  | ||||||
|     try: |  | ||||||
|         trio.run(main) |  | ||||||
|     except KeyboardInterrupt: |  | ||||||
|         print('trio cancelled by KBI') |  | ||||||
|  | @ -13,7 +13,7 @@ async def simple_rpc( | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|     # signal to parent that we're up much like |     # signal to parent that we're up much like | ||||||
|     # ``trio.TaskStatus.started()`` |     # ``trio_typing.TaskStatus.started()`` | ||||||
|     await ctx.started(data + 1) |     await ctx.started(data + 1) | ||||||
| 
 | 
 | ||||||
|     async with ctx.open_stream() as stream: |     async with ctx.open_stream() as stream: | ||||||
|  |  | ||||||
|  | @ -9,7 +9,7 @@ async def main(service_name): | ||||||
|     async with tractor.open_nursery() as an: |     async with tractor.open_nursery() as an: | ||||||
|         await an.start_actor(service_name) |         await an.start_actor(service_name) | ||||||
| 
 | 
 | ||||||
|         async with tractor.get_registry() as portal: |         async with tractor.get_arbiter('127.0.0.1', 1616) as portal: | ||||||
|             print(f"Arbiter is listening on {portal.channel}") |             print(f"Arbiter is listening on {portal.channel}") | ||||||
| 
 | 
 | ||||||
|         async with tractor.wait_for_actor(service_name) as sockaddr: |         async with tractor.wait_for_actor(service_name) as sockaddr: | ||||||
|  |  | ||||||
|  | @ -1,85 +0,0 @@ | ||||||
| from contextlib import ( |  | ||||||
|     asynccontextmanager as acm, |  | ||||||
| ) |  | ||||||
| from functools import partial |  | ||||||
| 
 |  | ||||||
| import tractor |  | ||||||
| import trio |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| log = tractor.log.get_logger( |  | ||||||
|     name=__name__ |  | ||||||
| ) |  | ||||||
| 
 |  | ||||||
| _lock: trio.Lock|None = None |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @acm |  | ||||||
| async def acquire_singleton_lock( |  | ||||||
| ) -> None: |  | ||||||
|     global _lock |  | ||||||
|     if _lock is None: |  | ||||||
|         log.info('Allocating LOCK') |  | ||||||
|         _lock = trio.Lock() |  | ||||||
| 
 |  | ||||||
|     log.info('TRYING TO LOCK ACQUIRE') |  | ||||||
|     async with _lock: |  | ||||||
|         log.info('ACQUIRED') |  | ||||||
|         yield _lock |  | ||||||
| 
 |  | ||||||
|     log.info('RELEASED') |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| async def hold_lock_forever( |  | ||||||
|     task_status=trio.TASK_STATUS_IGNORED |  | ||||||
| ): |  | ||||||
|     async with ( |  | ||||||
|         tractor.trionics.maybe_raise_from_masking_exc(), |  | ||||||
|         acquire_singleton_lock() as lock, |  | ||||||
|     ): |  | ||||||
|         task_status.started(lock) |  | ||||||
|         await trio.sleep_forever() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| async def main( |  | ||||||
|     ignore_special_cases: bool, |  | ||||||
|     loglevel: str = 'info', |  | ||||||
|     debug_mode: bool = True, |  | ||||||
| ): |  | ||||||
|     async with ( |  | ||||||
|         trio.open_nursery() as tn, |  | ||||||
| 
 |  | ||||||
|         # tractor.trionics.maybe_raise_from_masking_exc() |  | ||||||
|         # ^^^ XXX NOTE, interestingly putting the unmasker |  | ||||||
|         # here does not exhibit the same behaviour ?? |  | ||||||
|     ): |  | ||||||
|         if not ignore_special_cases: |  | ||||||
|             from tractor.trionics import _taskc |  | ||||||
|             _taskc._mask_cases.clear() |  | ||||||
| 
 |  | ||||||
|         _lock = await tn.start( |  | ||||||
|             hold_lock_forever, |  | ||||||
|         ) |  | ||||||
|         with trio.move_on_after(0.2): |  | ||||||
|             await tn.start( |  | ||||||
|                 hold_lock_forever, |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|         tn.cancel_scope.cancel() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| # XXX, manual test as script |  | ||||||
| if __name__ == '__main__': |  | ||||||
|     tractor.log.get_console_log(level='info') |  | ||||||
|     for case in [True, False]: |  | ||||||
|         log.info( |  | ||||||
|             f'\n' |  | ||||||
|             f'------ RUNNING SCRIPT TRIAL ------\n' |  | ||||||
|             f'ignore_special_cases: {case!r}\n' |  | ||||||
|         ) |  | ||||||
|         trio.run(partial( |  | ||||||
|             main, |  | ||||||
|             ignore_special_cases=case, |  | ||||||
|             loglevel='info', |  | ||||||
|         )) |  | ||||||
|  | @ -1,195 +0,0 @@ | ||||||
| from contextlib import ( |  | ||||||
|     contextmanager as cm, |  | ||||||
|     # TODO, any diff in async case(s)?? |  | ||||||
|     # asynccontextmanager as acm, |  | ||||||
| ) |  | ||||||
| from functools import partial |  | ||||||
| 
 |  | ||||||
| import tractor |  | ||||||
| import trio |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| log = tractor.log.get_logger( |  | ||||||
|     name=__name__ |  | ||||||
| ) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @cm |  | ||||||
| def teardown_on_exc( |  | ||||||
|     raise_from_handler: bool = False, |  | ||||||
| ): |  | ||||||
|     ''' |  | ||||||
|     You could also have a teardown handler which catches any exc and |  | ||||||
|     does some required teardown. In this case the problem is |  | ||||||
|     compounded UNLESS you ensure the handler's scope is OUTSIDE the |  | ||||||
|     `ux.aclose()`.. that is in the caller's enclosing scope. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     try: |  | ||||||
|         yield |  | ||||||
|     except BaseException as _berr: |  | ||||||
|         berr = _berr |  | ||||||
|         log.exception( |  | ||||||
|             f'Handling termination teardown in child due to,\n' |  | ||||||
|             f'{berr!r}\n' |  | ||||||
|         ) |  | ||||||
|         if raise_from_handler: |  | ||||||
|             # XXX teardown ops XXX |  | ||||||
|             # on termination these steps say need to be run to |  | ||||||
|             # ensure wider system consistency (like the state of |  | ||||||
|             # remote connections/services). |  | ||||||
|             # |  | ||||||
|             # HOWEVER, any bug in this teardown code is also |  | ||||||
|             # masked by the `tx.aclose()`! |  | ||||||
|             # this is also true if `_tn.cancel_scope` is |  | ||||||
|             # `.cancel_called` by the parent in a graceful |  | ||||||
|             # request case.. |  | ||||||
| 
 |  | ||||||
|             # simulate a bug in teardown handler. |  | ||||||
|             raise RuntimeError( |  | ||||||
|                 'woopsie teardown bug!' |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|         raise  # no teardown bug. |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| async def finite_stream_to_rent( |  | ||||||
|     tx: trio.abc.SendChannel, |  | ||||||
|     child_errors_mid_stream: bool, |  | ||||||
|     raise_unmasked: bool, |  | ||||||
| 
 |  | ||||||
|     task_status: trio.TaskStatus[ |  | ||||||
|         trio.CancelScope, |  | ||||||
|     ] = trio.TASK_STATUS_IGNORED, |  | ||||||
| ): |  | ||||||
|     async with ( |  | ||||||
|         # XXX without this unmasker the mid-streaming RTE is never |  | ||||||
|         # reported since it is masked by the `tx.aclose()` |  | ||||||
|         # call which in turn raises `Cancelled`! |  | ||||||
|         # |  | ||||||
|         # NOTE, this is WITHOUT doing any exception handling |  | ||||||
|         # inside the child  task! |  | ||||||
|         # |  | ||||||
|         # TODO, uncomment next LoC to see the supprsessed beg[RTE]! |  | ||||||
|         tractor.trionics.maybe_raise_from_masking_exc( |  | ||||||
|             raise_unmasked=raise_unmasked, |  | ||||||
|         ), |  | ||||||
| 
 |  | ||||||
|         tx as tx,  # .aclose() is the guilty masker chkpt! |  | ||||||
| 
 |  | ||||||
|         # XXX, this ONLY matters in the |  | ||||||
|         # `child_errors_mid_stream=False` case oddly!? |  | ||||||
|         # THAT IS, if no tn is opened in that case then the |  | ||||||
|         # test will not fail; it raises the RTE correctly? |  | ||||||
|         # |  | ||||||
|         # -> so it seems this new scope somehow affects the form of |  | ||||||
|         #    eventual in the parent EG? |  | ||||||
|         tractor.trionics.maybe_open_nursery( |  | ||||||
|             nursery=( |  | ||||||
|                 None |  | ||||||
|                 if not child_errors_mid_stream |  | ||||||
|                 else True |  | ||||||
|             ), |  | ||||||
|         ) as _tn, |  | ||||||
|     ): |  | ||||||
|         # pass our scope back to parent for supervision\ |  | ||||||
|         # control. |  | ||||||
|         cs: trio.CancelScope|None = ( |  | ||||||
|             None |  | ||||||
|             if _tn is True |  | ||||||
|             else _tn.cancel_scope |  | ||||||
|         ) |  | ||||||
|         task_status.started(cs) |  | ||||||
| 
 |  | ||||||
|         with teardown_on_exc( |  | ||||||
|             raise_from_handler=not child_errors_mid_stream, |  | ||||||
|         ): |  | ||||||
|             for i in range(100): |  | ||||||
|                 log.debug( |  | ||||||
|                     f'Child tx {i!r}\n' |  | ||||||
|                 ) |  | ||||||
|                 if ( |  | ||||||
|                     child_errors_mid_stream |  | ||||||
|                     and |  | ||||||
|                     i == 66 |  | ||||||
|                 ): |  | ||||||
|                     # oh wait but WOOPS there's a bug |  | ||||||
|                     # in that teardown code!? |  | ||||||
|                     raise RuntimeError( |  | ||||||
|                         'woopsie, a mid-streaming bug!?' |  | ||||||
|                     ) |  | ||||||
| 
 |  | ||||||
|                 await tx.send(i) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| async def main( |  | ||||||
|     # TODO! toggle this for the 2 cases! |  | ||||||
|     # 1. child errors mid-stream while parent is also requesting |  | ||||||
|     #   (graceful) cancel of that child streamer. |  | ||||||
|     # |  | ||||||
|     # 2. child contains a teardown handler which contains a |  | ||||||
|     #   bug and raises. |  | ||||||
|     # |  | ||||||
|     child_errors_mid_stream: bool, |  | ||||||
| 
 |  | ||||||
|     raise_unmasked: bool = False, |  | ||||||
|     loglevel: str = 'info', |  | ||||||
| ): |  | ||||||
|     tractor.log.get_console_log(level=loglevel) |  | ||||||
| 
 |  | ||||||
|     # the `.aclose()` being checkpoints on these |  | ||||||
|     # is the source of the problem.. |  | ||||||
|     tx, rx = trio.open_memory_channel(1) |  | ||||||
| 
 |  | ||||||
|     async with ( |  | ||||||
|         tractor.trionics.collapse_eg(), |  | ||||||
|         trio.open_nursery() as tn, |  | ||||||
|         rx as rx, |  | ||||||
|     ): |  | ||||||
|         _child_cs = await tn.start( |  | ||||||
|             partial( |  | ||||||
|                 finite_stream_to_rent, |  | ||||||
|                 child_errors_mid_stream=child_errors_mid_stream, |  | ||||||
|                 raise_unmasked=raise_unmasked, |  | ||||||
|                 tx=tx, |  | ||||||
|             ) |  | ||||||
|         ) |  | ||||||
|         async for msg in rx: |  | ||||||
|             log.debug( |  | ||||||
|                 f'Rent rx {msg!r}\n' |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|             # simulate some external cancellation |  | ||||||
|             # request **JUST BEFORE** the child errors. |  | ||||||
|             if msg == 65: |  | ||||||
|                 log.cancel( |  | ||||||
|                     f'Cancelling parent on,\n' |  | ||||||
|                     f'msg={msg}\n' |  | ||||||
|                     f'\n' |  | ||||||
|                     f'Simulates OOB cancel request!\n' |  | ||||||
|                 ) |  | ||||||
|                 tn.cancel_scope.cancel() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| # XXX, manual test as script |  | ||||||
| if __name__ == '__main__': |  | ||||||
|     tractor.log.get_console_log(level='info') |  | ||||||
|     for case in [True, False]: |  | ||||||
|         log.info( |  | ||||||
|             f'\n' |  | ||||||
|             f'------ RUNNING SCRIPT TRIAL ------\n' |  | ||||||
|             f'child_errors_midstream: {case!r}\n' |  | ||||||
|         ) |  | ||||||
|         try: |  | ||||||
|             trio.run(partial( |  | ||||||
|                 main, |  | ||||||
|                 child_errors_mid_stream=case, |  | ||||||
|                 # raise_unmasked=True, |  | ||||||
|                 loglevel='info', |  | ||||||
|             )) |  | ||||||
|         except Exception as _exc: |  | ||||||
|             exc = _exc |  | ||||||
|             log.exception( |  | ||||||
|                 'Should have raised an RTE or Cancelled?\n' |  | ||||||
|             ) |  | ||||||
|             breakpoint() |  | ||||||
|  | @ -0,0 +1,9 @@ | ||||||
|  | Add optional `msgspec <https://jcristharif.com/msgspec/>`_ support over | ||||||
|  | TCP streams as an alernative, faster MessagePack codec. | ||||||
|  | 
 | ||||||
|  | This get's us moving toward typed messaging/IPC protocols. Further, | ||||||
|  | ``msgspec`` structs may be a valid tool to start for formalizing our "SC | ||||||
|  | dialog un-protocol" messages as described in `#36 | ||||||
|  | <https://github.com/goodboy/tractor/issues/36>`_`. | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | @ -0,0 +1,12 @@ | ||||||
|  | Add `tokio-style broadcast channels | ||||||
|  | <https://docs.rs/tokio/1.11.0/tokio/sync/broadcast/index.html>`_ as | ||||||
|  | a solution for `#204 <https://github.com/goodboy/tractor/pull/204>`_ and | ||||||
|  | discussed thoroughly in `trio/#987 | ||||||
|  | <https://github.com/python-trio/trio/issues/987>`_. | ||||||
|  | 
 | ||||||
|  | This gives us local task broadcast functionality using a new | ||||||
|  | ``BroadcastReceiver`` type which can wrap ``trio.ReceiveChannel``  and | ||||||
|  | provide fan-out copies of a stream of data to every subscribed consumer. | ||||||
|  | We use this new machinery to provide a ``ReceiveMsgStream.subscribe()`` | ||||||
|  | async context manager which can be used by actor-local concumers tasks | ||||||
|  | to easily pull from a shared and dynamic IPC stream. | ||||||
|  | @ -0,0 +1,9 @@ | ||||||
|  | Drop stream "shielding" support which was originally added to sidestep | ||||||
|  | a cancelled call to ``.receive()`` | ||||||
|  | 
 | ||||||
|  | In the original api design a stream instance was returned directly from | ||||||
|  | a call to ``Portal.run()`` and thus there was no "exit phase" to handle | ||||||
|  | cancellations and errors which would trigger implicit closure. Now that | ||||||
|  | we have said enter/exit semantics with ``Portal.open_stream_from()`` and | ||||||
|  | ``Context.open_stream()`` we can drop this implicit (and arguably | ||||||
|  | confusing) behavior. | ||||||
|  | @ -0,0 +1 @@ | ||||||
|  | Drop Python 3.7 support in preparation for supporting 3.9+ syntax. | ||||||
|  | @ -0,0 +1,6 @@ | ||||||
|  | Handle broken channel/stream faults where the root's tty lock is left acquired by some | ||||||
|  | child actor who went MIA and the root ends up hanging indefinitely. | ||||||
|  | 
 | ||||||
|  | There's two parts here: | ||||||
|  | - Don't shield wait on the lock | ||||||
|  | - Always do our best to release the lock on the expected worst case connection faults | ||||||
|  | @ -4,5 +4,5 @@ now and use the default `fragment set`_. | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| .. _towncrier docs: https://github.com/twisted/towncrier#quick-start | .. _towncrier docs: https://github.com/twisted/towncrier#quick-start | ||||||
| .. _pluggy release readme: https://github.com/pytest-dev/pluggy/blob/main/changelog/README.rst | .. _pluggy release readme: https://github.com/twisted/towncrier#quick-start | ||||||
| .. _fragment set: https://github.com/twisted/towncrier#news-fragments | .. _fragment set: https://github.com/twisted/towncrier#news-fragments | ||||||
|  | @ -1,16 +0,0 @@ | ||||||
| Strictly support Python 3.10+, start runtime machinery reorg |  | ||||||
| 
 |  | ||||||
| Since we want to push forward using the new `match:` syntax for our |  | ||||||
| internal RPC-msg loops, we officially drop 3.9 support for the next |  | ||||||
| release which should coincide well with the first release of 3.11. |  | ||||||
| 
 |  | ||||||
| This patch set also officially removes the ``tractor.run()`` API (which |  | ||||||
| has been deprecated for some time) as well as starts an initial re-org |  | ||||||
| of the internal runtime core by: |  | ||||||
| - renaming ``tractor._actor`` -> ``._runtime`` |  | ||||||
| - moving the ``._runtime.ActorActor._process_messages()`` and |  | ||||||
|   ``._async_main()`` to be module level singleton-task-functions since |  | ||||||
|   they are only started once for each connection and actor spawn |  | ||||||
|   respectively; this internal API thus looks more similar to (at the |  | ||||||
|   time of writing) the ``trio``-internals in ``trio._core._run``. |  | ||||||
| - officially remove ``tractor.run()``, now deprecated for some time. |  | ||||||
|  | @ -1,4 +0,0 @@ | ||||||
| Only set `._debug.Lock.local_pdb_complete` if has been created. |  | ||||||
| 
 |  | ||||||
| This can be triggered by a very rare race condition (and thus we have no |  | ||||||
| working test yet) but it is known to exist in (a) consumer project(s). |  | ||||||
|  | @ -1,25 +0,0 @@ | ||||||
| Add support for ``trio >= 0.22`` and support for the new Python 3.11 |  | ||||||
| ``[Base]ExceptionGroup`` from `pep 654`_ via the backported |  | ||||||
| `exceptiongroup`_ package and some final fixes to the debug mode |  | ||||||
| subsystem. |  | ||||||
| 
 |  | ||||||
| This port ended up driving some (hopefully) final fixes to our debugger |  | ||||||
| subsystem including the solution to all lingering stdstreams locking |  | ||||||
| race-conditions and deadlock scenarios. This includes extending the |  | ||||||
| debugger tests suite as well as cancellation and ``asyncio`` mode cases. |  | ||||||
| Some of the notable details: |  | ||||||
| 
 |  | ||||||
| - always reverting to the ``trio`` SIGINT handler when leaving debug |  | ||||||
|   mode. |  | ||||||
| - bypassing child attempts to acquire the debug lock when detected |  | ||||||
|   to be amdist actor-runtime-cancellation. |  | ||||||
| - allowing the root actor to cancel local but IPC-stale subactor |  | ||||||
|   requests-tasks for the debug lock when in a "no IPC peers" state. |  | ||||||
| 
 |  | ||||||
| Further we refined our ``ActorNursery`` semantics to be more similar to |  | ||||||
| ``trio`` in the sense that parent task errors are always packed into the |  | ||||||
| actor-nursery emitted exception group and adjusted all tests and |  | ||||||
| examples accordingly. |  | ||||||
| 
 |  | ||||||
| .. _pep 654: https://peps.python.org/pep-0654/#handling-exception-groups |  | ||||||
| .. _exceptiongroup: https://github.com/python-trio/exceptiongroup |  | ||||||
|  | @ -1,5 +0,0 @@ | ||||||
| Establish an explicit "backend spawning" method table; use it from CI |  | ||||||
| 
 |  | ||||||
| More clearly lays out the current set of (3) backends: ``['trio', |  | ||||||
| 'mp_spawn', 'mp_forkserver']`` and adjusts the ``._spawn.py`` internals |  | ||||||
| as well as the test suite to accommodate. |  | ||||||
|  | @ -1,4 +0,0 @@ | ||||||
| Add ``key: Callable[..., Hashable]`` support to ``.trionics.maybe_open_context()`` |  | ||||||
| 
 |  | ||||||
| Gives users finer grained control over cache hit behaviour using |  | ||||||
| a callable which receives the input ``kwargs: dict``. |  | ||||||
|  | @ -1,41 +0,0 @@ | ||||||
| Add support for debug-lock blocking using a ``._debug.Lock._blocked: |  | ||||||
| set[tuple]`` and add ids when no-more IPC connections with the |  | ||||||
| root actor are detected. |  | ||||||
| 
 |  | ||||||
| This is an enhancement which (mostly) solves a lingering debugger |  | ||||||
| locking race case we needed to handle: |  | ||||||
| 
 |  | ||||||
| - child crashes acquires TTY lock in root and attaches to ``pdb`` |  | ||||||
| - child IPC goes down such that all channels to the root are broken |  | ||||||
|   / non-functional. |  | ||||||
| - root is stuck thinking the child is still in debug even though it |  | ||||||
|   can't be contacted and the child actor machinery hasn't been |  | ||||||
|   cancelled by its parent. |  | ||||||
| - root get's stuck in deadlock with child since it won't send a cancel |  | ||||||
|   request until the child is finished debugging (to avoid clobbering |  | ||||||
|   a child that is actually using the debugger), but the child can't |  | ||||||
|   unlock the debugger bc IPC is down and it can't contact the root. |  | ||||||
| 
 |  | ||||||
| To avoid this scenario add debug lock blocking list via |  | ||||||
| `._debug.Lock._blocked: set[tuple]` which holds actor uids for any actor |  | ||||||
| that is detected by the root as having no transport channel connections |  | ||||||
| (of which at least one should exist if this sub-actor at some point |  | ||||||
| acquired the debug lock). The root consequently checks this list for any |  | ||||||
| actor that tries to (re)acquire the lock and blocks with |  | ||||||
| a ``ContextCancelled``. Further, when a debug condition is tested in |  | ||||||
| ``._runtime._invoke``, the context's ``._enter_debugger_on_cancel`` is |  | ||||||
| set to `False` if the actor was put on the block list then all |  | ||||||
| post-mortem / crash handling will be bypassed for that task. |  | ||||||
| 
 |  | ||||||
| In theory this approach to block list management may cause problems |  | ||||||
| where some nested child actor acquires and releases the lock multiple |  | ||||||
| times and it gets stuck on the block list after the first use? If this |  | ||||||
| turns out to be an issue we can try changing the strat so blocks are |  | ||||||
| only added when the root has zero IPC peers left? |  | ||||||
| 
 |  | ||||||
| Further, this adds a root-locking-task side cancel scope, |  | ||||||
| ``Lock._root_local_task_cs_in_debug``, which can be ``.cancel()``-ed by the root |  | ||||||
| runtime when a stale lock is detected during the IPC channel testing. |  | ||||||
| However, right now we're NOT using this since it seems to cause test |  | ||||||
| failures likely due to causing pre-mature cancellation and maybe needs |  | ||||||
| a bit more experimenting? |  | ||||||
|  | @ -1,19 +0,0 @@ | ||||||
| Rework our ``.trionics.BroadcastReceiver`` internals to avoid method |  | ||||||
| recursion and approach a design and interface closer to ``trio``'s |  | ||||||
| ``MemoryReceiveChannel``. |  | ||||||
| 
 |  | ||||||
| The details of the internal changes include: |  | ||||||
| 
 |  | ||||||
| - implementing a ``BroadcastReceiver.receive_nowait()`` and using it |  | ||||||
|   within the async ``.receive()`` thus avoiding recursion from |  | ||||||
|   ``.receive()``. |  | ||||||
| - failing over to an internal ``._receive_from_underlying()`` when the |  | ||||||
|   ``_nowait()`` call raises ``trio.WouldBlock`` |  | ||||||
| - adding ``BroadcastState.statistics()`` for debugging and testing both |  | ||||||
|   internals and by users. |  | ||||||
| - add an internal ``BroadcastReceiver._raise_on_lag: bool`` which can be |  | ||||||
|   set to avoid ``Lagged`` raising for possible use cases where a user |  | ||||||
|   wants to choose between a [cheap or nasty |  | ||||||
|   pattern](https://zguide.zeromq.org/docs/chapter7/#The-Cheap-or-Nasty-Pattern) |  | ||||||
|   the the particular stream (we use this in ``piker``'s dark clearing |  | ||||||
|   engine to avoid fast feeds breaking during HFT periods). |  | ||||||
|  | @ -1,11 +0,0 @@ | ||||||
| Always ``list``-cast the ``mngrs`` input to |  | ||||||
| ``.trionics.gather_contexts()`` and ensure its size otherwise raise |  | ||||||
| a ``ValueError``. |  | ||||||
| 
 |  | ||||||
| Turns out that trying to pass an inline-style generator comprehension |  | ||||||
| doesn't seem to work inside the ``async with`` expression? Further, in |  | ||||||
| such a case we can get a hang waiting on the all-entered event |  | ||||||
| completion when the internal mngrs iteration is a noop. Instead we |  | ||||||
| always greedily check a size and error on empty input; the lazy |  | ||||||
| iteration of a generator input is not beneficial anyway since we're |  | ||||||
| entering all manager instances in concurrent tasks. |  | ||||||
|  | @ -1,15 +0,0 @@ | ||||||
| Fixes to ensure IPC (channel) breakage doesn't result in hung actor |  | ||||||
| trees; the zombie reaping and general supervision machinery will always |  | ||||||
| clean up and terminate. |  | ||||||
| 
 |  | ||||||
| This includes not only the (mostly minor) fixes to solve these cases but |  | ||||||
| also a new extensive test suite in `test_advanced_faults.py` with an |  | ||||||
| accompanying highly configurable example module-script in |  | ||||||
| `examples/advanced_faults/ipc_failure_during_stream.py`. Tests ensure we |  | ||||||
| never get hang or zombies despite operating in debug mode and attempt to |  | ||||||
| simulate all possible IPC transport failure cases for a local-host actor |  | ||||||
| tree. |  | ||||||
| 
 |  | ||||||
| Further we simplify `Context.open_stream.__aexit__()` to just call |  | ||||||
| `MsgStream.aclose()` directly more or less avoiding a pure duplicate |  | ||||||
| code path. |  | ||||||
|  | @ -1,10 +0,0 @@ | ||||||
| Always redraw the `pdbpp` prompt on `SIGINT` during REPL use. |  | ||||||
| 
 |  | ||||||
| There was recent changes todo with Python 3.10 that required us to pin |  | ||||||
| to a specific commit in `pdbpp` which have recently been fixed minus |  | ||||||
| this last issue with `SIGINT` shielding: not clobbering or not |  | ||||||
| showing the `(Pdb++)` prompt on ctlr-c by the user. This repairs all |  | ||||||
| that by firstly removing the standard KBI intercepting of the std lib's |  | ||||||
| `pdb.Pdb._cmdloop()` as well as ensuring that only the actor with REPL |  | ||||||
| control ever reports `SIGINT` handler log msgs and prompt redraws. With |  | ||||||
| this we move back to using pypi `pdbpp` release. |  | ||||||
|  | @ -1,7 +0,0 @@ | ||||||
| Drop `trio.Process.aclose()` usage, copy into our spawning code. |  | ||||||
| 
 |  | ||||||
| The details are laid out in https://github.com/goodboy/tractor/issues/330. |  | ||||||
| `trio` changed is process running quite some time ago, this just copies |  | ||||||
| out the small bit we needed (from the old `.aclose()`) for hard kills |  | ||||||
| where a soft runtime cancel request fails and our "zombie killer" |  | ||||||
| implementation kicks in. |  | ||||||
|  | @ -1,15 +0,0 @@ | ||||||
| Switch to using the fork & fix of `pdb++`, `pdbp`: |  | ||||||
| https://github.com/mdmintz/pdbp |  | ||||||
| 
 |  | ||||||
| Allows us to sidestep a variety of issues that aren't being maintained |  | ||||||
| in the upstream project thanks to the hard work of @mdmintz! |  | ||||||
| 
 |  | ||||||
| We also include some default settings adjustments as per recent |  | ||||||
| development on the fork: |  | ||||||
| 
 |  | ||||||
| - sticky mode is still turned on by default but now activates when |  | ||||||
|   a using the `ll` repl command. |  | ||||||
| - turn off line truncation by default to avoid inter-line gaps when |  | ||||||
|   resizing the terimnal during use. |  | ||||||
| - when using the backtrace cmd either by `w` or `bt`, the config |  | ||||||
|   automatically switches to non-sticky mode. |  | ||||||
|  | @ -1,37 +0,0 @@ | ||||||
| {% for section in sections %} |  | ||||||
| {% set underline = "-" %} |  | ||||||
| {% if section %} |  | ||||||
| {{section}} |  | ||||||
| {{ underline * section|length }}{% set underline = "~" %} |  | ||||||
| 
 |  | ||||||
| {% endif %} |  | ||||||
| {% if sections[section] %} |  | ||||||
| {% for category, val in definitions.items() if category in sections[section] %} |  | ||||||
| 
 |  | ||||||
| {{ definitions[category]['name'] }} |  | ||||||
| {{ underline * definitions[category]['name']|length }} |  | ||||||
| 
 |  | ||||||
| {% if definitions[category]['showcontent'] %} |  | ||||||
| {% for text, values in sections[section][category]|dictsort(by='value') %} |  | ||||||
| {% set issue_joiner = joiner(', ') %} |  | ||||||
| - {% for value in values|sort %}{{ issue_joiner() }}`{{ value }} <https://github.com/goodboy/tractor/issues/{{ value[1:] }}>`_{% endfor %}: {{ text }} |  | ||||||
| 
 |  | ||||||
| {% endfor %} |  | ||||||
| {% else %} |  | ||||||
| - {{ sections[section][category]['']|sort|join(', ') }} |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| {% endif %} |  | ||||||
| {% if sections[section][category]|length == 0 %} |  | ||||||
| 
 |  | ||||||
| No significant changes. |  | ||||||
| 
 |  | ||||||
| {% else %} |  | ||||||
| {% endif %} |  | ||||||
| {% endfor %} |  | ||||||
| {% else %} |  | ||||||
| 
 |  | ||||||
| No significant changes. |  | ||||||
| 
 |  | ||||||
| {% endif %} |  | ||||||
| {% endfor %} |  | ||||||
|  | @ -1,18 +0,0 @@ | ||||||
| First generate a built disti: |  | ||||||
| 
 |  | ||||||
| ``` |  | ||||||
| python -m pip install --upgrade build |  | ||||||
| python -m build --sdist --outdir dist/alpha5/ |  | ||||||
| ``` |  | ||||||
| 
 |  | ||||||
| Then try a test ``pypi`` upload: |  | ||||||
| 
 |  | ||||||
| ``` |  | ||||||
| python -m twine upload --repository testpypi dist/alpha5/* |  | ||||||
| ``` |  | ||||||
| 
 |  | ||||||
| The push to `pypi` for realz. |  | ||||||
| 
 |  | ||||||
| ``` |  | ||||||
| python -m twine upload --repository testpypi dist/alpha5/* |  | ||||||
| ``` |  | ||||||
							
								
								
									
										164
									
								
								pyproject.toml
								
								
								
								
							
							
						
						
									
										164
									
								
								pyproject.toml
								
								
								
								
							|  | @ -1,164 +0,0 @@ | ||||||
| [build-system] |  | ||||||
| requires = ["hatchling"] |  | ||||||
| build-backend = "hatchling.build" |  | ||||||
| 
 |  | ||||||
| # ------ build-system ------ |  | ||||||
| 
 |  | ||||||
| [project] |  | ||||||
| name = "tractor" |  | ||||||
| version = "0.1.0a6dev0" |  | ||||||
| description = 'structured concurrent `trio`-"actors"' |  | ||||||
| authors = [{ name = "Tyler Goodlet", email = "goodboy_foss@protonmail.com" }] |  | ||||||
| requires-python = ">= 3.11" |  | ||||||
| readme = "docs/README.rst" |  | ||||||
| license = "AGPL-3.0-or-later" |  | ||||||
| keywords = [ |  | ||||||
|   "trio", |  | ||||||
|   "async", |  | ||||||
|   "concurrency", |  | ||||||
|   "structured concurrency", |  | ||||||
|   "actor model", |  | ||||||
|   "distributed", |  | ||||||
|   "multiprocessing", |  | ||||||
| ] |  | ||||||
| classifiers = [ |  | ||||||
|   "Development Status :: 3 - Alpha", |  | ||||||
|   "Operating System :: POSIX :: Linux", |  | ||||||
|   "Framework :: Trio", |  | ||||||
|   "License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)", |  | ||||||
|   "Programming Language :: Python :: Implementation :: CPython", |  | ||||||
|   "Programming Language :: Python :: 3 :: Only", |  | ||||||
|   "Programming Language :: Python :: 3.11", |  | ||||||
|   "Topic :: System :: Distributed Computing", |  | ||||||
| ] |  | ||||||
| dependencies = [ |  | ||||||
|   # trio runtime and friends |  | ||||||
|   # (poetry) proper range specs, |  | ||||||
|   # https://packaging.python.org/en/latest/discussions/install-requires-vs-requirements/#id5 |  | ||||||
|   # TODO, for 3.13 we must go go `0.27` which means we have to |  | ||||||
|   # disable strict egs or port to handling them internally! |  | ||||||
|   "trio>0.27", |  | ||||||
|   "tricycle>=0.4.1,<0.5", |  | ||||||
|   "wrapt>=1.16.0,<2", |  | ||||||
|   "colorlog>=6.8.2,<7", |  | ||||||
|   # built-in multi-actor `pdb` REPL |  | ||||||
|   "pdbp>=1.6,<2", # windows only (from `pdbp`) |  | ||||||
|   # typed IPC msging |  | ||||||
|   "msgspec>=0.19.0", |  | ||||||
|   "cffi>=1.17.1", |  | ||||||
|   "bidict>=0.23.1", |  | ||||||
| ] |  | ||||||
| 
 |  | ||||||
| # ------ project ------ |  | ||||||
| 
 |  | ||||||
| [dependency-groups] |  | ||||||
| dev = [ |  | ||||||
|   # test suite |  | ||||||
|   # TODO: maybe some of these layout choices? |  | ||||||
|   # https://docs.pytest.org/en/8.0.x/explanation/goodpractices.html#choosing-a-test-layout-import-rules |  | ||||||
|   "pytest>=8.3.5", |  | ||||||
|   "pexpect>=4.9.0,<5", |  | ||||||
|   # `tractor.devx` tooling |  | ||||||
|   "greenback>=1.2.1,<2", |  | ||||||
|   "stackscope>=0.2.2,<0.3", |  | ||||||
|   # ^ requires this? |  | ||||||
|   "typing-extensions>=4.14.1", |  | ||||||
| 
 |  | ||||||
|   "pyperclip>=1.9.0", |  | ||||||
|   "prompt-toolkit>=3.0.50", |  | ||||||
|   "xonsh>=0.19.2", |  | ||||||
|   "psutil>=7.0.0", |  | ||||||
| ] |  | ||||||
| # TODO, add these with sane versions; were originally in |  | ||||||
| # `requirements-docs.txt`.. |  | ||||||
| # docs = [ |  | ||||||
| #   "sphinx>=" |  | ||||||
| #   "sphinx_book_theme>=" |  | ||||||
| # ] |  | ||||||
| 
 |  | ||||||
| # ------ dependency-groups ------ |  | ||||||
| 
 |  | ||||||
| # ------ dependency-groups ------ |  | ||||||
| 
 |  | ||||||
| [tool.uv.sources] |  | ||||||
| # XXX NOTE, only for @goodboy's hacking on `pprint(sort_dicts=False)` |  | ||||||
| # for the `pp` alias.. |  | ||||||
| # pdbp = { path = "../pdbp", editable = true } |  | ||||||
| 
 |  | ||||||
| # ------ tool.uv.sources ------ |  | ||||||
| # TODO, distributed (multi-host) extensions |  | ||||||
| # linux kernel networking |  | ||||||
| # 'pyroute2 |  | ||||||
| 
 |  | ||||||
| # ------ tool.uv.sources ------ |  | ||||||
| 
 |  | ||||||
| [tool.uv] |  | ||||||
| # XXX NOTE, prefer the sys python bc apparently the distis from |  | ||||||
| # `astral` are built in a way that breaks `pdbp`+`tabcompleter`'s |  | ||||||
| # likely due to linking against `libedit` over `readline`.. |  | ||||||
| # |_https://docs.astral.sh/uv/concepts/python-versions/#managed-python-distributions |  | ||||||
| # |_https://gregoryszorc.com/docs/python-build-standalone/main/quirks.html#use-of-libedit-on-linux |  | ||||||
| # |  | ||||||
| # https://docs.astral.sh/uv/reference/settings/#python-preference |  | ||||||
| python-preference = 'system' |  | ||||||
| 
 |  | ||||||
| # ------ tool.uv ------ |  | ||||||
| 
 |  | ||||||
| [tool.hatch.build.targets.sdist] |  | ||||||
| include = ["tractor"] |  | ||||||
| 
 |  | ||||||
| [tool.hatch.build.targets.wheel] |  | ||||||
| include = ["tractor"] |  | ||||||
| 
 |  | ||||||
| # ------ tool.hatch ------ |  | ||||||
| 
 |  | ||||||
| [tool.towncrier] |  | ||||||
| package = "tractor" |  | ||||||
| filename = "NEWS.rst" |  | ||||||
| directory = "nooz/" |  | ||||||
| version = "0.1.0a6" |  | ||||||
| title_format = "tractor {version} ({project_date})" |  | ||||||
| template = "nooz/_template.rst" |  | ||||||
| all_bullets = true |  | ||||||
| 
 |  | ||||||
| [[tool.towncrier.type]] |  | ||||||
|   directory = "feature" |  | ||||||
|   name = "Features" |  | ||||||
|   showcontent = true |  | ||||||
| 
 |  | ||||||
| [[tool.towncrier.type]] |  | ||||||
|   directory = "bugfix" |  | ||||||
|   name = "Bug Fixes" |  | ||||||
|   showcontent = true |  | ||||||
| 
 |  | ||||||
| [[tool.towncrier.type]] |  | ||||||
|   directory = "doc" |  | ||||||
|   name = "Improved Documentation" |  | ||||||
|   showcontent = true |  | ||||||
| 
 |  | ||||||
| [[tool.towncrier.type]] |  | ||||||
|   directory = "trivial" |  | ||||||
|   name = "Trivial/Internal Changes" |  | ||||||
|   showcontent = true |  | ||||||
| 
 |  | ||||||
| # ------ tool.towncrier ------ |  | ||||||
| 
 |  | ||||||
| [tool.pytest.ini_options] |  | ||||||
| minversion = '6.0' |  | ||||||
| testpaths = [ |  | ||||||
|   'tests' |  | ||||||
| ] |  | ||||||
| addopts = [ |  | ||||||
|   # TODO: figure out why this isn't working.. |  | ||||||
|   '--rootdir=./tests', |  | ||||||
| 
 |  | ||||||
|   '--import-mode=importlib', |  | ||||||
|   # don't show frickin captured logs AGAIN in the report.. |  | ||||||
|   '--show-capture=no', |  | ||||||
| ] |  | ||||||
| log_cli = false |  | ||||||
| # TODO: maybe some of these layout choices? |  | ||||||
| # https://docs.pytest.org/en/8.0.x/explanation/goodpractices.html#choosing-a-test-layout-import-rules |  | ||||||
| # pythonpath = "src" |  | ||||||
| 
 |  | ||||||
| # ------ tool.pytest ------ |  | ||||||
|  | @ -1,8 +0,0 @@ | ||||||
| # vim: ft=ini |  | ||||||
| # pytest.ini for tractor |  | ||||||
| 
 |  | ||||||
| [pytest] |  | ||||||
| # don't show frickin captured logs AGAIN in the report.. |  | ||||||
| addopts = --show-capture='no' |  | ||||||
| log_cli = false |  | ||||||
| ; minversion = 6.0 |  | ||||||
|  | @ -0,0 +1,2 @@ | ||||||
|  | sphinx | ||||||
|  | sphinx_book_theme | ||||||
|  | @ -0,0 +1,6 @@ | ||||||
|  | pytest | ||||||
|  | pytest-trio | ||||||
|  | pdbpp | ||||||
|  | mypy | ||||||
|  | trio_typing | ||||||
|  | pexpect | ||||||
							
								
								
									
										82
									
								
								ruff.toml
								
								
								
								
							
							
						
						
									
										82
									
								
								ruff.toml
								
								
								
								
							|  | @ -1,82 +0,0 @@ | ||||||
| # from default `ruff.toml` @ |  | ||||||
| # https://docs.astral.sh/ruff/configuration/ |  | ||||||
| 
 |  | ||||||
| # Exclude a variety of commonly ignored directories. |  | ||||||
| exclude = [ |  | ||||||
|     ".bzr", |  | ||||||
|     ".direnv", |  | ||||||
|     ".eggs", |  | ||||||
|     ".git", |  | ||||||
|     ".git-rewrite", |  | ||||||
|     ".hg", |  | ||||||
|     ".ipynb_checkpoints", |  | ||||||
|     ".mypy_cache", |  | ||||||
|     ".nox", |  | ||||||
|     ".pants.d", |  | ||||||
|     ".pyenv", |  | ||||||
|     ".pytest_cache", |  | ||||||
|     ".pytype", |  | ||||||
|     ".ruff_cache", |  | ||||||
|     ".svn", |  | ||||||
|     ".tox", |  | ||||||
|     ".venv", |  | ||||||
|     ".vscode", |  | ||||||
|     "__pypackages__", |  | ||||||
|     "_build", |  | ||||||
|     "buck-out", |  | ||||||
|     "build", |  | ||||||
|     "dist", |  | ||||||
|     "node_modules", |  | ||||||
|     "site-packages", |  | ||||||
|     "venv", |  | ||||||
| ] |  | ||||||
| 
 |  | ||||||
| # Same as Black. |  | ||||||
| line-length = 88 |  | ||||||
| indent-width = 4 |  | ||||||
| 
 |  | ||||||
| # Assume Python 3.9 |  | ||||||
| target-version = "py311" |  | ||||||
| 
 |  | ||||||
| [lint] |  | ||||||
| # Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`)  codes by default. |  | ||||||
| # Unlike Flake8, Ruff doesn't enable pycodestyle warnings (`W`) or |  | ||||||
| # McCabe complexity (`C901`) by default. |  | ||||||
| select = ["E4", "E7", "E9", "F"] |  | ||||||
| ignore = [ |  | ||||||
|   'E402',  # https://docs.astral.sh/ruff/rules/module-import-not-at-top-of-file/ |  | ||||||
| ] |  | ||||||
| 
 |  | ||||||
| # Allow fix for all enabled rules (when `--fix`) is provided. |  | ||||||
| fixable = ["ALL"] |  | ||||||
| unfixable = [] |  | ||||||
| 
 |  | ||||||
| # Allow unused variables when underscore-prefixed. |  | ||||||
| # dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" |  | ||||||
| 
 |  | ||||||
| [format] |  | ||||||
| # Use single quotes in `ruff format`. |  | ||||||
| quote-style = "single" |  | ||||||
| 
 |  | ||||||
| # Like Black, indent with spaces, rather than tabs. |  | ||||||
| indent-style = "space" |  | ||||||
| 
 |  | ||||||
| # Like Black, respect magic trailing commas. |  | ||||||
| skip-magic-trailing-comma = false |  | ||||||
| 
 |  | ||||||
| # Like Black, automatically detect the appropriate line ending. |  | ||||||
| line-ending = "auto" |  | ||||||
| 
 |  | ||||||
| # Enable auto-formatting of code examples in docstrings. Markdown, |  | ||||||
| # reStructuredText code/literal blocks and doctests are all supported. |  | ||||||
| # |  | ||||||
| # This is currently disabled by default, but it is planned for this |  | ||||||
| # to be opt-out in the future. |  | ||||||
| docstring-code-format = false |  | ||||||
| 
 |  | ||||||
| # Set the line length limit used when formatting code snippets in |  | ||||||
| # docstrings. |  | ||||||
| # |  | ||||||
| # This only has an effect when the `docstring-code-format` setting is |  | ||||||
| # enabled. |  | ||||||
| docstring-code-line-length = "dynamic" |  | ||||||
|  | @ -0,0 +1,90 @@ | ||||||
|  | #!/usr/bin/env python | ||||||
|  | # | ||||||
|  | # tractor: a trionic actor model built on `multiprocessing` and `trio` | ||||||
|  | # | ||||||
|  | # Copyright (C) 2018-2020  Tyler Goodlet | ||||||
|  | 
 | ||||||
|  | # This program is free software: you can redistribute it and/or modify | ||||||
|  | # it under the terms of the GNU General Public License as published by | ||||||
|  | # the Free Software Foundation, either version 3 of the License, or | ||||||
|  | # (at your option) any later version. | ||||||
|  | 
 | ||||||
|  | # This program is distributed in the hope that it will be useful, | ||||||
|  | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||||
|  | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||||
|  | # GNU General Public License for more details. | ||||||
|  | 
 | ||||||
|  | # You should have received a copy of the GNU General Public License | ||||||
|  | # along with this program.  If not, see <http://www.gnu.org/licenses/>. | ||||||
|  | from setuptools import setup | ||||||
|  | 
 | ||||||
|  | with open('docs/README.rst', encoding='utf-8') as f: | ||||||
|  |     readme = f.read() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | setup( | ||||||
|  |     name="tractor", | ||||||
|  |     version='0.1.0a2.dev0',  # alpha zone | ||||||
|  |     description='structured concurrrent "actors"', | ||||||
|  |     long_description=readme, | ||||||
|  |     license='GPLv3', | ||||||
|  |     author='Tyler Goodlet', | ||||||
|  |     maintainer='Tyler Goodlet', | ||||||
|  |     maintainer_email='jgbt@protonmail.com', | ||||||
|  |     url='https://github.com/goodboy/tractor', | ||||||
|  |     platforms=['linux', 'windows'], | ||||||
|  |     packages=[ | ||||||
|  |         'tractor', | ||||||
|  |         'tractor.testing', | ||||||
|  |     ], | ||||||
|  |     install_requires=[ | ||||||
|  | 
 | ||||||
|  |         # trio related | ||||||
|  |         'trio>0.8', | ||||||
|  |         'async_generator', | ||||||
|  |         'trio_typing', | ||||||
|  | 
 | ||||||
|  |         # tooling | ||||||
|  |         'tricycle', | ||||||
|  |         'trio_typing', | ||||||
|  | 
 | ||||||
|  |         # tooling | ||||||
|  |         'colorlog', | ||||||
|  |         'wrapt', | ||||||
|  |         'pdbpp', | ||||||
|  | 
 | ||||||
|  |         # serialization | ||||||
|  |         'msgpack', | ||||||
|  | 
 | ||||||
|  |     ], | ||||||
|  |     extras_require={ | ||||||
|  | 
 | ||||||
|  |         # serialization | ||||||
|  |         'msgspec': ["msgspec >= 0.3.2'; python_version >= '3.9'"], | ||||||
|  | 
 | ||||||
|  |     }, | ||||||
|  |     tests_require=['pytest'], | ||||||
|  |     python_requires=">=3.8", | ||||||
|  |     keywords=[ | ||||||
|  |         'trio', | ||||||
|  |         "async", | ||||||
|  |         "concurrency", | ||||||
|  |         "actor model", | ||||||
|  |         "distributed", | ||||||
|  |         'multiprocessing' | ||||||
|  |     ], | ||||||
|  |     classifiers=[ | ||||||
|  |         "Development Status :: 3 - Alpha", | ||||||
|  |         "Operating System :: POSIX :: Linux", | ||||||
|  |         "Operating System :: Microsoft :: Windows", | ||||||
|  |         "Framework :: Trio", | ||||||
|  |         "License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)", | ||||||
|  |         "Programming Language :: Python :: Implementation :: CPython", | ||||||
|  |         "Programming Language :: Python :: 3 :: Only", | ||||||
|  |         "Programming Language :: Python :: 3.8", | ||||||
|  |         "Programming Language :: Python :: 3.9", | ||||||
|  |         "Intended Audience :: Science/Research", | ||||||
|  |         "Intended Audience :: Developers", | ||||||
|  |         "Topic :: System :: Distributed Computing", | ||||||
|  |     ], | ||||||
|  | ) | ||||||
|  | @ -1,26 +1,23 @@ | ||||||
| """ | """ | ||||||
| Top level of the testing suites! | ``tractor`` testing!! | ||||||
| 
 |  | ||||||
| """ | """ | ||||||
| from __future__ import annotations |  | ||||||
| import sys | import sys | ||||||
| import subprocess | import subprocess | ||||||
| import os | import os | ||||||
|  | import random | ||||||
| import signal | import signal | ||||||
| import platform | import platform | ||||||
| import time | import time | ||||||
| 
 | 
 | ||||||
| import pytest | import pytest | ||||||
| from tractor._testing import ( | import tractor | ||||||
|     examples_dir as examples_dir, |  | ||||||
|     tractor_test as tractor_test, |  | ||||||
|     expect_ctxc as expect_ctxc, |  | ||||||
| ) |  | ||||||
| 
 | 
 | ||||||
| pytest_plugins: list[str] = [ | # export for tests | ||||||
|     'pytester', | from tractor.testing import tractor_test  # noqa | ||||||
|     'tractor._testing.pytest', | 
 | ||||||
| ] | 
 | ||||||
|  | pytest_plugins = ['pytester'] | ||||||
|  | _arb_addr = '127.0.0.1', random.randint(1000, 9999) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| # Sending signal.SIGINT on subprocess fails on windows. Use CTRL_* alternatives | # Sending signal.SIGINT on subprocess fails on windows. Use CTRL_* alternatives | ||||||
|  | @ -33,11 +30,7 @@ else: | ||||||
|     _KILL_SIGNAL = signal.SIGKILL |     _KILL_SIGNAL = signal.SIGKILL | ||||||
|     _INT_SIGNAL = signal.SIGINT |     _INT_SIGNAL = signal.SIGINT | ||||||
|     _INT_RETURN_CODE = 1 if sys.version_info < (3, 8) else -signal.SIGINT.value |     _INT_RETURN_CODE = 1 if sys.version_info < (3, 8) else -signal.SIGINT.value | ||||||
|     _PROC_SPAWN_WAIT = ( |     _PROC_SPAWN_WAIT = 0.6 if sys.version_info < (3, 7) else 0.4 | ||||||
|         0.6 |  | ||||||
|         if sys.version_info < (3, 7) |  | ||||||
|         else 0.4 |  | ||||||
|     ) |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| no_windows = pytest.mark.skipif( | no_windows = pytest.mark.skipif( | ||||||
|  | @ -46,172 +39,124 @@ no_windows = pytest.mark.skipif( | ||||||
| ) | ) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def pytest_addoption( | def repodir(): | ||||||
|     parser: pytest.Parser, |     """Return the abspath to the repo directory. | ||||||
| ): |     """ | ||||||
|     # ?TODO? should this be exposed from our `._testing.pytest` |     dirname = os.path.dirname | ||||||
|     # plugin or should we make it more explicit with `--tl` for |     dirpath = os.path.abspath( | ||||||
|     # tractor logging like we do in other client projects? |         dirname(dirname(os.path.realpath(__file__))) | ||||||
|     parser.addoption( |  | ||||||
|         "--ll", |  | ||||||
|         action="store", |  | ||||||
|         dest='loglevel', |  | ||||||
|         default='ERROR', help="logging level to set when testing" |  | ||||||
|         ) |         ) | ||||||
|  |     return dirpath | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def pytest_addoption(parser): | ||||||
|  |     parser.addoption( | ||||||
|  |         "--ll", action="store", dest='loglevel', | ||||||
|  |         default=None, help="logging level to set when testing" | ||||||
|  |     ) | ||||||
|  | 
 | ||||||
|  |     parser.addoption( | ||||||
|  |         "--spawn-backend", action="store", dest='spawn_backend', | ||||||
|  |         default='trio', | ||||||
|  |         help="Processing spawning backend to use for test run", | ||||||
|  |     ) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def pytest_configure(config): | ||||||
|  |     backend = config.option.spawn_backend | ||||||
|  | 
 | ||||||
|  |     if backend == 'mp': | ||||||
|  |         tractor._spawn.try_set_start_method('spawn') | ||||||
|  |     elif backend == 'trio': | ||||||
|  |         tractor._spawn.try_set_start_method(backend) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @pytest.fixture(scope='session', autouse=True) | @pytest.fixture(scope='session', autouse=True) | ||||||
| def loglevel(request): | def loglevel(request): | ||||||
|     import tractor |  | ||||||
|     orig = tractor.log._default_loglevel |     orig = tractor.log._default_loglevel | ||||||
|     level = tractor.log._default_loglevel = request.config.option.loglevel |     level = tractor.log._default_loglevel = request.config.option.loglevel | ||||||
|     tractor.log.get_console_log(level) |  | ||||||
|     yield level |     yield level | ||||||
|     tractor.log._default_loglevel = orig |     tractor.log._default_loglevel = orig | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| _ci_env: bool = os.environ.get('CI', False) | @pytest.fixture(scope='session') | ||||||
|  | def spawn_backend(request): | ||||||
|  |     return request.config.option.spawn_backend | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @pytest.fixture(scope='session') | @pytest.fixture(scope='session') | ||||||
| def ci_env() -> bool: | def ci_env() -> bool: | ||||||
|     ''' |     """Detect CI envoirment. | ||||||
|     Detect CI environment. |     """ | ||||||
| 
 |     return os.environ.get('TRAVIS', False) or os.environ.get('CI', False) | ||||||
|     ''' |  | ||||||
|     return _ci_env |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def sig_prog( | @pytest.fixture(scope='session') | ||||||
|     proc: subprocess.Popen, | def arb_addr(): | ||||||
|     sig: int, |     return _arb_addr | ||||||
|     canc_timeout: float = 0.1, | 
 | ||||||
| ) -> int: | 
 | ||||||
|  | def pytest_generate_tests(metafunc): | ||||||
|  |     spawn_backend = metafunc.config.option.spawn_backend | ||||||
|  |     if not spawn_backend: | ||||||
|  |         # XXX some weird windows bug with `pytest`? | ||||||
|  |         spawn_backend = 'mp' | ||||||
|  |     assert spawn_backend in ('mp', 'trio') | ||||||
|  | 
 | ||||||
|  |     if 'start_method' in metafunc.fixturenames: | ||||||
|  |         if spawn_backend == 'mp': | ||||||
|  |             from multiprocessing import get_all_start_methods | ||||||
|  |             methods = get_all_start_methods() | ||||||
|  |             if 'fork' in methods: | ||||||
|  |                 # fork not available on windows, so check before | ||||||
|  |                 # removing XXX: the fork method is in general | ||||||
|  |                 # incompatible with trio's global scheduler state | ||||||
|  |                 methods.remove('fork') | ||||||
|  |         elif spawn_backend == 'trio': | ||||||
|  |             methods = ['trio'] | ||||||
|  | 
 | ||||||
|  |         metafunc.parametrize("start_method", methods, scope='module') | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def sig_prog(proc, sig): | ||||||
|     "Kill the actor-process with ``sig``." |     "Kill the actor-process with ``sig``." | ||||||
|     proc.send_signal(sig) |     proc.send_signal(sig) | ||||||
|     time.sleep(canc_timeout) |     time.sleep(0.1) | ||||||
|     if not proc.poll(): |     if not proc.poll(): | ||||||
|         # TODO: why sometimes does SIGINT not work on teardown? |         # TODO: why sometimes does SIGINT not work on teardown? | ||||||
|         # seems to happen only when trace logging enabled? |         # seems to happen only when trace logging enabled? | ||||||
|         proc.send_signal(_KILL_SIGNAL) |         proc.send_signal(_KILL_SIGNAL) | ||||||
|     ret: int = proc.wait() |     ret = proc.wait() | ||||||
|     assert ret |     assert ret | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| # TODO: factor into @cm and move to `._testing`? |  | ||||||
| @pytest.fixture | @pytest.fixture | ||||||
| def daemon( | def daemon(loglevel, testdir, arb_addr): | ||||||
|     debug_mode: bool, |     """Run a daemon actor as a "remote arbiter". | ||||||
|     loglevel: str, |     """ | ||||||
|     testdir: pytest.Pytester, |  | ||||||
|     reg_addr: tuple[str, int], |  | ||||||
|     tpt_proto: str, |  | ||||||
| 
 |  | ||||||
| ) -> subprocess.Popen: |  | ||||||
|     ''' |  | ||||||
|     Run a daemon root actor as a separate actor-process tree and |  | ||||||
|     "remote registrar" for discovery-protocol related tests. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     if loglevel in ('trace', 'debug'): |     if loglevel in ('trace', 'debug'): | ||||||
|         # XXX: too much logging will lock up the subproc (smh) |         # too much logging will lock up the subproc (smh) | ||||||
|         loglevel: str = 'info' |         loglevel = 'info' | ||||||
| 
 | 
 | ||||||
|     code: str = ( |     cmdargs = [ | ||||||
|         "import tractor; " |         sys.executable, '-c', | ||||||
|         "tractor.run_daemon([], " |         "import tractor; tractor.run_daemon([], arbiter_addr={}, loglevel={})" | ||||||
|         "registry_addrs={reg_addrs}, " |         .format( | ||||||
|         "debug_mode={debug_mode}, " |             arb_addr, | ||||||
|         "loglevel={ll})" |             "'{}'".format(loglevel) if loglevel else None) | ||||||
|     ).format( |  | ||||||
|         reg_addrs=str([reg_addr]), |  | ||||||
|         ll="'{}'".format(loglevel) if loglevel else None, |  | ||||||
|         debug_mode=debug_mode, |  | ||||||
|     ) |  | ||||||
|     cmd: list[str] = [ |  | ||||||
|         sys.executable, |  | ||||||
|         '-c', code, |  | ||||||
|     ] |     ] | ||||||
|     # breakpoint() |     kwargs = dict() | ||||||
|     kwargs = {} |  | ||||||
|     if platform.system() == 'Windows': |     if platform.system() == 'Windows': | ||||||
|         # without this, tests hang on windows forever |         # without this, tests hang on windows forever | ||||||
|         kwargs['creationflags'] = subprocess.CREATE_NEW_PROCESS_GROUP |         kwargs['creationflags'] = subprocess.CREATE_NEW_PROCESS_GROUP | ||||||
| 
 | 
 | ||||||
|     proc: subprocess.Popen = testdir.popen( |     proc = testdir.popen( | ||||||
|         cmd, |         cmdargs, | ||||||
|  |         stdout=subprocess.PIPE, | ||||||
|  |         stderr=subprocess.PIPE, | ||||||
|         **kwargs, |         **kwargs, | ||||||
|     ) |     ) | ||||||
| 
 |  | ||||||
|     # UDS sockets are **really** fast to bind()/listen()/connect() |  | ||||||
|     # so it's often required that we delay a bit more starting |  | ||||||
|     # the first actor-tree.. |  | ||||||
|     if tpt_proto == 'uds': |  | ||||||
|         global _PROC_SPAWN_WAIT |  | ||||||
|         _PROC_SPAWN_WAIT = 0.6 |  | ||||||
| 
 |  | ||||||
|     time.sleep(_PROC_SPAWN_WAIT) |  | ||||||
| 
 |  | ||||||
|     assert not proc.returncode |     assert not proc.returncode | ||||||
|  |     time.sleep(_PROC_SPAWN_WAIT) | ||||||
|     yield proc |     yield proc | ||||||
|     sig_prog(proc, _INT_SIGNAL) |     sig_prog(proc, _INT_SIGNAL) | ||||||
| 
 |  | ||||||
|     # XXX! yeah.. just be reaaal careful with this bc sometimes it |  | ||||||
|     # can lock up on the `_io.BufferedReader` and hang.. |  | ||||||
|     stderr: str = proc.stderr.read().decode() |  | ||||||
|     if stderr: |  | ||||||
|         print( |  | ||||||
|             f'Daemon actor tree produced STDERR:\n' |  | ||||||
|             f'{proc.args}\n' |  | ||||||
|             f'\n' |  | ||||||
|             f'{stderr}\n' |  | ||||||
|         ) |  | ||||||
|     if proc.returncode != -2: |  | ||||||
|         raise RuntimeError( |  | ||||||
|             'Daemon actor tree failed !?\n' |  | ||||||
|             f'{proc.args}\n' |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| # @pytest.fixture(autouse=True) |  | ||||||
| # def shared_last_failed(pytestconfig): |  | ||||||
| #     val = pytestconfig.cache.get("example/value", None) |  | ||||||
| #     breakpoint() |  | ||||||
| #     if val is None: |  | ||||||
| #         pytestconfig.cache.set("example/value", val) |  | ||||||
| #     return val |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| # TODO: a way to let test scripts (like from `examples/`) |  | ||||||
| # guarantee they won't `registry_addrs` collide! |  | ||||||
| # -[ ] maybe use some kinda standard `def main()` arg-spec that |  | ||||||
| #     we can introspect from a fixture that is called from the test |  | ||||||
| #     body? |  | ||||||
| # -[ ] test and figure out typing for below prototype! Bp |  | ||||||
| # |  | ||||||
| # @pytest.fixture |  | ||||||
| # def set_script_runtime_args( |  | ||||||
| #     reg_addr: tuple, |  | ||||||
| # ) -> Callable[[...], None]: |  | ||||||
| 
 |  | ||||||
| #     def import_n_partial_in_args_n_triorun( |  | ||||||
| #         script: Path,  # under examples? |  | ||||||
| #         **runtime_args, |  | ||||||
| #     ) -> Callable[[], Any]:  # a `partial`-ed equiv of `trio.run()` |  | ||||||
| 
 |  | ||||||
| #         # NOTE, below is taken from |  | ||||||
| #         # `.test_advanced_faults.test_ipc_channel_break_during_stream` |  | ||||||
| #         mod: ModuleType = import_path( |  | ||||||
| #             examples_dir() / 'advanced_faults' |  | ||||||
| #             / 'ipc_failure_during_stream.py', |  | ||||||
| #             root=examples_dir(), |  | ||||||
| #             consider_namespace_packages=False, |  | ||||||
| #         ) |  | ||||||
| #         return partial( |  | ||||||
| #             trio.run, |  | ||||||
| #             partial( |  | ||||||
| #                 mod.main, |  | ||||||
| #                 **runtime_args, |  | ||||||
| #             ) |  | ||||||
| #         ) |  | ||||||
| #     return import_n_partial_in_args_n_triorun |  | ||||||
|  |  | ||||||
|  | @ -1,253 +0,0 @@ | ||||||
| ''' |  | ||||||
| `tractor.devx.*` tooling sub-pkg test space. |  | ||||||
| 
 |  | ||||||
| ''' |  | ||||||
| from __future__ import annotations |  | ||||||
| import time |  | ||||||
| from typing import ( |  | ||||||
|     Callable, |  | ||||||
|     TYPE_CHECKING, |  | ||||||
| ) |  | ||||||
| 
 |  | ||||||
| import pytest |  | ||||||
| from pexpect.exceptions import ( |  | ||||||
|     TIMEOUT, |  | ||||||
| ) |  | ||||||
| from pexpect.spawnbase import SpawnBase |  | ||||||
| 
 |  | ||||||
| from tractor._testing import ( |  | ||||||
|     mk_cmd, |  | ||||||
| ) |  | ||||||
| from tractor.devx.debug import ( |  | ||||||
|     _pause_msg as _pause_msg, |  | ||||||
|     _crash_msg as _crash_msg, |  | ||||||
|     _repl_fail_msg as _repl_fail_msg, |  | ||||||
|     _ctlc_ignore_header as _ctlc_ignore_header, |  | ||||||
| ) |  | ||||||
| from ..conftest import ( |  | ||||||
|     _ci_env, |  | ||||||
| ) |  | ||||||
| 
 |  | ||||||
| if TYPE_CHECKING: |  | ||||||
|     from pexpect import pty_spawn |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| # a fn that sub-instantiates a `pexpect.spawn()` |  | ||||||
| # and returns it. |  | ||||||
| type PexpectSpawner = Callable[[str], pty_spawn.spawn] |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @pytest.fixture |  | ||||||
| def spawn( |  | ||||||
|     start_method: str, |  | ||||||
|     testdir: pytest.Pytester, |  | ||||||
|     reg_addr: tuple[str, int], |  | ||||||
| 
 |  | ||||||
| ) -> PexpectSpawner: |  | ||||||
|     ''' |  | ||||||
|     Use the `pexpect` module shipped via `testdir.spawn()` to |  | ||||||
|     run an `./examples/..` script by name. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     if start_method != 'trio': |  | ||||||
|         pytest.skip( |  | ||||||
|             '`pexpect` based tests only supported on `trio` backend' |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|     def unset_colors(): |  | ||||||
|         ''' |  | ||||||
|         Python 3.13 introduced colored tracebacks that break patt |  | ||||||
|         matching, |  | ||||||
| 
 |  | ||||||
|         https://docs.python.org/3/using/cmdline.html#envvar-PYTHON_COLORS |  | ||||||
|         https://docs.python.org/3/using/cmdline.html#using-on-controlling-color |  | ||||||
| 
 |  | ||||||
|         ''' |  | ||||||
|         import os |  | ||||||
|         os.environ['PYTHON_COLORS'] = '0' |  | ||||||
| 
 |  | ||||||
|     def _spawn( |  | ||||||
|         cmd: str, |  | ||||||
|         **mkcmd_kwargs, |  | ||||||
|     ) -> pty_spawn.spawn: |  | ||||||
|         unset_colors() |  | ||||||
|         return testdir.spawn( |  | ||||||
|             cmd=mk_cmd( |  | ||||||
|                 cmd, |  | ||||||
|                 **mkcmd_kwargs, |  | ||||||
|             ), |  | ||||||
|             expect_timeout=3, |  | ||||||
|             # preexec_fn=unset_colors, |  | ||||||
|             # ^TODO? get `pytest` core to expose underlying |  | ||||||
|             # `pexpect.spawn()` stuff? |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|     # such that test-dep can pass input script name. |  | ||||||
|     return _spawn  # the `PexpectSpawner`, type alias. |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @pytest.fixture( |  | ||||||
|     params=[False, True], |  | ||||||
|     ids='ctl-c={}'.format, |  | ||||||
| ) |  | ||||||
| def ctlc( |  | ||||||
|     request, |  | ||||||
|     ci_env: bool, |  | ||||||
| 
 |  | ||||||
| ) -> bool: |  | ||||||
| 
 |  | ||||||
|     use_ctlc = request.param |  | ||||||
| 
 |  | ||||||
|     node = request.node |  | ||||||
|     markers = node.own_markers |  | ||||||
|     for mark in markers: |  | ||||||
|         if mark.name == 'has_nested_actors': |  | ||||||
|             pytest.skip( |  | ||||||
|                 f'Test {node} has nested actors and fails with Ctrl-C.\n' |  | ||||||
|                 f'The test can sometimes run fine locally but until' |  | ||||||
|                 ' we solve' 'this issue this CI test will be xfail:\n' |  | ||||||
|                 'https://github.com/goodboy/tractor/issues/320' |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|         if mark.name == 'ctlcs_bish': |  | ||||||
|             pytest.skip( |  | ||||||
|                 f'Test {node} prolly uses something from the stdlib (namely `asyncio`..)\n' |  | ||||||
|                 f'The test and/or underlying example script can *sometimes* run fine ' |  | ||||||
|                 f'locally but more then likely until the cpython peeps get their sh#$ together, ' |  | ||||||
|                 f'this test will definitely not behave like `trio` under SIGINT..\n' |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|     if use_ctlc: |  | ||||||
|         # XXX: disable pygments highlighting for auto-tests |  | ||||||
|         # since some envs (like actions CI) will struggle |  | ||||||
|         # the the added color-char encoding.. |  | ||||||
|         from tractor.devx.debug import TractorConfig |  | ||||||
|         TractorConfig.use_pygements = False |  | ||||||
| 
 |  | ||||||
|     yield use_ctlc |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def expect( |  | ||||||
|     child, |  | ||||||
| 
 |  | ||||||
|     # normally a `pdb` prompt by default |  | ||||||
|     patt: str, |  | ||||||
| 
 |  | ||||||
|     **kwargs, |  | ||||||
| 
 |  | ||||||
| ) -> None: |  | ||||||
|     ''' |  | ||||||
|     Expect wrapper that prints last seen console |  | ||||||
|     data before failing. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     try: |  | ||||||
|         child.expect( |  | ||||||
|             patt, |  | ||||||
|             **kwargs, |  | ||||||
|         ) |  | ||||||
|     except TIMEOUT: |  | ||||||
|         before = str(child.before.decode()) |  | ||||||
|         print(before) |  | ||||||
|         raise |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| PROMPT = r"\(Pdb\+\)" |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def in_prompt_msg( |  | ||||||
|     child: SpawnBase, |  | ||||||
|     parts: list[str], |  | ||||||
| 
 |  | ||||||
|     pause_on_false: bool = False, |  | ||||||
|     err_on_false: bool = False, |  | ||||||
|     print_prompt_on_false: bool = True, |  | ||||||
| 
 |  | ||||||
| ) -> bool: |  | ||||||
|     ''' |  | ||||||
|     Predicate check if (the prompt's) std-streams output has all |  | ||||||
|     `str`-parts in it. |  | ||||||
| 
 |  | ||||||
|     Can be used in test asserts for bulk matching expected |  | ||||||
|     log/REPL output for a given `pdb` interact point. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     __tracebackhide__: bool = False |  | ||||||
| 
 |  | ||||||
|     before: str = str(child.before.decode()) |  | ||||||
|     for part in parts: |  | ||||||
|         if part not in before: |  | ||||||
|             if pause_on_false: |  | ||||||
|                 import pdbp |  | ||||||
|                 pdbp.set_trace() |  | ||||||
| 
 |  | ||||||
|             if print_prompt_on_false: |  | ||||||
|                 print(before) |  | ||||||
| 
 |  | ||||||
|             if err_on_false: |  | ||||||
|                 raise ValueError( |  | ||||||
|                     f'Could not find pattern in `before` output?\n' |  | ||||||
|                     f'part: {part!r}\n' |  | ||||||
|                 ) |  | ||||||
|             return False |  | ||||||
| 
 |  | ||||||
|     return True |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| # TODO: todo support terminal color-chars stripping so we can match |  | ||||||
| # against call stack frame output from the the 'll' command the like! |  | ||||||
| # -[ ] SO answer for stipping ANSI codes: https://stackoverflow.com/a/14693789 |  | ||||||
| def assert_before( |  | ||||||
|     child: SpawnBase, |  | ||||||
|     patts: list[str], |  | ||||||
| 
 |  | ||||||
|     **kwargs, |  | ||||||
| 
 |  | ||||||
| ) -> None: |  | ||||||
|     __tracebackhide__: bool = False |  | ||||||
| 
 |  | ||||||
|     assert in_prompt_msg( |  | ||||||
|         child=child, |  | ||||||
|         parts=patts, |  | ||||||
| 
 |  | ||||||
|         # since this is an "assert" helper ;) |  | ||||||
|         err_on_false=True, |  | ||||||
|         **kwargs |  | ||||||
|     ) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def do_ctlc( |  | ||||||
|     child, |  | ||||||
|     count: int = 3, |  | ||||||
|     delay: float = 0.1, |  | ||||||
|     patt: str|None = None, |  | ||||||
| 
 |  | ||||||
|     # expect repl UX to reprint the prompt after every |  | ||||||
|     # ctrl-c send. |  | ||||||
|     # XXX: no idea but, in CI this never seems to work even on 3.10 so |  | ||||||
|     # needs some further investigation potentially... |  | ||||||
|     expect_prompt: bool = not _ci_env, |  | ||||||
| 
 |  | ||||||
| ) -> str|None: |  | ||||||
| 
 |  | ||||||
|     before: str|None = None |  | ||||||
| 
 |  | ||||||
|     # make sure ctl-c sends don't do anything but repeat output |  | ||||||
|     for _ in range(count): |  | ||||||
|         time.sleep(delay) |  | ||||||
|         child.sendcontrol('c') |  | ||||||
| 
 |  | ||||||
|         # TODO: figure out why this makes CI fail.. |  | ||||||
|         # if you run this test manually it works just fine.. |  | ||||||
|         if expect_prompt: |  | ||||||
|             time.sleep(delay) |  | ||||||
|             child.expect(PROMPT) |  | ||||||
|             before = str(child.before.decode()) |  | ||||||
|             time.sleep(delay) |  | ||||||
| 
 |  | ||||||
|             if patt: |  | ||||||
|                 # should see the last line on console |  | ||||||
|                 assert patt in before |  | ||||||
| 
 |  | ||||||
|     # return the console content up to the final prompt |  | ||||||
|     return before |  | ||||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							|  | @ -1,381 +0,0 @@ | ||||||
| ''' |  | ||||||
| That "foreign loop/thread" debug REPL support better ALSO WORK! |  | ||||||
| 
 |  | ||||||
| Same as `test_native_pause.py`. |  | ||||||
| All these tests can be understood (somewhat) by running the |  | ||||||
| equivalent `examples/debugging/` scripts manually. |  | ||||||
| 
 |  | ||||||
| ''' |  | ||||||
| from contextlib import ( |  | ||||||
|     contextmanager as cm, |  | ||||||
| ) |  | ||||||
| # from functools import partial |  | ||||||
| # import itertools |  | ||||||
| import time |  | ||||||
| # from typing import ( |  | ||||||
| #     Iterator, |  | ||||||
| # ) |  | ||||||
| 
 |  | ||||||
| import pytest |  | ||||||
| from pexpect.exceptions import ( |  | ||||||
|     TIMEOUT, |  | ||||||
|     EOF, |  | ||||||
| ) |  | ||||||
| 
 |  | ||||||
| from .conftest import ( |  | ||||||
|     # _ci_env, |  | ||||||
|     do_ctlc, |  | ||||||
|     PROMPT, |  | ||||||
|     # expect, |  | ||||||
|     in_prompt_msg, |  | ||||||
|     assert_before, |  | ||||||
|     _pause_msg, |  | ||||||
|     _crash_msg, |  | ||||||
|     _ctlc_ignore_header, |  | ||||||
|     # _repl_fail_msg, |  | ||||||
| ) |  | ||||||
| 
 |  | ||||||
| @cm |  | ||||||
| def maybe_expect_timeout( |  | ||||||
|     ctlc: bool = False, |  | ||||||
| ) -> None: |  | ||||||
|     try: |  | ||||||
|         yield |  | ||||||
|     except TIMEOUT: |  | ||||||
|         # breakpoint() |  | ||||||
|         if ctlc: |  | ||||||
|             pytest.xfail( |  | ||||||
|                 'Some kinda redic threading SIGINT bug i think?\n' |  | ||||||
|                 'See the notes in `examples/debugging/sync_bp.py`..\n' |  | ||||||
|             ) |  | ||||||
|         raise |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @pytest.mark.ctlcs_bish |  | ||||||
| def test_pause_from_sync( |  | ||||||
|     spawn, |  | ||||||
|     ctlc: bool, |  | ||||||
| ): |  | ||||||
|     ''' |  | ||||||
|     Verify we can use the `pdbp` REPL from sync functions AND from |  | ||||||
|     any thread spawned with `trio.to_thread.run_sync()`. |  | ||||||
| 
 |  | ||||||
|     `examples/debugging/sync_bp.py` |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     child = spawn('sync_bp') |  | ||||||
| 
 |  | ||||||
|     # first `sync_pause()` after nurseries open |  | ||||||
|     child.expect(PROMPT) |  | ||||||
|     assert_before( |  | ||||||
|         child, |  | ||||||
|         [ |  | ||||||
|             # pre-prompt line |  | ||||||
|             _pause_msg, |  | ||||||
|             "<Task '__main__.main'", |  | ||||||
|             "('root'", |  | ||||||
|         ] |  | ||||||
|     ) |  | ||||||
|     if ctlc: |  | ||||||
|         do_ctlc(child) |  | ||||||
|         # ^NOTE^ subactor not spawned yet; don't need extra delay. |  | ||||||
| 
 |  | ||||||
|     child.sendline('c') |  | ||||||
| 
 |  | ||||||
|     # first `await tractor.pause()` inside `p.open_context()` body |  | ||||||
|     child.expect(PROMPT) |  | ||||||
| 
 |  | ||||||
|     # XXX shouldn't see gb loaded message with PDB loglevel! |  | ||||||
|     # assert not in_prompt_msg( |  | ||||||
|     #     child, |  | ||||||
|     #     ['`greenback` portal opened!'], |  | ||||||
|     # ) |  | ||||||
|     # should be same root task |  | ||||||
|     assert_before( |  | ||||||
|         child, |  | ||||||
|         [ |  | ||||||
|             _pause_msg, |  | ||||||
|             "<Task '__main__.main'", |  | ||||||
|             "('root'", |  | ||||||
|         ] |  | ||||||
|     ) |  | ||||||
| 
 |  | ||||||
|     if ctlc: |  | ||||||
|         do_ctlc( |  | ||||||
|             child, |  | ||||||
|             # NOTE: setting this to 0 (or some other sufficient |  | ||||||
|             # small val) can cause the test to fail since the |  | ||||||
|             # `subactor` suffers a race where the root/parent |  | ||||||
|             # sends an actor-cancel prior to it hitting its pause |  | ||||||
|             # point; by def the value is 0.1 |  | ||||||
|             delay=0.4, |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|     # XXX, fwiw without a brief sleep here the SIGINT might actually |  | ||||||
|     # trigger "subactor" cancellation by its parent  before the |  | ||||||
|     # shield-handler is engaged. |  | ||||||
|     # |  | ||||||
|     # => similar to the `delay` input to `do_ctlc()` below, setting |  | ||||||
|     # this too low can cause the test to fail since the `subactor` |  | ||||||
|     # suffers a race where the root/parent sends an actor-cancel |  | ||||||
|     # prior to the context task hitting its pause point (and thus |  | ||||||
|     # engaging the `sigint_shield()` handler in time); this value |  | ||||||
|     # seems be good enuf? |  | ||||||
|     time.sleep(0.6) |  | ||||||
| 
 |  | ||||||
|     # one of the bg thread or subactor should have |  | ||||||
|     # `Lock.acquire()`-ed |  | ||||||
|     # (NOT both, which will result in REPL clobbering!) |  | ||||||
|     attach_patts: dict[str, list[str]] = { |  | ||||||
|         'subactor': [ |  | ||||||
|             "'start_n_sync_pause'", |  | ||||||
|             "('subactor'", |  | ||||||
|         ], |  | ||||||
|         'inline_root_bg_thread': [ |  | ||||||
|             "<Thread(inline_root_bg_thread", |  | ||||||
|             "('root'", |  | ||||||
|         ], |  | ||||||
|         'start_soon_root_bg_thread': [ |  | ||||||
|             "<Thread(start_soon_root_bg_thread", |  | ||||||
|             "('root'", |  | ||||||
|         ], |  | ||||||
|     } |  | ||||||
|     conts: int = 0  # for debugging below matching logic on failure |  | ||||||
|     while attach_patts: |  | ||||||
|         child.sendline('c') |  | ||||||
|         conts += 1 |  | ||||||
|         child.expect(PROMPT) |  | ||||||
|         before = str(child.before.decode()) |  | ||||||
|         for key in attach_patts: |  | ||||||
|             if key in before: |  | ||||||
|                 attach_key: str = key |  | ||||||
|                 expected_patts: str = attach_patts.pop(key) |  | ||||||
|                 assert_before( |  | ||||||
|                     child, |  | ||||||
|                     [_pause_msg] |  | ||||||
|                     + |  | ||||||
|                     expected_patts |  | ||||||
|                 ) |  | ||||||
|                 break |  | ||||||
|         else: |  | ||||||
|             pytest.fail( |  | ||||||
|                 f'No keys found?\n\n' |  | ||||||
|                 f'{attach_patts.keys()}\n\n' |  | ||||||
|                 f'{before}\n' |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|         # ensure no other task/threads engaged a REPL |  | ||||||
|         # at the same time as the one that was detected above. |  | ||||||
|         for key, other_patts in attach_patts.copy().items(): |  | ||||||
|             assert not in_prompt_msg( |  | ||||||
|                 child, |  | ||||||
|                 other_patts, |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|         if ctlc: |  | ||||||
|             do_ctlc( |  | ||||||
|                 child, |  | ||||||
|                 patt=attach_key, |  | ||||||
|                 # NOTE same as comment above |  | ||||||
|                 delay=0.4, |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|     child.sendline('c') |  | ||||||
| 
 |  | ||||||
|     # XXX TODO, weird threading bug it seems despite the |  | ||||||
|     # `abandon_on_cancel: bool` setting to |  | ||||||
|     # `trio.to_thread.run_sync()`.. |  | ||||||
|     with maybe_expect_timeout( |  | ||||||
|         ctlc=ctlc, |  | ||||||
|     ): |  | ||||||
|         child.expect(EOF) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def expect_any_of( |  | ||||||
|     attach_patts: dict[str, list[str]], |  | ||||||
|     child,   # what type? |  | ||||||
|     ctlc: bool = False, |  | ||||||
|     prompt: str = _ctlc_ignore_header, |  | ||||||
|     ctlc_delay: float = .4, |  | ||||||
| 
 |  | ||||||
| ) -> list[str]: |  | ||||||
|     ''' |  | ||||||
|     Receive any of a `list[str]` of patterns provided in |  | ||||||
|     `attach_patts`. |  | ||||||
| 
 |  | ||||||
|     Used to test racing prompts from multiple actors and/or |  | ||||||
|     tasks using a common root process' `pdbp` REPL. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     assert attach_patts |  | ||||||
| 
 |  | ||||||
|     child.expect(PROMPT) |  | ||||||
|     before = str(child.before.decode()) |  | ||||||
| 
 |  | ||||||
|     for attach_key in attach_patts: |  | ||||||
|         if attach_key in before: |  | ||||||
|             expected_patts: str = attach_patts.pop(attach_key) |  | ||||||
|             assert_before( |  | ||||||
|                 child, |  | ||||||
|                 expected_patts |  | ||||||
|             ) |  | ||||||
|             break  # from for |  | ||||||
|     else: |  | ||||||
|         pytest.fail( |  | ||||||
|             f'No keys found?\n\n' |  | ||||||
|             f'{attach_patts.keys()}\n\n' |  | ||||||
|             f'{before}\n' |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|     # ensure no other task/threads engaged a REPL |  | ||||||
|     # at the same time as the one that was detected above. |  | ||||||
|     for key, other_patts in attach_patts.copy().items(): |  | ||||||
|         assert not in_prompt_msg( |  | ||||||
|             child, |  | ||||||
|             other_patts, |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|     if ctlc: |  | ||||||
|         do_ctlc( |  | ||||||
|             child, |  | ||||||
|             patt=prompt, |  | ||||||
|             # NOTE same as comment above |  | ||||||
|             delay=ctlc_delay, |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|     return expected_patts |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @pytest.mark.ctlcs_bish |  | ||||||
| def test_sync_pause_from_aio_task( |  | ||||||
|     spawn, |  | ||||||
| 
 |  | ||||||
|     ctlc: bool |  | ||||||
|     # ^TODO, fix for `asyncio`!! |  | ||||||
| ): |  | ||||||
|     ''' |  | ||||||
|     Verify we can use the `pdbp` REPL from an `asyncio.Task` spawned using |  | ||||||
|     APIs in `.to_asyncio`. |  | ||||||
| 
 |  | ||||||
|     `examples/debugging/asycio_bp.py` |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     child = spawn('asyncio_bp') |  | ||||||
| 
 |  | ||||||
|     # RACE on whether trio/asyncio task bps first |  | ||||||
|     attach_patts: dict[str, list[str]] = { |  | ||||||
| 
 |  | ||||||
|         # first pause in guest-mode (aka "infecting") |  | ||||||
|         # `trio.Task`. |  | ||||||
|         'trio-side': [ |  | ||||||
|             _pause_msg, |  | ||||||
|             "<Task 'trio_ctx'", |  | ||||||
|             "('aio_daemon'", |  | ||||||
|         ], |  | ||||||
| 
 |  | ||||||
|         # `breakpoint()` from `asyncio.Task`. |  | ||||||
|         'asyncio-side': [ |  | ||||||
|             _pause_msg, |  | ||||||
|             "<Task pending name='Task-2' coro=<greenback_shim()", |  | ||||||
|             "('aio_daemon'", |  | ||||||
|         ], |  | ||||||
|     } |  | ||||||
| 
 |  | ||||||
|     while attach_patts: |  | ||||||
|         expect_any_of( |  | ||||||
|             attach_patts=attach_patts, |  | ||||||
|             child=child, |  | ||||||
|             ctlc=ctlc, |  | ||||||
|         ) |  | ||||||
|         child.sendline('c') |  | ||||||
| 
 |  | ||||||
|     # NOW in race order, |  | ||||||
|     # - the asyncio-task will error |  | ||||||
|     # - the root-actor parent task will pause |  | ||||||
|     # |  | ||||||
|     attach_patts: dict[str, list[str]] = { |  | ||||||
| 
 |  | ||||||
|         # error raised in `asyncio.Task` |  | ||||||
|         "raise ValueError('asyncio side error!')": [ |  | ||||||
|             _crash_msg, |  | ||||||
|             "<Task 'trio_ctx'", |  | ||||||
|             "@ ('aio_daemon'", |  | ||||||
|             "ValueError: asyncio side error!", |  | ||||||
| 
 |  | ||||||
|             # XXX, we no longer show this frame by default! |  | ||||||
|             # 'return await chan.receive()',  # `.to_asyncio` impl internals in tb |  | ||||||
|         ], |  | ||||||
| 
 |  | ||||||
|         # parent-side propagation via actor-nursery/portal |  | ||||||
|         # "tractor._exceptions.RemoteActorError: remote task raised a 'ValueError'": [ |  | ||||||
|         "remote task raised a 'ValueError'": [ |  | ||||||
|             _crash_msg, |  | ||||||
|             "src_uid=('aio_daemon'", |  | ||||||
|             "('aio_daemon'", |  | ||||||
|         ], |  | ||||||
| 
 |  | ||||||
|         # a final pause in root-actor |  | ||||||
|         "<Task '__main__.main'": [ |  | ||||||
|             _pause_msg, |  | ||||||
|             "<Task '__main__.main'", |  | ||||||
|             "('root'", |  | ||||||
|         ], |  | ||||||
|     } |  | ||||||
|     while attach_patts: |  | ||||||
|         expect_any_of( |  | ||||||
|             attach_patts=attach_patts, |  | ||||||
|             child=child, |  | ||||||
|             ctlc=ctlc, |  | ||||||
|         ) |  | ||||||
|         child.sendline('c') |  | ||||||
| 
 |  | ||||||
|     assert not attach_patts |  | ||||||
| 
 |  | ||||||
|     # final boxed error propagates to root |  | ||||||
|     assert_before( |  | ||||||
|         child, |  | ||||||
|         [ |  | ||||||
|             _crash_msg, |  | ||||||
|             "<Task '__main__.main'", |  | ||||||
|             "('root'", |  | ||||||
|             "remote task raised a 'ValueError'", |  | ||||||
|             "ValueError: asyncio side error!", |  | ||||||
|         ] |  | ||||||
|     ) |  | ||||||
| 
 |  | ||||||
|     if ctlc: |  | ||||||
|         do_ctlc( |  | ||||||
|             child, |  | ||||||
|             # NOTE: setting this to 0 (or some other sufficient |  | ||||||
|             # small val) can cause the test to fail since the |  | ||||||
|             # `subactor` suffers a race where the root/parent |  | ||||||
|             # sends an actor-cancel prior to it hitting its pause |  | ||||||
|             # point; by def the value is 0.1 |  | ||||||
|             delay=0.4, |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|     child.sendline('c') |  | ||||||
|     # with maybe_expect_timeout(): |  | ||||||
|     child.expect(EOF) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def test_sync_pause_from_non_greenbacked_aio_task(): |  | ||||||
|     ''' |  | ||||||
|     Where the `breakpoint()` caller task is NOT spawned by |  | ||||||
|     `tractor.to_asyncio` and thus never activates |  | ||||||
|     a `greenback.ensure_portal()` beforehand, presumably bc the task |  | ||||||
|     was started by some lib/dep as in often seen in the field. |  | ||||||
| 
 |  | ||||||
|     Ensure sync pausing works when the pause is in, |  | ||||||
| 
 |  | ||||||
|     - the root actor running in infected-mode? |  | ||||||
|       |_ since we don't need any IPC to acquire the debug lock? |  | ||||||
|       |_ is there some way to handle this like the non-main-thread case? |  | ||||||
| 
 |  | ||||||
|     All other cases need to error out appropriately right? |  | ||||||
| 
 |  | ||||||
|     - for any subactor we can't avoid needing the repl lock.. |  | ||||||
|       |_ is there a way to hook into `asyncio.ensure_future(obj)`? |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     pass |  | ||||||
|  | @ -1,306 +0,0 @@ | ||||||
| ''' |  | ||||||
| That "native" runtime-hackin toolset better be dang useful! |  | ||||||
| 
 |  | ||||||
| Verify the funtion of a variety of "developer-experience" tools we |  | ||||||
| offer from the `.devx` sub-pkg: |  | ||||||
| 
 |  | ||||||
| - use of the lovely `stackscope` for dumping actor `trio`-task trees |  | ||||||
|   during operation and hangs. |  | ||||||
| 
 |  | ||||||
| TODO: |  | ||||||
| - demonstration of `CallerInfo` call stack frame filtering such that |  | ||||||
|   for logging and REPL purposes a user sees exactly the layers needed |  | ||||||
|   when debugging a problem inside the stack vs. in their app. |  | ||||||
| 
 |  | ||||||
| ''' |  | ||||||
| from __future__ import annotations |  | ||||||
| from contextlib import ( |  | ||||||
|     contextmanager as cm, |  | ||||||
| ) |  | ||||||
| import os |  | ||||||
| import signal |  | ||||||
| import time |  | ||||||
| from typing import ( |  | ||||||
|     TYPE_CHECKING, |  | ||||||
| ) |  | ||||||
| 
 |  | ||||||
| from .conftest import ( |  | ||||||
|     expect, |  | ||||||
|     assert_before, |  | ||||||
|     in_prompt_msg, |  | ||||||
|     PROMPT, |  | ||||||
|     _pause_msg, |  | ||||||
| ) |  | ||||||
| 
 |  | ||||||
| import pytest |  | ||||||
| from pexpect.exceptions import ( |  | ||||||
|     # TIMEOUT, |  | ||||||
|     EOF, |  | ||||||
| ) |  | ||||||
| 
 |  | ||||||
| if TYPE_CHECKING: |  | ||||||
|     from ..conftest import PexpectSpawner |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def test_shield_pause( |  | ||||||
|     spawn: PexpectSpawner, |  | ||||||
| ): |  | ||||||
|     ''' |  | ||||||
|     Verify the `tractor.pause()/.post_mortem()` API works inside an |  | ||||||
|     already cancelled `trio.CancelScope` and that you can step to the |  | ||||||
|     next checkpoint wherein the cancelled will get raised. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     child = spawn( |  | ||||||
|         'shield_hang_in_sub' |  | ||||||
|     ) |  | ||||||
|     expect( |  | ||||||
|         child, |  | ||||||
|         'Yo my child hanging..?', |  | ||||||
|     ) |  | ||||||
|     assert_before( |  | ||||||
|         child, |  | ||||||
|         [ |  | ||||||
|             'Entering shield sleep..', |  | ||||||
|             'Enabling trace-trees on `SIGUSR1` since `stackscope` is installed @', |  | ||||||
|         ] |  | ||||||
|     ) |  | ||||||
| 
 |  | ||||||
|     script_pid: int = child.pid |  | ||||||
|     print( |  | ||||||
|         f'Sending SIGUSR1 to {script_pid}\n' |  | ||||||
|         f'(kill -s SIGUSR1 {script_pid})\n' |  | ||||||
|     ) |  | ||||||
|     os.kill( |  | ||||||
|         script_pid, |  | ||||||
|         signal.SIGUSR1, |  | ||||||
|     ) |  | ||||||
|     time.sleep(0.2) |  | ||||||
|     expect( |  | ||||||
|         child, |  | ||||||
|         # end-of-tree delimiter |  | ||||||
|         "end-of-\('root'", |  | ||||||
|     ) |  | ||||||
|     assert_before( |  | ||||||
|         child, |  | ||||||
|         [ |  | ||||||
|             # 'Srying to dump `stackscope` tree..', |  | ||||||
|             # 'Dumping `stackscope` tree for actor', |  | ||||||
|             "('root'",  # uid line |  | ||||||
| 
 |  | ||||||
|             # TODO!? this used to show? |  | ||||||
|             # -[ ] mk reproducable for @oremanj? |  | ||||||
|             # |  | ||||||
|             # parent block point (non-shielded) |  | ||||||
|             # 'await trio.sleep_forever()  # in root', |  | ||||||
|         ] |  | ||||||
|     ) |  | ||||||
|     expect( |  | ||||||
|         child, |  | ||||||
|         # end-of-tree delimiter |  | ||||||
|         "end-of-\('hanger'", |  | ||||||
|     ) |  | ||||||
|     assert_before( |  | ||||||
|         child, |  | ||||||
|         [ |  | ||||||
|             # relay to the sub should be reported |  | ||||||
|             'Relaying `SIGUSR1`[10] to sub-actor', |  | ||||||
| 
 |  | ||||||
|             "('hanger'",  # uid line |  | ||||||
| 
 |  | ||||||
|             # TODO!? SEE ABOVE |  | ||||||
|             # hanger LOC where it's shield-halted |  | ||||||
|             # 'await trio.sleep_forever()  # in subactor', |  | ||||||
|         ] |  | ||||||
|     ) |  | ||||||
| 
 |  | ||||||
|     # simulate the user sending a ctl-c to the hanging program. |  | ||||||
|     # this should result in the terminator kicking in since |  | ||||||
|     # the sub is shield blocking and can't respond to SIGINT. |  | ||||||
|     os.kill( |  | ||||||
|         child.pid, |  | ||||||
|         signal.SIGINT, |  | ||||||
|     ) |  | ||||||
|     from tractor._supervise import _shutdown_msg |  | ||||||
|     expect( |  | ||||||
|         child, |  | ||||||
|         # 'Shutting down actor runtime', |  | ||||||
|         _shutdown_msg, |  | ||||||
|         timeout=6, |  | ||||||
|     ) |  | ||||||
|     assert_before( |  | ||||||
|         child, |  | ||||||
|         [ |  | ||||||
|             'raise KeyboardInterrupt', |  | ||||||
|             # 'Shutting down actor runtime', |  | ||||||
|             '#T-800 deployed to collect zombie B0', |  | ||||||
|             "'--uid', \"('hanger',", |  | ||||||
|         ] |  | ||||||
|     ) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def test_breakpoint_hook_restored( |  | ||||||
|     spawn: PexpectSpawner, |  | ||||||
| ): |  | ||||||
|     ''' |  | ||||||
|     Ensures our actor runtime sets a custom `breakpoint()` hook |  | ||||||
|     on open then restores the stdlib's default on close. |  | ||||||
| 
 |  | ||||||
|     The hook state validation is done via `assert`s inside the |  | ||||||
|     invoked script with only `breakpoint()` (not `tractor.pause()`) |  | ||||||
|     calls used. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     child = spawn('restore_builtin_breakpoint') |  | ||||||
| 
 |  | ||||||
|     child.expect(PROMPT) |  | ||||||
|     try: |  | ||||||
|         assert_before( |  | ||||||
|             child, |  | ||||||
|             [ |  | ||||||
|                 _pause_msg, |  | ||||||
|                 "<Task '__main__.main'", |  | ||||||
|                 "('root'", |  | ||||||
|                 "first bp, tractor hook set", |  | ||||||
|             ] |  | ||||||
|         ) |  | ||||||
|     # XXX if the above raises `AssertionError`, without sending |  | ||||||
|     # the final 'continue' cmd to the REPL-active sub-process, |  | ||||||
|     # we'll hang waiting for that pexpect instance to terminate.. |  | ||||||
|     finally: |  | ||||||
|         child.sendline('c') |  | ||||||
| 
 |  | ||||||
|     child.expect(PROMPT) |  | ||||||
|     assert_before( |  | ||||||
|         child, |  | ||||||
|         [ |  | ||||||
|             "last bp, stdlib hook restored", |  | ||||||
|         ] |  | ||||||
|     ) |  | ||||||
| 
 |  | ||||||
|     # since the stdlib hook was already restored there should be NO |  | ||||||
|     # `tractor` `log.pdb()` content from console! |  | ||||||
|     assert not in_prompt_msg( |  | ||||||
|         child, |  | ||||||
|         [ |  | ||||||
|             _pause_msg, |  | ||||||
|             "<Task '__main__.main'", |  | ||||||
|             "('root'", |  | ||||||
|         ], |  | ||||||
|     ) |  | ||||||
|     child.sendline('c') |  | ||||||
|     child.expect(EOF) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| _to_raise = Exception('Triggering a crash') |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @pytest.mark.parametrize( |  | ||||||
|     'to_raise', |  | ||||||
|     [ |  | ||||||
|         None, |  | ||||||
|         _to_raise, |  | ||||||
|         RuntimeError('Never crash handle this!'), |  | ||||||
|     ], |  | ||||||
| ) |  | ||||||
| @pytest.mark.parametrize( |  | ||||||
|     'raise_on_exit', |  | ||||||
|     [ |  | ||||||
|         True, |  | ||||||
|         [type(_to_raise)], |  | ||||||
|         False, |  | ||||||
|     ] |  | ||||||
| ) |  | ||||||
| def test_crash_handler_cms( |  | ||||||
|     debug_mode: bool, |  | ||||||
|     to_raise: Exception, |  | ||||||
|     raise_on_exit: bool|list[Exception], |  | ||||||
| ): |  | ||||||
|     ''' |  | ||||||
|     Verify the `.devx.open_crash_handler()` API(s) by also |  | ||||||
|     (conveniently enough) tesing its `repl_fixture: ContextManager` |  | ||||||
|     param support which for this suite allows use to avoid use of |  | ||||||
|     a `pexpect`-style-test since we use the fixture to avoid actually |  | ||||||
|     entering `PdbpREPL.iteract()` :smirk: |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     import tractor |  | ||||||
|     # import trio |  | ||||||
| 
 |  | ||||||
|     # state flags |  | ||||||
|     repl_acquired: bool = False |  | ||||||
|     repl_released: bool = False |  | ||||||
| 
 |  | ||||||
|     @cm |  | ||||||
|     def block_repl_ux( |  | ||||||
|         repl: tractor.devx.debug.PdbREPL, |  | ||||||
|         maybe_bxerr: ( |  | ||||||
|             tractor.devx._debug.BoxedMaybeException |  | ||||||
|             |None |  | ||||||
|         ) = None, |  | ||||||
|         enter_repl: bool = True, |  | ||||||
| 
 |  | ||||||
|     ) -> bool: |  | ||||||
|         ''' |  | ||||||
|         Set pre/post-REPL state vars and bypass actual conole |  | ||||||
|         interaction. |  | ||||||
| 
 |  | ||||||
|         ''' |  | ||||||
|         nonlocal repl_acquired, repl_released |  | ||||||
| 
 |  | ||||||
|         # task: trio.Task = trio.lowlevel.current_task() |  | ||||||
|         # print(f'pre-REPL active_task={task.name}') |  | ||||||
| 
 |  | ||||||
|         print('pre-REPL') |  | ||||||
|         repl_acquired = True |  | ||||||
|         yield False  # never actually .interact() |  | ||||||
|         print('post-REPL') |  | ||||||
|         repl_released = True |  | ||||||
| 
 |  | ||||||
|     try: |  | ||||||
|         # TODO, with runtime's `debug_mode` setting |  | ||||||
|         # -[ ] need to open runtime tho obvi.. |  | ||||||
|         # |  | ||||||
|         # with tractor.devx.maybe_open_crash_handler( |  | ||||||
|         #     pdb=True, |  | ||||||
| 
 |  | ||||||
|         with tractor.devx.open_crash_handler( |  | ||||||
|             raise_on_exit=raise_on_exit, |  | ||||||
|             repl_fixture=block_repl_ux |  | ||||||
|         ) as bxerr: |  | ||||||
|             if to_raise is not None: |  | ||||||
|                 raise to_raise |  | ||||||
| 
 |  | ||||||
|     except Exception as _exc: |  | ||||||
|         exc = _exc |  | ||||||
|         if ( |  | ||||||
|             raise_on_exit is True |  | ||||||
|             or |  | ||||||
|             type(to_raise) in raise_on_exit |  | ||||||
|         ): |  | ||||||
|             assert ( |  | ||||||
|                 exc |  | ||||||
|                 is |  | ||||||
|                 to_raise |  | ||||||
|                 is |  | ||||||
|                 bxerr.value |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|         else: |  | ||||||
|             raise |  | ||||||
|     else: |  | ||||||
|         assert ( |  | ||||||
|             to_raise is None |  | ||||||
|             or |  | ||||||
|             not raise_on_exit |  | ||||||
|             or |  | ||||||
|             type(to_raise) not in raise_on_exit |  | ||||||
|         ) |  | ||||||
|         assert bxerr.value is to_raise |  | ||||||
| 
 |  | ||||||
|     assert bxerr.raise_on_exit == raise_on_exit |  | ||||||
| 
 |  | ||||||
|     if to_raise is not None: |  | ||||||
|         assert repl_acquired |  | ||||||
|         assert repl_released |  | ||||||
|  | @ -1,4 +0,0 @@ | ||||||
| ''' |  | ||||||
| `tractor.ipc` subsystem(s)/unit testing suites. |  | ||||||
| 
 |  | ||||||
| ''' |  | ||||||
|  | @ -1,114 +0,0 @@ | ||||||
| ''' |  | ||||||
| Unit-ish tests for specific IPC transport protocol backends. |  | ||||||
| 
 |  | ||||||
| ''' |  | ||||||
| from __future__ import annotations |  | ||||||
| from pathlib import Path |  | ||||||
| 
 |  | ||||||
| import pytest |  | ||||||
| import trio |  | ||||||
| import tractor |  | ||||||
| from tractor import ( |  | ||||||
|     Actor, |  | ||||||
|     _state, |  | ||||||
|     _addr, |  | ||||||
| ) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @pytest.fixture |  | ||||||
| def bindspace_dir_str() -> str: |  | ||||||
| 
 |  | ||||||
|     rt_dir: Path = tractor._state.get_rt_dir() |  | ||||||
|     bs_dir: Path = rt_dir / 'doggy' |  | ||||||
|     bs_dir_str: str = str(bs_dir) |  | ||||||
|     assert not bs_dir.is_dir() |  | ||||||
| 
 |  | ||||||
|     yield bs_dir_str |  | ||||||
| 
 |  | ||||||
|     # delete it on suite teardown. |  | ||||||
|     # ?TODO? should we support this internally |  | ||||||
|     # or is leaking it ok? |  | ||||||
|     if bs_dir.is_dir(): |  | ||||||
|         bs_dir.rmdir() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def test_uds_bindspace_created_implicitly( |  | ||||||
|     debug_mode: bool, |  | ||||||
|     bindspace_dir_str: str, |  | ||||||
| ): |  | ||||||
|     registry_addr: tuple = ( |  | ||||||
|         f'{bindspace_dir_str}', |  | ||||||
|         'registry@doggy.sock', |  | ||||||
|     ) |  | ||||||
|     bs_dir_str: str = registry_addr[0] |  | ||||||
| 
 |  | ||||||
|     # XXX, ensure bindspace-dir DNE beforehand! |  | ||||||
|     assert not Path(bs_dir_str).is_dir() |  | ||||||
| 
 |  | ||||||
|     async def main(): |  | ||||||
|         async with tractor.open_nursery( |  | ||||||
|             enable_transports=['uds'], |  | ||||||
|             registry_addrs=[registry_addr], |  | ||||||
|             debug_mode=debug_mode, |  | ||||||
|         ) as _an: |  | ||||||
| 
 |  | ||||||
|             # XXX MUST be created implicitly by |  | ||||||
|             # `.ipc._uds.start_listener()`! |  | ||||||
|             assert Path(bs_dir_str).is_dir() |  | ||||||
| 
 |  | ||||||
|             root: Actor = tractor.current_actor() |  | ||||||
|             assert root.is_registrar |  | ||||||
| 
 |  | ||||||
|             assert registry_addr in root.reg_addrs |  | ||||||
|             assert ( |  | ||||||
|                 registry_addr |  | ||||||
|                 in |  | ||||||
|                 _state._runtime_vars['_registry_addrs'] |  | ||||||
|             ) |  | ||||||
|             assert ( |  | ||||||
|                 _addr.wrap_address(registry_addr) |  | ||||||
|                 in |  | ||||||
|                 root.registry_addrs |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|     trio.run(main) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def test_uds_double_listen_raises_connerr( |  | ||||||
|     debug_mode: bool, |  | ||||||
|     bindspace_dir_str: str, |  | ||||||
| ): |  | ||||||
|     registry_addr: tuple = ( |  | ||||||
|         f'{bindspace_dir_str}', |  | ||||||
|         'registry@doggy.sock', |  | ||||||
|     ) |  | ||||||
| 
 |  | ||||||
|     async def main(): |  | ||||||
|         async with tractor.open_nursery( |  | ||||||
|             enable_transports=['uds'], |  | ||||||
|             registry_addrs=[registry_addr], |  | ||||||
|             debug_mode=debug_mode, |  | ||||||
|         ) as _an: |  | ||||||
| 
 |  | ||||||
|             # runtime up |  | ||||||
|             root: Actor = tractor.current_actor() |  | ||||||
| 
 |  | ||||||
|             from tractor.ipc._uds import ( |  | ||||||
|                 start_listener, |  | ||||||
|                 UDSAddress, |  | ||||||
|             ) |  | ||||||
|             ya_bound_addr: UDSAddress = root.registry_addrs[0] |  | ||||||
|             try: |  | ||||||
|                 await start_listener( |  | ||||||
|                     addr=ya_bound_addr, |  | ||||||
|                 ) |  | ||||||
|             except ConnectionError as connerr: |  | ||||||
|                 assert type(src_exc := connerr.__context__) is OSError |  | ||||||
|                 assert 'Address already in use' in src_exc.args |  | ||||||
|                 # complete, exit test. |  | ||||||
| 
 |  | ||||||
|             else: |  | ||||||
|                 pytest.fail('It dint raise a connerr !?') |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     trio.run(main) |  | ||||||
|  | @ -1,95 +0,0 @@ | ||||||
| ''' |  | ||||||
| Verify the `enable_transports` param drives various |  | ||||||
| per-root/sub-actor IPC endpoint/server settings. |  | ||||||
| 
 |  | ||||||
| ''' |  | ||||||
| from __future__ import annotations |  | ||||||
| 
 |  | ||||||
| import pytest |  | ||||||
| import trio |  | ||||||
| import tractor |  | ||||||
| from tractor import ( |  | ||||||
|     Actor, |  | ||||||
|     Portal, |  | ||||||
|     ipc, |  | ||||||
|     msg, |  | ||||||
|     _state, |  | ||||||
|     _addr, |  | ||||||
| ) |  | ||||||
| 
 |  | ||||||
| @tractor.context |  | ||||||
| async def chk_tpts( |  | ||||||
|     ctx: tractor.Context, |  | ||||||
|     tpt_proto_key: str, |  | ||||||
| ): |  | ||||||
|     rtvars = _state._runtime_vars |  | ||||||
|     assert ( |  | ||||||
|         tpt_proto_key |  | ||||||
|         in |  | ||||||
|         rtvars['_enable_tpts'] |  | ||||||
|     ) |  | ||||||
|     actor: Actor = tractor.current_actor() |  | ||||||
|     spec: msg.types.SpawnSpec = actor._spawn_spec |  | ||||||
|     assert spec._runtime_vars == rtvars |  | ||||||
| 
 |  | ||||||
|     # ensure individual IPC ep-addr types |  | ||||||
|     serv: ipc._server.Server = actor.ipc_server |  | ||||||
|     addr: ipc._types.Address |  | ||||||
|     for addr in serv.addrs: |  | ||||||
|         assert addr.proto_key == tpt_proto_key |  | ||||||
| 
 |  | ||||||
|     # Actor delegate-props enforcement |  | ||||||
|     assert ( |  | ||||||
|         actor.accept_addrs |  | ||||||
|         == |  | ||||||
|         serv.accept_addrs |  | ||||||
|     ) |  | ||||||
| 
 |  | ||||||
|     await ctx.started(serv.accept_addrs) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| # TODO, parametrize over mis-matched-proto-typed `registry_addrs` |  | ||||||
| # since i seems to work in `piker` but not exactly sure if both tcp |  | ||||||
| # & uds are being deployed then? |  | ||||||
| # |  | ||||||
| @pytest.mark.parametrize( |  | ||||||
|     'tpt_proto_key', |  | ||||||
|     ['tcp', 'uds'], |  | ||||||
|     ids=lambda item: f'ipc_tpt={item!r}' |  | ||||||
| ) |  | ||||||
| def test_root_passes_tpt_to_sub( |  | ||||||
|     tpt_proto_key: str, |  | ||||||
|     reg_addr: tuple, |  | ||||||
|     debug_mode: bool, |  | ||||||
| ): |  | ||||||
|     async def main(): |  | ||||||
|         async with tractor.open_nursery( |  | ||||||
|             enable_transports=[tpt_proto_key], |  | ||||||
|             registry_addrs=[reg_addr], |  | ||||||
|             debug_mode=debug_mode, |  | ||||||
|         ) as an: |  | ||||||
| 
 |  | ||||||
|             assert ( |  | ||||||
|                 tpt_proto_key |  | ||||||
|                 in |  | ||||||
|                 _state._runtime_vars['_enable_tpts'] |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|             ptl: Portal = await an.start_actor( |  | ||||||
|                 name='sub', |  | ||||||
|                 enable_modules=[__name__], |  | ||||||
|             ) |  | ||||||
|             async with ptl.open_context( |  | ||||||
|                 chk_tpts, |  | ||||||
|                 tpt_proto_key=tpt_proto_key, |  | ||||||
|             ) as (ctx, accept_addrs): |  | ||||||
| 
 |  | ||||||
|                 uw_addr: tuple |  | ||||||
|                 for uw_addr in accept_addrs: |  | ||||||
|                     addr = _addr.wrap_address(uw_addr) |  | ||||||
|                     assert addr.is_valid |  | ||||||
| 
 |  | ||||||
|             # shudown sub-actor(s) |  | ||||||
|             await an.cancel() |  | ||||||
| 
 |  | ||||||
|     trio.run(main) |  | ||||||
|  | @ -1,72 +0,0 @@ | ||||||
| ''' |  | ||||||
| High-level `.ipc._server` unit tests. |  | ||||||
| 
 |  | ||||||
| ''' |  | ||||||
| from __future__ import annotations |  | ||||||
| 
 |  | ||||||
| import pytest |  | ||||||
| import trio |  | ||||||
| from tractor import ( |  | ||||||
|     devx, |  | ||||||
|     ipc, |  | ||||||
|     log, |  | ||||||
| ) |  | ||||||
| from tractor._testing.addr import ( |  | ||||||
|     get_rando_addr, |  | ||||||
| ) |  | ||||||
| # TODO, use/check-roundtripping with some of these wrapper types? |  | ||||||
| # |  | ||||||
| # from .._addr import Address |  | ||||||
| # from ._chan import Channel |  | ||||||
| # from ._transport import MsgTransport |  | ||||||
| # from ._uds import UDSAddress |  | ||||||
| # from ._tcp import TCPAddress |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @pytest.mark.parametrize( |  | ||||||
|     '_tpt_proto', |  | ||||||
|     ['uds', 'tcp'] |  | ||||||
| ) |  | ||||||
| def test_basic_ipc_server( |  | ||||||
|     _tpt_proto: str, |  | ||||||
|     debug_mode: bool, |  | ||||||
|     loglevel: str, |  | ||||||
| ): |  | ||||||
| 
 |  | ||||||
|     # so we see the socket-listener reporting on console |  | ||||||
|     log.get_console_log("INFO") |  | ||||||
| 
 |  | ||||||
|     rando_addr: tuple = get_rando_addr( |  | ||||||
|         tpt_proto=_tpt_proto, |  | ||||||
|     ) |  | ||||||
|     async def main(): |  | ||||||
|         async with ipc._server.open_ipc_server() as server: |  | ||||||
| 
 |  | ||||||
|             assert ( |  | ||||||
|                 server._parent_tn |  | ||||||
|                 and |  | ||||||
|                 server._parent_tn is server._stream_handler_tn |  | ||||||
|             ) |  | ||||||
|             assert server._no_more_peers.is_set() |  | ||||||
| 
 |  | ||||||
|             eps: list[ipc._server.Endpoint] = await server.listen_on( |  | ||||||
|                 accept_addrs=[rando_addr], |  | ||||||
|                 stream_handler_nursery=None, |  | ||||||
|             ) |  | ||||||
|             assert ( |  | ||||||
|                 len(eps) == 1 |  | ||||||
|                 and |  | ||||||
|                 (ep := eps[0])._listener |  | ||||||
|                 and |  | ||||||
|                 not ep.peer_tpts |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|             server._parent_tn.cancel_scope.cancel() |  | ||||||
| 
 |  | ||||||
|         # !TODO! actually make a bg-task connection from a client |  | ||||||
|         # using `ipc._chan._connect_chan()` |  | ||||||
| 
 |  | ||||||
|     with devx.maybe_open_crash_handler( |  | ||||||
|         pdb=debug_mode, |  | ||||||
|     ): |  | ||||||
|         trio.run(main) |  | ||||||
|  | @ -1,11 +1,382 @@ | ||||||
| """ | """ | ||||||
| Bidirectional streaming. | Bidirectional streaming and context API. | ||||||
| 
 | 
 | ||||||
| """ | """ | ||||||
| import pytest | import pytest | ||||||
| import trio | import trio | ||||||
| import tractor | import tractor | ||||||
| 
 | 
 | ||||||
|  | from conftest import tractor_test | ||||||
|  | 
 | ||||||
|  | # the general stream semantics are | ||||||
|  | # - normal termination: far end relays a stop message which | ||||||
|  | # terminates an ongoing ``MsgStream`` iteration | ||||||
|  | # - cancel termination: context is cancelled on either side cancelling | ||||||
|  | #  the "linked" inter-actor task context | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | _state: bool = False | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | @tractor.context | ||||||
|  | async def simple_setup_teardown( | ||||||
|  | 
 | ||||||
|  |     ctx: tractor.Context, | ||||||
|  |     data: int, | ||||||
|  |     block_forever: bool = False, | ||||||
|  | 
 | ||||||
|  | ) -> None: | ||||||
|  | 
 | ||||||
|  |     # startup phase | ||||||
|  |     global _state | ||||||
|  |     _state = True | ||||||
|  | 
 | ||||||
|  |     # signal to parent that we're up | ||||||
|  |     await ctx.started(data + 1) | ||||||
|  | 
 | ||||||
|  |     try: | ||||||
|  |         if block_forever: | ||||||
|  |             # block until cancelled | ||||||
|  |             await trio.sleep_forever() | ||||||
|  |         else: | ||||||
|  |             return 'yo' | ||||||
|  |     finally: | ||||||
|  |         _state = False | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | async def assert_state(value: bool): | ||||||
|  |     global _state | ||||||
|  |     assert _state == value | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | @pytest.mark.parametrize( | ||||||
|  |     'error_parent', | ||||||
|  |     [False, True], | ||||||
|  | ) | ||||||
|  | @pytest.mark.parametrize( | ||||||
|  |     'callee_blocks_forever', | ||||||
|  |     [False, True], | ||||||
|  | ) | ||||||
|  | def test_simple_context( | ||||||
|  |     error_parent, | ||||||
|  |     callee_blocks_forever, | ||||||
|  | ): | ||||||
|  | 
 | ||||||
|  |     async def main(): | ||||||
|  | 
 | ||||||
|  |         async with tractor.open_nursery() as n: | ||||||
|  | 
 | ||||||
|  |             portal = await n.start_actor( | ||||||
|  |                 'simple_context', | ||||||
|  |                 enable_modules=[__name__], | ||||||
|  |             ) | ||||||
|  | 
 | ||||||
|  |             async with portal.open_context( | ||||||
|  |                 simple_setup_teardown, | ||||||
|  |                 data=10, | ||||||
|  |                 block_forever=callee_blocks_forever, | ||||||
|  |             ) as (ctx, sent): | ||||||
|  | 
 | ||||||
|  |                 assert sent == 11 | ||||||
|  | 
 | ||||||
|  |                 if callee_blocks_forever: | ||||||
|  |                     await portal.run(assert_state, value=True) | ||||||
|  |                     await ctx.cancel() | ||||||
|  |                 else: | ||||||
|  |                     assert await ctx.result() == 'yo' | ||||||
|  | 
 | ||||||
|  |             # after cancellation | ||||||
|  |             await portal.run(assert_state, value=False) | ||||||
|  | 
 | ||||||
|  |             if error_parent: | ||||||
|  |                 raise ValueError | ||||||
|  | 
 | ||||||
|  |             # shut down daemon | ||||||
|  |             await portal.cancel_actor() | ||||||
|  | 
 | ||||||
|  |     if error_parent: | ||||||
|  |         try: | ||||||
|  |             trio.run(main) | ||||||
|  |         except ValueError: | ||||||
|  |             pass | ||||||
|  |     else: | ||||||
|  |         trio.run(main) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # basic stream terminations: | ||||||
|  | # - callee context closes without using stream | ||||||
|  | # - caller context closes without using stream | ||||||
|  | # - caller context calls `Context.cancel()` while streaming | ||||||
|  | #   is ongoing resulting in callee being cancelled | ||||||
|  | # - callee calls `Context.cancel()` while streaming and caller | ||||||
|  | #   sees stream terminated in `RemoteActorError` | ||||||
|  | 
 | ||||||
|  | # TODO: future possible features | ||||||
|  | # - restart request: far end raises `ContextRestart` | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | @tractor.context | ||||||
|  | async def close_ctx_immediately( | ||||||
|  | 
 | ||||||
|  |     ctx: tractor.Context, | ||||||
|  | 
 | ||||||
|  | ) -> None: | ||||||
|  | 
 | ||||||
|  |     await ctx.started() | ||||||
|  |     global _state | ||||||
|  | 
 | ||||||
|  |     async with ctx.open_stream(): | ||||||
|  |         pass | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | @tractor_test | ||||||
|  | async def test_callee_closes_ctx_after_stream_open(): | ||||||
|  |     'callee context closes without using stream' | ||||||
|  | 
 | ||||||
|  |     async with tractor.open_nursery() as n: | ||||||
|  | 
 | ||||||
|  |         portal = await n.start_actor( | ||||||
|  |             'fast_stream_closer', | ||||||
|  |             enable_modules=[__name__], | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|  |         async with portal.open_context( | ||||||
|  |             close_ctx_immediately, | ||||||
|  | 
 | ||||||
|  |             # flag to avoid waiting the final result | ||||||
|  |             # cancel_on_exit=True, | ||||||
|  | 
 | ||||||
|  |         ) as (ctx, sent): | ||||||
|  | 
 | ||||||
|  |             assert sent is None | ||||||
|  | 
 | ||||||
|  |             with trio.fail_after(0.5): | ||||||
|  |                 async with ctx.open_stream() as stream: | ||||||
|  | 
 | ||||||
|  |                     # should fall through since ``StopAsyncIteration`` | ||||||
|  |                     # should be raised through translation of | ||||||
|  |                     # a ``trio.EndOfChannel`` by | ||||||
|  |                     # ``trio.abc.ReceiveChannel.__anext__()`` | ||||||
|  |                     async for _ in stream: | ||||||
|  |                         assert 0 | ||||||
|  |                     else: | ||||||
|  | 
 | ||||||
|  |                         # verify stream is now closed | ||||||
|  |                         try: | ||||||
|  |                             await stream.receive() | ||||||
|  |                         except trio.EndOfChannel: | ||||||
|  |                             pass | ||||||
|  | 
 | ||||||
|  |             # TODO: should be just raise the closed resource err | ||||||
|  |             # directly here to enforce not allowing a re-open | ||||||
|  |             # of a stream to the context (at least until a time of | ||||||
|  |             # if/when we decide that's a good idea?) | ||||||
|  |             try: | ||||||
|  |                 async with ctx.open_stream() as stream: | ||||||
|  |                     pass | ||||||
|  |             except trio.ClosedResourceError: | ||||||
|  |                 pass | ||||||
|  | 
 | ||||||
|  |         await portal.cancel_actor() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | @tractor.context | ||||||
|  | async def expect_cancelled( | ||||||
|  | 
 | ||||||
|  |     ctx: tractor.Context, | ||||||
|  | 
 | ||||||
|  | ) -> None: | ||||||
|  |     global _state | ||||||
|  |     _state = True | ||||||
|  | 
 | ||||||
|  |     await ctx.started() | ||||||
|  | 
 | ||||||
|  |     try: | ||||||
|  |         async with ctx.open_stream() as stream: | ||||||
|  |             async for msg in stream: | ||||||
|  |                 await stream.send(msg)  # echo server | ||||||
|  | 
 | ||||||
|  |     except trio.Cancelled: | ||||||
|  |         # expected case | ||||||
|  |         _state = False | ||||||
|  |         raise | ||||||
|  | 
 | ||||||
|  |     else: | ||||||
|  |         assert 0, "Wasn't cancelled!?" | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | @pytest.mark.parametrize( | ||||||
|  |     'use_ctx_cancel_method', | ||||||
|  |     [False, True], | ||||||
|  | ) | ||||||
|  | @tractor_test | ||||||
|  | async def test_caller_closes_ctx_after_callee_opens_stream( | ||||||
|  |     use_ctx_cancel_method: bool, | ||||||
|  | ): | ||||||
|  |     'caller context closes without using stream' | ||||||
|  | 
 | ||||||
|  |     async with tractor.open_nursery() as n: | ||||||
|  | 
 | ||||||
|  |         portal = await n.start_actor( | ||||||
|  |             'ctx_cancelled', | ||||||
|  |             enable_modules=[__name__], | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|  |         async with portal.open_context( | ||||||
|  |             expect_cancelled, | ||||||
|  |         ) as (ctx, sent): | ||||||
|  |             await portal.run(assert_state, value=True) | ||||||
|  | 
 | ||||||
|  |             assert sent is None | ||||||
|  | 
 | ||||||
|  |             # call cancel explicitly | ||||||
|  |             if use_ctx_cancel_method: | ||||||
|  | 
 | ||||||
|  |                 await ctx.cancel() | ||||||
|  | 
 | ||||||
|  |                 try: | ||||||
|  |                     async with ctx.open_stream() as stream: | ||||||
|  |                         async for msg in stream: | ||||||
|  |                             pass | ||||||
|  | 
 | ||||||
|  |                 except tractor.ContextCancelled: | ||||||
|  |                     raise  # XXX: must be propagated to __aexit__ | ||||||
|  | 
 | ||||||
|  |                 else: | ||||||
|  |                     assert 0, "Should have context cancelled?" | ||||||
|  | 
 | ||||||
|  |                 # channel should still be up | ||||||
|  |                 assert portal.channel.connected() | ||||||
|  | 
 | ||||||
|  |                 # ctx is closed here | ||||||
|  |                 await portal.run(assert_state, value=False) | ||||||
|  | 
 | ||||||
|  |             else: | ||||||
|  |                 try: | ||||||
|  |                     with trio.fail_after(0.2): | ||||||
|  |                         await ctx.result() | ||||||
|  |                         assert 0, "Callee should have blocked!?" | ||||||
|  |                 except trio.TooSlowError: | ||||||
|  |                     await ctx.cancel() | ||||||
|  |         try: | ||||||
|  |             async with ctx.open_stream() as stream: | ||||||
|  |                 async for msg in stream: | ||||||
|  |                     pass | ||||||
|  |         except tractor.ContextCancelled: | ||||||
|  |             pass | ||||||
|  |         else: | ||||||
|  |             assert 0, "Should have received closed resource error?" | ||||||
|  | 
 | ||||||
|  |         # ctx is closed here | ||||||
|  |         await portal.run(assert_state, value=False) | ||||||
|  | 
 | ||||||
|  |         # channel should not have been destroyed yet, only the | ||||||
|  |         # inter-actor-task context | ||||||
|  |         assert portal.channel.connected() | ||||||
|  | 
 | ||||||
|  |         # teardown the actor | ||||||
|  |         await portal.cancel_actor() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | @tractor_test | ||||||
|  | async def test_multitask_caller_cancels_from_nonroot_task(): | ||||||
|  | 
 | ||||||
|  |     async with tractor.open_nursery() as n: | ||||||
|  | 
 | ||||||
|  |         portal = await n.start_actor( | ||||||
|  |             'ctx_cancelled', | ||||||
|  |             enable_modules=[__name__], | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|  |         async with portal.open_context( | ||||||
|  |             expect_cancelled, | ||||||
|  |         ) as (ctx, sent): | ||||||
|  | 
 | ||||||
|  |             await portal.run(assert_state, value=True) | ||||||
|  |             assert sent is None | ||||||
|  | 
 | ||||||
|  |             async with ctx.open_stream() as stream: | ||||||
|  | 
 | ||||||
|  |                 async def send_msg_then_cancel(): | ||||||
|  |                     await stream.send('yo') | ||||||
|  |                     await portal.run(assert_state, value=True) | ||||||
|  |                     await ctx.cancel() | ||||||
|  |                     await portal.run(assert_state, value=False) | ||||||
|  | 
 | ||||||
|  |                 async with trio.open_nursery() as n: | ||||||
|  |                     n.start_soon(send_msg_then_cancel) | ||||||
|  | 
 | ||||||
|  |                     try: | ||||||
|  |                         async for msg in stream: | ||||||
|  |                             assert msg == 'yo' | ||||||
|  | 
 | ||||||
|  |                     except tractor.ContextCancelled: | ||||||
|  |                         raise  # XXX: must be propagated to __aexit__ | ||||||
|  | 
 | ||||||
|  |                 # channel should still be up | ||||||
|  |                 assert portal.channel.connected() | ||||||
|  | 
 | ||||||
|  |                 # ctx is closed here | ||||||
|  |                 await portal.run(assert_state, value=False) | ||||||
|  | 
 | ||||||
|  |         # channel should not have been destroyed yet, only the | ||||||
|  |         # inter-actor-task context | ||||||
|  |         assert portal.channel.connected() | ||||||
|  | 
 | ||||||
|  |         # teardown the actor | ||||||
|  |         await portal.cancel_actor() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | @tractor.context | ||||||
|  | async def cancel_self( | ||||||
|  | 
 | ||||||
|  |     ctx: tractor.Context, | ||||||
|  | 
 | ||||||
|  | ) -> None: | ||||||
|  |     global _state | ||||||
|  |     _state = True | ||||||
|  | 
 | ||||||
|  |     await ctx.cancel() | ||||||
|  |     try: | ||||||
|  |         with trio.fail_after(0.1): | ||||||
|  |             await trio.sleep_forever() | ||||||
|  | 
 | ||||||
|  |     except trio.Cancelled: | ||||||
|  |         raise | ||||||
|  | 
 | ||||||
|  |     except trio.TooSlowError: | ||||||
|  |         # should never get here | ||||||
|  |         assert 0 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | @tractor_test | ||||||
|  | async def test_callee_cancels_before_started(): | ||||||
|  |     '''callee calls `Context.cancel()` while streaming and caller | ||||||
|  |     sees stream terminated in `ContextCancelled`. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     async with tractor.open_nursery() as n: | ||||||
|  | 
 | ||||||
|  |         portal = await n.start_actor( | ||||||
|  |             'cancels_self', | ||||||
|  |             enable_modules=[__name__], | ||||||
|  |         ) | ||||||
|  |         try: | ||||||
|  | 
 | ||||||
|  |             async with portal.open_context( | ||||||
|  |                 cancel_self, | ||||||
|  |             ) as (ctx, sent): | ||||||
|  |                 async with ctx.open_stream(): | ||||||
|  | 
 | ||||||
|  |                     await trio.sleep_forever() | ||||||
|  | 
 | ||||||
|  |         # raises a special cancel signal | ||||||
|  |         except tractor.ContextCancelled as ce: | ||||||
|  |             ce.type == trio.Cancelled | ||||||
|  | 
 | ||||||
|  |         # teardown the actor | ||||||
|  |         await portal.cancel_actor() | ||||||
|  | 
 | ||||||
| 
 | 
 | ||||||
| @tractor.context | @tractor.context | ||||||
| async def simple_rpc( | async def simple_rpc( | ||||||
|  | @ -14,10 +385,9 @@ async def simple_rpc( | ||||||
|     data: int, |     data: int, | ||||||
| 
 | 
 | ||||||
| ) -> None: | ) -> None: | ||||||
|     ''' |     """Test a small ping-pong server. | ||||||
|     Test a small ping-pong server. |  | ||||||
| 
 | 
 | ||||||
|     ''' |     """ | ||||||
|     # signal to parent that we're up |     # signal to parent that we're up | ||||||
|     await ctx.started(data + 1) |     await ctx.started(data + 1) | ||||||
| 
 | 
 | ||||||
|  | @ -75,10 +445,9 @@ async def simple_rpc_with_forloop( | ||||||
|     [simple_rpc, simple_rpc_with_forloop], |     [simple_rpc, simple_rpc_with_forloop], | ||||||
| ) | ) | ||||||
| def test_simple_rpc(server_func, use_async_for): | def test_simple_rpc(server_func, use_async_for): | ||||||
|     ''' |     """The simplest request response pattern. | ||||||
|     The simplest request response pattern. |  | ||||||
| 
 | 
 | ||||||
|     ''' |     """ | ||||||
|     async def main(): |     async def main(): | ||||||
|         async with tractor.open_nursery() as n: |         async with tractor.open_nursery() as n: | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
|  | @ -1,309 +0,0 @@ | ||||||
| ''' |  | ||||||
| Sketchy network blackoutz, ugly byzantine gens, puedes eschuchar la |  | ||||||
| cancelacion?.. |  | ||||||
| 
 |  | ||||||
| ''' |  | ||||||
| from functools import partial |  | ||||||
| from types import ModuleType |  | ||||||
| 
 |  | ||||||
| import pytest |  | ||||||
| from _pytest.pathlib import import_path |  | ||||||
| import trio |  | ||||||
| import tractor |  | ||||||
| from tractor import ( |  | ||||||
|     TransportClosed, |  | ||||||
| ) |  | ||||||
| from tractor._testing import ( |  | ||||||
|     examples_dir, |  | ||||||
|     break_ipc, |  | ||||||
| ) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @pytest.mark.parametrize( |  | ||||||
|     'pre_aclose_msgstream', |  | ||||||
|     [ |  | ||||||
|         False, |  | ||||||
|         True, |  | ||||||
|     ], |  | ||||||
|     ids=[ |  | ||||||
|         'no_msgstream_aclose', |  | ||||||
|         'pre_aclose_msgstream', |  | ||||||
|     ], |  | ||||||
| ) |  | ||||||
| @pytest.mark.parametrize( |  | ||||||
|     'ipc_break', |  | ||||||
|     [ |  | ||||||
|         # no breaks |  | ||||||
|         { |  | ||||||
|             'break_parent_ipc_after': False, |  | ||||||
|             'break_child_ipc_after': False, |  | ||||||
|         }, |  | ||||||
| 
 |  | ||||||
|         # only parent breaks |  | ||||||
|         { |  | ||||||
|             'break_parent_ipc_after': 500, |  | ||||||
|             'break_child_ipc_after': False, |  | ||||||
|         }, |  | ||||||
| 
 |  | ||||||
|         # only child breaks |  | ||||||
|         { |  | ||||||
|             'break_parent_ipc_after': False, |  | ||||||
|             'break_child_ipc_after': 500, |  | ||||||
|         }, |  | ||||||
| 
 |  | ||||||
|         # both: break parent first |  | ||||||
|         { |  | ||||||
|             'break_parent_ipc_after': 500, |  | ||||||
|             'break_child_ipc_after': 800, |  | ||||||
|         }, |  | ||||||
|         # both: break child first |  | ||||||
|         { |  | ||||||
|             'break_parent_ipc_after': 800, |  | ||||||
|             'break_child_ipc_after': 500, |  | ||||||
|         }, |  | ||||||
| 
 |  | ||||||
|     ], |  | ||||||
|     ids=[ |  | ||||||
|         'no_break', |  | ||||||
|         'break_parent', |  | ||||||
|         'break_child', |  | ||||||
|         'break_both_parent_first', |  | ||||||
|         'break_both_child_first', |  | ||||||
|     ], |  | ||||||
| ) |  | ||||||
| def test_ipc_channel_break_during_stream( |  | ||||||
|     debug_mode: bool, |  | ||||||
|     loglevel: str, |  | ||||||
|     spawn_backend: str, |  | ||||||
|     ipc_break: dict|None, |  | ||||||
|     pre_aclose_msgstream: bool, |  | ||||||
|     tpt_proto: str, |  | ||||||
| ): |  | ||||||
|     ''' |  | ||||||
|     Ensure we can have an IPC channel break its connection during |  | ||||||
|     streaming and it's still possible for the (simulated) user to kill |  | ||||||
|     the actor tree using SIGINT. |  | ||||||
| 
 |  | ||||||
|     We also verify the type of connection error expected in the parent |  | ||||||
|     depending on which side if the IPC breaks first. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     if spawn_backend != 'trio': |  | ||||||
|         if debug_mode: |  | ||||||
|             pytest.skip('`debug_mode` only supported on `trio` spawner') |  | ||||||
| 
 |  | ||||||
|         # non-`trio` spawners should never hit the hang condition that |  | ||||||
|         # requires the user to do ctl-c to cancel the actor tree. |  | ||||||
|         # expect_final_exc = trio.ClosedResourceError |  | ||||||
|         expect_final_exc = TransportClosed |  | ||||||
| 
 |  | ||||||
|     mod: ModuleType = import_path( |  | ||||||
|         examples_dir() / 'advanced_faults' |  | ||||||
|         / 'ipc_failure_during_stream.py', |  | ||||||
|         root=examples_dir(), |  | ||||||
|         consider_namespace_packages=False, |  | ||||||
|     ) |  | ||||||
| 
 |  | ||||||
|     # by def we expect KBI from user after a simulated "hang |  | ||||||
|     # period" wherein the user eventually hits ctl-c to kill the |  | ||||||
|     # root-actor tree. |  | ||||||
|     expect_final_exc: BaseException = KeyboardInterrupt |  | ||||||
|     expect_final_cause: BaseException|None = None |  | ||||||
| 
 |  | ||||||
|     if ( |  | ||||||
|         # only expect EoC if trans is broken on the child side, |  | ||||||
|         ipc_break['break_child_ipc_after'] is not False |  | ||||||
|         # AND we tell the child to call `MsgStream.aclose()`. |  | ||||||
|         and pre_aclose_msgstream |  | ||||||
|     ): |  | ||||||
|         # expect_final_exc = trio.EndOfChannel |  | ||||||
|         # ^XXX NOPE! XXX^ since now `.open_stream()` absorbs this |  | ||||||
|         # gracefully! |  | ||||||
|         expect_final_exc = KeyboardInterrupt |  | ||||||
| 
 |  | ||||||
|     # NOTE when ONLY the child breaks or it breaks BEFORE the |  | ||||||
|     # parent we expect the parent to get a closed resource error |  | ||||||
|     # on the next `MsgStream.receive()` and then fail out and |  | ||||||
|     # cancel the child from there. |  | ||||||
|     # |  | ||||||
|     # ONLY CHILD breaks |  | ||||||
|     if ( |  | ||||||
|         ipc_break['break_child_ipc_after'] |  | ||||||
|         and |  | ||||||
|         ipc_break['break_parent_ipc_after'] is False |  | ||||||
|     ): |  | ||||||
|         # NOTE: we DO NOT expect this any more since |  | ||||||
|         # the child side's channel will be broken silently |  | ||||||
|         # and nothing on the parent side will indicate this! |  | ||||||
|         # expect_final_exc = trio.ClosedResourceError |  | ||||||
| 
 |  | ||||||
|         # NOTE: child will send a 'stop' msg before it breaks |  | ||||||
|         # the transport channel BUT, that will be absorbed by the |  | ||||||
|         # `ctx.open_stream()` block and thus the `.open_context()` |  | ||||||
|         # should hang, after which the test script simulates |  | ||||||
|         # a user sending ctl-c by raising a KBI. |  | ||||||
|         if pre_aclose_msgstream: |  | ||||||
|             expect_final_exc = KeyboardInterrupt |  | ||||||
|             if tpt_proto == 'uds': |  | ||||||
|                 expect_final_exc = TransportClosed |  | ||||||
|                 expect_final_cause = trio.BrokenResourceError |  | ||||||
| 
 |  | ||||||
|             # XXX OLD XXX |  | ||||||
|             # if child calls `MsgStream.aclose()` then expect EoC. |  | ||||||
|             # ^ XXX not any more ^ since eoc is always absorbed |  | ||||||
|             # gracefully and NOT bubbled to the `.open_context()` |  | ||||||
|             # block! |  | ||||||
|             # expect_final_exc = trio.EndOfChannel |  | ||||||
| 
 |  | ||||||
|     # BOTH but, CHILD breaks FIRST |  | ||||||
|     elif ( |  | ||||||
|         ipc_break['break_child_ipc_after'] is not False |  | ||||||
|         and ( |  | ||||||
|             ipc_break['break_parent_ipc_after'] |  | ||||||
|             > ipc_break['break_child_ipc_after'] |  | ||||||
|         ) |  | ||||||
|     ): |  | ||||||
|         if pre_aclose_msgstream: |  | ||||||
|             expect_final_exc = KeyboardInterrupt |  | ||||||
| 
 |  | ||||||
|             if tpt_proto == 'uds': |  | ||||||
|                 expect_final_exc = TransportClosed |  | ||||||
|                 expect_final_cause = trio.BrokenResourceError |  | ||||||
| 
 |  | ||||||
|     # NOTE when the parent IPC side dies (even if the child does as well |  | ||||||
|     # but the child fails BEFORE the parent) we always expect the |  | ||||||
|     # IPC layer to raise a closed-resource, NEVER do we expect |  | ||||||
|     # a stop msg since the parent-side ctx apis will error out |  | ||||||
|     # IMMEDIATELY before the child ever sends any 'stop' msg. |  | ||||||
|     # |  | ||||||
|     # ONLY PARENT breaks |  | ||||||
|     elif ( |  | ||||||
|         ipc_break['break_parent_ipc_after'] |  | ||||||
|         and |  | ||||||
|         ipc_break['break_child_ipc_after'] is False |  | ||||||
|     ): |  | ||||||
|         expect_final_exc = tractor.TransportClosed |  | ||||||
|         expect_final_cause = trio.ClosedResourceError |  | ||||||
| 
 |  | ||||||
|     # BOTH but, PARENT breaks FIRST |  | ||||||
|     elif ( |  | ||||||
|         ipc_break['break_parent_ipc_after'] is not False |  | ||||||
|         and ( |  | ||||||
|             ipc_break['break_child_ipc_after'] |  | ||||||
|             > |  | ||||||
|             ipc_break['break_parent_ipc_after'] |  | ||||||
|         ) |  | ||||||
|     ): |  | ||||||
|         expect_final_exc = tractor.TransportClosed |  | ||||||
|         expect_final_cause = trio.ClosedResourceError |  | ||||||
| 
 |  | ||||||
|     with pytest.raises( |  | ||||||
|         expected_exception=( |  | ||||||
|             expect_final_exc, |  | ||||||
|             ExceptionGroup, |  | ||||||
|         ), |  | ||||||
|     ) as excinfo: |  | ||||||
|         try: |  | ||||||
|             trio.run( |  | ||||||
|                 partial( |  | ||||||
|                     mod.main, |  | ||||||
|                     debug_mode=debug_mode, |  | ||||||
|                     start_method=spawn_backend, |  | ||||||
|                     loglevel=loglevel, |  | ||||||
|                     pre_close=pre_aclose_msgstream, |  | ||||||
|                     tpt_proto=tpt_proto, |  | ||||||
|                     **ipc_break, |  | ||||||
|                 ) |  | ||||||
|             ) |  | ||||||
|         except KeyboardInterrupt as _kbi: |  | ||||||
|             kbi = _kbi |  | ||||||
|             if expect_final_exc is not KeyboardInterrupt: |  | ||||||
|                 pytest.fail( |  | ||||||
|                     'Rxed unexpected KBI !?\n' |  | ||||||
|                     f'{repr(kbi)}' |  | ||||||
|                 ) |  | ||||||
| 
 |  | ||||||
|             raise |  | ||||||
| 
 |  | ||||||
|         except tractor.TransportClosed as _tc: |  | ||||||
|             tc = _tc |  | ||||||
|             if expect_final_exc is KeyboardInterrupt: |  | ||||||
|                 pytest.fail( |  | ||||||
|                     'Unexpected transport failure !?\n' |  | ||||||
|                     f'{repr(tc)}' |  | ||||||
|                 ) |  | ||||||
|             cause: Exception = tc.__cause__ |  | ||||||
|             assert ( |  | ||||||
|                 # type(cause) is trio.ClosedResourceError |  | ||||||
|                 type(cause) is expect_final_cause |  | ||||||
| 
 |  | ||||||
|                 # TODO, should we expect a certain exc-message (per |  | ||||||
|                 # tpt) as well?? |  | ||||||
|                 # and |  | ||||||
|                 # cause.args[0] == 'another task closed this fd' |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|             raise |  | ||||||
| 
 |  | ||||||
|     # get raw instance from pytest wrapper |  | ||||||
|     value = excinfo.value |  | ||||||
|     if isinstance(value, ExceptionGroup): |  | ||||||
|         excs = value.exceptions |  | ||||||
|         assert len(excs) == 1 |  | ||||||
|         final_exc = excs[0] |  | ||||||
|         assert isinstance(final_exc, expect_final_exc) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @tractor.context |  | ||||||
| async def break_ipc_after_started( |  | ||||||
|     ctx: tractor.Context, |  | ||||||
| ) -> None: |  | ||||||
|     await ctx.started() |  | ||||||
|     async with ctx.open_stream() as stream: |  | ||||||
| 
 |  | ||||||
|         # TODO: make a test which verifies the error |  | ||||||
|         # for this, i.e. raises a `MsgTypeError` |  | ||||||
|         # await ctx.chan.send(None) |  | ||||||
| 
 |  | ||||||
|         await break_ipc( |  | ||||||
|             stream=stream, |  | ||||||
|             pre_close=True, |  | ||||||
|         ) |  | ||||||
|         print('child broke IPC and terminating') |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def test_stream_closed_right_after_ipc_break_and_zombie_lord_engages(): |  | ||||||
|     ''' |  | ||||||
|     Verify that is a subactor's IPC goes down just after bringing up |  | ||||||
|     a stream the parent can trigger a SIGINT and the child will be |  | ||||||
|     reaped out-of-IPC by the localhost process supervision machinery: |  | ||||||
|     aka "zombie lord". |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     async def main(): |  | ||||||
|         with trio.fail_after(3): |  | ||||||
|             async with tractor.open_nursery() as an: |  | ||||||
|                 portal = await an.start_actor( |  | ||||||
|                     'ipc_breaker', |  | ||||||
|                     enable_modules=[__name__], |  | ||||||
|                 ) |  | ||||||
| 
 |  | ||||||
|                 with trio.move_on_after(1): |  | ||||||
|                     async with ( |  | ||||||
|                         portal.open_context( |  | ||||||
|                             break_ipc_after_started |  | ||||||
|                         ) as (ctx, sent), |  | ||||||
|                     ): |  | ||||||
|                         async with ctx.open_stream(): |  | ||||||
|                             await trio.sleep(0.5) |  | ||||||
| 
 |  | ||||||
|                         print('parent waiting on context') |  | ||||||
| 
 |  | ||||||
|                 print( |  | ||||||
|                     'parent exited context\n' |  | ||||||
|                     'parent raising KBI..\n' |  | ||||||
|                 ) |  | ||||||
|                 raise KeyboardInterrupt |  | ||||||
| 
 |  | ||||||
|     with pytest.raises(KeyboardInterrupt): |  | ||||||
|         trio.run(main) |  | ||||||
|  | @ -1,21 +1,15 @@ | ||||||
| ''' | """ | ||||||
| Advanced streaming patterns using bidirectional streams and contexts. | Advanced streaming patterns using bidirectional streams and contexts. | ||||||
| 
 | 
 | ||||||
| ''' | """ | ||||||
| from collections import Counter |  | ||||||
| import itertools | import itertools | ||||||
| import platform | from typing import Set, Dict, List | ||||||
| 
 | 
 | ||||||
| import pytest |  | ||||||
| import trio | import trio | ||||||
| import tractor | import tractor | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def is_win(): | _registry: Dict[str, Set[tractor.ReceiveMsgStream]] = { | ||||||
|     return platform.system() == 'Windows' |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| _registry: dict[str, set[tractor.MsgStream]] = { |  | ||||||
|     'even': set(), |     'even': set(), | ||||||
|     'odd': set(), |     'odd': set(), | ||||||
| } | } | ||||||
|  | @ -77,7 +71,7 @@ async def subscribe( | ||||||
| 
 | 
 | ||||||
| async def consumer( | async def consumer( | ||||||
| 
 | 
 | ||||||
|     subs: list[str], |     subs: List[str], | ||||||
| 
 | 
 | ||||||
| ) -> None: | ) -> None: | ||||||
| 
 | 
 | ||||||
|  | @ -144,16 +138,8 @@ def test_dynamic_pub_sub(): | ||||||
| 
 | 
 | ||||||
|     try: |     try: | ||||||
|         trio.run(main) |         trio.run(main) | ||||||
|     except ( |     except trio.TooSlowError: | ||||||
|         trio.TooSlowError, |         pass | ||||||
|         ExceptionGroup, |  | ||||||
|     ) as err: |  | ||||||
|         if isinstance(err, ExceptionGroup): |  | ||||||
|             for suberr in err.exceptions: |  | ||||||
|                 if isinstance(suberr, trio.TooSlowError): |  | ||||||
|                     break |  | ||||||
|             else: |  | ||||||
|                 pytest.fail('Never got a `TooSlowError` ?') |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @tractor.context | @tractor.context | ||||||
|  | @ -186,22 +172,14 @@ async def one_task_streams_and_one_handles_reqresp( | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def test_reqresp_ontopof_streaming(): | def test_reqresp_ontopof_streaming(): | ||||||
|     ''' |     '''Test a subactor that both streams with one task and | ||||||
|     Test a subactor that both streams with one task and |  | ||||||
|     spawns another which handles a small requests-response |     spawns another which handles a small requests-response | ||||||
|     dialogue over the same bidir-stream. |     dialogue over the same bidir-stream. | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|     async def main(): |     async def main(): | ||||||
| 
 | 
 | ||||||
|         # flat to make sure we get at least one pong |         with trio.move_on_after(2): | ||||||
|         got_pong: bool = False |  | ||||||
|         timeout: int = 2 |  | ||||||
| 
 |  | ||||||
|         if is_win():  # smh |  | ||||||
|             timeout = 4 |  | ||||||
| 
 |  | ||||||
|         with trio.move_on_after(timeout): |  | ||||||
|             async with tractor.open_nursery() as n: |             async with tractor.open_nursery() as n: | ||||||
| 
 | 
 | ||||||
|                 # name of this actor will be same as target func |                 # name of this actor will be same as target func | ||||||
|  | @ -210,6 +188,9 @@ def test_reqresp_ontopof_streaming(): | ||||||
|                     enable_modules=[__name__] |                     enable_modules=[__name__] | ||||||
|                 ) |                 ) | ||||||
| 
 | 
 | ||||||
|  |                 # flat to make sure we get at least one pong | ||||||
|  |                 got_pong: bool = False | ||||||
|  | 
 | ||||||
|                 async with portal.open_context( |                 async with portal.open_context( | ||||||
|                     one_task_streams_and_one_handles_reqresp, |                     one_task_streams_and_one_handles_reqresp, | ||||||
| 
 | 
 | ||||||
|  | @ -237,188 +218,3 @@ def test_reqresp_ontopof_streaming(): | ||||||
|         trio.run(main) |         trio.run(main) | ||||||
|     except trio.TooSlowError: |     except trio.TooSlowError: | ||||||
|         pass |         pass | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| async def async_gen_stream(sequence): |  | ||||||
|     for i in sequence: |  | ||||||
|         yield i |  | ||||||
|         await trio.sleep(0.1) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @tractor.context |  | ||||||
| async def echo_ctx_stream( |  | ||||||
|     ctx: tractor.Context, |  | ||||||
| ) -> None: |  | ||||||
|     await ctx.started() |  | ||||||
| 
 |  | ||||||
|     async with ctx.open_stream() as stream: |  | ||||||
|         async for msg in stream: |  | ||||||
|             await stream.send(msg) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def test_sigint_both_stream_types(): |  | ||||||
|     '''Verify that running a bi-directional and recv only stream |  | ||||||
|     side-by-side will cancel correctly from SIGINT. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     timeout: float = 2 |  | ||||||
|     if is_win():  # smh |  | ||||||
|         timeout += 1 |  | ||||||
| 
 |  | ||||||
|     async def main(): |  | ||||||
|         with trio.fail_after(timeout): |  | ||||||
|             async with tractor.open_nursery() as n: |  | ||||||
|                 # name of this actor will be same as target func |  | ||||||
|                 portal = await n.start_actor( |  | ||||||
|                     '2_way', |  | ||||||
|                     enable_modules=[__name__] |  | ||||||
|                 ) |  | ||||||
| 
 |  | ||||||
|                 async with portal.open_context(echo_ctx_stream) as (ctx, _): |  | ||||||
|                     async with ctx.open_stream() as stream: |  | ||||||
|                         async with portal.open_stream_from( |  | ||||||
|                             async_gen_stream, |  | ||||||
|                             sequence=list(range(1)), |  | ||||||
|                         ) as gen_stream: |  | ||||||
| 
 |  | ||||||
|                             msg = await gen_stream.receive() |  | ||||||
|                             await stream.send(msg) |  | ||||||
|                             resp = await stream.receive() |  | ||||||
|                             assert resp == msg |  | ||||||
|                             raise KeyboardInterrupt |  | ||||||
| 
 |  | ||||||
|     try: |  | ||||||
|         trio.run(main) |  | ||||||
|         assert 0, "Didn't receive KBI!?" |  | ||||||
|     except KeyboardInterrupt: |  | ||||||
|         pass |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @tractor.context |  | ||||||
| async def inf_streamer( |  | ||||||
|     ctx: tractor.Context, |  | ||||||
| 
 |  | ||||||
| ) -> None: |  | ||||||
|     ''' |  | ||||||
|     Stream increasing ints until terminated with a 'done' msg. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     await ctx.started() |  | ||||||
| 
 |  | ||||||
|     async with ( |  | ||||||
|         ctx.open_stream() as stream, |  | ||||||
| 
 |  | ||||||
|         # XXX TODO, INTERESTING CASE!! |  | ||||||
|         # - if we don't collapse the eg then the embedded |  | ||||||
|         # `trio.EndOfChannel` doesn't propagate directly to the above |  | ||||||
|         # .open_stream() parent, resulting in it also raising instead |  | ||||||
|         # of gracefully absorbing as normal.. so how to handle? |  | ||||||
|         tractor.trionics.collapse_eg(), |  | ||||||
|         trio.open_nursery() as tn, |  | ||||||
|     ): |  | ||||||
|         async def close_stream_on_sentinel(): |  | ||||||
|             async for msg in stream: |  | ||||||
|                 if msg == 'done': |  | ||||||
|                     print( |  | ||||||
|                         'streamer RXed "done" sentinel msg!\n' |  | ||||||
|                         'CLOSING `MsgStream`!' |  | ||||||
|                     ) |  | ||||||
|                     await stream.aclose() |  | ||||||
|                 else: |  | ||||||
|                     print(f'streamer received {msg}') |  | ||||||
|             else: |  | ||||||
|                 print('streamer exited recv loop') |  | ||||||
| 
 |  | ||||||
|         # start termination detector |  | ||||||
|         tn.start_soon(close_stream_on_sentinel) |  | ||||||
| 
 |  | ||||||
|         cap: int = 10000  # so that we don't spin forever when bug.. |  | ||||||
|         for val in range(cap): |  | ||||||
|             try: |  | ||||||
|                 print(f'streamer sending {val}') |  | ||||||
|                 await stream.send(val) |  | ||||||
|                 if val > cap: |  | ||||||
|                     raise RuntimeError( |  | ||||||
|                         'Streamer never cancelled by setinel?' |  | ||||||
|                     ) |  | ||||||
|                 await trio.sleep(0.001) |  | ||||||
| 
 |  | ||||||
|             # close out the stream gracefully |  | ||||||
|             except trio.ClosedResourceError: |  | ||||||
|                 print('transport closed on streamer side!') |  | ||||||
|                 assert stream.closed |  | ||||||
|                 break |  | ||||||
|         else: |  | ||||||
|             raise RuntimeError( |  | ||||||
|                 'Streamer not cancelled before finished sending?' |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|     print('streamer exited .open_streamer() block') |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def test_local_task_fanout_from_stream( |  | ||||||
|     debug_mode: bool, |  | ||||||
| ): |  | ||||||
|     ''' |  | ||||||
|     Single stream with multiple local consumer tasks using the |  | ||||||
|     ``MsgStream.subscribe()` api. |  | ||||||
| 
 |  | ||||||
|     Ensure all tasks receive all values after stream completes |  | ||||||
|     sending. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     consumers: int = 22 |  | ||||||
| 
 |  | ||||||
|     async def main(): |  | ||||||
| 
 |  | ||||||
|         counts = Counter() |  | ||||||
| 
 |  | ||||||
|         async with tractor.open_nursery( |  | ||||||
|             debug_mode=debug_mode, |  | ||||||
|         ) as tn: |  | ||||||
|             p: tractor.Portal = await tn.start_actor( |  | ||||||
|                 'inf_streamer', |  | ||||||
|                 enable_modules=[__name__], |  | ||||||
|             ) |  | ||||||
|             async with ( |  | ||||||
|                 p.open_context(inf_streamer) as (ctx, _), |  | ||||||
|                 ctx.open_stream() as stream, |  | ||||||
|             ): |  | ||||||
|                 async def pull_and_count(name: str): |  | ||||||
|                     # name = trio.lowlevel.current_task().name |  | ||||||
|                     async with stream.subscribe() as recver: |  | ||||||
|                         assert isinstance( |  | ||||||
|                             recver, |  | ||||||
|                             tractor.trionics.BroadcastReceiver |  | ||||||
|                         ) |  | ||||||
|                         async for val in recver: |  | ||||||
|                             print(f'bx {name} rx: {val}') |  | ||||||
|                             counts[name] += 1 |  | ||||||
| 
 |  | ||||||
|                         print(f'{name} bcaster ended') |  | ||||||
| 
 |  | ||||||
|                     print(f'{name} completed') |  | ||||||
| 
 |  | ||||||
|                 with trio.fail_after(3): |  | ||||||
|                     async with trio.open_nursery() as nurse: |  | ||||||
|                         for i in range(consumers): |  | ||||||
|                             nurse.start_soon( |  | ||||||
|                                 pull_and_count, |  | ||||||
|                                 i, |  | ||||||
|                             ) |  | ||||||
| 
 |  | ||||||
|                         # delay to let bcast consumers pull msgs |  | ||||||
|                         await trio.sleep(0.5) |  | ||||||
|                         print('terminating nursery of bcast rxer consumers!') |  | ||||||
|                         await stream.send('done') |  | ||||||
| 
 |  | ||||||
|             print('closed stream connection') |  | ||||||
| 
 |  | ||||||
|             assert len(counts) == consumers |  | ||||||
|             mx = max(counts.values()) |  | ||||||
|             # make sure each task received all stream values |  | ||||||
|             assert all(val == mx for val in counts.values()) |  | ||||||
| 
 |  | ||||||
|             await p.cancel_actor() |  | ||||||
| 
 |  | ||||||
|     trio.run(main) |  | ||||||
|  |  | ||||||
|  | @ -1,6 +1,5 @@ | ||||||
| """ | """ | ||||||
| Cancellation and error propagation | Cancellation and error propagation | ||||||
| 
 |  | ||||||
| """ | """ | ||||||
| import os | import os | ||||||
| import signal | import signal | ||||||
|  | @ -11,14 +10,8 @@ from itertools import repeat | ||||||
| import pytest | import pytest | ||||||
| import trio | import trio | ||||||
| import tractor | import tractor | ||||||
| from tractor._testing import ( |  | ||||||
|     tractor_test, |  | ||||||
| ) |  | ||||||
| from .conftest import no_windows |  | ||||||
| 
 | 
 | ||||||
| 
 | from conftest import tractor_test, no_windows | ||||||
| def is_win(): |  | ||||||
|     return platform.system() == 'Windows' |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def assert_err(delay=0): | async def assert_err(delay=0): | ||||||
|  | @ -45,82 +38,45 @@ async def do_nuthin(): | ||||||
|     ], |     ], | ||||||
|     ids=['no_args', 'unexpected_args'], |     ids=['no_args', 'unexpected_args'], | ||||||
| ) | ) | ||||||
| def test_remote_error(reg_addr, args_err): | def test_remote_error(arb_addr, args_err): | ||||||
|     ''' |     """Verify an error raised in a subactor that is propagated | ||||||
|     Verify an error raised in a subactor that is propagated |  | ||||||
|     to the parent nursery, contains the underlying boxed builtin |     to the parent nursery, contains the underlying boxed builtin | ||||||
|     error type info and causes cancellation and reraising all the |     error type info and causes cancellation and reraising all the | ||||||
|     way up the stack. |     way up the stack. | ||||||
| 
 |     """ | ||||||
|     ''' |  | ||||||
|     args, errtype = args_err |     args, errtype = args_err | ||||||
| 
 | 
 | ||||||
|     async def main(): |     async def main(): | ||||||
|         async with tractor.open_nursery( |         async with tractor.open_nursery( | ||||||
|             registry_addrs=[reg_addr], |             arbiter_addr=arb_addr, | ||||||
|         ) as nursery: |         ) as nursery: | ||||||
| 
 | 
 | ||||||
|             # on a remote type error caused by bad input args |  | ||||||
|             # this should raise directly which means we **don't** get |  | ||||||
|             # an exception group outside the nursery since the error |  | ||||||
|             # here and the far end task error are one in the same? |  | ||||||
|             portal = await nursery.run_in_actor( |             portal = await nursery.run_in_actor( | ||||||
|                 assert_err, |                 assert_err, name='errorer', **args | ||||||
|                 name='errorer', |  | ||||||
|                 **args |  | ||||||
|             ) |             ) | ||||||
| 
 | 
 | ||||||
|             # get result(s) from main task |             # get result(s) from main task | ||||||
|             try: |             try: | ||||||
|                 # this means the root actor will also raise a local |  | ||||||
|                 # parent task error and thus an eg will propagate out |  | ||||||
|                 # of this actor nursery. |  | ||||||
|                 await portal.result() |                 await portal.result() | ||||||
|             except tractor.RemoteActorError as err: |             except tractor.RemoteActorError as err: | ||||||
|                 assert err.boxed_type == errtype |                 assert err.type == errtype | ||||||
|                 print("Look Maa that actor failed hard, hehh") |                 print("Look Maa that actor failed hard, hehh") | ||||||
|                 raise |                 raise | ||||||
| 
 | 
 | ||||||
|     # ensure boxed errors |  | ||||||
|     if args: |  | ||||||
|     with pytest.raises(tractor.RemoteActorError) as excinfo: |     with pytest.raises(tractor.RemoteActorError) as excinfo: | ||||||
|         trio.run(main) |         trio.run(main) | ||||||
| 
 | 
 | ||||||
|         assert excinfo.value.boxed_type == errtype |     # ensure boxed error is correct | ||||||
| 
 |     assert excinfo.value.type == errtype | ||||||
|     else: |  | ||||||
|         # the root task will also error on the `Portal.result()` |  | ||||||
|         # call so we expect an error from there AND the child. |  | ||||||
|         # |_ tho seems like on new `trio` this doesn't always |  | ||||||
|         #    happen? |  | ||||||
|         with pytest.raises(( |  | ||||||
|             BaseExceptionGroup, |  | ||||||
|             tractor.RemoteActorError, |  | ||||||
|         )) as excinfo: |  | ||||||
|             trio.run(main) |  | ||||||
| 
 |  | ||||||
|         # ensure boxed errors are `errtype` |  | ||||||
|         err: BaseException = excinfo.value |  | ||||||
|         if isinstance(err, BaseExceptionGroup): |  | ||||||
|             suberrs: list[BaseException] = err.exceptions |  | ||||||
|         else: |  | ||||||
|             suberrs: list[BaseException] = [err] |  | ||||||
| 
 |  | ||||||
|         for exc in suberrs: |  | ||||||
|             assert exc.boxed_type == errtype |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def test_multierror( | def test_multierror(arb_addr): | ||||||
|     reg_addr: tuple[str, int], |     """Verify we raise a ``trio.MultiError`` out of a nursery where | ||||||
| ): |  | ||||||
|     ''' |  | ||||||
|     Verify we raise a ``BaseExceptionGroup`` out of a nursery where |  | ||||||
|     more then one actor errors. |     more then one actor errors. | ||||||
| 
 |     """ | ||||||
|     ''' |  | ||||||
|     async def main(): |     async def main(): | ||||||
|         async with tractor.open_nursery( |         async with tractor.open_nursery( | ||||||
|             registry_addrs=[reg_addr], |             arbiter_addr=arb_addr, | ||||||
|         ) as nursery: |         ) as nursery: | ||||||
| 
 | 
 | ||||||
|             await nursery.run_in_actor(assert_err, name='errorer1') |             await nursery.run_in_actor(assert_err, name='errorer1') | ||||||
|  | @ -130,14 +86,14 @@ def test_multierror( | ||||||
|             try: |             try: | ||||||
|                 await portal2.result() |                 await portal2.result() | ||||||
|             except tractor.RemoteActorError as err: |             except tractor.RemoteActorError as err: | ||||||
|                 assert err.boxed_type is AssertionError |                 assert err.type == AssertionError | ||||||
|                 print("Look Maa that first actor failed hard, hehh") |                 print("Look Maa that first actor failed hard, hehh") | ||||||
|                 raise |                 raise | ||||||
| 
 | 
 | ||||||
|         # here we should get a ``BaseExceptionGroup`` containing exceptions |         # here we should get a `trio.MultiError` containing exceptions | ||||||
|         # from both subactors |         # from both subactors | ||||||
| 
 | 
 | ||||||
|     with pytest.raises(BaseExceptionGroup): |     with pytest.raises(trio.MultiError): | ||||||
|         trio.run(main) |         trio.run(main) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | @ -145,14 +101,14 @@ def test_multierror( | ||||||
| @pytest.mark.parametrize( | @pytest.mark.parametrize( | ||||||
|     'num_subactors', range(25, 26), |     'num_subactors', range(25, 26), | ||||||
| ) | ) | ||||||
| def test_multierror_fast_nursery(reg_addr, start_method, num_subactors, delay): | def test_multierror_fast_nursery(arb_addr, start_method, num_subactors, delay): | ||||||
|     """Verify we raise a ``BaseExceptionGroup`` out of a nursery where |     """Verify we raise a ``trio.MultiError`` out of a nursery where | ||||||
|     more then one actor errors and also with a delay before failure |     more then one actor errors and also with a delay before failure | ||||||
|     to test failure during an ongoing spawning. |     to test failure during an ongoing spawning. | ||||||
|     """ |     """ | ||||||
|     async def main(): |     async def main(): | ||||||
|         async with tractor.open_nursery( |         async with tractor.open_nursery( | ||||||
|             registry_addrs=[reg_addr], |             arbiter_addr=arb_addr, | ||||||
|         ) as nursery: |         ) as nursery: | ||||||
| 
 | 
 | ||||||
|             for i in range(num_subactors): |             for i in range(num_subactors): | ||||||
|  | @ -162,27 +118,22 @@ def test_multierror_fast_nursery(reg_addr, start_method, num_subactors, delay): | ||||||
|                     delay=delay |                     delay=delay | ||||||
|                 ) |                 ) | ||||||
| 
 | 
 | ||||||
|     # with pytest.raises(trio.MultiError) as exc_info: |     with pytest.raises(trio.MultiError) as exc_info: | ||||||
|     with pytest.raises(BaseExceptionGroup) as exc_info: |  | ||||||
|         trio.run(main) |         trio.run(main) | ||||||
| 
 | 
 | ||||||
|     assert exc_info.type == ExceptionGroup |     assert exc_info.type == tractor.MultiError | ||||||
|     err = exc_info.value |     err = exc_info.value | ||||||
|     exceptions = err.exceptions |     exceptions = err.exceptions | ||||||
| 
 | 
 | ||||||
|     if len(exceptions) == 2: |     if len(exceptions) == 2: | ||||||
|         # sometimes oddly now there's an embedded BrokenResourceError ? |         # sometimes oddly now there's an embedded BrokenResourceError ? | ||||||
|         for exc in exceptions: |         exceptions = exceptions[1].exceptions | ||||||
|             excs = getattr(exc, 'exceptions', None) |  | ||||||
|             if excs: |  | ||||||
|                 exceptions = excs |  | ||||||
|                 break |  | ||||||
| 
 | 
 | ||||||
|     assert len(exceptions) == num_subactors |     assert len(exceptions) == num_subactors | ||||||
| 
 | 
 | ||||||
|     for exc in exceptions: |     for exc in exceptions: | ||||||
|         assert isinstance(exc, tractor.RemoteActorError) |         assert isinstance(exc, tractor.RemoteActorError) | ||||||
|         assert exc.boxed_type is AssertionError |         assert exc.type == AssertionError | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def do_nothing(): | async def do_nothing(): | ||||||
|  | @ -190,20 +141,15 @@ async def do_nothing(): | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @pytest.mark.parametrize('mechanism', ['nursery_cancel', KeyboardInterrupt]) | @pytest.mark.parametrize('mechanism', ['nursery_cancel', KeyboardInterrupt]) | ||||||
| def test_cancel_single_subactor(reg_addr, mechanism): | def test_cancel_single_subactor(arb_addr, mechanism): | ||||||
|     ''' |     """Ensure a ``ActorNursery.start_actor()`` spawned subactor | ||||||
|     Ensure a ``ActorNursery.start_actor()`` spawned subactor |  | ||||||
|     cancels when the nursery is cancelled. |     cancels when the nursery is cancelled. | ||||||
| 
 |     """ | ||||||
|     ''' |  | ||||||
|     async def spawn_actor(): |     async def spawn_actor(): | ||||||
|         ''' |         """Spawn an actor that blocks indefinitely. | ||||||
|         Spawn an actor that blocks indefinitely then cancel via |         """ | ||||||
|         either `ActorNursery.cancel()` or an exception raise. |  | ||||||
| 
 |  | ||||||
|         ''' |  | ||||||
|         async with tractor.open_nursery( |         async with tractor.open_nursery( | ||||||
|             registry_addrs=[reg_addr], |             arbiter_addr=arb_addr, | ||||||
|         ) as nursery: |         ) as nursery: | ||||||
| 
 | 
 | ||||||
|             portal = await nursery.start_actor( |             portal = await nursery.start_actor( | ||||||
|  | @ -236,10 +182,7 @@ async def stream_forever(): | ||||||
| async def test_cancel_infinite_streamer(start_method): | async def test_cancel_infinite_streamer(start_method): | ||||||
| 
 | 
 | ||||||
|     # stream for at most 1 seconds |     # stream for at most 1 seconds | ||||||
|     with ( |     with trio.move_on_after(1) as cancel_scope: | ||||||
|         trio.fail_after(4), |  | ||||||
|         trio.move_on_after(1) as cancel_scope |  | ||||||
|     ): |  | ||||||
|         async with tractor.open_nursery() as n: |         async with tractor.open_nursery() as n: | ||||||
|             portal = await n.start_actor( |             portal = await n.start_actor( | ||||||
|                 'donny', |                 'donny', | ||||||
|  | @ -262,8 +205,8 @@ async def test_cancel_infinite_streamer(start_method): | ||||||
|     [ |     [ | ||||||
|         # daemon actors sit idle while single task actors error out |         # daemon actors sit idle while single task actors error out | ||||||
|         (1, tractor.RemoteActorError, AssertionError, (assert_err, {}), None), |         (1, tractor.RemoteActorError, AssertionError, (assert_err, {}), None), | ||||||
|         (2, BaseExceptionGroup, AssertionError, (assert_err, {}), None), |         (2, tractor.MultiError, AssertionError, (assert_err, {}), None), | ||||||
|         (3, BaseExceptionGroup, AssertionError, (assert_err, {}), None), |         (3, tractor.MultiError, AssertionError, (assert_err, {}), None), | ||||||
| 
 | 
 | ||||||
|         # 1 daemon actor errors out while single task actors sleep forever |         # 1 daemon actor errors out while single task actors sleep forever | ||||||
|         (3, tractor.RemoteActorError, AssertionError, (sleep_forever, {}), |         (3, tractor.RemoteActorError, AssertionError, (sleep_forever, {}), | ||||||
|  | @ -274,7 +217,7 @@ async def test_cancel_infinite_streamer(start_method): | ||||||
|          (do_nuthin, {}), (assert_err, {'delay': 1}, True)), |          (do_nuthin, {}), (assert_err, {'delay': 1}, True)), | ||||||
|         # daemon complete quickly delay while single task |         # daemon complete quickly delay while single task | ||||||
|         # actors error after brief delay |         # actors error after brief delay | ||||||
|         (3, BaseExceptionGroup, AssertionError, |         (3, tractor.MultiError, AssertionError, | ||||||
|          (assert_err, {'delay': 1}), (do_nuthin, {}, False)), |          (assert_err, {'delay': 1}), (do_nuthin, {}, False)), | ||||||
|     ], |     ], | ||||||
|     ids=[ |     ids=[ | ||||||
|  | @ -287,32 +230,20 @@ async def test_cancel_infinite_streamer(start_method): | ||||||
|     ], |     ], | ||||||
| ) | ) | ||||||
| @tractor_test | @tractor_test | ||||||
| async def test_some_cancels_all( | async def test_some_cancels_all(num_actors_and_errs, start_method, loglevel): | ||||||
|     num_actors_and_errs: tuple, |     """Verify a subset of failed subactors causes all others in | ||||||
|     start_method: str, |  | ||||||
|     loglevel: str, |  | ||||||
| ): |  | ||||||
|     ''' |  | ||||||
|     Verify a subset of failed subactors causes all others in |  | ||||||
|     the nursery to be cancelled just like the strategy in trio. |     the nursery to be cancelled just like the strategy in trio. | ||||||
| 
 | 
 | ||||||
|     This is the first and only supervisory strategy at the moment. |     This is the first and only supervisory strategy at the moment. | ||||||
| 
 |     """ | ||||||
|     ''' |     num_actors, first_err, err_type, ria_func, da_func = num_actors_and_errs | ||||||
|     ( |  | ||||||
|         num_actors, |  | ||||||
|         first_err, |  | ||||||
|         err_type, |  | ||||||
|         ria_func, |  | ||||||
|         da_func, |  | ||||||
|     ) = num_actors_and_errs |  | ||||||
|     try: |     try: | ||||||
|         async with tractor.open_nursery() as an: |         async with tractor.open_nursery() as n: | ||||||
| 
 | 
 | ||||||
|             # spawn the same number of deamon actors which should be cancelled |             # spawn the same number of deamon actors which should be cancelled | ||||||
|             dactor_portals = [] |             dactor_portals = [] | ||||||
|             for i in range(num_actors): |             for i in range(num_actors): | ||||||
|                 dactor_portals.append(await an.start_actor( |                 dactor_portals.append(await n.start_actor( | ||||||
|                     f'deamon_{i}', |                     f'deamon_{i}', | ||||||
|                     enable_modules=[__name__], |                     enable_modules=[__name__], | ||||||
|                 )) |                 )) | ||||||
|  | @ -322,7 +253,7 @@ async def test_some_cancels_all( | ||||||
|             for i in range(num_actors): |             for i in range(num_actors): | ||||||
|                 # start actor(s) that will fail immediately |                 # start actor(s) that will fail immediately | ||||||
|                 riactor_portals.append( |                 riactor_portals.append( | ||||||
|                     await an.run_in_actor( |                     await n.run_in_actor( | ||||||
|                         func, |                         func, | ||||||
|                         name=f'actor_{i}', |                         name=f'actor_{i}', | ||||||
|                         **kwargs |                         **kwargs | ||||||
|  | @ -338,7 +269,7 @@ async def test_some_cancels_all( | ||||||
|                         await portal.run(func, **kwargs) |                         await portal.run(func, **kwargs) | ||||||
| 
 | 
 | ||||||
|                     except tractor.RemoteActorError as err: |                     except tractor.RemoteActorError as err: | ||||||
|                         assert err.boxed_type == err_type |                         assert err.type == err_type | ||||||
|                         # we only expect this first error to propogate |                         # we only expect this first error to propogate | ||||||
|                         # (all other daemons are cancelled before they |                         # (all other daemons are cancelled before they | ||||||
|                         # can be scheduled) |                         # can be scheduled) | ||||||
|  | @ -352,20 +283,19 @@ async def test_some_cancels_all( | ||||||
| 
 | 
 | ||||||
|         # should error here with a ``RemoteActorError`` or ``MultiError`` |         # should error here with a ``RemoteActorError`` or ``MultiError`` | ||||||
| 
 | 
 | ||||||
|     except first_err as _err: |     except first_err as err: | ||||||
|         err = _err |         if isinstance(err, tractor.MultiError): | ||||||
|         if isinstance(err, BaseExceptionGroup): |  | ||||||
|             assert len(err.exceptions) == num_actors |             assert len(err.exceptions) == num_actors | ||||||
|             for exc in err.exceptions: |             for exc in err.exceptions: | ||||||
|                 if isinstance(exc, tractor.RemoteActorError): |                 if isinstance(exc, tractor.RemoteActorError): | ||||||
|                     assert exc.boxed_type == err_type |                     assert exc.type == err_type | ||||||
|                 else: |                 else: | ||||||
|                     assert isinstance(exc, trio.Cancelled) |                     assert isinstance(exc, trio.Cancelled) | ||||||
|         elif isinstance(err, tractor.RemoteActorError): |         elif isinstance(err, tractor.RemoteActorError): | ||||||
|             assert err.boxed_type == err_type |             assert err.type == err_type | ||||||
| 
 | 
 | ||||||
|         assert an.cancelled is True |         assert n.cancelled is True | ||||||
|         assert not an._children |         assert not n._children | ||||||
|     else: |     else: | ||||||
|         pytest.fail("Should have gotten a remote assertion error?") |         pytest.fail("Should have gotten a remote assertion error?") | ||||||
| 
 | 
 | ||||||
|  | @ -397,12 +327,10 @@ async def spawn_and_error(breadth, depth) -> None: | ||||||
| 
 | 
 | ||||||
| @tractor_test | @tractor_test | ||||||
| async def test_nested_multierrors(loglevel, start_method): | async def test_nested_multierrors(loglevel, start_method): | ||||||
|     ''' |     """Test that failed actor sets are wrapped in `trio.MultiError`s. | ||||||
|     Test that failed actor sets are wrapped in `BaseExceptionGroup`s. This |     This test goes only 2 nurseries deep but we should eventually have tests | ||||||
|     test goes only 2 nurseries deep but we should eventually have tests |  | ||||||
|     for arbitrary n-depth actor trees. |     for arbitrary n-depth actor trees. | ||||||
| 
 |     """ | ||||||
|     ''' |  | ||||||
|     if start_method == 'trio': |     if start_method == 'trio': | ||||||
|         depth = 3 |         depth = 3 | ||||||
|         subactor_breadth = 2 |         subactor_breadth = 2 | ||||||
|  | @ -426,36 +354,24 @@ async def test_nested_multierrors(loglevel, start_method): | ||||||
|                         breadth=subactor_breadth, |                         breadth=subactor_breadth, | ||||||
|                         depth=depth, |                         depth=depth, | ||||||
|                     ) |                     ) | ||||||
|         except BaseExceptionGroup as err: |         except trio.MultiError as err: | ||||||
|             assert len(err.exceptions) == subactor_breadth |             assert len(err.exceptions) == subactor_breadth | ||||||
|             for subexc in err.exceptions: |             for subexc in err.exceptions: | ||||||
| 
 | 
 | ||||||
|                 # verify first level actor errors are wrapped as remote |                 # verify first level actor errors are wrapped as remote | ||||||
|                 if is_win(): |                 if platform.system() == 'Windows': | ||||||
| 
 | 
 | ||||||
|                     # windows is often too slow and cancellation seems |                     # windows is often too slow and cancellation seems | ||||||
|                     # to happen before an actor is spawned |                     # to happen before an actor is spawned | ||||||
|                     if isinstance(subexc, trio.Cancelled): |                     if isinstance(subexc, trio.Cancelled): | ||||||
|                         continue |                         continue | ||||||
| 
 |                     else: | ||||||
|                     elif isinstance(subexc, tractor.RemoteActorError): |  | ||||||
|                         # on windows it seems we can't exactly be sure wtf |                         # on windows it seems we can't exactly be sure wtf | ||||||
|                         # will happen.. |                         # will happen.. | ||||||
|                         assert subexc.boxed_type in ( |                         assert subexc.type in ( | ||||||
|                             tractor.RemoteActorError, |                             tractor.RemoteActorError, | ||||||
|                             trio.Cancelled, |                             trio.Cancelled, | ||||||
|                             BaseExceptionGroup, |                             trio.MultiError | ||||||
|                         ) |  | ||||||
| 
 |  | ||||||
|                     elif isinstance(subexc, BaseExceptionGroup): |  | ||||||
|                         for subsub in subexc.exceptions: |  | ||||||
| 
 |  | ||||||
|                             if subsub in (tractor.RemoteActorError,): |  | ||||||
|                                 subsub = subsub.boxed_type |  | ||||||
| 
 |  | ||||||
|                             assert type(subsub) in ( |  | ||||||
|                                 trio.Cancelled, |  | ||||||
|                                 BaseExceptionGroup, |  | ||||||
|                         ) |                         ) | ||||||
|                 else: |                 else: | ||||||
|                     assert isinstance(subexc, tractor.RemoteActorError) |                     assert isinstance(subexc, tractor.RemoteActorError) | ||||||
|  | @ -464,21 +380,14 @@ async def test_nested_multierrors(loglevel, start_method): | ||||||
|                     # XXX not sure what's up with this.. |                     # XXX not sure what's up with this.. | ||||||
|                     # on windows sometimes spawning is just too slow and |                     # on windows sometimes spawning is just too slow and | ||||||
|                     # we get back the (sent) cancel signal instead |                     # we get back the (sent) cancel signal instead | ||||||
|                     if is_win(): |                     if platform.system() == 'Windows': | ||||||
|                         if isinstance(subexc, tractor.RemoteActorError): |                         assert (subexc.type is trio.MultiError) or ( | ||||||
|                             assert subexc.boxed_type in ( |                             subexc.type is tractor.RemoteActorError) | ||||||
|                                 BaseExceptionGroup, |  | ||||||
|                                 tractor.RemoteActorError |  | ||||||
|                             ) |  | ||||||
|                     else: |                     else: | ||||||
|                             assert isinstance(subexc, BaseExceptionGroup) |                         assert subexc.type is trio.MultiError | ||||||
|                 else: |                 else: | ||||||
|                         assert subexc.boxed_type is ExceptionGroup |                     assert (subexc.type is tractor.RemoteActorError) or ( | ||||||
|                 else: |                         subexc.type is trio.Cancelled) | ||||||
|                     assert subexc.boxed_type in ( |  | ||||||
|                         tractor.RemoteActorError, |  | ||||||
|                         trio.Cancelled |  | ||||||
|                     ) |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @no_windows | @no_windows | ||||||
|  | @ -496,7 +405,7 @@ def test_cancel_via_SIGINT( | ||||||
|         with trio.fail_after(2): |         with trio.fail_after(2): | ||||||
|             async with tractor.open_nursery() as tn: |             async with tractor.open_nursery() as tn: | ||||||
|                 await tn.start_actor('sucka') |                 await tn.start_actor('sucka') | ||||||
|                 if 'mp' in spawn_backend: |                 if spawn_backend == 'mp': | ||||||
|                     time.sleep(0.1) |                     time.sleep(0.1) | ||||||
|                 os.kill(pid, signal.SIGINT) |                 os.kill(pid, signal.SIGINT) | ||||||
|                 await trio.sleep_forever() |                 await trio.sleep_forever() | ||||||
|  | @ -516,13 +425,8 @@ def test_cancel_via_SIGINT_other_task( | ||||||
|     from a seperate ``trio`` child  task. |     from a seperate ``trio`` child  task. | ||||||
|     """ |     """ | ||||||
|     pid = os.getpid() |     pid = os.getpid() | ||||||
|     timeout: float = 2 |  | ||||||
|     if is_win():  # smh |  | ||||||
|         timeout += 1 |  | ||||||
| 
 | 
 | ||||||
|     async def spawn_and_sleep_forever( |     async def spawn_and_sleep_forever(task_status=trio.TASK_STATUS_IGNORED): | ||||||
|         task_status=trio.TASK_STATUS_IGNORED |  | ||||||
|     ): |  | ||||||
|         async with tractor.open_nursery() as tn: |         async with tractor.open_nursery() as tn: | ||||||
|             for i in range(3): |             for i in range(3): | ||||||
|                 await tn.run_in_actor( |                 await tn.run_in_actor( | ||||||
|  | @ -534,204 +438,51 @@ def test_cancel_via_SIGINT_other_task( | ||||||
| 
 | 
 | ||||||
|     async def main(): |     async def main(): | ||||||
|         # should never timeout since SIGINT should cancel the current program |         # should never timeout since SIGINT should cancel the current program | ||||||
|         with trio.fail_after(timeout): |         with trio.fail_after(2): | ||||||
|             async with ( |             async with trio.open_nursery() as n: | ||||||
| 
 |                 await n.start(spawn_and_sleep_forever) | ||||||
|                 # XXX ?TODO? why no work!? |                 if spawn_backend == 'mp': | ||||||
|                 # tractor.trionics.collapse_eg(), |  | ||||||
|                 trio.open_nursery( |  | ||||||
|                     strict_exception_groups=False, |  | ||||||
|                 ) as tn, |  | ||||||
|             ): |  | ||||||
|                 await tn.start(spawn_and_sleep_forever) |  | ||||||
|                 if 'mp' in spawn_backend: |  | ||||||
|                     time.sleep(0.1) |                     time.sleep(0.1) | ||||||
|                 os.kill(pid, signal.SIGINT) |                 os.kill(pid, signal.SIGINT) | ||||||
| 
 | 
 | ||||||
|     with pytest.raises(KeyboardInterrupt): |     with pytest.raises(KeyboardInterrupt): | ||||||
|         trio.run(main) |         trio.run(main) | ||||||
| 
 | 
 | ||||||
| 
 |  | ||||||
| async def spin_for(period=3): | async def spin_for(period=3): | ||||||
|     "Sync sleep." |     "Sync sleep." | ||||||
|     print(f'sync sleeping in sub-sub for {period}\n') |  | ||||||
|     time.sleep(period) |     time.sleep(period) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def spawn_sub_with_sync_blocking_task(): | async def spawn(): | ||||||
|     async with tractor.open_nursery() as an: |     async with tractor.open_nursery() as tn: | ||||||
|         print('starting sync blocking subactor..\n') |         await tn.run_in_actor( | ||||||
|         await an.run_in_actor( |  | ||||||
|             spin_for, |             spin_for, | ||||||
|             name='sleeper', |             name='sleeper', | ||||||
|         ) |         ) | ||||||
|         print('exiting first subactor layer..\n') |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @pytest.mark.parametrize( |  | ||||||
|     'man_cancel_outer', |  | ||||||
|     [ |  | ||||||
|         False,  # passes if delay != 2 |  | ||||||
| 
 |  | ||||||
|         # always causes an unexpected eg-w-embedded-assert-err? |  | ||||||
|         pytest.param(True, |  | ||||||
|              marks=pytest.mark.xfail( |  | ||||||
|                  reason=( |  | ||||||
|                     'always causes an unexpected eg-w-embedded-assert-err?' |  | ||||||
|                 ) |  | ||||||
|             ), |  | ||||||
|         ), |  | ||||||
|     ], |  | ||||||
| ) |  | ||||||
| @no_windows | @no_windows | ||||||
| def test_cancel_while_childs_child_in_sync_sleep( | def test_cancel_while_childs_child_in_sync_sleep( | ||||||
|     loglevel: str, |     loglevel, | ||||||
|     start_method: str, |     start_method, | ||||||
|     spawn_backend: str, |     spawn_backend, | ||||||
|     debug_mode: bool, |  | ||||||
|     reg_addr: tuple, |  | ||||||
|     man_cancel_outer: bool, |  | ||||||
| ): | ): | ||||||
|     ''' |     """Verify that a child cancelled while executing sync code is torn | ||||||
|     Verify that a child cancelled while executing sync code is torn |  | ||||||
|     down even when that cancellation is triggered by the parent |     down even when that cancellation is triggered by the parent | ||||||
|     2 nurseries "up". |     2 nurseries "up". | ||||||
| 
 |     """ | ||||||
|     Though the grandchild should stay blocking its actor runtime, its |  | ||||||
|     parent should issue a "zombie reaper" to hard kill it after |  | ||||||
|     sufficient timeout. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     if start_method == 'forkserver': |     if start_method == 'forkserver': | ||||||
|         pytest.skip("Forksever sux hard at resuming from sync sleep...") |         pytest.skip("Forksever sux hard at resuming from sync sleep...") | ||||||
| 
 | 
 | ||||||
|     async def main(): |     async def main(): | ||||||
|         # |         with trio.fail_after(2): | ||||||
|         # XXX BIG TODO NOTE XXX |             async with tractor.open_nursery() as tn: | ||||||
|         # |                 await tn.run_in_actor( | ||||||
|         # it seems there's a strange race that can happen |                     spawn, | ||||||
|         # where where the fail-after will trigger outer scope |                     name='spawn', | ||||||
|         # .cancel() which then causes the inner scope to raise, |  | ||||||
|         # |  | ||||||
|         # BaseExceptionGroup('Exceptions from Trio nursery', [ |  | ||||||
|         #   BaseExceptionGroup('Exceptions from Trio nursery', |  | ||||||
|         #   [ |  | ||||||
|         #       Cancelled(), |  | ||||||
|         #       Cancelled(), |  | ||||||
|         #   ] |  | ||||||
|         #   ), |  | ||||||
|         #   AssertionError('assert 0') |  | ||||||
|         # ]) |  | ||||||
|         # |  | ||||||
|         # WHY THIS DOESN'T MAKE SENSE: |  | ||||||
|         # --------------------------- |  | ||||||
|         # - it should raise too-slow-error when too slow.. |  | ||||||
|         #  * verified that using simple-cs and manually cancelling |  | ||||||
|         #    you get same outcome -> indicates that the fail-after |  | ||||||
|         #    can have its TooSlowError overriden! |  | ||||||
|         #  |_ to check this it's easy, simplly decrease the timeout |  | ||||||
|         #     as per the var below. |  | ||||||
|         # |  | ||||||
|         # - when using the manual simple-cs the outcome is different |  | ||||||
|         #   DESPITE the `assert 0` which means regardless of the |  | ||||||
|         #   inner scope effectively failing in the same way, the |  | ||||||
|         #   bubbling up **is NOT the same**. |  | ||||||
|         # |  | ||||||
|         # delays trigger diff outcomes.. |  | ||||||
|         # --------------------------- |  | ||||||
|         # as seen by uncommenting various lines below there is from |  | ||||||
|         # my POV an unexpected outcome due to the delay=2 case. |  | ||||||
|         # |  | ||||||
|         # delay = 1  # no AssertionError in eg, TooSlowError raised. |  | ||||||
|         # delay = 2  # is AssertionError in eg AND no TooSlowError !? |  | ||||||
|         delay = 4  # is AssertionError in eg AND no _cs cancellation. |  | ||||||
| 
 |  | ||||||
|         with trio.fail_after(delay) as _cs: |  | ||||||
|         # with trio.CancelScope() as cs: |  | ||||||
|         # ^XXX^ can be used instead to see same outcome. |  | ||||||
| 
 |  | ||||||
|             async with ( |  | ||||||
|                 # tractor.trionics.collapse_eg(),  # doesn't help |  | ||||||
|                 tractor.open_nursery( |  | ||||||
|                     hide_tb=False, |  | ||||||
|                     debug_mode=debug_mode, |  | ||||||
|                     registry_addrs=[reg_addr], |  | ||||||
|                 ) as an, |  | ||||||
|             ): |  | ||||||
|                 await an.run_in_actor( |  | ||||||
|                     spawn_sub_with_sync_blocking_task, |  | ||||||
|                     name='sync_blocking_sub', |  | ||||||
|                 ) |                 ) | ||||||
|                 await trio.sleep(1) |                 await trio.sleep(1) | ||||||
| 
 |  | ||||||
|                 if man_cancel_outer: |  | ||||||
|                     print('Cancelling manually in root') |  | ||||||
|                     _cs.cancel() |  | ||||||
| 
 |  | ||||||
|                 # trigger exc-srced taskc down |  | ||||||
|                 # the actor tree. |  | ||||||
|                 print('RAISING IN ROOT') |  | ||||||
|                 assert 0 |                 assert 0 | ||||||
| 
 | 
 | ||||||
|     with pytest.raises(AssertionError): |     with pytest.raises(AssertionError): | ||||||
|         trio.run(main) |         trio.run(main) | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def test_fast_graceful_cancel_when_spawn_task_in_soft_proc_wait_for_daemon( |  | ||||||
|     start_method, |  | ||||||
| ): |  | ||||||
|     ''' |  | ||||||
|     This is a very subtle test which demonstrates how cancellation |  | ||||||
|     during process collection can result in non-optimal teardown |  | ||||||
|     performance on daemon actors. The fix for this test was to handle |  | ||||||
|     ``trio.Cancelled`` specially in the spawn task waiting in |  | ||||||
|     `proc.wait()` such that ``Portal.cancel_actor()`` is called before |  | ||||||
|     executing the "hard reap" sequence (which has an up to 3 second |  | ||||||
|     delay currently). |  | ||||||
| 
 |  | ||||||
|     In other words, if we can cancel the actor using a graceful remote |  | ||||||
|     cancellation, and it's faster, we might as well do it. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     kbi_delay = 0.5 |  | ||||||
|     timeout: float = 2.9 |  | ||||||
| 
 |  | ||||||
|     if is_win():  # smh |  | ||||||
|         timeout += 1 |  | ||||||
| 
 |  | ||||||
|     async def main(): |  | ||||||
|         start = time.time() |  | ||||||
|         try: |  | ||||||
|             async with trio.open_nursery() as nurse: |  | ||||||
|                 async with tractor.open_nursery() as tn: |  | ||||||
|                     p = await tn.start_actor( |  | ||||||
|                         'fast_boi', |  | ||||||
|                         enable_modules=[__name__], |  | ||||||
|                     ) |  | ||||||
| 
 |  | ||||||
|                     async def delayed_kbi(): |  | ||||||
|                         await trio.sleep(kbi_delay) |  | ||||||
|                         print(f'RAISING KBI after {kbi_delay} s') |  | ||||||
|                         raise KeyboardInterrupt |  | ||||||
| 
 |  | ||||||
|                     # start task which raises a kbi **after** |  | ||||||
|                     # the actor nursery ``__aexit__()`` has |  | ||||||
|                     # been run. |  | ||||||
|                     nurse.start_soon(delayed_kbi) |  | ||||||
| 
 |  | ||||||
|                     await p.run(do_nuthin) |  | ||||||
| 
 |  | ||||||
|         # need to explicitly re-raise the lone kbi..now |  | ||||||
|         except* KeyboardInterrupt as kbi_eg: |  | ||||||
|             assert (len(excs := kbi_eg.exceptions) == 1) |  | ||||||
|             raise excs[0] |  | ||||||
| 
 |  | ||||||
|         finally: |  | ||||||
|             duration = time.time() - start |  | ||||||
|             if duration > timeout: |  | ||||||
|                 raise trio.TooSlowError( |  | ||||||
|                     'daemon cancel was slower then necessary..' |  | ||||||
|                 ) |  | ||||||
| 
 |  | ||||||
|     with pytest.raises(KeyboardInterrupt): |  | ||||||
|         trio.run(main) |  | ||||||
|  |  | ||||||
|  | @ -1,177 +0,0 @@ | ||||||
| ''' |  | ||||||
| Test a service style daemon that maintains a nursery for spawning |  | ||||||
| "remote async tasks" including both spawning other long living |  | ||||||
| sub-sub-actor daemons. |  | ||||||
| 
 |  | ||||||
| ''' |  | ||||||
| from typing import Optional |  | ||||||
| import asyncio |  | ||||||
| from contextlib import ( |  | ||||||
|     asynccontextmanager as acm, |  | ||||||
|     aclosing, |  | ||||||
| ) |  | ||||||
| 
 |  | ||||||
| import pytest |  | ||||||
| import trio |  | ||||||
| import tractor |  | ||||||
| from tractor import RemoteActorError |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| async def aio_streamer( |  | ||||||
|     from_trio: asyncio.Queue, |  | ||||||
|     to_trio: trio.abc.SendChannel, |  | ||||||
| ) -> trio.abc.ReceiveChannel: |  | ||||||
| 
 |  | ||||||
|     # required first msg to sync caller |  | ||||||
|     to_trio.send_nowait(None) |  | ||||||
| 
 |  | ||||||
|     from itertools import cycle |  | ||||||
|     for i in cycle(range(10)): |  | ||||||
|         to_trio.send_nowait(i) |  | ||||||
|         await asyncio.sleep(0.01) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| async def trio_streamer(): |  | ||||||
|     from itertools import cycle |  | ||||||
|     for i in cycle(range(10)): |  | ||||||
|         yield i |  | ||||||
|         await trio.sleep(0.01) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| async def trio_sleep_and_err(delay: float = 0.5): |  | ||||||
|     await trio.sleep(delay) |  | ||||||
|     # name error |  | ||||||
|     doggy()  # noqa |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| _cached_stream: Optional[ |  | ||||||
|     trio.abc.ReceiveChannel |  | ||||||
| ] = None |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @acm |  | ||||||
| async def wrapper_mngr( |  | ||||||
| ): |  | ||||||
|     from tractor.trionics import broadcast_receiver |  | ||||||
|     global _cached_stream |  | ||||||
|     in_aio = tractor.current_actor().is_infected_aio() |  | ||||||
| 
 |  | ||||||
|     if in_aio: |  | ||||||
|         if _cached_stream: |  | ||||||
| 
 |  | ||||||
|             from_aio = _cached_stream |  | ||||||
| 
 |  | ||||||
|             # if we already have a cached feed deliver a rx side clone |  | ||||||
|             # to consumer |  | ||||||
|             async with broadcast_receiver(from_aio, 6) as from_aio: |  | ||||||
|                 yield from_aio |  | ||||||
|                 return |  | ||||||
|         else: |  | ||||||
|             async with tractor.to_asyncio.open_channel_from( |  | ||||||
|                 aio_streamer, |  | ||||||
|             ) as (first, from_aio): |  | ||||||
|                 assert not first |  | ||||||
| 
 |  | ||||||
|                 # cache it so next task uses broadcast receiver |  | ||||||
|                 _cached_stream = from_aio |  | ||||||
| 
 |  | ||||||
|                 yield from_aio |  | ||||||
|     else: |  | ||||||
|         async with aclosing(trio_streamer()) as stream: |  | ||||||
|             # cache it so next task uses broadcast receiver |  | ||||||
|             _cached_stream = stream |  | ||||||
|             yield stream |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| _nursery: trio.Nursery = None |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @tractor.context |  | ||||||
| async def trio_main( |  | ||||||
|     ctx: tractor.Context, |  | ||||||
| ): |  | ||||||
|     # sync |  | ||||||
|     await ctx.started() |  | ||||||
| 
 |  | ||||||
|     # stash a "service nursery" as "actor local" (aka a Python global) |  | ||||||
|     global _nursery |  | ||||||
|     tn = _nursery |  | ||||||
|     assert tn |  | ||||||
| 
 |  | ||||||
|     async def consume_stream(): |  | ||||||
|         async with wrapper_mngr() as stream: |  | ||||||
|             async for msg in stream: |  | ||||||
|                 print(msg) |  | ||||||
| 
 |  | ||||||
|     # run 2 tasks to ensure broadcaster chan use |  | ||||||
|     tn.start_soon(consume_stream) |  | ||||||
|     tn.start_soon(consume_stream) |  | ||||||
| 
 |  | ||||||
|     tn.start_soon(trio_sleep_and_err) |  | ||||||
| 
 |  | ||||||
|     await trio.sleep_forever() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @tractor.context |  | ||||||
| async def open_actor_local_nursery( |  | ||||||
|     ctx: tractor.Context, |  | ||||||
| ): |  | ||||||
|     global _nursery |  | ||||||
|     async with ( |  | ||||||
|         tractor.trionics.collapse_eg(), |  | ||||||
|         trio.open_nursery() as tn |  | ||||||
|     ): |  | ||||||
|         _nursery = tn |  | ||||||
|         await ctx.started() |  | ||||||
|         await trio.sleep(10) |  | ||||||
|         # await trio.sleep(1) |  | ||||||
| 
 |  | ||||||
|         # XXX: this causes the hang since |  | ||||||
|         # the caller does not unblock from its own |  | ||||||
|         # ``trio.sleep_forever()``. |  | ||||||
| 
 |  | ||||||
|         # TODO: we need to test a simple ctx task starting remote tasks |  | ||||||
|         # that error and then blocking on a ``Nursery.start()`` which |  | ||||||
|         # never yields back.. aka a scenario where the |  | ||||||
|         # ``tractor.context`` task IS NOT in the service n's cancel |  | ||||||
|         # scope. |  | ||||||
|         tn.cancel_scope.cancel() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @pytest.mark.parametrize( |  | ||||||
|     'asyncio_mode', |  | ||||||
|     [True, False], |  | ||||||
|     ids='asyncio_mode={}'.format, |  | ||||||
| ) |  | ||||||
| def test_actor_managed_trio_nursery_task_error_cancels_aio( |  | ||||||
|     asyncio_mode: bool, |  | ||||||
|     reg_addr: tuple, |  | ||||||
| ): |  | ||||||
|     ''' |  | ||||||
|     Verify that a ``trio`` nursery created managed in a child actor |  | ||||||
|     correctly relays errors to the parent actor when one of its spawned |  | ||||||
|     tasks errors even when running in infected asyncio mode and using |  | ||||||
|     broadcast receivers for multi-task-per-actor subscription. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     async def main(): |  | ||||||
| 
 |  | ||||||
|         # cancel the nursery shortly after boot |  | ||||||
|         async with tractor.open_nursery() as n: |  | ||||||
|             p = await n.start_actor( |  | ||||||
|                 'nursery_mngr', |  | ||||||
|                 infect_asyncio=asyncio_mode,  # TODO, is this enabling debug mode? |  | ||||||
|                 enable_modules=[__name__], |  | ||||||
|             ) |  | ||||||
|             async with ( |  | ||||||
|                 p.open_context(open_actor_local_nursery) as (ctx, first), |  | ||||||
|                 p.open_context(trio_main) as (ctx, first), |  | ||||||
|             ): |  | ||||||
|                 await trio.sleep_forever() |  | ||||||
| 
 |  | ||||||
|     with pytest.raises(RemoteActorError) as excinfo: |  | ||||||
|         trio.run(main) |  | ||||||
| 
 |  | ||||||
|     # verify boxed error |  | ||||||
|     err = excinfo.value |  | ||||||
|     assert err.boxed_type is NameError |  | ||||||
|  | @ -1,80 +0,0 @@ | ||||||
| import itertools |  | ||||||
| 
 |  | ||||||
| import pytest |  | ||||||
| import trio |  | ||||||
| import tractor |  | ||||||
| from tractor import open_actor_cluster |  | ||||||
| from tractor.trionics import gather_contexts |  | ||||||
| from tractor._testing import tractor_test |  | ||||||
| 
 |  | ||||||
| MESSAGE = 'tractoring at full speed' |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def test_empty_mngrs_input_raises() -> None: |  | ||||||
| 
 |  | ||||||
|     async def main(): |  | ||||||
|         with trio.fail_after(3): |  | ||||||
|             async with ( |  | ||||||
|                 open_actor_cluster( |  | ||||||
|                     modules=[__name__], |  | ||||||
| 
 |  | ||||||
|                     # NOTE: ensure we can passthrough runtime opts |  | ||||||
|                     loglevel='cancel', |  | ||||||
|                     debug_mode=False, |  | ||||||
| 
 |  | ||||||
|                 ) as portals, |  | ||||||
| 
 |  | ||||||
|                 gather_contexts(mngrs=()), |  | ||||||
|             ): |  | ||||||
|                 # should fail before this? |  | ||||||
|                 assert portals |  | ||||||
| 
 |  | ||||||
|                 # test should fail if we mk it here! |  | ||||||
|                 assert 0, 'Should have raised val-err !?' |  | ||||||
| 
 |  | ||||||
|     with pytest.raises(ValueError): |  | ||||||
|         trio.run(main) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @tractor.context |  | ||||||
| async def worker( |  | ||||||
|     ctx: tractor.Context, |  | ||||||
| 
 |  | ||||||
| ) -> None: |  | ||||||
| 
 |  | ||||||
|     await ctx.started() |  | ||||||
| 
 |  | ||||||
|     async with ctx.open_stream( |  | ||||||
|         allow_overruns=True, |  | ||||||
|     ) as stream: |  | ||||||
| 
 |  | ||||||
|         # TODO: this with the below assert causes a hang bug? |  | ||||||
|         # with trio.move_on_after(1): |  | ||||||
| 
 |  | ||||||
|         async for msg in stream: |  | ||||||
|             # do something with msg |  | ||||||
|             print(msg) |  | ||||||
|             assert msg == MESSAGE |  | ||||||
| 
 |  | ||||||
|         # TODO: does this ever cause a hang |  | ||||||
|         # assert 0 |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @tractor_test |  | ||||||
| async def test_streaming_to_actor_cluster() -> None: |  | ||||||
| 
 |  | ||||||
|     async with ( |  | ||||||
|         open_actor_cluster(modules=[__name__]) as portals, |  | ||||||
| 
 |  | ||||||
|         gather_contexts( |  | ||||||
|             mngrs=[p.open_context(worker) for p in portals.values()], |  | ||||||
|         ) as contexts, |  | ||||||
| 
 |  | ||||||
|         gather_contexts( |  | ||||||
|             mngrs=[ctx[0].open_stream() for ctx in contexts], |  | ||||||
|         ) as streams, |  | ||||||
| 
 |  | ||||||
|     ): |  | ||||||
|         with trio.move_on_after(1): |  | ||||||
|             for stream in itertools.cycle(streams): |  | ||||||
|                 await stream.send(MESSAGE) |  | ||||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							|  | @ -0,0 +1,523 @@ | ||||||
|  | """ | ||||||
|  | That native debug better work! | ||||||
|  | 
 | ||||||
|  | All these tests can be understood (somewhat) by running the equivalent | ||||||
|  | `examples/debugging/` scripts manually. | ||||||
|  | 
 | ||||||
|  | TODO: None of these tests have been run successfully on windows yet. | ||||||
|  | """ | ||||||
|  | import time | ||||||
|  | from os import path | ||||||
|  | import platform | ||||||
|  | 
 | ||||||
|  | import pytest | ||||||
|  | import pexpect | ||||||
|  | 
 | ||||||
|  | from conftest import repodir | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # TODO: The next great debugger audit could be done by you! | ||||||
|  | # - recurrent entry to breakpoint() from single actor *after* and an | ||||||
|  | #   error in another task? | ||||||
|  | # - root error before child errors | ||||||
|  | # - root error after child errors | ||||||
|  | # - root error before child breakpoint | ||||||
|  | # - root error after child breakpoint | ||||||
|  | # - recurrent root errors | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | if platform.system() == 'Windows': | ||||||
|  |     pytest.skip( | ||||||
|  |         'Debugger tests have no windows support (yet)', | ||||||
|  |         allow_module_level=True, | ||||||
|  |     ) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def examples_dir(): | ||||||
|  |     """Return the abspath to the examples directory. | ||||||
|  |     """ | ||||||
|  |     return path.join(repodir(), 'examples', 'debugging/') | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def mk_cmd(ex_name: str) -> str: | ||||||
|  |     """Generate a command suitable to pass to ``pexpect.spawn()``. | ||||||
|  |     """ | ||||||
|  |     return ' '.join( | ||||||
|  |         ['python', | ||||||
|  |          path.join(examples_dir(), f'{ex_name}.py')] | ||||||
|  |     ) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | @pytest.fixture | ||||||
|  | def spawn( | ||||||
|  |     start_method, | ||||||
|  |     testdir, | ||||||
|  |     arb_addr, | ||||||
|  | ) -> 'pexpect.spawn': | ||||||
|  | 
 | ||||||
|  |     if start_method != 'trio': | ||||||
|  |         pytest.skip( | ||||||
|  |             "Debugger tests are only supported on the trio backend" | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|  |     def _spawn(cmd): | ||||||
|  |         return testdir.spawn( | ||||||
|  |             cmd=mk_cmd(cmd), | ||||||
|  |             expect_timeout=3, | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|  |     return _spawn | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | @pytest.mark.parametrize( | ||||||
|  |     'user_in_out', | ||||||
|  |     [ | ||||||
|  |         ('c', 'AssertionError'), | ||||||
|  |         ('q', 'AssertionError'), | ||||||
|  |     ], | ||||||
|  |     ids=lambda item: f'{item[0]} -> {item[1]}', | ||||||
|  | ) | ||||||
|  | def test_root_actor_error(spawn, user_in_out): | ||||||
|  |     """Demonstrate crash handler entering pdbpp from basic error in root actor. | ||||||
|  |     """ | ||||||
|  |     user_input, expect_err_str = user_in_out | ||||||
|  | 
 | ||||||
|  |     child = spawn('root_actor_error') | ||||||
|  | 
 | ||||||
|  |     # scan for the pdbpp prompt | ||||||
|  |     child.expect(r"\(Pdb\+\+\)") | ||||||
|  | 
 | ||||||
|  |     before = str(child.before.decode()) | ||||||
|  | 
 | ||||||
|  |     # make sure expected logging and error arrives | ||||||
|  |     assert "Attaching to pdb in crashed actor: ('root'" in before | ||||||
|  |     assert 'AssertionError' in before | ||||||
|  | 
 | ||||||
|  |     # send user command | ||||||
|  |     child.sendline(user_input) | ||||||
|  | 
 | ||||||
|  |     # process should exit | ||||||
|  |     child.expect(pexpect.EOF) | ||||||
|  |     assert expect_err_str in str(child.before) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | @pytest.mark.parametrize( | ||||||
|  |     'user_in_out', | ||||||
|  |     [ | ||||||
|  |         ('c', None), | ||||||
|  |         ('q', 'bdb.BdbQuit'), | ||||||
|  |     ], | ||||||
|  |     ids=lambda item: f'{item[0]} -> {item[1]}', | ||||||
|  | ) | ||||||
|  | def test_root_actor_bp(spawn, user_in_out): | ||||||
|  |     """Demonstrate breakpoint from in root actor. | ||||||
|  |     """ | ||||||
|  |     user_input, expect_err_str = user_in_out | ||||||
|  |     child = spawn('root_actor_breakpoint') | ||||||
|  | 
 | ||||||
|  |     # scan for the pdbpp prompt | ||||||
|  |     child.expect(r"\(Pdb\+\+\)") | ||||||
|  | 
 | ||||||
|  |     assert 'Error' not in str(child.before) | ||||||
|  | 
 | ||||||
|  |     # send user command | ||||||
|  |     child.sendline(user_input) | ||||||
|  |     child.expect('\r\n') | ||||||
|  | 
 | ||||||
|  |     # process should exit | ||||||
|  |     child.expect(pexpect.EOF) | ||||||
|  | 
 | ||||||
|  |     if expect_err_str is None: | ||||||
|  |         assert 'Error' not in str(child.before) | ||||||
|  |     else: | ||||||
|  |         assert expect_err_str in str(child.before) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def test_root_actor_bp_forever(spawn): | ||||||
|  |     "Re-enter a breakpoint from the root actor-task." | ||||||
|  |     child = spawn('root_actor_breakpoint_forever') | ||||||
|  | 
 | ||||||
|  |     # do some "next" commands to demonstrate recurrent breakpoint | ||||||
|  |     # entries | ||||||
|  |     for _ in range(10): | ||||||
|  |         child.sendline('next') | ||||||
|  |         child.expect(r"\(Pdb\+\+\)") | ||||||
|  | 
 | ||||||
|  |     # do one continue which should trigger a new task to lock the tty | ||||||
|  |     child.sendline('continue') | ||||||
|  |     child.expect(r"\(Pdb\+\+\)") | ||||||
|  | 
 | ||||||
|  |     # XXX: this previously caused a bug! | ||||||
|  |     child.sendline('n') | ||||||
|  |     child.expect(r"\(Pdb\+\+\)") | ||||||
|  | 
 | ||||||
|  |     child.sendline('n') | ||||||
|  |     child.expect(r"\(Pdb\+\+\)") | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def test_subactor_error(spawn): | ||||||
|  |     "Single subactor raising an error" | ||||||
|  | 
 | ||||||
|  |     child = spawn('subactor_error') | ||||||
|  | 
 | ||||||
|  |     # scan for the pdbpp prompt | ||||||
|  |     child.expect(r"\(Pdb\+\+\)") | ||||||
|  | 
 | ||||||
|  |     before = str(child.before.decode()) | ||||||
|  |     assert "Attaching to pdb in crashed actor: ('name_error'" in before | ||||||
|  | 
 | ||||||
|  |     # send user command | ||||||
|  |     # (in this case it's the same for 'continue' vs. 'quit') | ||||||
|  |     child.sendline('continue') | ||||||
|  | 
 | ||||||
|  |     # the debugger should enter a second time in the nursery | ||||||
|  |     # creating actor | ||||||
|  | 
 | ||||||
|  |     child.expect(r"\(Pdb\+\+\)") | ||||||
|  | 
 | ||||||
|  |     before = str(child.before.decode()) | ||||||
|  | 
 | ||||||
|  |     # root actor gets debugger engaged | ||||||
|  |     assert "Attaching to pdb in crashed actor: ('root'" in before | ||||||
|  | 
 | ||||||
|  |     # error is a remote error propagated from the subactor | ||||||
|  |     assert "RemoteActorError: ('name_error'" in before | ||||||
|  | 
 | ||||||
|  |     child.sendline('c') | ||||||
|  |     child.expect('\r\n') | ||||||
|  | 
 | ||||||
|  |     # process should exit | ||||||
|  |     child.expect(pexpect.EOF) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def test_subactor_breakpoint(spawn): | ||||||
|  |     "Single subactor with an infinite breakpoint loop" | ||||||
|  | 
 | ||||||
|  |     child = spawn('subactor_breakpoint') | ||||||
|  | 
 | ||||||
|  |     # scan for the pdbpp prompt | ||||||
|  |     child.expect(r"\(Pdb\+\+\)") | ||||||
|  | 
 | ||||||
|  |     before = str(child.before.decode()) | ||||||
|  |     assert "Attaching pdb to actor: ('breakpoint_forever'" in before | ||||||
|  | 
 | ||||||
|  |     # do some "next" commands to demonstrate recurrent breakpoint | ||||||
|  |     # entries | ||||||
|  |     for _ in range(10): | ||||||
|  |         child.sendline('next') | ||||||
|  |         child.expect(r"\(Pdb\+\+\)") | ||||||
|  | 
 | ||||||
|  |     # now run some "continues" to show re-entries | ||||||
|  |     for _ in range(5): | ||||||
|  |         child.sendline('continue') | ||||||
|  |         child.expect(r"\(Pdb\+\+\)") | ||||||
|  |         before = str(child.before.decode()) | ||||||
|  |         assert "Attaching pdb to actor: ('breakpoint_forever'" in before | ||||||
|  | 
 | ||||||
|  |     # finally quit the loop | ||||||
|  |     child.sendline('q') | ||||||
|  | 
 | ||||||
|  |     # child process should exit but parent will capture pdb.BdbQuit | ||||||
|  |     child.expect(r"\(Pdb\+\+\)") | ||||||
|  | 
 | ||||||
|  |     before = str(child.before.decode()) | ||||||
|  |     assert "RemoteActorError: ('breakpoint_forever'" in before | ||||||
|  |     assert 'bdb.BdbQuit' in before | ||||||
|  | 
 | ||||||
|  |     # quit the parent | ||||||
|  |     child.sendline('c') | ||||||
|  | 
 | ||||||
|  |     # process should exit | ||||||
|  |     child.expect(pexpect.EOF) | ||||||
|  | 
 | ||||||
|  |     before = str(child.before.decode()) | ||||||
|  |     assert "RemoteActorError: ('breakpoint_forever'" in before | ||||||
|  |     assert 'bdb.BdbQuit' in before | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def test_multi_subactors(spawn): | ||||||
|  |     """Multiple subactors, both erroring and breakpointing as well as | ||||||
|  |     a nested subactor erroring. | ||||||
|  |     """ | ||||||
|  |     child = spawn(r'multi_subactors') | ||||||
|  | 
 | ||||||
|  |     # scan for the pdbpp prompt | ||||||
|  |     child.expect(r"\(Pdb\+\+\)") | ||||||
|  | 
 | ||||||
|  |     before = str(child.before.decode()) | ||||||
|  |     assert "Attaching pdb to actor: ('breakpoint_forever'" in before | ||||||
|  | 
 | ||||||
|  |     # do some "next" commands to demonstrate recurrent breakpoint | ||||||
|  |     # entries | ||||||
|  |     for _ in range(10): | ||||||
|  |         child.sendline('next') | ||||||
|  |         child.expect(r"\(Pdb\+\+\)") | ||||||
|  | 
 | ||||||
|  |     # continue to next error | ||||||
|  |     child.sendline('c') | ||||||
|  | 
 | ||||||
|  |     # first name_error failure | ||||||
|  |     child.expect(r"\(Pdb\+\+\)") | ||||||
|  |     before = str(child.before.decode()) | ||||||
|  |     assert "NameError" in before | ||||||
|  | 
 | ||||||
|  |     # continue again | ||||||
|  |     child.sendline('c') | ||||||
|  | 
 | ||||||
|  |     # 2nd name_error failure | ||||||
|  |     child.expect(r"\(Pdb\+\+\)") | ||||||
|  |     before = str(child.before.decode()) | ||||||
|  |     assert "NameError" in before | ||||||
|  | 
 | ||||||
|  |     # breakpoint loop should re-engage | ||||||
|  |     child.sendline('c') | ||||||
|  |     child.expect(r"\(Pdb\+\+\)") | ||||||
|  |     before = str(child.before.decode()) | ||||||
|  |     assert "Attaching pdb to actor: ('breakpoint_forever'" in before | ||||||
|  | 
 | ||||||
|  |     # now run some "continues" to show re-entries | ||||||
|  |     for _ in range(5): | ||||||
|  |         child.sendline('c') | ||||||
|  |         child.expect(r"\(Pdb\+\+\)") | ||||||
|  | 
 | ||||||
|  |     # quit the loop and expect parent to attach | ||||||
|  |     child.sendline('q') | ||||||
|  |     child.expect(r"\(Pdb\+\+\)") | ||||||
|  |     before = str(child.before.decode()) | ||||||
|  |     assert "Attaching to pdb in crashed actor: ('root'" in before | ||||||
|  |     assert "RemoteActorError: ('breakpoint_forever'" in before | ||||||
|  |     assert 'bdb.BdbQuit' in before | ||||||
|  | 
 | ||||||
|  |     # process should exit | ||||||
|  |     child.sendline('c') | ||||||
|  |     child.expect(pexpect.EOF) | ||||||
|  | 
 | ||||||
|  |     before = str(child.before.decode()) | ||||||
|  |     assert "RemoteActorError: ('breakpoint_forever'" in before | ||||||
|  |     assert 'bdb.BdbQuit' in before | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def test_multi_daemon_subactors(spawn, loglevel): | ||||||
|  |     """Multiple daemon subactors, both erroring and breakpointing within a | ||||||
|  |     stream. | ||||||
|  |     """ | ||||||
|  |     child = spawn('multi_daemon_subactors') | ||||||
|  | 
 | ||||||
|  |     child.expect(r"\(Pdb\+\+\)") | ||||||
|  | 
 | ||||||
|  |     # there is a race for which subactor will acquire | ||||||
|  |     # the root's tty lock first | ||||||
|  | 
 | ||||||
|  |     before = str(child.before.decode()) | ||||||
|  | 
 | ||||||
|  |     bp_forever_msg = "Attaching pdb to actor: ('bp_forever'" | ||||||
|  |     name_error_msg = "NameError" | ||||||
|  | 
 | ||||||
|  |     if bp_forever_msg in before: | ||||||
|  |         next_msg = name_error_msg | ||||||
|  | 
 | ||||||
|  |     elif name_error_msg in before: | ||||||
|  |         next_msg = bp_forever_msg | ||||||
|  | 
 | ||||||
|  |     else: | ||||||
|  |         raise ValueError("Neither log msg was found !?") | ||||||
|  | 
 | ||||||
|  |     # NOTE: previously since we did not have clobber prevention | ||||||
|  |     # in the root actor this final resume could result in the debugger | ||||||
|  |     # tearing down since both child actors would be cancelled and it was | ||||||
|  |     # unlikely that `bp_forever` would re-acquire the tty lock again. | ||||||
|  |     # Now, we should have a final resumption in the root plus a possible | ||||||
|  |     # second entry by `bp_forever`. | ||||||
|  | 
 | ||||||
|  |     child.sendline('c') | ||||||
|  |     child.expect(r"\(Pdb\+\+\)") | ||||||
|  |     before = str(child.before.decode()) | ||||||
|  | 
 | ||||||
|  |     assert next_msg in before | ||||||
|  | 
 | ||||||
|  |     # XXX: hooray the root clobbering the child here was fixed! | ||||||
|  |     # IMO, this demonstrates the true power of SC system design. | ||||||
|  | 
 | ||||||
|  |     # now the root actor won't clobber the bp_forever child | ||||||
|  |     # during it's first access to the debug lock, but will instead | ||||||
|  |     # wait for the lock to release, by the edge triggered | ||||||
|  |     # ``_debug._no_remote_has_tty`` event before sending cancel messages | ||||||
|  |     # (via portals) to its underlings B) | ||||||
|  | 
 | ||||||
|  |     # at some point here there should have been some warning msg from | ||||||
|  |     # the root announcing it avoided a clobber of the child's lock, but | ||||||
|  |     # it seems unreliable in testing here to gnab it: | ||||||
|  |     # assert "in use by child ('bp_forever'," in before | ||||||
|  | 
 | ||||||
|  |     # wait for final error in root | ||||||
|  |     while True: | ||||||
|  | 
 | ||||||
|  |         child.sendline('c') | ||||||
|  |         child.expect(r"\(Pdb\+\+\)") | ||||||
|  |         before = str(child.before.decode()) | ||||||
|  |         try: | ||||||
|  | 
 | ||||||
|  |             # root error should be packed as remote error | ||||||
|  |             assert "_exceptions.RemoteActorError: ('name_error'" in before | ||||||
|  |             break | ||||||
|  | 
 | ||||||
|  |         except AssertionError: | ||||||
|  |             assert bp_forever_msg in before | ||||||
|  | 
 | ||||||
|  |     try: | ||||||
|  |         child.sendline('c') | ||||||
|  |         child.expect(pexpect.EOF) | ||||||
|  | 
 | ||||||
|  |     except pexpect.exceptions.TIMEOUT: | ||||||
|  |         # Failed to exit using continue..? | ||||||
|  |         child.sendline('q') | ||||||
|  |         child.expect(pexpect.EOF) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def test_multi_subactors_root_errors(spawn): | ||||||
|  |     """Multiple subactors, both erroring and breakpointing as well as | ||||||
|  |     a nested subactor erroring. | ||||||
|  |     """ | ||||||
|  |     child = spawn('multi_subactor_root_errors') | ||||||
|  | 
 | ||||||
|  |     # scan for the pdbpp prompt | ||||||
|  |     child.expect(r"\(Pdb\+\+\)") | ||||||
|  | 
 | ||||||
|  |     # at most one subactor should attach before the root is cancelled | ||||||
|  |     before = str(child.before.decode()) | ||||||
|  |     assert "NameError: name 'doggypants' is not defined" in before | ||||||
|  | 
 | ||||||
|  |     # continue again | ||||||
|  |     child.sendline('c') | ||||||
|  |     child.expect(r"\(Pdb\+\+\)") | ||||||
|  | 
 | ||||||
|  |     # should now get attached in root with assert error | ||||||
|  |     before = str(child.before.decode()) | ||||||
|  | 
 | ||||||
|  |     # should have come just after priot prompt | ||||||
|  |     assert "Attaching to pdb in crashed actor: ('root'" in before | ||||||
|  |     assert "AssertionError" in before | ||||||
|  | 
 | ||||||
|  |     # warnings assert we probably don't need | ||||||
|  |     # assert "Cancelling nursery in ('spawn_error'," in before | ||||||
|  | 
 | ||||||
|  |     # continue again | ||||||
|  |     child.sendline('c') | ||||||
|  |     child.expect(pexpect.EOF) | ||||||
|  | 
 | ||||||
|  |     before = str(child.before.decode()) | ||||||
|  |     assert "AssertionError" in before | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def test_multi_nested_subactors_error_through_nurseries(spawn): | ||||||
|  |     """Verify deeply nested actors that error trigger debugger entries | ||||||
|  |     at each actor nurserly (level) all the way up the tree. | ||||||
|  | 
 | ||||||
|  |     """ | ||||||
|  |     # NOTE: previously, inside this script was a bug where if the | ||||||
|  |     # parent errors before a 2-levels-lower actor has released the lock, | ||||||
|  |     # the parent tries to cancel it but it's stuck in the debugger? | ||||||
|  |     # A test (below) has now been added to explicitly verify this is | ||||||
|  |     # fixed. | ||||||
|  | 
 | ||||||
|  |     child = spawn('multi_nested_subactors_error_up_through_nurseries') | ||||||
|  | 
 | ||||||
|  |     timed_out_early: bool = False | ||||||
|  | 
 | ||||||
|  |     for i in range(12): | ||||||
|  |         try: | ||||||
|  |             child.expect(r"\(Pdb\+\+\)") | ||||||
|  |             child.sendline('c') | ||||||
|  |             time.sleep(0.1) | ||||||
|  | 
 | ||||||
|  |         except pexpect.exceptions.EOF: | ||||||
|  | 
 | ||||||
|  |             # race conditions on how fast the continue is sent? | ||||||
|  |             print(f"Failed early on {i}?") | ||||||
|  |             timed_out_early = True | ||||||
|  |             break | ||||||
|  |     else: | ||||||
|  |         child.expect(pexpect.EOF) | ||||||
|  | 
 | ||||||
|  |     if not timed_out_early: | ||||||
|  |         before = str(child.before.decode()) | ||||||
|  |         assert "NameError" in before | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def test_root_nursery_cancels_before_child_releases_tty_lock( | ||||||
|  |     spawn, | ||||||
|  |     start_method | ||||||
|  | ): | ||||||
|  |     """Test that when the root sends a cancel message before a nested | ||||||
|  |     child has unblocked (which can happen when it has the tty lock and | ||||||
|  |     is engaged in pdb) it is indeed cancelled after exiting the debugger. | ||||||
|  |     """ | ||||||
|  |     timed_out_early = False | ||||||
|  | 
 | ||||||
|  |     child = spawn('root_cancelled_but_child_is_in_tty_lock') | ||||||
|  | 
 | ||||||
|  |     child.expect(r"\(Pdb\+\+\)") | ||||||
|  | 
 | ||||||
|  |     before = str(child.before.decode()) | ||||||
|  |     assert "NameError: name 'doggypants' is not defined" in before | ||||||
|  |     assert "tractor._exceptions.RemoteActorError: ('name_error'" not in before | ||||||
|  |     time.sleep(0.5) | ||||||
|  | 
 | ||||||
|  |     child.sendline('c') | ||||||
|  | 
 | ||||||
|  |     for i in range(4): | ||||||
|  |         time.sleep(0.5) | ||||||
|  |         try: | ||||||
|  |             child.expect(r"\(Pdb\+\+\)") | ||||||
|  | 
 | ||||||
|  |         except ( | ||||||
|  |             pexpect.exceptions.EOF, | ||||||
|  |             pexpect.exceptions.TIMEOUT, | ||||||
|  |         ): | ||||||
|  |             # races all over.. | ||||||
|  | 
 | ||||||
|  |             print(f"Failed early on {i}?") | ||||||
|  |             before = str(child.before.decode()) | ||||||
|  | 
 | ||||||
|  |             timed_out_early = True | ||||||
|  | 
 | ||||||
|  |             # race conditions on how fast the continue is sent? | ||||||
|  |             break | ||||||
|  | 
 | ||||||
|  |         before = str(child.before.decode()) | ||||||
|  |         assert "NameError: name 'doggypants' is not defined" in before | ||||||
|  | 
 | ||||||
|  |         child.sendline('c') | ||||||
|  | 
 | ||||||
|  |     while True: | ||||||
|  |         try: | ||||||
|  |             child.expect(pexpect.EOF) | ||||||
|  |             break | ||||||
|  |         except pexpect.exceptions.TIMEOUT: | ||||||
|  |             child.sendline('c') | ||||||
|  |             print('child was able to grab tty lock again?') | ||||||
|  | 
 | ||||||
|  |     if not timed_out_early: | ||||||
|  | 
 | ||||||
|  |         before = str(child.before.decode()) | ||||||
|  |         assert "tractor._exceptions.RemoteActorError: ('spawner0'" in before | ||||||
|  |         assert "tractor._exceptions.RemoteActorError: ('name_error'" in before | ||||||
|  |         assert "NameError: name 'doggypants' is not defined" in before | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def test_root_cancels_child_context_during_startup( | ||||||
|  |     spawn, | ||||||
|  | ): | ||||||
|  |     '''Verify a fast fail in the root doesn't lock up the child reaping | ||||||
|  |     and all while using the new context api. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     child = spawn('fast_error_in_root_after_spawn') | ||||||
|  | 
 | ||||||
|  |     child.expect(r"\(Pdb\+\+\)") | ||||||
|  | 
 | ||||||
|  |     before = str(child.before.decode()) | ||||||
|  |     assert "AssertionError" in before | ||||||
|  | 
 | ||||||
|  |     child.sendline('c') | ||||||
|  |     child.expect(pexpect.EOF) | ||||||
|  | @ -7,29 +7,27 @@ import platform | ||||||
| from functools import partial | from functools import partial | ||||||
| import itertools | import itertools | ||||||
| 
 | 
 | ||||||
| import psutil |  | ||||||
| import pytest | import pytest | ||||||
| import subprocess |  | ||||||
| import tractor | import tractor | ||||||
| from tractor.trionics import collapse_eg |  | ||||||
| from tractor._testing import tractor_test |  | ||||||
| import trio | import trio | ||||||
| 
 | 
 | ||||||
|  | from conftest import tractor_test | ||||||
|  | 
 | ||||||
| 
 | 
 | ||||||
| @tractor_test | @tractor_test | ||||||
| async def test_reg_then_unreg(reg_addr): | async def test_reg_then_unreg(arb_addr): | ||||||
|     actor = tractor.current_actor() |     actor = tractor.current_actor() | ||||||
|     assert actor.is_arbiter |     assert actor.is_arbiter | ||||||
|     assert len(actor._registry) == 1  # only self is registered |     assert len(actor._registry) == 1  # only self is registered | ||||||
| 
 | 
 | ||||||
|     async with tractor.open_nursery( |     async with tractor.open_nursery( | ||||||
|         registry_addrs=[reg_addr], |         arbiter_addr=arb_addr, | ||||||
|     ) as n: |     ) as n: | ||||||
| 
 | 
 | ||||||
|         portal = await n.start_actor('actor', enable_modules=[__name__]) |         portal = await n.start_actor('actor', enable_modules=[__name__]) | ||||||
|         uid = portal.channel.uid |         uid = portal.channel.uid | ||||||
| 
 | 
 | ||||||
|         async with tractor.get_registry(reg_addr) as aportal: |         async with tractor.get_arbiter(*arb_addr) as aportal: | ||||||
|             # this local actor should be the arbiter |             # this local actor should be the arbiter | ||||||
|             assert actor is aportal.actor |             assert actor is aportal.actor | ||||||
| 
 | 
 | ||||||
|  | @ -55,27 +53,15 @@ async def hi(): | ||||||
|     return the_line.format(tractor.current_actor().name) |     return the_line.format(tractor.current_actor().name) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def say_hello( | async def say_hello(other_actor): | ||||||
|     other_actor: str, |  | ||||||
|     reg_addr: tuple[str, int], |  | ||||||
| ): |  | ||||||
|     await trio.sleep(1)  # wait for other actor to spawn |     await trio.sleep(1)  # wait for other actor to spawn | ||||||
|     async with tractor.find_actor( |     async with tractor.find_actor(other_actor) as portal: | ||||||
|         other_actor, |  | ||||||
|         registry_addrs=[reg_addr], |  | ||||||
|     ) as portal: |  | ||||||
|         assert portal is not None |         assert portal is not None | ||||||
|         return await portal.run(__name__, 'hi') |         return await portal.run(__name__, 'hi') | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def say_hello_use_wait( | async def say_hello_use_wait(other_actor): | ||||||
|     other_actor: str, |     async with tractor.wait_for_actor(other_actor) as portal: | ||||||
|     reg_addr: tuple[str, int], |  | ||||||
| ): |  | ||||||
|     async with tractor.wait_for_actor( |  | ||||||
|         other_actor, |  | ||||||
|         registry_addr=reg_addr, |  | ||||||
|     ) as portal: |  | ||||||
|         assert portal is not None |         assert portal is not None | ||||||
|         result = await portal.run(__name__, 'hi') |         result = await portal.run(__name__, 'hi') | ||||||
|         return result |         return result | ||||||
|  | @ -83,29 +69,21 @@ async def say_hello_use_wait( | ||||||
| 
 | 
 | ||||||
| @tractor_test | @tractor_test | ||||||
| @pytest.mark.parametrize('func', [say_hello, say_hello_use_wait]) | @pytest.mark.parametrize('func', [say_hello, say_hello_use_wait]) | ||||||
| async def test_trynamic_trio( | async def test_trynamic_trio(func, start_method, arb_addr): | ||||||
|     func, |     """Main tractor entry point, the "master" process (for now | ||||||
|     start_method, |     acts as the "director"). | ||||||
|     reg_addr, |     """ | ||||||
| ): |  | ||||||
|     ''' |  | ||||||
|     Root actor acting as the "director" and running one-shot-task-actors |  | ||||||
|     for the directed subs. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     async with tractor.open_nursery() as n: |     async with tractor.open_nursery() as n: | ||||||
|         print("Alright... Action!") |         print("Alright... Action!") | ||||||
| 
 | 
 | ||||||
|         donny = await n.run_in_actor( |         donny = await n.run_in_actor( | ||||||
|             func, |             func, | ||||||
|             other_actor='gretchen', |             other_actor='gretchen', | ||||||
|             reg_addr=reg_addr, |  | ||||||
|             name='donny', |             name='donny', | ||||||
|         ) |         ) | ||||||
|         gretchen = await n.run_in_actor( |         gretchen = await n.run_in_actor( | ||||||
|             func, |             func, | ||||||
|             other_actor='donny', |             other_actor='donny', | ||||||
|             reg_addr=reg_addr, |  | ||||||
|             name='gretchen', |             name='gretchen', | ||||||
|         ) |         ) | ||||||
|         print(await gretchen.result()) |         print(await gretchen.result()) | ||||||
|  | @ -138,42 +116,17 @@ async def stream_from(portal): | ||||||
|             print(value) |             print(value) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def unpack_reg(actor_or_portal): |  | ||||||
|     ''' |  | ||||||
|     Get and unpack a "registry" RPC request from the "arbiter" registry |  | ||||||
|     system. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     if getattr(actor_or_portal, 'get_registry', None): |  | ||||||
|         msg = await actor_or_portal.get_registry() |  | ||||||
|     else: |  | ||||||
|         msg = await actor_or_portal.run_from_ns('self', 'get_registry') |  | ||||||
| 
 |  | ||||||
|     return {tuple(key.split('.')): val for key, val in msg.items()} |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| async def spawn_and_check_registry( | async def spawn_and_check_registry( | ||||||
|     reg_addr: tuple, |     arb_addr: tuple, | ||||||
|     use_signal: bool, |     use_signal: bool, | ||||||
|     debug_mode: bool = False, |  | ||||||
|     remote_arbiter: bool = False, |     remote_arbiter: bool = False, | ||||||
|     with_streaming: bool = False, |     with_streaming: bool = False, | ||||||
|     maybe_daemon: tuple[ |  | ||||||
|         subprocess.Popen, |  | ||||||
|         psutil.Process, |  | ||||||
|     ]|None = None, |  | ||||||
| 
 |  | ||||||
| ) -> None: | ) -> None: | ||||||
| 
 | 
 | ||||||
|     if maybe_daemon: |  | ||||||
|         popen, proc = maybe_daemon |  | ||||||
|         # breakpoint() |  | ||||||
| 
 |  | ||||||
|     async with tractor.open_root_actor( |     async with tractor.open_root_actor( | ||||||
|         registry_addrs=[reg_addr], |         arbiter_addr=arb_addr, | ||||||
|         debug_mode=debug_mode, |  | ||||||
|     ): |     ): | ||||||
|         async with tractor.get_registry(reg_addr) as portal: |         async with tractor.get_arbiter(*arb_addr) as portal: | ||||||
|             # runtime needs to be up to call this |             # runtime needs to be up to call this | ||||||
|             actor = tractor.current_actor() |             actor = tractor.current_actor() | ||||||
| 
 | 
 | ||||||
|  | @ -181,38 +134,38 @@ async def spawn_and_check_registry( | ||||||
|                 assert not actor.is_arbiter |                 assert not actor.is_arbiter | ||||||
| 
 | 
 | ||||||
|             if actor.is_arbiter: |             if actor.is_arbiter: | ||||||
|                 extra = 1  # arbiter is local root actor |  | ||||||
|                 get_reg = partial(unpack_reg, actor) |  | ||||||
| 
 | 
 | ||||||
|  |                 async def get_reg(): | ||||||
|  |                     return await actor.get_registry() | ||||||
|  | 
 | ||||||
|  |                 extra = 1  # arbiter is local root actor | ||||||
|             else: |             else: | ||||||
|                 get_reg = partial(unpack_reg, portal) |                 get_reg = partial(portal.run_from_ns, 'self', 'get_registry') | ||||||
|                 extra = 2  # local root actor + remote arbiter |                 extra = 2  # local root actor + remote arbiter | ||||||
| 
 | 
 | ||||||
|             # ensure current actor is registered |             # ensure current actor is registered | ||||||
|             registry: dict = await get_reg() |             registry = await get_reg() | ||||||
|             assert actor.uid in registry |             assert actor.uid in registry | ||||||
| 
 | 
 | ||||||
|             try: |             try: | ||||||
|                 async with tractor.open_nursery() as an: |                 async with tractor.open_nursery() as n: | ||||||
|                     async with ( |                     async with trio.open_nursery() as trion: | ||||||
|                         collapse_eg(), | 
 | ||||||
|                         trio.open_nursery() as trion, |  | ||||||
|                     ): |  | ||||||
|                         portals = {} |                         portals = {} | ||||||
|                         for i in range(3): |                         for i in range(3): | ||||||
|                             name = f'a{i}' |                             name = f'a{i}' | ||||||
|                             if with_streaming: |                             if with_streaming: | ||||||
|                                 portals[name] = await an.start_actor( |                                 portals[name] = await n.start_actor( | ||||||
|                                     name=name, enable_modules=[__name__]) |                                     name=name, enable_modules=[__name__]) | ||||||
| 
 | 
 | ||||||
|                             else:  # no streaming |                             else:  # no streaming | ||||||
|                                 portals[name] = await an.run_in_actor( |                                 portals[name] = await n.run_in_actor( | ||||||
|                                     trio.sleep_forever, name=name) |                                     trio.sleep_forever, name=name) | ||||||
| 
 | 
 | ||||||
|                         # wait on last actor to come up |                         # wait on last actor to come up | ||||||
|                         async with tractor.wait_for_actor(name): |                         async with tractor.wait_for_actor(name): | ||||||
|                             registry = await get_reg() |                             registry = await get_reg() | ||||||
|                             for uid in an._children: |                             for uid in n._children: | ||||||
|                                 assert uid in registry |                                 assert uid in registry | ||||||
| 
 | 
 | ||||||
|                         assert len(portals) + extra == len(registry) |                         assert len(portals) + extra == len(registry) | ||||||
|  | @ -245,24 +198,20 @@ async def spawn_and_check_registry( | ||||||
| @pytest.mark.parametrize('use_signal', [False, True]) | @pytest.mark.parametrize('use_signal', [False, True]) | ||||||
| @pytest.mark.parametrize('with_streaming', [False, True]) | @pytest.mark.parametrize('with_streaming', [False, True]) | ||||||
| def test_subactors_unregister_on_cancel( | def test_subactors_unregister_on_cancel( | ||||||
|     debug_mode: bool, |  | ||||||
|     start_method, |     start_method, | ||||||
|     use_signal, |     use_signal, | ||||||
|     reg_addr, |     arb_addr, | ||||||
|     with_streaming, |     with_streaming, | ||||||
| ): | ): | ||||||
|     ''' |     """Verify that cancelling a nursery results in all subactors | ||||||
|     Verify that cancelling a nursery results in all subactors |  | ||||||
|     deregistering themselves with the arbiter. |     deregistering themselves with the arbiter. | ||||||
| 
 |     """ | ||||||
|     ''' |  | ||||||
|     with pytest.raises(KeyboardInterrupt): |     with pytest.raises(KeyboardInterrupt): | ||||||
|         trio.run( |         trio.run( | ||||||
|             partial( |             partial( | ||||||
|                 spawn_and_check_registry, |                 spawn_and_check_registry, | ||||||
|                 reg_addr, |                 arb_addr, | ||||||
|                 use_signal, |                 use_signal, | ||||||
|                 debug_mode=debug_mode, |  | ||||||
|                 remote_arbiter=False, |                 remote_arbiter=False, | ||||||
|                 with_streaming=with_streaming, |                 with_streaming=with_streaming, | ||||||
|             ), |             ), | ||||||
|  | @ -272,11 +221,10 @@ def test_subactors_unregister_on_cancel( | ||||||
| @pytest.mark.parametrize('use_signal', [False, True]) | @pytest.mark.parametrize('use_signal', [False, True]) | ||||||
| @pytest.mark.parametrize('with_streaming', [False, True]) | @pytest.mark.parametrize('with_streaming', [False, True]) | ||||||
| def test_subactors_unregister_on_cancel_remote_daemon( | def test_subactors_unregister_on_cancel_remote_daemon( | ||||||
|     daemon: subprocess.Popen, |     daemon, | ||||||
|     debug_mode: bool, |  | ||||||
|     start_method, |     start_method, | ||||||
|     use_signal, |     use_signal, | ||||||
|     reg_addr, |     arb_addr, | ||||||
|     with_streaming, |     with_streaming, | ||||||
| ): | ): | ||||||
|     """Verify that cancelling a nursery results in all subactors |     """Verify that cancelling a nursery results in all subactors | ||||||
|  | @ -287,15 +235,10 @@ def test_subactors_unregister_on_cancel_remote_daemon( | ||||||
|         trio.run( |         trio.run( | ||||||
|             partial( |             partial( | ||||||
|                 spawn_and_check_registry, |                 spawn_and_check_registry, | ||||||
|                 reg_addr, |                 arb_addr, | ||||||
|                 use_signal, |                 use_signal, | ||||||
|                 debug_mode=debug_mode, |  | ||||||
|                 remote_arbiter=True, |                 remote_arbiter=True, | ||||||
|                 with_streaming=with_streaming, |                 with_streaming=with_streaming, | ||||||
|                 maybe_daemon=( |  | ||||||
|                     daemon, |  | ||||||
|                     psutil.Process(daemon.pid) |  | ||||||
|                 ), |  | ||||||
|             ), |             ), | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|  | @ -306,7 +249,7 @@ async def streamer(agen): | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def close_chans_before_nursery( | async def close_chans_before_nursery( | ||||||
|     reg_addr: tuple, |     arb_addr: tuple, | ||||||
|     use_signal: bool, |     use_signal: bool, | ||||||
|     remote_arbiter: bool = False, |     remote_arbiter: bool = False, | ||||||
| ) -> None: | ) -> None: | ||||||
|  | @ -319,11 +262,11 @@ async def close_chans_before_nursery( | ||||||
|         entries_at_end = 1 |         entries_at_end = 1 | ||||||
| 
 | 
 | ||||||
|     async with tractor.open_root_actor( |     async with tractor.open_root_actor( | ||||||
|         registry_addrs=[reg_addr], |         arbiter_addr=arb_addr, | ||||||
|     ): |     ): | ||||||
|         async with tractor.get_registry(reg_addr) as aportal: |         async with tractor.get_arbiter(*arb_addr) as aportal: | ||||||
|             try: |             try: | ||||||
|                 get_reg = partial(unpack_reg, aportal) |                 get_reg = partial(aportal.run_from_ns, 'self', 'get_registry') | ||||||
| 
 | 
 | ||||||
|                 async with tractor.open_nursery() as tn: |                 async with tractor.open_nursery() as tn: | ||||||
|                     portal1 = await tn.start_actor( |                     portal1 = await tn.start_actor( | ||||||
|  | @ -339,12 +282,9 @@ async def close_chans_before_nursery( | ||||||
|                         async with portal2.open_stream_from( |                         async with portal2.open_stream_from( | ||||||
|                             stream_forever |                             stream_forever | ||||||
|                         ) as agen2: |                         ) as agen2: | ||||||
|                             async with ( |                             async with trio.open_nursery() as n: | ||||||
|                                 collapse_eg(), |                                 n.start_soon(streamer, agen1) | ||||||
|                                 trio.open_nursery() as tn, |                                 n.start_soon(cancel, use_signal, .5) | ||||||
|                             ): |  | ||||||
|                                 tn.start_soon(streamer, agen1) |  | ||||||
|                                 tn.start_soon(cancel, use_signal, .5) |  | ||||||
|                                 try: |                                 try: | ||||||
|                                     await streamer(agen2) |                                     await streamer(agen2) | ||||||
|                                 finally: |                                 finally: | ||||||
|  | @ -376,7 +316,7 @@ async def close_chans_before_nursery( | ||||||
| def test_close_channel_explicit( | def test_close_channel_explicit( | ||||||
|     start_method, |     start_method, | ||||||
|     use_signal, |     use_signal, | ||||||
|     reg_addr, |     arb_addr, | ||||||
| ): | ): | ||||||
|     """Verify that closing a stream explicitly and killing the actor's |     """Verify that closing a stream explicitly and killing the actor's | ||||||
|     "root nursery" **before** the containing nursery tears down also |     "root nursery" **before** the containing nursery tears down also | ||||||
|  | @ -386,7 +326,7 @@ def test_close_channel_explicit( | ||||||
|         trio.run( |         trio.run( | ||||||
|             partial( |             partial( | ||||||
|                 close_chans_before_nursery, |                 close_chans_before_nursery, | ||||||
|                 reg_addr, |                 arb_addr, | ||||||
|                 use_signal, |                 use_signal, | ||||||
|                 remote_arbiter=False, |                 remote_arbiter=False, | ||||||
|             ), |             ), | ||||||
|  | @ -395,10 +335,10 @@ def test_close_channel_explicit( | ||||||
| 
 | 
 | ||||||
| @pytest.mark.parametrize('use_signal', [False, True]) | @pytest.mark.parametrize('use_signal', [False, True]) | ||||||
| def test_close_channel_explicit_remote_arbiter( | def test_close_channel_explicit_remote_arbiter( | ||||||
|     daemon: subprocess.Popen, |     daemon, | ||||||
|     start_method, |     start_method, | ||||||
|     use_signal, |     use_signal, | ||||||
|     reg_addr, |     arb_addr, | ||||||
| ): | ): | ||||||
|     """Verify that closing a stream explicitly and killing the actor's |     """Verify that closing a stream explicitly and killing the actor's | ||||||
|     "root nursery" **before** the containing nursery tears down also |     "root nursery" **before** the containing nursery tears down also | ||||||
|  | @ -408,7 +348,7 @@ def test_close_channel_explicit_remote_arbiter( | ||||||
|         trio.run( |         trio.run( | ||||||
|             partial( |             partial( | ||||||
|                 close_chans_before_nursery, |                 close_chans_before_nursery, | ||||||
|                 reg_addr, |                 arb_addr, | ||||||
|                 use_signal, |                 use_signal, | ||||||
|                 remote_arbiter=True, |                 remote_arbiter=True, | ||||||
|             ), |             ), | ||||||
|  |  | ||||||
|  | @ -1,7 +1,6 @@ | ||||||
| ''' | """ | ||||||
| Let's make sure them docs work yah? | Let's make sure them docs work yah? | ||||||
| 
 | """ | ||||||
| ''' |  | ||||||
| from contextlib import contextmanager | from contextlib import contextmanager | ||||||
| import itertools | import itertools | ||||||
| import os | import os | ||||||
|  | @ -11,17 +10,18 @@ import platform | ||||||
| import shutil | import shutil | ||||||
| 
 | 
 | ||||||
| import pytest | import pytest | ||||||
| from tractor._testing import ( | 
 | ||||||
|     examples_dir, | from conftest import repodir | ||||||
| ) | 
 | ||||||
|  | 
 | ||||||
|  | def examples_dir(): | ||||||
|  |     """Return the abspath to the examples directory. | ||||||
|  |     """ | ||||||
|  |     return os.path.join(repodir(), 'examples') | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @pytest.fixture | @pytest.fixture | ||||||
| def run_example_in_subproc( | def run_example_in_subproc(loglevel, testdir, arb_addr): | ||||||
|     loglevel: str, |  | ||||||
|     testdir: pytest.Pytester, |  | ||||||
|     reg_addr: tuple[str, int], |  | ||||||
| ): |  | ||||||
| 
 | 
 | ||||||
|     @contextmanager |     @contextmanager | ||||||
|     def run(script_code): |     def run(script_code): | ||||||
|  | @ -31,8 +31,8 @@ def run_example_in_subproc( | ||||||
|             # on windows we need to create a special __main__.py which will |             # on windows we need to create a special __main__.py which will | ||||||
|             # be executed with ``python -m <modulename>`` on windows.. |             # be executed with ``python -m <modulename>`` on windows.. | ||||||
|             shutil.copyfile( |             shutil.copyfile( | ||||||
|                 examples_dir() / '__main__.py', |                 os.path.join(examples_dir(), '__main__.py'), | ||||||
|                 str(testdir / '__main__.py'), |                 os.path.join(str(testdir), '__main__.py') | ||||||
|             ) |             ) | ||||||
| 
 | 
 | ||||||
|             # drop the ``if __name__ == '__main__'`` guard onwards from |             # drop the ``if __name__ == '__main__'`` guard onwards from | ||||||
|  | @ -66,9 +66,6 @@ def run_example_in_subproc( | ||||||
|         # due to backpressure!!! |         # due to backpressure!!! | ||||||
|         proc = testdir.popen( |         proc = testdir.popen( | ||||||
|             cmdargs, |             cmdargs, | ||||||
|             stdin=subprocess.PIPE, |  | ||||||
|             stdout=subprocess.PIPE, |  | ||||||
|             stderr=subprocess.PIPE, |  | ||||||
|             **kwargs, |             **kwargs, | ||||||
|         ) |         ) | ||||||
|         assert not proc.returncode |         assert not proc.returncode | ||||||
|  | @ -83,38 +80,24 @@ def run_example_in_subproc( | ||||||
|     'example_script', |     'example_script', | ||||||
| 
 | 
 | ||||||
|     # walk yields: (dirpath, dirnames, filenames) |     # walk yields: (dirpath, dirnames, filenames) | ||||||
|     [ |     [(p[0], f) for p in os.walk(examples_dir()) for f in p[2] | ||||||
|         (p[0], f) |  | ||||||
|         for p in os.walk(examples_dir()) |  | ||||||
|         for f in p[2] |  | ||||||
| 
 | 
 | ||||||
|         if ( |         if '__' not in f | ||||||
|             '__' not in f |  | ||||||
|         and f[0] != '_' |         and f[0] != '_' | ||||||
|             and 'debugging' not in p[0] |         and 'debugging' not in p[0]], | ||||||
|             and 'integration' not in p[0] | 
 | ||||||
|             and 'advanced_faults' not in p[0] |  | ||||||
|             and 'multihost' not in p[0] |  | ||||||
|             and 'trio' not in p[0] |  | ||||||
|         ) |  | ||||||
|     ], |  | ||||||
|     ids=lambda t: t[1], |     ids=lambda t: t[1], | ||||||
| ) | ) | ||||||
| def test_example( | def test_example(run_example_in_subproc, example_script): | ||||||
|     run_example_in_subproc, |     """Load and run scripts from this repo's ``examples/`` dir as a user | ||||||
|     example_script, |  | ||||||
| ): |  | ||||||
|     ''' |  | ||||||
|     Load and run scripts from this repo's ``examples/`` dir as a user |  | ||||||
|     would copy and pasing them into their editor. |     would copy and pasing them into their editor. | ||||||
| 
 | 
 | ||||||
|     On windows a little more "finessing" is done to make |     On windows a little more "finessing" is done to make | ||||||
|     ``multiprocessing`` play nice: we copy the ``__main__.py`` into the |     ``multiprocessing`` play nice: we copy the ``__main__.py`` into the | ||||||
|     test directory and invoke the script as a module with ``python -m |     test directory and invoke the script as a module with ``python -m | ||||||
|     test_example``. |     test_example``. | ||||||
| 
 |     """ | ||||||
|     ''' |     ex_file = os.path.join(*example_script) | ||||||
|     ex_file: str = os.path.join(*example_script) |  | ||||||
| 
 | 
 | ||||||
|     if 'rpc_bidir_streaming' in ex_file and sys.version_info < (3, 9): |     if 'rpc_bidir_streaming' in ex_file and sys.version_info < (3, 9): | ||||||
|         pytest.skip("2-way streaming example requires py3.9 async with syntax") |         pytest.skip("2-way streaming example requires py3.9 async with syntax") | ||||||
|  | @ -123,30 +106,15 @@ def test_example( | ||||||
|         code = ex.read() |         code = ex.read() | ||||||
| 
 | 
 | ||||||
|         with run_example_in_subproc(code) as proc: |         with run_example_in_subproc(code) as proc: | ||||||
|             err = None |             proc.wait() | ||||||
|             try: |             err, _ = proc.stderr.read(), proc.stdout.read() | ||||||
|                 if not proc.poll(): |             # print(f'STDERR: {err}') | ||||||
|                     _, err = proc.communicate(timeout=15) |             # print(f'STDOUT: {out}') | ||||||
| 
 |  | ||||||
|             except subprocess.TimeoutExpired as e: |  | ||||||
|                 proc.kill() |  | ||||||
|                 err = e.stderr |  | ||||||
| 
 | 
 | ||||||
|             # if we get some gnarly output let's aggregate and raise |             # if we get some gnarly output let's aggregate and raise | ||||||
|             if err: |  | ||||||
|             errmsg = err.decode() |             errmsg = err.decode() | ||||||
|             errlines = errmsg.splitlines() |             errlines = errmsg.splitlines() | ||||||
|                 last_error = errlines[-1] |             if err and 'Error' in errlines[-1]: | ||||||
|                 if ( |  | ||||||
|                     'Error' in last_error |  | ||||||
| 
 |  | ||||||
|                     # XXX: currently we print this to console, but maybe |  | ||||||
|                     # shouldn't eventually once we figure out what's |  | ||||||
|                     # a better way to be explicit about aio side |  | ||||||
|                     # cancels? |  | ||||||
|                     and |  | ||||||
|                     'asyncio.exceptions.CancelledError' not in last_error |  | ||||||
|                 ): |  | ||||||
|                 raise Exception(errmsg) |                 raise Exception(errmsg) | ||||||
| 
 | 
 | ||||||
|             assert proc.returncode == 0 |             assert proc.returncode == 0 | ||||||
|  |  | ||||||
|  | @ -1,946 +0,0 @@ | ||||||
| ''' |  | ||||||
| Low-level functional audits for our |  | ||||||
| "capability based messaging"-spec feats. |  | ||||||
| 
 |  | ||||||
| B~) |  | ||||||
| 
 |  | ||||||
| ''' |  | ||||||
| from contextlib import ( |  | ||||||
|     contextmanager as cm, |  | ||||||
|     # nullcontext, |  | ||||||
| ) |  | ||||||
| import importlib |  | ||||||
| from typing import ( |  | ||||||
|     Any, |  | ||||||
|     Type, |  | ||||||
|     Union, |  | ||||||
| ) |  | ||||||
| 
 |  | ||||||
| from msgspec import ( |  | ||||||
|     # structs, |  | ||||||
|     # msgpack, |  | ||||||
|     Raw, |  | ||||||
|     # Struct, |  | ||||||
|     ValidationError, |  | ||||||
| ) |  | ||||||
| import pytest |  | ||||||
| import trio |  | ||||||
| 
 |  | ||||||
| import tractor |  | ||||||
| from tractor import ( |  | ||||||
|     Actor, |  | ||||||
|     # _state, |  | ||||||
|     MsgTypeError, |  | ||||||
|     Context, |  | ||||||
| ) |  | ||||||
| from tractor.msg import ( |  | ||||||
|     _codec, |  | ||||||
|     _ctxvar_MsgCodec, |  | ||||||
|     _exts, |  | ||||||
| 
 |  | ||||||
|     NamespacePath, |  | ||||||
|     MsgCodec, |  | ||||||
|     MsgDec, |  | ||||||
|     mk_codec, |  | ||||||
|     mk_dec, |  | ||||||
|     apply_codec, |  | ||||||
|     current_codec, |  | ||||||
| ) |  | ||||||
| from tractor.msg.types import ( |  | ||||||
|     log, |  | ||||||
|     Started, |  | ||||||
|     # _payload_msgs, |  | ||||||
|     # PayloadMsg, |  | ||||||
|     # mk_msg_spec, |  | ||||||
| ) |  | ||||||
| from tractor.msg._ops import ( |  | ||||||
|     limit_plds, |  | ||||||
| ) |  | ||||||
| 
 |  | ||||||
| def enc_nsp(obj: Any) -> Any: |  | ||||||
|     actor: Actor = tractor.current_actor( |  | ||||||
|         err_on_no_runtime=False, |  | ||||||
|     ) |  | ||||||
|     uid: tuple[str, str]|None = None if not actor else actor.uid |  | ||||||
|     print(f'{uid} ENC HOOK') |  | ||||||
| 
 |  | ||||||
|     match obj: |  | ||||||
|         # case NamespacePath()|str(): |  | ||||||
|         case NamespacePath(): |  | ||||||
|             encoded: str = str(obj) |  | ||||||
|             print( |  | ||||||
|                 f'----- ENCODING `NamespacePath` as `str` ------\n' |  | ||||||
|                 f'|_obj:{type(obj)!r} = {obj!r}\n' |  | ||||||
|                 f'|_encoded: str = {encoded!r}\n' |  | ||||||
|             ) |  | ||||||
|             # if type(obj) != NamespacePath: |  | ||||||
|             #     breakpoint() |  | ||||||
|             return encoded |  | ||||||
|         case _: |  | ||||||
|             logmsg: str = ( |  | ||||||
|                 f'{uid}\n' |  | ||||||
|                 'FAILED ENCODE\n' |  | ||||||
|                 f'obj-> `{obj}: {type(obj)}`\n' |  | ||||||
|             ) |  | ||||||
|             raise NotImplementedError(logmsg) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def dec_nsp( |  | ||||||
|     obj_type: Type, |  | ||||||
|     obj: Any, |  | ||||||
| 
 |  | ||||||
| ) -> Any: |  | ||||||
|     # breakpoint() |  | ||||||
|     actor: Actor = tractor.current_actor( |  | ||||||
|         err_on_no_runtime=False, |  | ||||||
|     ) |  | ||||||
|     uid: tuple[str, str]|None = None if not actor else actor.uid |  | ||||||
|     print( |  | ||||||
|         f'{uid}\n' |  | ||||||
|         'CUSTOM DECODE\n' |  | ||||||
|         f'type-arg-> {obj_type}\n' |  | ||||||
|         f'obj-arg-> `{obj}`: {type(obj)}\n' |  | ||||||
|     ) |  | ||||||
|     nsp = None |  | ||||||
|     # XXX, never happens right? |  | ||||||
|     if obj_type is Raw: |  | ||||||
|         breakpoint() |  | ||||||
| 
 |  | ||||||
|     if ( |  | ||||||
|         obj_type is NamespacePath |  | ||||||
|         and isinstance(obj, str) |  | ||||||
|         and ':' in obj |  | ||||||
|     ): |  | ||||||
|         nsp = NamespacePath(obj) |  | ||||||
|         # TODO: we could built a generic handler using |  | ||||||
|         # JUST matching the obj_type part? |  | ||||||
|         # nsp = obj_type(obj) |  | ||||||
| 
 |  | ||||||
|     if nsp: |  | ||||||
|         print(f'Returning NSP instance: {nsp}') |  | ||||||
|         return nsp |  | ||||||
| 
 |  | ||||||
|     logmsg: str = ( |  | ||||||
|         f'{uid}\n' |  | ||||||
|         'FAILED DECODE\n' |  | ||||||
|         f'type-> {obj_type}\n' |  | ||||||
|         f'obj-arg-> `{obj}`: {type(obj)}\n\n' |  | ||||||
|         f'current codec:\n' |  | ||||||
|         f'{current_codec()}\n' |  | ||||||
|     ) |  | ||||||
|     # TODO: figure out the ignore subsys for this! |  | ||||||
|     # -[ ] option whether to defense-relay backc the msg |  | ||||||
|     #   inside an `Invalid`/`Ignore` |  | ||||||
|     # -[ ] how to make this handling pluggable such that a |  | ||||||
|     #   `Channel`/`MsgTransport` can intercept and process |  | ||||||
|     #   back msgs either via exception handling or some other |  | ||||||
|     #   signal? |  | ||||||
|     log.warning(logmsg) |  | ||||||
|     # NOTE: this delivers the invalid |  | ||||||
|     # value up to `msgspec`'s decoding |  | ||||||
|     # machinery for error raising. |  | ||||||
|     return obj |  | ||||||
|     # raise NotImplementedError(logmsg) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def ex_func(*args): |  | ||||||
|     ''' |  | ||||||
|     A mod level func we can ref and load via our `NamespacePath` |  | ||||||
|     python-object pointer `str` subtype. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     print(f'ex_func({args})') |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @pytest.mark.parametrize( |  | ||||||
|     'add_codec_hooks', |  | ||||||
|     [ |  | ||||||
|         True, |  | ||||||
|         False, |  | ||||||
|     ], |  | ||||||
|     ids=['use_codec_hooks', 'no_codec_hooks'], |  | ||||||
| ) |  | ||||||
| def test_custom_extension_types( |  | ||||||
|     debug_mode: bool, |  | ||||||
|     add_codec_hooks: bool |  | ||||||
| ): |  | ||||||
|     ''' |  | ||||||
|     Verify that a `MsgCodec` (used for encoding all outbound IPC msgs |  | ||||||
|     and decoding all inbound `PayloadMsg`s) and a paired `MsgDec` |  | ||||||
|     (used for decoding the `PayloadMsg.pld: Raw` received within a given |  | ||||||
|     task's ipc `Context` scope) can both send and receive "extension types" |  | ||||||
|     as supported via custom converter hooks passed to `msgspec`. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     nsp_pld_dec: MsgDec = mk_dec( |  | ||||||
|         spec=None,  # ONLY support the ext type |  | ||||||
|         dec_hook=dec_nsp if add_codec_hooks else None, |  | ||||||
|         ext_types=[NamespacePath], |  | ||||||
|     ) |  | ||||||
|     nsp_codec: MsgCodec = mk_codec( |  | ||||||
|         # ipc_pld_spec=Raw,  # default! |  | ||||||
| 
 |  | ||||||
|         # NOTE XXX: the encode hook MUST be used no matter what since |  | ||||||
|         # our `NamespacePath` is not any of a `Any` native type nor |  | ||||||
|         # a `msgspec.Struct` subtype - so `msgspec` has no way to know |  | ||||||
|         # how to encode it unless we provide the custom hook. |  | ||||||
|         # |  | ||||||
|         # AGAIN that is, regardless of whether we spec an |  | ||||||
|         # `Any`-decoded-pld the enc has no knowledge (by default) |  | ||||||
|         # how to enc `NamespacePath` (nsp), so we add a custom |  | ||||||
|         # hook to do that ALWAYS. |  | ||||||
|         enc_hook=enc_nsp if add_codec_hooks else None, |  | ||||||
| 
 |  | ||||||
|         # XXX NOTE: pretty sure this is mutex with the `type=` to |  | ||||||
|         # `Decoder`? so it won't work in tandem with the |  | ||||||
|         # `ipc_pld_spec` passed above? |  | ||||||
|         ext_types=[NamespacePath], |  | ||||||
| 
 |  | ||||||
|         # TODO? is it useful to have the `.pld` decoded *prior* to |  | ||||||
|         # the `PldRx`?? like perf or mem related? |  | ||||||
|         # ext_dec=nsp_pld_dec, |  | ||||||
|     ) |  | ||||||
|     if add_codec_hooks: |  | ||||||
|         assert nsp_codec.dec.dec_hook is None |  | ||||||
| 
 |  | ||||||
|         # TODO? if we pass `ext_dec` above? |  | ||||||
|         # assert nsp_codec.dec.dec_hook is dec_nsp |  | ||||||
| 
 |  | ||||||
|         assert nsp_codec.enc.enc_hook is enc_nsp |  | ||||||
| 
 |  | ||||||
|     nsp = NamespacePath.from_ref(ex_func) |  | ||||||
| 
 |  | ||||||
|     try: |  | ||||||
|         nsp_bytes: bytes = nsp_codec.encode(nsp) |  | ||||||
|         nsp_rt_sin_msg = nsp_pld_dec.decode(nsp_bytes) |  | ||||||
|         nsp_rt_sin_msg.load_ref() is ex_func |  | ||||||
|     except TypeError: |  | ||||||
|         if not add_codec_hooks: |  | ||||||
|             pass |  | ||||||
| 
 |  | ||||||
|     try: |  | ||||||
|         msg_bytes: bytes = nsp_codec.encode( |  | ||||||
|             Started( |  | ||||||
|                 cid='cid', |  | ||||||
|                 pld=nsp, |  | ||||||
|             ) |  | ||||||
|         ) |  | ||||||
|         # since the ext-type obj should also be set as the msg.pld |  | ||||||
|         assert nsp_bytes in msg_bytes |  | ||||||
|         started_rt: Started = nsp_codec.decode(msg_bytes) |  | ||||||
|         pld: Raw = started_rt.pld |  | ||||||
|         assert isinstance(pld, Raw) |  | ||||||
|         nsp_rt: NamespacePath = nsp_pld_dec.decode(pld) |  | ||||||
|         assert isinstance(nsp_rt, NamespacePath) |  | ||||||
|         # in obj comparison terms they should be the same |  | ||||||
|         assert nsp_rt == nsp |  | ||||||
|         # ensure we've decoded to ext type! |  | ||||||
|         assert nsp_rt.load_ref() is ex_func |  | ||||||
| 
 |  | ||||||
|     except TypeError: |  | ||||||
|         if not add_codec_hooks: |  | ||||||
|             pass |  | ||||||
| 
 |  | ||||||
| @tractor.context |  | ||||||
| async def sleep_forever_in_sub( |  | ||||||
|     ctx: Context, |  | ||||||
| ) -> None: |  | ||||||
|     await trio.sleep_forever() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def mk_custom_codec( |  | ||||||
|     add_hooks: bool, |  | ||||||
| 
 |  | ||||||
| ) -> tuple[ |  | ||||||
|     MsgCodec,  # encode to send |  | ||||||
|     MsgDec,  # pld receive-n-decode |  | ||||||
| ]: |  | ||||||
|     ''' |  | ||||||
|     Create custom `msgpack` enc/dec-hooks and set a `Decoder` |  | ||||||
|     which only loads `pld_spec` (like `NamespacePath`) types. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
| 
 |  | ||||||
|     # XXX NOTE XXX: despite defining `NamespacePath` as a type |  | ||||||
|     # field on our `PayloadMsg.pld`, we still need a enc/dec_hook() pair |  | ||||||
|     # to cast to/from that type on the wire. See the docs: |  | ||||||
|     # https://jcristharif.com/msgspec/extending.html#mapping-to-from-native-types |  | ||||||
| 
 |  | ||||||
|     # if pld_spec is Any: |  | ||||||
|     #     pld_spec = Raw |  | ||||||
| 
 |  | ||||||
|     nsp_codec: MsgCodec = mk_codec( |  | ||||||
|         # ipc_pld_spec=Raw,  # default! |  | ||||||
| 
 |  | ||||||
|         # NOTE XXX: the encode hook MUST be used no matter what since |  | ||||||
|         # our `NamespacePath` is not any of a `Any` native type nor |  | ||||||
|         # a `msgspec.Struct` subtype - so `msgspec` has no way to know |  | ||||||
|         # how to encode it unless we provide the custom hook. |  | ||||||
|         # |  | ||||||
|         # AGAIN that is, regardless of whether we spec an |  | ||||||
|         # `Any`-decoded-pld the enc has no knowledge (by default) |  | ||||||
|         # how to enc `NamespacePath` (nsp), so we add a custom |  | ||||||
|         # hook to do that ALWAYS. |  | ||||||
|         enc_hook=enc_nsp if add_hooks else None, |  | ||||||
| 
 |  | ||||||
|         # XXX NOTE: pretty sure this is mutex with the `type=` to |  | ||||||
|         # `Decoder`? so it won't work in tandem with the |  | ||||||
|         # `ipc_pld_spec` passed above? |  | ||||||
|         ext_types=[NamespacePath], |  | ||||||
|     ) |  | ||||||
|     # dec_hook=dec_nsp if add_hooks else None, |  | ||||||
|     return nsp_codec |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @pytest.mark.parametrize( |  | ||||||
|     'limit_plds_args', |  | ||||||
|     [ |  | ||||||
|         ( |  | ||||||
|             {'dec_hook': None, 'ext_types': None}, |  | ||||||
|             None, |  | ||||||
|         ), |  | ||||||
|         ( |  | ||||||
|             {'dec_hook': dec_nsp, 'ext_types': None}, |  | ||||||
|             TypeError, |  | ||||||
|         ), |  | ||||||
|         ( |  | ||||||
|             {'dec_hook': dec_nsp, 'ext_types': [NamespacePath]}, |  | ||||||
|             None, |  | ||||||
|         ), |  | ||||||
|         ( |  | ||||||
|             {'dec_hook': dec_nsp, 'ext_types': [NamespacePath|None]}, |  | ||||||
|             None, |  | ||||||
|         ), |  | ||||||
|     ], |  | ||||||
|     ids=[ |  | ||||||
|         'no_hook_no_ext_types', |  | ||||||
|         'only_hook', |  | ||||||
|         'hook_and_ext_types', |  | ||||||
|         'hook_and_ext_types_w_null', |  | ||||||
|     ] |  | ||||||
| ) |  | ||||||
| def test_pld_limiting_usage( |  | ||||||
|     limit_plds_args: tuple[dict, Exception|None], |  | ||||||
| ): |  | ||||||
|     ''' |  | ||||||
|     Verify `dec_hook()` and `ext_types` need to either both be |  | ||||||
|     provided or we raise a explanator type-error. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     kwargs, maybe_err = limit_plds_args |  | ||||||
|     async def main(): |  | ||||||
|         async with tractor.open_nursery() as an:  # just to open runtime |  | ||||||
| 
 |  | ||||||
|             # XXX SHOULD NEVER WORK outside an ipc ctx scope! |  | ||||||
|             try: |  | ||||||
|                 with limit_plds(**kwargs): |  | ||||||
|                     pass |  | ||||||
|             except RuntimeError: |  | ||||||
|                 pass |  | ||||||
| 
 |  | ||||||
|             p: tractor.Portal = await an.start_actor( |  | ||||||
|                 'sub', |  | ||||||
|                 enable_modules=[__name__], |  | ||||||
|             ) |  | ||||||
|             async with ( |  | ||||||
|                 p.open_context( |  | ||||||
|                     sleep_forever_in_sub |  | ||||||
|                 ) as (ctx, first), |  | ||||||
|             ): |  | ||||||
|                 try: |  | ||||||
|                     with limit_plds(**kwargs): |  | ||||||
|                         pass |  | ||||||
|                 except maybe_err as exc: |  | ||||||
|                     assert type(exc) is maybe_err |  | ||||||
|                     pass |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def chk_codec_applied( |  | ||||||
|     expect_codec: MsgCodec|None, |  | ||||||
|     enter_value: MsgCodec|None = None, |  | ||||||
| 
 |  | ||||||
| ) -> MsgCodec: |  | ||||||
|     ''' |  | ||||||
|     buncha sanity checks ensuring that the IPC channel's |  | ||||||
|     context-vars are set to the expected codec and that are |  | ||||||
|     ctx-var wrapper APIs match the same. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     # TODO: play with tricyle again, bc this is supposed to work |  | ||||||
|     # the way we want? |  | ||||||
|     # |  | ||||||
|     # TreeVar |  | ||||||
|     # task: trio.Task = trio.lowlevel.current_task() |  | ||||||
|     # curr_codec = _ctxvar_MsgCodec.get_in(task) |  | ||||||
| 
 |  | ||||||
|     # ContextVar |  | ||||||
|     # task_ctx: Context = task.context |  | ||||||
|     # assert _ctxvar_MsgCodec in task_ctx |  | ||||||
|     # curr_codec: MsgCodec = task.context[_ctxvar_MsgCodec] |  | ||||||
|     if expect_codec is None: |  | ||||||
|         assert enter_value is None |  | ||||||
|         return |  | ||||||
| 
 |  | ||||||
|     # NOTE: currently we use this! |  | ||||||
|     # RunVar |  | ||||||
|     curr_codec: MsgCodec = current_codec() |  | ||||||
|     last_read_codec = _ctxvar_MsgCodec.get() |  | ||||||
|     # assert curr_codec is last_read_codec |  | ||||||
| 
 |  | ||||||
|     assert ( |  | ||||||
|         (same_codec := expect_codec) is |  | ||||||
|         # returned from `mk_codec()` |  | ||||||
| 
 |  | ||||||
|         # yielded value from `apply_codec()` |  | ||||||
| 
 |  | ||||||
|         # read from current task's `contextvars.Context` |  | ||||||
|         curr_codec is |  | ||||||
|         last_read_codec |  | ||||||
| 
 |  | ||||||
|         # the default `msgspec` settings |  | ||||||
|         is not _codec._def_msgspec_codec |  | ||||||
|         is not _codec._def_tractor_codec |  | ||||||
|     ) |  | ||||||
| 
 |  | ||||||
|     if enter_value: |  | ||||||
|         assert enter_value is same_codec |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @tractor.context |  | ||||||
| async def send_back_values( |  | ||||||
|     ctx: Context, |  | ||||||
|     rent_pld_spec_type_strs: list[str], |  | ||||||
|     add_hooks: bool, |  | ||||||
| 
 |  | ||||||
| ) -> None: |  | ||||||
|     ''' |  | ||||||
|     Setup up a custom codec to load instances of `NamespacePath` |  | ||||||
|     and ensure we can round trip a func ref with our parent. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     uid: tuple = tractor.current_actor().uid |  | ||||||
| 
 |  | ||||||
|     # init state in sub-actor should be default |  | ||||||
|     chk_codec_applied( |  | ||||||
|         expect_codec=_codec._def_tractor_codec, |  | ||||||
|     ) |  | ||||||
| 
 |  | ||||||
|     # load pld spec from input str |  | ||||||
|     rent_pld_spec = _exts.dec_type_union( |  | ||||||
|         rent_pld_spec_type_strs, |  | ||||||
|         mods=[ |  | ||||||
|             importlib.import_module(__name__), |  | ||||||
|         ], |  | ||||||
|     ) |  | ||||||
|     rent_pld_spec_types: set[Type] = _codec.unpack_spec_types( |  | ||||||
|         rent_pld_spec, |  | ||||||
|     ) |  | ||||||
| 
 |  | ||||||
|     # ONLY add ext-hooks if the rent specified a non-std type! |  | ||||||
|     add_hooks: bool = ( |  | ||||||
|         NamespacePath in rent_pld_spec_types |  | ||||||
|         and |  | ||||||
|         add_hooks |  | ||||||
|     ) |  | ||||||
| 
 |  | ||||||
|     # same as on parent side config. |  | ||||||
|     nsp_codec: MsgCodec|None = None |  | ||||||
|     if add_hooks: |  | ||||||
|         nsp_codec = mk_codec( |  | ||||||
|             enc_hook=enc_nsp, |  | ||||||
|             ext_types=[NamespacePath], |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|     with ( |  | ||||||
|         maybe_apply_codec(nsp_codec) as codec, |  | ||||||
|         limit_plds( |  | ||||||
|             rent_pld_spec, |  | ||||||
|             dec_hook=dec_nsp if add_hooks else None, |  | ||||||
|             ext_types=[NamespacePath]  if add_hooks else None, |  | ||||||
|         ) as pld_dec, |  | ||||||
|     ): |  | ||||||
|         # ?XXX? SHOULD WE NOT be swapping the global codec since it |  | ||||||
|         # breaks `Context.started()` roundtripping checks?? |  | ||||||
|         chk_codec_applied( |  | ||||||
|             expect_codec=nsp_codec, |  | ||||||
|             enter_value=codec, |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|         # ?TODO, mismatch case(s)? |  | ||||||
|         # |  | ||||||
|         # ensure pld spec matches on both sides |  | ||||||
|         ctx_pld_dec: MsgDec = ctx._pld_rx._pld_dec |  | ||||||
|         assert pld_dec is ctx_pld_dec |  | ||||||
|         child_pld_spec: Type = pld_dec.spec |  | ||||||
|         child_pld_spec_types: set[Type] = _codec.unpack_spec_types( |  | ||||||
|             child_pld_spec, |  | ||||||
|         ) |  | ||||||
|         assert ( |  | ||||||
|             child_pld_spec_types.issuperset( |  | ||||||
|                 rent_pld_spec_types |  | ||||||
|             ) |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|         # ?TODO, try loop for each of the types in pld-superset? |  | ||||||
|         # |  | ||||||
|         # for send_value in [ |  | ||||||
|         #     nsp, |  | ||||||
|         #     str(nsp), |  | ||||||
|         #     None, |  | ||||||
|         # ]: |  | ||||||
|         nsp = NamespacePath.from_ref(ex_func) |  | ||||||
|         try: |  | ||||||
|             print( |  | ||||||
|                 f'{uid}: attempting to `.started({nsp})`\n' |  | ||||||
|                 f'\n' |  | ||||||
|                 f'rent_pld_spec: {rent_pld_spec}\n' |  | ||||||
|                 f'child_pld_spec: {child_pld_spec}\n' |  | ||||||
|                 f'codec: {codec}\n' |  | ||||||
|             ) |  | ||||||
|             # await tractor.pause() |  | ||||||
|             await ctx.started(nsp) |  | ||||||
| 
 |  | ||||||
|         except tractor.MsgTypeError as _mte: |  | ||||||
|             mte = _mte |  | ||||||
| 
 |  | ||||||
|             # false -ve case |  | ||||||
|             if add_hooks: |  | ||||||
|                 raise RuntimeError( |  | ||||||
|                     f'EXPECTED to `.started()` value given spec ??\n\n' |  | ||||||
|                     f'child_pld_spec -> {child_pld_spec}\n' |  | ||||||
|                     f'value = {nsp}: {type(nsp)}\n' |  | ||||||
|                 ) |  | ||||||
| 
 |  | ||||||
|             # true -ve case |  | ||||||
|             raise mte |  | ||||||
| 
 |  | ||||||
|         # TODO: maybe we should add our own wrapper error so as to |  | ||||||
|         # be interchange-lib agnostic? |  | ||||||
|         # -[ ] the error type is wtv is raised from the hook so we |  | ||||||
|         #   could also require a type-class of errors for |  | ||||||
|         #   indicating whether the hook-failure can be handled by |  | ||||||
|         #   a nasty-dialog-unprot sub-sys? |  | ||||||
|         except TypeError as typerr: |  | ||||||
|             # false -ve |  | ||||||
|             if add_hooks: |  | ||||||
|                 raise RuntimeError('Should have been able to send `nsp`??') |  | ||||||
| 
 |  | ||||||
|             # true -ve |  | ||||||
|             print('Failed to send `nsp` due to no ext hooks set!') |  | ||||||
|             raise typerr |  | ||||||
| 
 |  | ||||||
|         # now try sending a set of valid and invalid plds to ensure |  | ||||||
|         # the pld spec is respected. |  | ||||||
|         sent: list[Any] = [] |  | ||||||
|         async with ctx.open_stream() as ipc: |  | ||||||
|             print( |  | ||||||
|                 f'{uid}: streaming all pld types to rent..' |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|             # for send_value, expect_send in iter_send_val_items: |  | ||||||
|             for send_value in [ |  | ||||||
|                 nsp, |  | ||||||
|                 str(nsp), |  | ||||||
|                 None, |  | ||||||
|             ]: |  | ||||||
|                 send_type: Type = type(send_value) |  | ||||||
|                 print( |  | ||||||
|                     f'{uid}: SENDING NEXT pld\n' |  | ||||||
|                     f'send_type: {send_type}\n' |  | ||||||
|                     f'send_value: {send_value}\n' |  | ||||||
|                 ) |  | ||||||
|                 try: |  | ||||||
|                     await ipc.send(send_value) |  | ||||||
|                     sent.append(send_value) |  | ||||||
| 
 |  | ||||||
|                 except ValidationError as valerr: |  | ||||||
|                     print(f'{uid} FAILED TO SEND {send_value}!') |  | ||||||
| 
 |  | ||||||
|                     # false -ve |  | ||||||
|                     if add_hooks: |  | ||||||
|                         raise RuntimeError( |  | ||||||
|                             f'EXPECTED to roundtrip value given spec:\n' |  | ||||||
|                             f'rent_pld_spec -> {rent_pld_spec}\n' |  | ||||||
|                             f'child_pld_spec -> {child_pld_spec}\n' |  | ||||||
|                             f'value = {send_value}: {send_type}\n' |  | ||||||
|                         ) |  | ||||||
| 
 |  | ||||||
|                     # true -ve |  | ||||||
|                     raise valerr |  | ||||||
|                     # continue |  | ||||||
| 
 |  | ||||||
|             else: |  | ||||||
|                 print( |  | ||||||
|                     f'{uid}: finished sending all values\n' |  | ||||||
|                     'Should be exiting stream block!\n' |  | ||||||
|                 ) |  | ||||||
| 
 |  | ||||||
|         print(f'{uid}: exited streaming block!') |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @cm |  | ||||||
| def maybe_apply_codec(codec: MsgCodec|None) -> MsgCodec|None: |  | ||||||
|     if codec is None: |  | ||||||
|         yield None |  | ||||||
|         return |  | ||||||
| 
 |  | ||||||
|     with apply_codec(codec) as codec: |  | ||||||
|         yield codec |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @pytest.mark.parametrize( |  | ||||||
|     'pld_spec', |  | ||||||
|     [ |  | ||||||
|         Any, |  | ||||||
|         NamespacePath, |  | ||||||
|         NamespacePath|None,  # the "maybe" spec Bo |  | ||||||
|     ], |  | ||||||
|     ids=[ |  | ||||||
|         'any_type', |  | ||||||
|         'only_nsp_ext', |  | ||||||
|         'maybe_nsp_ext', |  | ||||||
|     ] |  | ||||||
| ) |  | ||||||
| @pytest.mark.parametrize( |  | ||||||
|     'add_hooks', |  | ||||||
|     [ |  | ||||||
|         True, |  | ||||||
|         False, |  | ||||||
|     ], |  | ||||||
|     ids=[ |  | ||||||
|         'use_codec_hooks', |  | ||||||
|         'no_codec_hooks', |  | ||||||
|     ], |  | ||||||
| ) |  | ||||||
| def test_ext_types_over_ipc( |  | ||||||
|     debug_mode: bool, |  | ||||||
|     pld_spec: Union[Type], |  | ||||||
|     add_hooks: bool, |  | ||||||
| ): |  | ||||||
|     ''' |  | ||||||
|     Ensure we can support extension types coverted using |  | ||||||
|     `enc/dec_hook()`s passed to the `.msg.limit_plds()` API |  | ||||||
|     and that sane errors happen when we try do the same without |  | ||||||
|     the codec hooks. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     pld_types: set[Type] = _codec.unpack_spec_types(pld_spec) |  | ||||||
| 
 |  | ||||||
|     async def main(): |  | ||||||
| 
 |  | ||||||
|         # sanity check the default pld-spec beforehand |  | ||||||
|         chk_codec_applied( |  | ||||||
|             expect_codec=_codec._def_tractor_codec, |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|         # extension type we want to send as msg payload |  | ||||||
|         nsp = NamespacePath.from_ref(ex_func) |  | ||||||
| 
 |  | ||||||
|         # ^NOTE, 2 cases: |  | ||||||
|         # - codec hooks noto added -> decode nsp as `str` |  | ||||||
|         # - codec with hooks -> decode nsp as `NamespacePath` |  | ||||||
|         nsp_codec: MsgCodec|None = None |  | ||||||
|         if ( |  | ||||||
|             NamespacePath in pld_types |  | ||||||
|             and |  | ||||||
|             add_hooks |  | ||||||
|         ): |  | ||||||
|             nsp_codec = mk_codec( |  | ||||||
|                 enc_hook=enc_nsp, |  | ||||||
|                 ext_types=[NamespacePath], |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|         async with tractor.open_nursery( |  | ||||||
|             debug_mode=debug_mode, |  | ||||||
|         ) as an: |  | ||||||
|             p: tractor.Portal = await an.start_actor( |  | ||||||
|                 'sub', |  | ||||||
|                 enable_modules=[__name__], |  | ||||||
|             ) |  | ||||||
|             with ( |  | ||||||
|                 maybe_apply_codec(nsp_codec) as codec, |  | ||||||
|             ): |  | ||||||
|                 chk_codec_applied( |  | ||||||
|                     expect_codec=nsp_codec, |  | ||||||
|                     enter_value=codec, |  | ||||||
|                 ) |  | ||||||
|                 rent_pld_spec_type_strs: list[str] = _exts.enc_type_union(pld_spec) |  | ||||||
| 
 |  | ||||||
|                 # XXX should raise an mte (`MsgTypeError`) |  | ||||||
|                 # when `add_hooks == False` bc the input |  | ||||||
|                 # `expect_ipc_send` kwarg has a nsp which can't be |  | ||||||
|                 # serialized! |  | ||||||
|                 # |  | ||||||
|                 # TODO:can we ensure this happens from the |  | ||||||
|                 # `Return`-side (aka the sub) as well? |  | ||||||
|                 try: |  | ||||||
|                     ctx: tractor.Context |  | ||||||
|                     ipc: tractor.MsgStream |  | ||||||
|                     async with ( |  | ||||||
| 
 |  | ||||||
|                         # XXX should raise an mte (`MsgTypeError`) |  | ||||||
|                         # when `add_hooks == False`.. |  | ||||||
|                         p.open_context( |  | ||||||
|                             send_back_values, |  | ||||||
|                             # expect_debug=debug_mode, |  | ||||||
|                             rent_pld_spec_type_strs=rent_pld_spec_type_strs, |  | ||||||
|                             add_hooks=add_hooks, |  | ||||||
|                             # expect_ipc_send=expect_ipc_send, |  | ||||||
|                         ) as (ctx, first), |  | ||||||
| 
 |  | ||||||
|                         ctx.open_stream() as ipc, |  | ||||||
|                     ): |  | ||||||
|                         with ( |  | ||||||
|                             limit_plds( |  | ||||||
|                                 pld_spec, |  | ||||||
|                                 dec_hook=dec_nsp if add_hooks else None, |  | ||||||
|                                 ext_types=[NamespacePath]  if add_hooks else None, |  | ||||||
|                             ) as pld_dec, |  | ||||||
|                         ): |  | ||||||
|                             ctx_pld_dec: MsgDec = ctx._pld_rx._pld_dec |  | ||||||
|                             assert pld_dec is ctx_pld_dec |  | ||||||
| 
 |  | ||||||
|                             # if ( |  | ||||||
|                             #     not add_hooks |  | ||||||
|                             #     and |  | ||||||
|                             #     NamespacePath in  |  | ||||||
|                             # ): |  | ||||||
|                             #     pytest.fail('ctx should fail to open without custom enc_hook!?') |  | ||||||
| 
 |  | ||||||
|                             await ipc.send(nsp) |  | ||||||
|                             nsp_rt = await ipc.receive() |  | ||||||
| 
 |  | ||||||
|                             assert nsp_rt == nsp |  | ||||||
|                             assert nsp_rt.load_ref() is ex_func |  | ||||||
| 
 |  | ||||||
|                 # this test passes bc we can go no further! |  | ||||||
|                 except MsgTypeError as mte: |  | ||||||
|                     # if not add_hooks: |  | ||||||
|                     #     # teardown nursery |  | ||||||
|                     #     await p.cancel_actor() |  | ||||||
|                         # return |  | ||||||
| 
 |  | ||||||
|                     raise mte |  | ||||||
| 
 |  | ||||||
|             await p.cancel_actor() |  | ||||||
| 
 |  | ||||||
|     if ( |  | ||||||
|         NamespacePath in pld_types |  | ||||||
|         and |  | ||||||
|         add_hooks |  | ||||||
|     ): |  | ||||||
|         trio.run(main) |  | ||||||
| 
 |  | ||||||
|     else: |  | ||||||
|         with pytest.raises( |  | ||||||
|             expected_exception=tractor.RemoteActorError, |  | ||||||
|         ) as excinfo: |  | ||||||
|             trio.run(main) |  | ||||||
| 
 |  | ||||||
|         exc = excinfo.value |  | ||||||
|         # bc `.started(nsp: NamespacePath)` will raise |  | ||||||
|         assert exc.boxed_type is TypeError |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| # def chk_pld_type( |  | ||||||
| #     payload_spec: Type[Struct]|Any, |  | ||||||
| #     pld: Any, |  | ||||||
| 
 |  | ||||||
| #     expect_roundtrip: bool|None = None, |  | ||||||
| 
 |  | ||||||
| # ) -> bool: |  | ||||||
| 
 |  | ||||||
| #     pld_val_type: Type = type(pld) |  | ||||||
| 
 |  | ||||||
| #     # TODO: verify that the overridden subtypes |  | ||||||
| #     # DO NOT have modified type-annots from original! |  | ||||||
| #     # 'Start',  .pld: FuncSpec |  | ||||||
| #     # 'StartAck',  .pld: IpcCtxSpec |  | ||||||
| #     # 'Stop',  .pld: UNSEt |  | ||||||
| #     # 'Error',  .pld: ErrorData |  | ||||||
| 
 |  | ||||||
| #     codec: MsgCodec = mk_codec( |  | ||||||
| #         # NOTE: this ONLY accepts `PayloadMsg.pld` fields of a specified |  | ||||||
| #         # type union. |  | ||||||
| #         ipc_pld_spec=payload_spec, |  | ||||||
| #     ) |  | ||||||
| 
 |  | ||||||
| #     # make a one-off dec to compare with our `MsgCodec` instance |  | ||||||
| #     # which does the below `mk_msg_spec()` call internally |  | ||||||
| #     ipc_msg_spec: Union[Type[Struct]] |  | ||||||
| #     msg_types: list[PayloadMsg[payload_spec]] |  | ||||||
| #     ( |  | ||||||
| #         ipc_msg_spec, |  | ||||||
| #         msg_types, |  | ||||||
| #     ) = mk_msg_spec( |  | ||||||
| #         payload_type_union=payload_spec, |  | ||||||
| #     ) |  | ||||||
| #     _enc = msgpack.Encoder() |  | ||||||
| #     _dec = msgpack.Decoder( |  | ||||||
| #         type=ipc_msg_spec or Any,  # like `PayloadMsg[Any]` |  | ||||||
| #     ) |  | ||||||
| 
 |  | ||||||
| #     assert ( |  | ||||||
| #         payload_spec |  | ||||||
| #         == |  | ||||||
| #         codec.pld_spec |  | ||||||
| #     ) |  | ||||||
| 
 |  | ||||||
| #     # assert codec.dec == dec |  | ||||||
| #     # |  | ||||||
| #     # ^-XXX-^ not sure why these aren't "equal" but when cast |  | ||||||
| #     # to `str` they seem to match ?? .. kk |  | ||||||
| 
 |  | ||||||
| #     assert ( |  | ||||||
| #         str(ipc_msg_spec) |  | ||||||
| #         == |  | ||||||
| #         str(codec.msg_spec) |  | ||||||
| #         == |  | ||||||
| #         str(_dec.type) |  | ||||||
| #         == |  | ||||||
| #         str(codec.dec.type) |  | ||||||
| #     ) |  | ||||||
| 
 |  | ||||||
| #     # verify the boxed-type for all variable payload-type msgs. |  | ||||||
| #     if not msg_types: |  | ||||||
| #         breakpoint() |  | ||||||
| 
 |  | ||||||
| #     roundtrip: bool|None = None |  | ||||||
| #     pld_spec_msg_names: list[str] = [ |  | ||||||
| #         td.__name__ for td in _payload_msgs |  | ||||||
| #     ] |  | ||||||
| #     for typedef in msg_types: |  | ||||||
| 
 |  | ||||||
| #         skip_runtime_msg: bool = typedef.__name__ not in pld_spec_msg_names |  | ||||||
| #         if skip_runtime_msg: |  | ||||||
| #             continue |  | ||||||
| 
 |  | ||||||
| #         pld_field = structs.fields(typedef)[1] |  | ||||||
| #         assert pld_field.type is payload_spec # TODO-^ does this need to work to get all subtypes to adhere? |  | ||||||
| 
 |  | ||||||
| #         kwargs: dict[str, Any] = { |  | ||||||
| #             'cid': '666', |  | ||||||
| #             'pld': pld, |  | ||||||
| #         } |  | ||||||
| #         enc_msg: PayloadMsg = typedef(**kwargs) |  | ||||||
| 
 |  | ||||||
| #         _wire_bytes: bytes = _enc.encode(enc_msg) |  | ||||||
| #         wire_bytes: bytes = codec.enc.encode(enc_msg) |  | ||||||
| #         assert _wire_bytes == wire_bytes |  | ||||||
| 
 |  | ||||||
| #         ve: ValidationError|None = None |  | ||||||
| #         try: |  | ||||||
| #             dec_msg = codec.dec.decode(wire_bytes) |  | ||||||
| #             _dec_msg = _dec.decode(wire_bytes) |  | ||||||
| 
 |  | ||||||
| #             # decoded msg and thus payload should be exactly same! |  | ||||||
| #             assert (roundtrip := ( |  | ||||||
| #                 _dec_msg |  | ||||||
| #                 == |  | ||||||
| #                 dec_msg |  | ||||||
| #                 == |  | ||||||
| #                 enc_msg |  | ||||||
| #             )) |  | ||||||
| 
 |  | ||||||
| #             if ( |  | ||||||
| #                 expect_roundtrip is not None |  | ||||||
| #                 and expect_roundtrip != roundtrip |  | ||||||
| #             ): |  | ||||||
| #                 breakpoint() |  | ||||||
| 
 |  | ||||||
| #             assert ( |  | ||||||
| #                 pld |  | ||||||
| #                 == |  | ||||||
| #                 dec_msg.pld |  | ||||||
| #                 == |  | ||||||
| #                 enc_msg.pld |  | ||||||
| #             ) |  | ||||||
| #             # assert (roundtrip := (_dec_msg == enc_msg)) |  | ||||||
| 
 |  | ||||||
| #         except ValidationError as _ve: |  | ||||||
| #             ve = _ve |  | ||||||
| #             roundtrip: bool = False |  | ||||||
| #             if pld_val_type is payload_spec: |  | ||||||
| #                 raise ValueError( |  | ||||||
| #                    'Got `ValidationError` despite type-var match!?\n' |  | ||||||
| #                     f'pld_val_type: {pld_val_type}\n' |  | ||||||
| #                     f'payload_type: {payload_spec}\n' |  | ||||||
| #                 ) from ve |  | ||||||
| 
 |  | ||||||
| #             else: |  | ||||||
| #                 # ow we good cuz the pld spec mismatched. |  | ||||||
| #                 print( |  | ||||||
| #                     'Got expected `ValidationError` since,\n' |  | ||||||
| #                     f'{pld_val_type} is not {payload_spec}\n' |  | ||||||
| #                 ) |  | ||||||
| #         else: |  | ||||||
| #             if ( |  | ||||||
| #                 payload_spec is not Any |  | ||||||
| #                 and |  | ||||||
| #                 pld_val_type is not payload_spec |  | ||||||
| #             ): |  | ||||||
| #                 raise ValueError( |  | ||||||
| #                    'DID NOT `ValidationError` despite expected type match!?\n' |  | ||||||
| #                     f'pld_val_type: {pld_val_type}\n' |  | ||||||
| #                     f'payload_type: {payload_spec}\n' |  | ||||||
| #                 ) |  | ||||||
| 
 |  | ||||||
| #     # full code decode should always be attempted! |  | ||||||
| #     if roundtrip is None: |  | ||||||
| #         breakpoint() |  | ||||||
| 
 |  | ||||||
| #     return roundtrip |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| # ?TODO? maybe remove since covered in the newer `test_pldrx_limiting` |  | ||||||
| # via end-2-end testing of all this? |  | ||||||
| # -[ ] IOW do we really NEED this lowlevel unit testing? |  | ||||||
| # |  | ||||||
| # def test_limit_msgspec( |  | ||||||
| #     debug_mode: bool, |  | ||||||
| # ): |  | ||||||
| #     ''' |  | ||||||
| #     Internals unit testing to verify that type-limiting an IPC ctx's |  | ||||||
| #     msg spec with `Pldrx.limit_plds()` results in various |  | ||||||
| #     encapsulated `msgspec` object settings and state. |  | ||||||
| 
 |  | ||||||
| #     ''' |  | ||||||
| #     async def main(): |  | ||||||
| #         async with tractor.open_root_actor( |  | ||||||
| #             debug_mode=debug_mode, |  | ||||||
| #         ): |  | ||||||
| #             # ensure we can round-trip a boxing `PayloadMsg` |  | ||||||
| #             assert chk_pld_type( |  | ||||||
| #                 payload_spec=Any, |  | ||||||
| #                 pld=None, |  | ||||||
| #                 expect_roundtrip=True, |  | ||||||
| #             ) |  | ||||||
| 
 |  | ||||||
| #             # verify that a mis-typed payload value won't decode |  | ||||||
| #             assert not chk_pld_type( |  | ||||||
| #                 payload_spec=int, |  | ||||||
| #                 pld='doggy', |  | ||||||
| #             ) |  | ||||||
| 
 |  | ||||||
| #             # parametrize the boxed `.pld` type as a custom-struct |  | ||||||
| #             # and ensure that parametrization propagates |  | ||||||
| #             # to all payload-msg-spec-able subtypes! |  | ||||||
| #             class CustomPayload(Struct): |  | ||||||
| #                 name: str |  | ||||||
| #                 value: Any |  | ||||||
| 
 |  | ||||||
| #             assert not chk_pld_type( |  | ||||||
| #                 payload_spec=CustomPayload, |  | ||||||
| #                 pld='doggy', |  | ||||||
| #             ) |  | ||||||
| 
 |  | ||||||
| #             assert chk_pld_type( |  | ||||||
| #                 payload_spec=CustomPayload, |  | ||||||
| #                 pld=CustomPayload(name='doggy', value='urmom') |  | ||||||
| #             ) |  | ||||||
| 
 |  | ||||||
| #             # yah, we can `.pause_from_sync()` now! |  | ||||||
| #             # breakpoint() |  | ||||||
| 
 |  | ||||||
| #     trio.run(main) |  | ||||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							|  | @ -7,24 +7,31 @@ import pytest | ||||||
| import trio | import trio | ||||||
| import tractor | import tractor | ||||||
| 
 | 
 | ||||||
| from tractor._testing import tractor_test | from conftest import tractor_test | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @pytest.mark.trio | @pytest.mark.trio | ||||||
| async def test_no_runtime(): | async def test_no_arbitter(): | ||||||
|     """An arbitter must be established before any nurseries |     """An arbitter must be established before any nurseries | ||||||
|     can be created. |     can be created. | ||||||
| 
 | 
 | ||||||
|     (In other words ``tractor.open_root_actor()`` must be engaged at |     (In other words ``tractor.open_root_actor()`` must be engaged at | ||||||
|     some point?) |     some point?) | ||||||
|     """ |     """ | ||||||
|     with pytest.raises(RuntimeError) : |     with pytest.raises(RuntimeError): | ||||||
|         async with tractor.find_actor('doggy'): |         with tractor.open_nursery(): | ||||||
|             pass |             pass | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | def test_no_main(): | ||||||
|  |     """An async function **must** be passed to ``tractor.run()``. | ||||||
|  |     """ | ||||||
|  |     with pytest.raises(TypeError): | ||||||
|  |         tractor.run(None) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
| @tractor_test | @tractor_test | ||||||
| async def test_self_is_registered(reg_addr): | async def test_self_is_registered(arb_addr): | ||||||
|     "Verify waiting on the arbiter to register itself using the standard api." |     "Verify waiting on the arbiter to register itself using the standard api." | ||||||
|     actor = tractor.current_actor() |     actor = tractor.current_actor() | ||||||
|     assert actor.is_arbiter |     assert actor.is_arbiter | ||||||
|  | @ -34,20 +41,20 @@ async def test_self_is_registered(reg_addr): | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @tractor_test | @tractor_test | ||||||
| async def test_self_is_registered_localportal(reg_addr): | async def test_self_is_registered_localportal(arb_addr): | ||||||
|     "Verify waiting on the arbiter to register itself using a local portal." |     "Verify waiting on the arbiter to register itself using a local portal." | ||||||
|     actor = tractor.current_actor() |     actor = tractor.current_actor() | ||||||
|     assert actor.is_arbiter |     assert actor.is_arbiter | ||||||
|     async with tractor.get_registry(reg_addr) as portal: |     async with tractor.get_arbiter(*arb_addr) as portal: | ||||||
|         assert isinstance(portal, tractor._portal.LocalPortal) |         assert isinstance(portal, tractor._portal.LocalPortal) | ||||||
| 
 | 
 | ||||||
|         with trio.fail_after(0.2): |         with trio.fail_after(0.2): | ||||||
|             sockaddr = await portal.run_from_ns( |             sockaddr = await portal.run_from_ns( | ||||||
|                     'self', 'wait_for_actor', name='root') |                     'self', 'wait_for_actor', name='root') | ||||||
|             assert sockaddr[0] == reg_addr |             assert sockaddr[0] == arb_addr | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def test_local_actor_async_func(reg_addr): | def test_local_actor_async_func(arb_addr): | ||||||
|     """Verify a simple async function in-process. |     """Verify a simple async function in-process. | ||||||
|     """ |     """ | ||||||
|     nums = [] |     nums = [] | ||||||
|  | @ -55,7 +62,7 @@ def test_local_actor_async_func(reg_addr): | ||||||
|     async def print_loop(): |     async def print_loop(): | ||||||
| 
 | 
 | ||||||
|         async with tractor.open_root_actor( |         async with tractor.open_root_actor( | ||||||
|             registry_addrs=[reg_addr], |             arbiter_addr=arb_addr, | ||||||
|         ): |         ): | ||||||
|             # arbiter is started in-proc if dne |             # arbiter is started in-proc if dne | ||||||
|             assert tractor.current_actor().is_arbiter |             assert tractor.current_actor().is_arbiter | ||||||
|  |  | ||||||
|  | @ -7,10 +7,8 @@ import time | ||||||
| import pytest | import pytest | ||||||
| import trio | import trio | ||||||
| import tractor | import tractor | ||||||
| from tractor._testing import ( | from conftest import ( | ||||||
|     tractor_test, |     tractor_test, | ||||||
| ) |  | ||||||
| from .conftest import ( |  | ||||||
|     sig_prog, |     sig_prog, | ||||||
|     _INT_SIGNAL, |     _INT_SIGNAL, | ||||||
|     _INT_RETURN_CODE, |     _INT_RETURN_CODE, | ||||||
|  | @ -30,9 +28,9 @@ def test_abort_on_sigint(daemon): | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @tractor_test | @tractor_test | ||||||
| async def test_cancel_remote_arbiter(daemon, reg_addr): | async def test_cancel_remote_arbiter(daemon, arb_addr): | ||||||
|     assert not tractor.current_actor().is_arbiter |     assert not tractor.current_actor().is_arbiter | ||||||
|     async with tractor.get_registry(reg_addr) as portal: |     async with tractor.get_arbiter(*arb_addr) as portal: | ||||||
|         await portal.cancel_actor() |         await portal.cancel_actor() | ||||||
| 
 | 
 | ||||||
|     time.sleep(0.1) |     time.sleep(0.1) | ||||||
|  | @ -41,16 +39,16 @@ async def test_cancel_remote_arbiter(daemon, reg_addr): | ||||||
| 
 | 
 | ||||||
|     # no arbiter socket should exist |     # no arbiter socket should exist | ||||||
|     with pytest.raises(OSError): |     with pytest.raises(OSError): | ||||||
|         async with tractor.get_registry(reg_addr) as portal: |         async with tractor.get_arbiter(*arb_addr) as portal: | ||||||
|             pass |             pass | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def test_register_duplicate_name(daemon, reg_addr): | def test_register_duplicate_name(daemon, arb_addr): | ||||||
| 
 | 
 | ||||||
|     async def main(): |     async def main(): | ||||||
| 
 | 
 | ||||||
|         async with tractor.open_nursery( |         async with tractor.open_nursery( | ||||||
|             registry_addrs=[reg_addr], |             arbiter_addr=arb_addr, | ||||||
|         ) as n: |         ) as n: | ||||||
| 
 | 
 | ||||||
|             assert not tractor.current_actor().is_arbiter |             assert not tractor.current_actor().is_arbiter | ||||||
|  |  | ||||||
|  | @ -1,239 +0,0 @@ | ||||||
| ''' |  | ||||||
| Define the details of inter-actor "out-of-band" (OoB) cancel |  | ||||||
| semantics, that is how cancellation works when a cancel request comes |  | ||||||
| from the different concurrency (primitive's) "layer" then where the |  | ||||||
| eventual `trio.Task` actually raises a signal. |  | ||||||
| 
 |  | ||||||
| ''' |  | ||||||
| from functools import partial |  | ||||||
| # from contextlib import asynccontextmanager as acm |  | ||||||
| # import itertools |  | ||||||
| 
 |  | ||||||
| import pytest |  | ||||||
| import trio |  | ||||||
| import tractor |  | ||||||
| from tractor import (  # typing |  | ||||||
|     ActorNursery, |  | ||||||
|     Portal, |  | ||||||
|     Context, |  | ||||||
|     # ContextCancelled, |  | ||||||
|     # RemoteActorError, |  | ||||||
| ) |  | ||||||
| # from tractor._testing import ( |  | ||||||
| #     tractor_test, |  | ||||||
| #     expect_ctxc, |  | ||||||
| # ) |  | ||||||
| 
 |  | ||||||
| # XXX TODO cases: |  | ||||||
| # - [ ] peer cancelled itself - so other peers should |  | ||||||
| #   get errors reflecting that the peer was itself the .canceller? |  | ||||||
| 
 |  | ||||||
| # def test_self_cancel(): |  | ||||||
| #     ''' |  | ||||||
| #     2 cases: |  | ||||||
| #     - calls `Actor.cancel()` locally in some task |  | ||||||
| #     - calls LocalPortal.cancel_actor()` ? |  | ||||||
| # |  | ||||||
| # things to ensure! |  | ||||||
| # -[ ] the ctxc raised in a child should ideally show the tb of the |  | ||||||
| #     underlying `Cancelled` checkpoint, i.e. |  | ||||||
| #     `raise scope_error from ctxc`? |  | ||||||
| # |  | ||||||
| # -[ ] a self-cancelled context, if not allowed to block on |  | ||||||
| #     `ctx.result()` at some point will hang since the `ctx._scope` |  | ||||||
| #     is never `.cancel_called`; cases for this include, |  | ||||||
| #     - an `open_ctx()` which never starteds before being OoB actor |  | ||||||
| #       cancelled. |  | ||||||
| #       |_ parent task will be blocked in `.open_context()` for the |  | ||||||
| #         `Started` msg, and when the OoB ctxc arrives `ctx._scope` |  | ||||||
| #         will never have been signalled.. |  | ||||||
| 
 |  | ||||||
| #     ''' |  | ||||||
| #     ... |  | ||||||
| 
 |  | ||||||
| # TODO, sanity test against the case in `/examples/trio/lockacquire_not_unmasked.py` |  | ||||||
| # but with the `Lock.acquire()` from a `@context` to ensure the |  | ||||||
| # implicit ignore-case-non-unmasking. |  | ||||||
| # |  | ||||||
| # @tractor.context |  | ||||||
| # async def acquire_actor_global_lock( |  | ||||||
| #     ctx: tractor.Context, |  | ||||||
| #     ignore_special_cases: bool, |  | ||||||
| # ): |  | ||||||
| 
 |  | ||||||
| #     async with maybe_unmask_excs( |  | ||||||
| #         ignore_special_cases=ignore_special_cases, |  | ||||||
| #     ): |  | ||||||
| #         await ctx.started('locked') |  | ||||||
| 
 |  | ||||||
| #     # block til cancelled |  | ||||||
| #     await trio.sleep_forever() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @tractor.context |  | ||||||
| async def sleep_forever( |  | ||||||
|     ctx: tractor.Context, |  | ||||||
|     # ignore_special_cases: bool, |  | ||||||
|     do_started: bool, |  | ||||||
| ): |  | ||||||
| 
 |  | ||||||
|     # async with maybe_unmask_excs( |  | ||||||
|     #     ignore_special_cases=ignore_special_cases, |  | ||||||
|     # ): |  | ||||||
|     #     await ctx.started('locked') |  | ||||||
|     if do_started: |  | ||||||
|         await ctx.started() |  | ||||||
| 
 |  | ||||||
|     # block til cancelled |  | ||||||
|     print('sleepin on child-side..') |  | ||||||
|     await trio.sleep_forever() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @pytest.mark.parametrize( |  | ||||||
|     'cancel_ctx', |  | ||||||
|     [True, False], |  | ||||||
| ) |  | ||||||
| def test_cancel_ctx_with_parent_side_entered_in_bg_task( |  | ||||||
|     debug_mode: bool, |  | ||||||
|     loglevel: str, |  | ||||||
|     cancel_ctx: bool, |  | ||||||
| ): |  | ||||||
|     ''' |  | ||||||
|     The most "basic" out-of-band-task self-cancellation case where |  | ||||||
|     `Portal.open_context()` is entered in a bg task and the |  | ||||||
|     parent-task (of the containing nursery) calls `Context.cancel()` |  | ||||||
|     without the child knowing; the `Context._scope` should be |  | ||||||
|     `.cancel_called` when the IPC ctx's child-side relays |  | ||||||
|     a `ContextCancelled` with a `.canceller` set to the parent |  | ||||||
|     actor('s task). |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     async def main(): |  | ||||||
|         with trio.fail_after( |  | ||||||
|             2 if not debug_mode else 999, |  | ||||||
|         ): |  | ||||||
|             an: ActorNursery |  | ||||||
|             async with ( |  | ||||||
|                 tractor.open_nursery( |  | ||||||
|                     debug_mode=debug_mode, |  | ||||||
|                     loglevel='devx', |  | ||||||
|                     enable_stack_on_sig=True, |  | ||||||
|                 ) as an, |  | ||||||
|                 trio.open_nursery() as tn, |  | ||||||
|             ): |  | ||||||
|                 ptl: Portal = await an.start_actor( |  | ||||||
|                     'sub', |  | ||||||
|                     enable_modules=[__name__], |  | ||||||
|                 ) |  | ||||||
| 
 |  | ||||||
|                 async def _open_ctx_async( |  | ||||||
|                     do_started: bool = True, |  | ||||||
|                     task_status=trio.TASK_STATUS_IGNORED, |  | ||||||
|                 ): |  | ||||||
|                     # do we expect to never enter the |  | ||||||
|                     # `.open_context()` below. |  | ||||||
|                     if not do_started: |  | ||||||
|                         task_status.started() |  | ||||||
| 
 |  | ||||||
|                     async with ptl.open_context( |  | ||||||
|                         sleep_forever, |  | ||||||
|                         do_started=do_started, |  | ||||||
|                     ) as (ctx, first): |  | ||||||
|                         task_status.started(ctx) |  | ||||||
|                         await trio.sleep_forever() |  | ||||||
| 
 |  | ||||||
|                 # XXX, this is the key OoB part! |  | ||||||
|                 # |  | ||||||
|                 # - start the `.open_context()` in a bg task which |  | ||||||
|                 #   blocks inside the embedded scope-body, |  | ||||||
|                 # |  | ||||||
|                 # -  when we call `Context.cancel()` it **is |  | ||||||
|                 #   not** from the same task which eventually runs |  | ||||||
|                 #   `.__aexit__()`, |  | ||||||
|                 # |  | ||||||
|                 # - since the bg "opener" task will be in |  | ||||||
|                 #   a `trio.sleep_forever()`, it must be interrupted |  | ||||||
|                 #   by the `ContextCancelled` delivered from the |  | ||||||
|                 #   child-side; `Context._scope: CancelScope` MUST |  | ||||||
|                 #   be `.cancel_called`! |  | ||||||
|                 # |  | ||||||
|                 print('ASYNC opening IPC context in subtask..') |  | ||||||
|                 maybe_ctx: Context|None = await tn.start(partial( |  | ||||||
|                     _open_ctx_async, |  | ||||||
|                 )) |  | ||||||
| 
 |  | ||||||
|                 if ( |  | ||||||
|                     maybe_ctx |  | ||||||
|                     and |  | ||||||
|                     cancel_ctx |  | ||||||
|                 ): |  | ||||||
|                     print('cancelling first IPC ctx!') |  | ||||||
|                     await maybe_ctx.cancel() |  | ||||||
| 
 |  | ||||||
|                 # XXX, note that despite `maybe_context.cancel()` |  | ||||||
|                 # being called above, it's the parent (bg) task |  | ||||||
|                 # which was originally never interrupted in |  | ||||||
|                 # the `ctx._scope` body due to missing case logic in |  | ||||||
|                 # `ctx._maybe_cancel_and_set_remote_error()`. |  | ||||||
|                 # |  | ||||||
|                 # It didn't matter that the subactor process was |  | ||||||
|                 # already terminated and reaped, nothing was |  | ||||||
|                 # cancelling the ctx-parent task's scope! |  | ||||||
|                 # |  | ||||||
|                 print('cancelling subactor!') |  | ||||||
|                 await ptl.cancel_actor() |  | ||||||
| 
 |  | ||||||
|                 if maybe_ctx: |  | ||||||
|                     try: |  | ||||||
|                         await maybe_ctx.wait_for_result() |  | ||||||
|                     except tractor.ContextCancelled as ctxc: |  | ||||||
|                         assert not cancel_ctx |  | ||||||
|                         assert ( |  | ||||||
|                             ctxc.canceller |  | ||||||
|                             == |  | ||||||
|                             tractor.current_actor().aid.uid |  | ||||||
|                         ) |  | ||||||
|                         # don't re-raise since it'll trigger |  | ||||||
|                         # an EG from the above tn. |  | ||||||
| 
 |  | ||||||
|     if cancel_ctx: |  | ||||||
|         # graceful self-cancel |  | ||||||
|         trio.run(main) |  | ||||||
| 
 |  | ||||||
|     else: |  | ||||||
|         # ctx parent task should see OoB ctxc due to |  | ||||||
|         # `ptl.cancel_actor()`. |  | ||||||
|         with pytest.raises(tractor.ContextCancelled) as excinfo: |  | ||||||
|             trio.run(main) |  | ||||||
| 
 |  | ||||||
|         assert 'root' in excinfo.value.canceller[0] |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| # def test_parent_actor_cancels_subactor_with_gt1_ctxs_open_to_it( |  | ||||||
| #     debug_mode: bool, |  | ||||||
| #     loglevel: str, |  | ||||||
| # ): |  | ||||||
| #     ''' |  | ||||||
| #     Demos OoB cancellation from the perspective of a ctx opened with |  | ||||||
| #     a child subactor where the parent cancels the child at the "actor |  | ||||||
| #     layer" using `Portal.cancel_actor()` and thus the |  | ||||||
| #     `ContextCancelled.canceller` received by the ctx's parent-side |  | ||||||
| #     task will appear to be a "self cancellation" even though that |  | ||||||
| #     specific task itself was not cancelled and thus |  | ||||||
| #     `Context.cancel_called ==False`. |  | ||||||
| #     ''' |  | ||||||
|                 # TODO, do we have an existing implied ctx |  | ||||||
|                 # cancel test like this? |  | ||||||
|                 # with trio.move_on_after(0.5):# as cs: |  | ||||||
|                 #     await _open_ctx_async( |  | ||||||
|                 #         do_started=False, |  | ||||||
|                 #     ) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|                 # in-line ctx scope should definitely raise |  | ||||||
|                 # a ctxc with `.canceller = 'root'` |  | ||||||
|                 # async with ptl.open_context( |  | ||||||
|                 #     sleep_forever, |  | ||||||
|                 #     do_started=True, |  | ||||||
|                 # ) as pair: |  | ||||||
| 
 |  | ||||||
|  | @ -1,364 +0,0 @@ | ||||||
| ''' |  | ||||||
| Audit sub-sys APIs from `.msg._ops` |  | ||||||
| mostly for ensuring correct `contextvars` |  | ||||||
| related settings around IPC contexts. |  | ||||||
| 
 |  | ||||||
| ''' |  | ||||||
| from contextlib import ( |  | ||||||
|     asynccontextmanager as acm, |  | ||||||
| ) |  | ||||||
| 
 |  | ||||||
| from msgspec import ( |  | ||||||
|     Struct, |  | ||||||
| ) |  | ||||||
| import pytest |  | ||||||
| import trio |  | ||||||
| 
 |  | ||||||
| import tractor |  | ||||||
| from tractor import ( |  | ||||||
|     Context, |  | ||||||
|     MsgTypeError, |  | ||||||
|     current_ipc_ctx, |  | ||||||
|     Portal, |  | ||||||
| ) |  | ||||||
| from tractor.msg import ( |  | ||||||
|     _ops as msgops, |  | ||||||
|     Return, |  | ||||||
| ) |  | ||||||
| from tractor.msg import ( |  | ||||||
|     _codec, |  | ||||||
| ) |  | ||||||
| from tractor.msg.types import ( |  | ||||||
|     log, |  | ||||||
| ) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| class PldMsg( |  | ||||||
|     Struct, |  | ||||||
| 
 |  | ||||||
|     # TODO: with multiple structs in-spec we need to tag them! |  | ||||||
|     # -[ ] offer a built-in `PldMsg` type to inherit from which takes |  | ||||||
|     #      case of these details? |  | ||||||
|     # |  | ||||||
|     # https://jcristharif.com/msgspec/structs.html#tagged-unions |  | ||||||
|     # tag=True, |  | ||||||
|     # tag_field='msg_type', |  | ||||||
| ): |  | ||||||
|     field: str |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| maybe_msg_spec = PldMsg|None |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @acm |  | ||||||
| async def maybe_expect_raises( |  | ||||||
|     raises: BaseException|None = None, |  | ||||||
|     ensure_in_message: list[str]|None = None, |  | ||||||
|     post_mortem: bool = False, |  | ||||||
|     timeout: int = 3, |  | ||||||
| ) -> None: |  | ||||||
|     ''' |  | ||||||
|     Async wrapper for ensuring errors propagate from the inner scope. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     if tractor._state.debug_mode(): |  | ||||||
|         timeout += 999 |  | ||||||
| 
 |  | ||||||
|     with trio.fail_after(timeout): |  | ||||||
|         try: |  | ||||||
|             yield |  | ||||||
|         except BaseException as _inner_err: |  | ||||||
|             inner_err = _inner_err |  | ||||||
|             # wasn't-expected to error.. |  | ||||||
|             if raises is None: |  | ||||||
|                 raise |  | ||||||
| 
 |  | ||||||
|             else: |  | ||||||
|                 assert type(inner_err) is raises |  | ||||||
| 
 |  | ||||||
|                 # maybe check for error txt content |  | ||||||
|                 if ensure_in_message: |  | ||||||
|                     part: str |  | ||||||
|                     err_repr: str = repr(inner_err) |  | ||||||
|                     for part in ensure_in_message: |  | ||||||
|                         for i, arg in enumerate(inner_err.args): |  | ||||||
|                             if part in err_repr: |  | ||||||
|                                 break |  | ||||||
|                         # if part never matches an arg, then we're |  | ||||||
|                         # missing a match. |  | ||||||
|                         else: |  | ||||||
|                             raise ValueError( |  | ||||||
|                                 'Failed to find error message content?\n\n' |  | ||||||
|                                 f'expected: {ensure_in_message!r}\n' |  | ||||||
|                                 f'part: {part!r}\n\n' |  | ||||||
|                                 f'{inner_err.args}' |  | ||||||
|                         ) |  | ||||||
| 
 |  | ||||||
|                 if post_mortem: |  | ||||||
|                     await tractor.post_mortem() |  | ||||||
| 
 |  | ||||||
|         else: |  | ||||||
|             if raises: |  | ||||||
|                 raise RuntimeError( |  | ||||||
|                     f'Expected a {raises.__name__!r} to be raised?' |  | ||||||
|                 ) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @tractor.context( |  | ||||||
|     pld_spec=maybe_msg_spec, |  | ||||||
| ) |  | ||||||
| async def child( |  | ||||||
|     ctx: Context, |  | ||||||
|     started_value: int|PldMsg|None, |  | ||||||
|     return_value: str|None, |  | ||||||
|     validate_pld_spec: bool, |  | ||||||
|     raise_on_started_mte: bool = True, |  | ||||||
| 
 |  | ||||||
| ) -> None: |  | ||||||
|     ''' |  | ||||||
|     Call ``Context.started()`` more then once (an error). |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     expect_started_mte: bool = started_value == 10 |  | ||||||
| 
 |  | ||||||
|     # sanaity check that child RPC context is the current one |  | ||||||
|     curr_ctx: Context = current_ipc_ctx() |  | ||||||
|     assert ctx is curr_ctx |  | ||||||
| 
 |  | ||||||
|     rx: msgops.PldRx = ctx._pld_rx |  | ||||||
|     curr_pldec: _codec.MsgDec = rx.pld_dec |  | ||||||
| 
 |  | ||||||
|     ctx_meta: dict = getattr( |  | ||||||
|         child, |  | ||||||
|         '_tractor_context_meta', |  | ||||||
|         None, |  | ||||||
|     ) |  | ||||||
|     if ctx_meta: |  | ||||||
|         assert ( |  | ||||||
|             ctx_meta['pld_spec'] |  | ||||||
|             is curr_pldec.spec |  | ||||||
|             is curr_pldec.pld_spec |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|     # 2 cases: hdndle send-side and recv-only validation |  | ||||||
|     # - when `raise_on_started_mte == True`, send validate |  | ||||||
|     # - else, parent-recv-side only validation |  | ||||||
|     mte: MsgTypeError|None = None |  | ||||||
|     try: |  | ||||||
|         await ctx.started( |  | ||||||
|             value=started_value, |  | ||||||
|             validate_pld_spec=validate_pld_spec, |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|     except MsgTypeError as _mte: |  | ||||||
|         mte = _mte |  | ||||||
|         log.exception('started()` raised an MTE!\n') |  | ||||||
|         if not expect_started_mte: |  | ||||||
|             raise RuntimeError( |  | ||||||
|                 'Child-ctx-task SHOULD NOT HAVE raised an MTE for\n\n' |  | ||||||
|                 f'{started_value!r}\n' |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|         boxed_div: str = '------ - ------' |  | ||||||
|         assert boxed_div not in mte._message |  | ||||||
|         assert boxed_div not in mte.tb_str |  | ||||||
|         assert boxed_div not in repr(mte) |  | ||||||
|         assert boxed_div not in str(mte) |  | ||||||
|         mte_repr: str = repr(mte) |  | ||||||
|         for line in mte.message.splitlines(): |  | ||||||
|             assert line in mte_repr |  | ||||||
| 
 |  | ||||||
|         # since this is a *local error* there should be no |  | ||||||
|         # boxed traceback content! |  | ||||||
|         assert not mte.tb_str |  | ||||||
| 
 |  | ||||||
|         # propagate to parent? |  | ||||||
|         if raise_on_started_mte: |  | ||||||
|             raise |  | ||||||
| 
 |  | ||||||
|     # no-send-side-error fallthrough |  | ||||||
|     if ( |  | ||||||
|         validate_pld_spec |  | ||||||
|         and |  | ||||||
|         expect_started_mte |  | ||||||
|     ): |  | ||||||
|         raise RuntimeError( |  | ||||||
|             'Child-ctx-task SHOULD HAVE raised an MTE for\n\n' |  | ||||||
|             f'{started_value!r}\n' |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|     assert ( |  | ||||||
|         not expect_started_mte |  | ||||||
|         or |  | ||||||
|         not validate_pld_spec |  | ||||||
|     ) |  | ||||||
| 
 |  | ||||||
|     # if wait_for_parent_to_cancel: |  | ||||||
|     #     ... |  | ||||||
|     # |  | ||||||
|     # ^-TODO-^ logic for diff validation policies on each side: |  | ||||||
|     # |  | ||||||
|     # -[ ] ensure that if we don't validate on the send |  | ||||||
|     #   side, that we are eventually error-cancelled by our |  | ||||||
|     #   parent due to the bad `Started` payload! |  | ||||||
|     # -[ ] the boxed error should be srced from the parent's |  | ||||||
|     #   runtime NOT ours! |  | ||||||
|     # -[ ] we should still error on bad `return_value`s |  | ||||||
|     #   despite the parent not yet error-cancelling us? |  | ||||||
|     #   |_ how do we want the parent side to look in that |  | ||||||
|     #     case? |  | ||||||
|     #     -[ ] maybe the equiv of "during handling of the |  | ||||||
|     #       above error another occurred" for the case where |  | ||||||
|     #       the parent sends a MTE to this child and while |  | ||||||
|     #       waiting for the child to terminate it gets back |  | ||||||
|     #       the MTE for this case? |  | ||||||
|     # |  | ||||||
| 
 |  | ||||||
|     # XXX should always fail on recv side since we can't |  | ||||||
|     # really do much else beside terminate and relay the |  | ||||||
|     # msg-type-error from this RPC task ;) |  | ||||||
|     return return_value |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @pytest.mark.parametrize( |  | ||||||
|     'return_value', |  | ||||||
|     [ |  | ||||||
|         'yo', |  | ||||||
|         None, |  | ||||||
|     ], |  | ||||||
|     ids=[ |  | ||||||
|         'return[invalid-"yo"]', |  | ||||||
|         'return[valid-None]', |  | ||||||
|     ], |  | ||||||
| ) |  | ||||||
| @pytest.mark.parametrize( |  | ||||||
|     'started_value', |  | ||||||
|     [ |  | ||||||
|         10, |  | ||||||
|         PldMsg(field='yo'), |  | ||||||
|     ], |  | ||||||
|     ids=[ |  | ||||||
|         'Started[invalid-10]', |  | ||||||
|         'Started[valid-PldMsg]', |  | ||||||
|     ], |  | ||||||
| ) |  | ||||||
| @pytest.mark.parametrize( |  | ||||||
|     'pld_check_started_value', |  | ||||||
|     [ |  | ||||||
|         True, |  | ||||||
|         False, |  | ||||||
|     ], |  | ||||||
|     ids=[ |  | ||||||
|         'check-started-pld', |  | ||||||
|         'no-started-pld-validate', |  | ||||||
|     ], |  | ||||||
| ) |  | ||||||
| def test_basic_payload_spec( |  | ||||||
|     debug_mode: bool, |  | ||||||
|     loglevel: str, |  | ||||||
|     return_value: str|None, |  | ||||||
|     started_value: int|PldMsg, |  | ||||||
|     pld_check_started_value: bool, |  | ||||||
| ): |  | ||||||
|     ''' |  | ||||||
|     Validate the most basic `PldRx` msg-type-spec semantics around |  | ||||||
|     a IPC `Context` endpoint start, started-sync, and final return |  | ||||||
|     value depending on set payload types and the currently applied |  | ||||||
|     pld-spec. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     invalid_return: bool = return_value == 'yo' |  | ||||||
|     invalid_started: bool = started_value == 10 |  | ||||||
| 
 |  | ||||||
|     async def main(): |  | ||||||
|         async with tractor.open_nursery( |  | ||||||
|             debug_mode=debug_mode, |  | ||||||
|             loglevel=loglevel, |  | ||||||
|         ) as an: |  | ||||||
|             p: Portal = await an.start_actor( |  | ||||||
|                 'child', |  | ||||||
|                 enable_modules=[__name__], |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|             # since not opened yet. |  | ||||||
|             assert current_ipc_ctx() is None |  | ||||||
| 
 |  | ||||||
|             if invalid_started: |  | ||||||
|                 msg_type_str: str = 'Started' |  | ||||||
|                 bad_value: int = 10 |  | ||||||
|             elif invalid_return: |  | ||||||
|                 msg_type_str: str = 'Return' |  | ||||||
|                 bad_value: str = 'yo' |  | ||||||
|             else: |  | ||||||
|                 # XXX but should never be used below then.. |  | ||||||
|                 msg_type_str: str = '' |  | ||||||
|                 bad_value: str = '' |  | ||||||
| 
 |  | ||||||
|             maybe_mte: MsgTypeError|None = None |  | ||||||
|             should_raise: Exception|None = ( |  | ||||||
|                 MsgTypeError if ( |  | ||||||
|                     invalid_return |  | ||||||
|                     or |  | ||||||
|                     invalid_started |  | ||||||
|                 ) else None |  | ||||||
|             ) |  | ||||||
|             async with ( |  | ||||||
|                 maybe_expect_raises( |  | ||||||
|                     raises=should_raise, |  | ||||||
|                     ensure_in_message=[ |  | ||||||
|                         f"invalid `{msg_type_str}` msg payload", |  | ||||||
|                         f'{bad_value}', |  | ||||||
|                         f'has type {type(bad_value)!r}', |  | ||||||
|                         'not match type-spec', |  | ||||||
|                         f'`{msg_type_str}.pld: PldMsg|NoneType`', |  | ||||||
|                     ], |  | ||||||
|                     # only for debug |  | ||||||
|                     # post_mortem=True, |  | ||||||
|                 ), |  | ||||||
|                 p.open_context( |  | ||||||
|                     child, |  | ||||||
|                     return_value=return_value, |  | ||||||
|                     started_value=started_value, |  | ||||||
|                     validate_pld_spec=pld_check_started_value, |  | ||||||
|                 ) as (ctx, first), |  | ||||||
|             ): |  | ||||||
|                 # now opened with 'child' sub |  | ||||||
|                 assert current_ipc_ctx() is ctx |  | ||||||
| 
 |  | ||||||
|                 assert type(first) is PldMsg |  | ||||||
|                 assert first.field == 'yo' |  | ||||||
| 
 |  | ||||||
|                 try: |  | ||||||
|                     res: None|PldMsg = await ctx.result(hide_tb=False) |  | ||||||
|                     assert res is None |  | ||||||
|                 except MsgTypeError as mte: |  | ||||||
|                     maybe_mte = mte |  | ||||||
|                     if not invalid_return: |  | ||||||
|                         raise |  | ||||||
| 
 |  | ||||||
|                     # expected this invalid `Return.pld` so audit |  | ||||||
|                     # the error state + meta-data |  | ||||||
|                     assert mte.expected_msg_type is Return |  | ||||||
|                     assert mte.cid == ctx.cid |  | ||||||
|                     mte_repr: str = repr(mte) |  | ||||||
|                     for line in mte.message.splitlines(): |  | ||||||
|                         assert line in mte_repr |  | ||||||
| 
 |  | ||||||
|                     assert mte.tb_str |  | ||||||
|                     # await tractor.pause(shield=True) |  | ||||||
| 
 |  | ||||||
|                     # verify expected remote mte deats |  | ||||||
|                     assert ctx._local_error is None |  | ||||||
|                     assert ( |  | ||||||
|                         mte is |  | ||||||
|                         ctx._remote_error is |  | ||||||
|                         ctx.maybe_error is |  | ||||||
|                         ctx.outcome |  | ||||||
|                     ) |  | ||||||
| 
 |  | ||||||
|             if should_raise is None: |  | ||||||
|                 assert maybe_mte is None |  | ||||||
| 
 |  | ||||||
|             await p.cancel_actor() |  | ||||||
| 
 |  | ||||||
|     trio.run(main) |  | ||||||
|  | @ -4,21 +4,20 @@ from itertools import cycle | ||||||
| import pytest | import pytest | ||||||
| import trio | import trio | ||||||
| import tractor | import tractor | ||||||
| from tractor.experimental import msgpub | from tractor.testing import tractor_test | ||||||
| from tractor._testing import tractor_test |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def test_type_checks(): | def test_type_checks(): | ||||||
| 
 | 
 | ||||||
|     with pytest.raises(TypeError) as err: |     with pytest.raises(TypeError) as err: | ||||||
|         @msgpub |         @tractor.msg.pub | ||||||
|         async def no_get_topics(yo): |         async def no_get_topics(yo): | ||||||
|             yield |             yield | ||||||
| 
 | 
 | ||||||
|     assert "must define a `get_topics`" in str(err.value) |     assert "must define a `get_topics`" in str(err.value) | ||||||
| 
 | 
 | ||||||
|     with pytest.raises(TypeError) as err: |     with pytest.raises(TypeError) as err: | ||||||
|         @msgpub |         @tractor.msg.pub | ||||||
|         def not_async_gen(yo): |         def not_async_gen(yo): | ||||||
|             pass |             pass | ||||||
| 
 | 
 | ||||||
|  | @ -33,7 +32,7 @@ def is_even(i): | ||||||
| _get_topics = None | _get_topics = None | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @msgpub | @tractor.msg.pub | ||||||
| async def pubber(get_topics, seed=10): | async def pubber(get_topics, seed=10): | ||||||
| 
 | 
 | ||||||
|     # ensure topic subscriptions are as expected |     # ensure topic subscriptions are as expected | ||||||
|  | @ -104,7 +103,7 @@ async def subs( | ||||||
|                 await stream.aclose() |                 await stream.aclose() | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @msgpub(tasks=['one', 'two']) | @tractor.msg.pub(tasks=['one', 'two']) | ||||||
| async def multilock_pubber(get_topics): | async def multilock_pubber(get_topics): | ||||||
|     yield {'doggy': 10} |     yield {'doggy': 10} | ||||||
| 
 | 
 | ||||||
|  | @ -159,7 +158,7 @@ async def test_required_args(callwith_expecterror): | ||||||
| ) | ) | ||||||
| def test_multi_actor_subs_arbiter_pub( | def test_multi_actor_subs_arbiter_pub( | ||||||
|     loglevel, |     loglevel, | ||||||
|     reg_addr, |     arb_addr, | ||||||
|     pub_actor, |     pub_actor, | ||||||
| ): | ): | ||||||
|     """Try out the neato @pub decorator system. |     """Try out the neato @pub decorator system. | ||||||
|  | @ -169,7 +168,7 @@ def test_multi_actor_subs_arbiter_pub( | ||||||
|     async def main(): |     async def main(): | ||||||
| 
 | 
 | ||||||
|         async with tractor.open_nursery( |         async with tractor.open_nursery( | ||||||
|             registry_addrs=[reg_addr], |             arbiter_addr=arb_addr, | ||||||
|             enable_modules=[__name__], |             enable_modules=[__name__], | ||||||
|         ) as n: |         ) as n: | ||||||
| 
 | 
 | ||||||
|  | @ -181,7 +180,6 @@ def test_multi_actor_subs_arbiter_pub( | ||||||
|                     'streamer', |                     'streamer', | ||||||
|                     enable_modules=[__name__], |                     enable_modules=[__name__], | ||||||
|                 ) |                 ) | ||||||
|                 name = 'streamer' |  | ||||||
| 
 | 
 | ||||||
|             even_portal = await n.run_in_actor( |             even_portal = await n.run_in_actor( | ||||||
|                 subs, |                 subs, | ||||||
|  | @ -254,12 +252,12 @@ def test_multi_actor_subs_arbiter_pub( | ||||||
| 
 | 
 | ||||||
| def test_single_subactor_pub_multitask_subs( | def test_single_subactor_pub_multitask_subs( | ||||||
|     loglevel, |     loglevel, | ||||||
|     reg_addr, |     arb_addr, | ||||||
| ): | ): | ||||||
|     async def main(): |     async def main(): | ||||||
| 
 | 
 | ||||||
|         async with tractor.open_nursery( |         async with tractor.open_nursery( | ||||||
|             registry_addrs=[reg_addr], |             arbiter_addr=arb_addr, | ||||||
|             enable_modules=[__name__], |             enable_modules=[__name__], | ||||||
|         ) as n: |         ) as n: | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
|  | @ -1,237 +0,0 @@ | ||||||
| ''' |  | ||||||
| Special case testing for issues not (dis)covered in the primary |  | ||||||
| `Context` related functional/scenario suites. |  | ||||||
| 
 |  | ||||||
| **NOTE: this mod is a WIP** space for handling |  | ||||||
| odd/rare/undiscovered/not-yet-revealed faults which either |  | ||||||
| loudly (ideal case) breakl our supervision protocol |  | ||||||
| or (worst case) result in distributed sys hangs. |  | ||||||
| 
 |  | ||||||
| Suites here further try to clarify (if [partially] ill-defined) and |  | ||||||
| verify our edge case semantics for inter-actor-relayed-exceptions |  | ||||||
| including, |  | ||||||
| 
 |  | ||||||
| - lowlevel: what remote obj-data is interchanged for IPC and what is |  | ||||||
|   native-obj form is expected from unpacking in the the new |  | ||||||
|   mem-domain. |  | ||||||
| 
 |  | ||||||
| - which kinds of `RemoteActorError` (and its derivs) are expected by which |  | ||||||
|   (types of) peers (parent, child, sibling, etc) with what |  | ||||||
|   particular meta-data set such as, |  | ||||||
| 
 |  | ||||||
|   - `.src_uid`: the original (maybe) peer who raised. |  | ||||||
|   - `.relay_uid`: the next-hop-peer who sent it. |  | ||||||
|   - `.relay_path`: the sequence of peer actor hops. |  | ||||||
|   - `.is_inception`: a predicate that denotes multi-hop remote errors. |  | ||||||
| 
 |  | ||||||
| - when should `ExceptionGroup`s be relayed from a particular |  | ||||||
|   remote endpoint, they should never be caused by implicit `._rpc` |  | ||||||
|   nursery machinery! |  | ||||||
| 
 |  | ||||||
| - various special `trio` edge cases around its cancellation semantics |  | ||||||
|   and how we (currently) leverage `trio.Cancelled` as a signal for |  | ||||||
|   whether a `Context` task should raise `ContextCancelled` (ctx). |  | ||||||
| 
 |  | ||||||
| ''' |  | ||||||
| import pytest |  | ||||||
| import trio |  | ||||||
| import tractor |  | ||||||
| from tractor import (  # typing |  | ||||||
|     ActorNursery, |  | ||||||
|     Portal, |  | ||||||
|     Context, |  | ||||||
|     ContextCancelled, |  | ||||||
| ) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @tractor.context |  | ||||||
| async def sleep_n_chkpt_in_finally( |  | ||||||
|     ctx: Context, |  | ||||||
|     sleep_n_raise: bool, |  | ||||||
| 
 |  | ||||||
|     chld_raise_delay: float, |  | ||||||
|     chld_finally_delay: float, |  | ||||||
| 
 |  | ||||||
|     rent_cancels: bool, |  | ||||||
|     rent_ctxc_delay: float, |  | ||||||
| 
 |  | ||||||
|     expect_exc: str|None = None, |  | ||||||
| 
 |  | ||||||
| ) -> None: |  | ||||||
|     ''' |  | ||||||
|     Sync, open a tn, then wait for cancel, run a chkpt inside |  | ||||||
|     the user's `finally:` teardown. |  | ||||||
| 
 |  | ||||||
|     This covers a footgun case that `trio` core doesn't seem to care about |  | ||||||
|     wherein an exc can be masked by a `trio.Cancelled` raised inside a tn emedded |  | ||||||
|     `finally:`. |  | ||||||
| 
 |  | ||||||
|     Also see `test_trioisms::test_acm_embedded_nursery_propagates_enter_err` |  | ||||||
|     for the down and gritty details. |  | ||||||
| 
 |  | ||||||
|     Since a `@context` endpoint fn can also contain code like this, |  | ||||||
|     **and** bc we currently have no easy way other then |  | ||||||
|     `trio.Cancelled` to signal cancellation on each side of an IPC `Context`, |  | ||||||
|     the footgun issue can compound itself as demonstrated in this suite.. |  | ||||||
| 
 |  | ||||||
|     Here are some edge cases codified with our WIP "sclang" syntax |  | ||||||
|     (note the parent(rent)/child(chld) naming here is just |  | ||||||
|     pragmatism, generally these most of these cases can occurr |  | ||||||
|     regardless of the distributed-task's supervision hiearchy), |  | ||||||
| 
 |  | ||||||
|     - rent c)=> chld.raises-then-taskc-in-finally |  | ||||||
|      |_ chld's body raises an `exc: BaseException`. |  | ||||||
|       _ in its `finally:` block it runs a chkpoint |  | ||||||
|         which raises a taskc (`trio.Cancelled`) which |  | ||||||
|         masks `exc` instead raising taskc up to the first tn. |  | ||||||
|       _ the embedded/chld tn captures the masking taskc and then |  | ||||||
|         raises it up to the ._rpc-ep-tn instead of `exc`. |  | ||||||
|       _ the rent thinks the child ctxc-ed instead of errored.. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     await ctx.started() |  | ||||||
| 
 |  | ||||||
|     if expect_exc: |  | ||||||
|         expect_exc: BaseException = tractor._exceptions.get_err_type( |  | ||||||
|             type_name=expect_exc, |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|     berr: BaseException|None = None |  | ||||||
|     try: |  | ||||||
|         if not sleep_n_raise: |  | ||||||
|             await trio.sleep_forever() |  | ||||||
|         elif sleep_n_raise: |  | ||||||
| 
 |  | ||||||
|             # XXX this sleep is less then the sleep the parent |  | ||||||
|             # does before calling `ctx.cancel()` |  | ||||||
|             await trio.sleep(chld_raise_delay) |  | ||||||
| 
 |  | ||||||
|             # XXX this will be masked by a taskc raised in |  | ||||||
|             # the `finally:` if this fn doesn't terminate |  | ||||||
|             # before any ctxc-req arrives AND a checkpoint is hit |  | ||||||
|             # in that `finally:`. |  | ||||||
|             raise RuntimeError('my app krurshed..') |  | ||||||
| 
 |  | ||||||
|     except BaseException as _berr: |  | ||||||
|         berr = _berr |  | ||||||
| 
 |  | ||||||
|         # TODO: it'd sure be nice to be able to inject our own |  | ||||||
|         # `ContextCancelled` here instead of of `trio.Cancelled` |  | ||||||
|         # so that our runtime can expect it and this "user code" |  | ||||||
|         # would be able to tell the diff between a generic trio |  | ||||||
|         # cancel and a tractor runtime-IPC cancel. |  | ||||||
|         if expect_exc: |  | ||||||
|             if not isinstance( |  | ||||||
|                 berr, |  | ||||||
|                 expect_exc, |  | ||||||
|             ): |  | ||||||
|                 raise ValueError( |  | ||||||
|                     f'Unexpected exc type ??\n' |  | ||||||
|                     f'{berr!r}\n' |  | ||||||
|                     f'\n' |  | ||||||
|                     f'Expected a {expect_exc!r}\n' |  | ||||||
|                 ) |  | ||||||
| 
 |  | ||||||
|         raise berr |  | ||||||
| 
 |  | ||||||
|     # simulate what user code might try even though |  | ||||||
|     # it's a known boo-boo.. |  | ||||||
|     finally: |  | ||||||
|         # maybe wait for rent ctxc to arrive |  | ||||||
|         with trio.CancelScope(shield=True): |  | ||||||
|             await trio.sleep(chld_finally_delay) |  | ||||||
| 
 |  | ||||||
|         # !!XXX this will raise `trio.Cancelled` which |  | ||||||
|         # will mask the RTE from above!!! |  | ||||||
|         # |  | ||||||
|         # YES, it's the same case as our extant |  | ||||||
|         # `test_trioisms::test_acm_embedded_nursery_propagates_enter_err` |  | ||||||
|         try: |  | ||||||
|             await trio.lowlevel.checkpoint() |  | ||||||
|         except trio.Cancelled as taskc: |  | ||||||
|             if (scope_err := taskc.__context__): |  | ||||||
|                 print( |  | ||||||
|                     f'XXX MASKED REMOTE ERROR XXX\n' |  | ||||||
|                     f'ENDPOINT exception -> {scope_err!r}\n' |  | ||||||
|                     f'will be masked by -> {taskc!r}\n' |  | ||||||
|                 ) |  | ||||||
|                 # await tractor.pause(shield=True) |  | ||||||
| 
 |  | ||||||
|             raise taskc |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @pytest.mark.parametrize( |  | ||||||
|     'chld_callspec', |  | ||||||
|     [ |  | ||||||
|         dict( |  | ||||||
|             sleep_n_raise=None, |  | ||||||
|             chld_raise_delay=0.1, |  | ||||||
|             chld_finally_delay=0.1, |  | ||||||
|             expect_exc='Cancelled', |  | ||||||
|             rent_cancels=True, |  | ||||||
|             rent_ctxc_delay=0.1, |  | ||||||
|         ), |  | ||||||
|         dict( |  | ||||||
|             sleep_n_raise='RuntimeError', |  | ||||||
|             chld_raise_delay=0.1, |  | ||||||
|             chld_finally_delay=1, |  | ||||||
|             expect_exc='RuntimeError', |  | ||||||
|             rent_cancels=False, |  | ||||||
|             rent_ctxc_delay=0.1, |  | ||||||
|         ), |  | ||||||
|     ], |  | ||||||
|     ids=lambda item: f'chld_callspec={item!r}' |  | ||||||
| ) |  | ||||||
| def test_unmasked_remote_exc( |  | ||||||
|     debug_mode: bool, |  | ||||||
|     chld_callspec: dict, |  | ||||||
|     tpt_proto: str, |  | ||||||
| ): |  | ||||||
|     expect_exc_str: str|None = chld_callspec['sleep_n_raise'] |  | ||||||
|     rent_ctxc_delay: float|None = chld_callspec['rent_ctxc_delay'] |  | ||||||
|     async def main(): |  | ||||||
|         an: ActorNursery |  | ||||||
|         async with tractor.open_nursery( |  | ||||||
|             debug_mode=debug_mode, |  | ||||||
|             enable_transports=[tpt_proto], |  | ||||||
|         ) as an: |  | ||||||
|             ptl: Portal = await an.start_actor( |  | ||||||
|                 'cancellee', |  | ||||||
|                 enable_modules=[__name__], |  | ||||||
|             ) |  | ||||||
|             ctx: Context |  | ||||||
|             async with ( |  | ||||||
|                 ptl.open_context( |  | ||||||
|                     sleep_n_chkpt_in_finally, |  | ||||||
|                     **chld_callspec, |  | ||||||
|                 ) as (ctx, sent), |  | ||||||
|             ): |  | ||||||
|                 assert not sent |  | ||||||
|                 await trio.sleep(rent_ctxc_delay) |  | ||||||
|                 await ctx.cancel() |  | ||||||
| 
 |  | ||||||
|                 # recv error or result from chld |  | ||||||
|                 ctxc: ContextCancelled = await ctx.wait_for_result() |  | ||||||
|                 assert ( |  | ||||||
|                     ctxc is ctx.outcome |  | ||||||
|                     and |  | ||||||
|                     isinstance(ctxc, ContextCancelled) |  | ||||||
|                 ) |  | ||||||
| 
 |  | ||||||
|             # always graceful terminate the sub in non-error cases |  | ||||||
|             await an.cancel() |  | ||||||
| 
 |  | ||||||
|     if expect_exc_str: |  | ||||||
|         expect_exc: BaseException = tractor._exceptions.get_err_type( |  | ||||||
|             type_name=expect_exc_str, |  | ||||||
|         ) |  | ||||||
|         with pytest.raises( |  | ||||||
|             expected_exception=tractor.RemoteActorError, |  | ||||||
|         ) as excinfo: |  | ||||||
|             trio.run(main) |  | ||||||
| 
 |  | ||||||
|         rae = excinfo.value |  | ||||||
|         assert expect_exc == rae.boxed_type |  | ||||||
| 
 |  | ||||||
|     else: |  | ||||||
|         trio.run(main) |  | ||||||
|  | @ -1,406 +0,0 @@ | ||||||
| ''' |  | ||||||
| Suites for our `.trionics.maybe_open_context()` multi-task |  | ||||||
| shared-cached `@acm` API. |  | ||||||
| 
 |  | ||||||
| ''' |  | ||||||
| from contextlib import asynccontextmanager as acm |  | ||||||
| import platform |  | ||||||
| from typing import Awaitable |  | ||||||
| 
 |  | ||||||
| import pytest |  | ||||||
| import trio |  | ||||||
| import tractor |  | ||||||
| from tractor.trionics import ( |  | ||||||
|     maybe_open_context, |  | ||||||
| ) |  | ||||||
| from tractor.log import ( |  | ||||||
|     get_console_log, |  | ||||||
|     get_logger, |  | ||||||
| ) |  | ||||||
| log = get_logger(__name__) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| _resource: int = 0 |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @acm |  | ||||||
| async def maybe_increment_counter(task_name: str): |  | ||||||
|     global _resource |  | ||||||
| 
 |  | ||||||
|     _resource += 1 |  | ||||||
|     await trio.lowlevel.checkpoint() |  | ||||||
|     yield _resource |  | ||||||
|     await trio.lowlevel.checkpoint() |  | ||||||
|     _resource -= 1 |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @pytest.mark.parametrize( |  | ||||||
|     'key_on', |  | ||||||
|     ['key_value', 'kwargs'], |  | ||||||
|     ids="key_on={}".format, |  | ||||||
| ) |  | ||||||
| def test_resource_only_entered_once(key_on): |  | ||||||
|     global _resource |  | ||||||
|     _resource = 0 |  | ||||||
| 
 |  | ||||||
|     key = None |  | ||||||
|     if key_on == 'key_value': |  | ||||||
|         key = 'some_common_key' |  | ||||||
| 
 |  | ||||||
|     async def main(): |  | ||||||
|         cache_active: bool = False |  | ||||||
| 
 |  | ||||||
|         async def enter_cached_mngr(name: str): |  | ||||||
|             nonlocal cache_active |  | ||||||
| 
 |  | ||||||
|             if key_on == 'kwargs': |  | ||||||
|                 # make a common kwargs input to key on it |  | ||||||
|                 kwargs = {'task_name': 'same_task_name'} |  | ||||||
|                 assert key is None |  | ||||||
|             else: |  | ||||||
|                 # different task names per task will be used |  | ||||||
|                 kwargs = {'task_name': name} |  | ||||||
| 
 |  | ||||||
|             async with maybe_open_context( |  | ||||||
|                 maybe_increment_counter, |  | ||||||
|                 kwargs=kwargs, |  | ||||||
|                 key=key, |  | ||||||
| 
 |  | ||||||
|             ) as (cache_hit, resource): |  | ||||||
|                 if cache_hit: |  | ||||||
|                     try: |  | ||||||
|                         cache_active = True |  | ||||||
|                         assert resource == 1 |  | ||||||
|                         await trio.sleep_forever() |  | ||||||
|                     finally: |  | ||||||
|                         cache_active = False |  | ||||||
|                 else: |  | ||||||
|                     assert resource == 1 |  | ||||||
|                     await trio.sleep_forever() |  | ||||||
| 
 |  | ||||||
|         with trio.move_on_after(0.5): |  | ||||||
|             async with ( |  | ||||||
|                 tractor.open_root_actor(), |  | ||||||
|                 trio.open_nursery() as tn, |  | ||||||
|             ): |  | ||||||
|                 for i in range(10): |  | ||||||
|                     tn.start_soon( |  | ||||||
|                         enter_cached_mngr, |  | ||||||
|                         f'task_{i}', |  | ||||||
|                     ) |  | ||||||
|                     await trio.sleep(0.001) |  | ||||||
| 
 |  | ||||||
|     trio.run(main) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @tractor.context |  | ||||||
| async def streamer( |  | ||||||
|     ctx: tractor.Context, |  | ||||||
|     seq: list[int] = list(range(1000)), |  | ||||||
| ) -> None: |  | ||||||
| 
 |  | ||||||
|     await ctx.started() |  | ||||||
|     async with ctx.open_stream() as stream: |  | ||||||
|         for val in seq: |  | ||||||
|             await stream.send(val) |  | ||||||
|             await trio.sleep(0.001) |  | ||||||
| 
 |  | ||||||
|     print('producer finished') |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @acm |  | ||||||
| async def open_stream() -> Awaitable[ |  | ||||||
|     tuple[ |  | ||||||
|         tractor.ActorNursery, |  | ||||||
|         tractor.MsgStream, |  | ||||||
|     ] |  | ||||||
| ]: |  | ||||||
|     try: |  | ||||||
|         async with tractor.open_nursery() as an: |  | ||||||
|             portal = await an.start_actor( |  | ||||||
|                 'streamer', |  | ||||||
|                 enable_modules=[__name__], |  | ||||||
|             ) |  | ||||||
|             try: |  | ||||||
|                 async with ( |  | ||||||
|                     portal.open_context(streamer) as (ctx, first), |  | ||||||
|                     ctx.open_stream() as stream, |  | ||||||
|                 ): |  | ||||||
|                     print('Entered open_stream() caller') |  | ||||||
|                     yield an, stream |  | ||||||
|                     print('Exited open_stream() caller') |  | ||||||
| 
 |  | ||||||
|             finally: |  | ||||||
|                 print( |  | ||||||
|                     'Cancelling streamer with,\n' |  | ||||||
|                     '=> `Portal.cancel_actor()`' |  | ||||||
|                 ) |  | ||||||
|                 await portal.cancel_actor() |  | ||||||
|                 print('Cancelled streamer') |  | ||||||
| 
 |  | ||||||
|     except Exception as err: |  | ||||||
|         print( |  | ||||||
|             f'`open_stream()` errored?\n' |  | ||||||
|             f'{err!r}\n' |  | ||||||
|         ) |  | ||||||
|         await tractor.pause(shield=True) |  | ||||||
|         raise err |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @acm |  | ||||||
| async def maybe_open_stream(taskname: str): |  | ||||||
|     async with maybe_open_context( |  | ||||||
|         # NOTE: all secondary tasks should cache hit on the same key |  | ||||||
|         acm_func=open_stream, |  | ||||||
|     ) as ( |  | ||||||
|         cache_hit, |  | ||||||
|         (an, stream) |  | ||||||
|     ): |  | ||||||
|         # when the actor + portal + ctx + stream has already been |  | ||||||
|         # allocated we want to just bcast to this task. |  | ||||||
|         if cache_hit: |  | ||||||
|             print(f'{taskname} loaded from cache') |  | ||||||
| 
 |  | ||||||
|             # add a new broadcast subscription for the quote stream |  | ||||||
|             # if this feed is already allocated by the first |  | ||||||
|             # task that entereed |  | ||||||
|             async with stream.subscribe() as bstream: |  | ||||||
|                 yield an, bstream |  | ||||||
|                 print( |  | ||||||
|                     f'cached task exited\n' |  | ||||||
|                     f')>\n' |  | ||||||
|                     f' |_{taskname}\n' |  | ||||||
|                 ) |  | ||||||
| 
 |  | ||||||
|             # we should always unreg the "cloned" bcrc for this |  | ||||||
|             # consumer-task |  | ||||||
|             assert id(bstream) not in bstream._state.subs |  | ||||||
| 
 |  | ||||||
|         else: |  | ||||||
|             # yield the actual stream |  | ||||||
|             try: |  | ||||||
|                 yield an, stream |  | ||||||
|             finally: |  | ||||||
|                 print( |  | ||||||
|                     f'NON-cached task exited\n' |  | ||||||
|                     f')>\n' |  | ||||||
|                     f' |_{taskname}\n' |  | ||||||
|                 ) |  | ||||||
| 
 |  | ||||||
|         first_bstream = stream._broadcaster |  | ||||||
|         bcrx_state = first_bstream._state |  | ||||||
|         subs: dict[int, int] = bcrx_state.subs |  | ||||||
|         if len(subs) == 1: |  | ||||||
|             assert id(first_bstream) in subs |  | ||||||
|             # ^^TODO! the bcrx should always de-allocate all subs, |  | ||||||
|             # including the implicit first one allocated on entry |  | ||||||
|             # by the first subscribing peer task, no? |  | ||||||
|             # |  | ||||||
|             # -[ ] adjust `MsgStream.subscribe()` to do this mgmt! |  | ||||||
|             #  |_ allows reverting `MsgStream.receive()` to the |  | ||||||
|             #    non-bcaster method. |  | ||||||
|             #  |_ we can decide whether to reset `._broadcaster`? |  | ||||||
|             # |  | ||||||
|             # await tractor.pause(shield=True) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def test_open_local_sub_to_stream( |  | ||||||
|     debug_mode: bool, |  | ||||||
| ): |  | ||||||
|     ''' |  | ||||||
|     Verify a single inter-actor stream can can be fanned-out shared to |  | ||||||
|     N local tasks using `trionics.maybe_open_context()`. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     timeout: float = 3.6 |  | ||||||
|     if platform.system() == "Windows": |  | ||||||
|         timeout: float = 10 |  | ||||||
| 
 |  | ||||||
|     if debug_mode: |  | ||||||
|         timeout = 999 |  | ||||||
|         print(f'IN debug_mode, setting large timeout={timeout!r}..') |  | ||||||
| 
 |  | ||||||
|     async def main(): |  | ||||||
| 
 |  | ||||||
|         full = list(range(1000)) |  | ||||||
|         an: tractor.ActorNursery|None = None |  | ||||||
|         num_tasks: int = 10 |  | ||||||
| 
 |  | ||||||
|         async def get_sub_and_pull(taskname: str): |  | ||||||
| 
 |  | ||||||
|             nonlocal an |  | ||||||
| 
 |  | ||||||
|             stream: tractor.MsgStream |  | ||||||
|             async with ( |  | ||||||
|                 maybe_open_stream(taskname) as ( |  | ||||||
|                     an, |  | ||||||
|                     stream, |  | ||||||
|                 ), |  | ||||||
|             ): |  | ||||||
|                 if '0' in taskname: |  | ||||||
|                     assert isinstance(stream, tractor.MsgStream) |  | ||||||
|                 else: |  | ||||||
|                     assert isinstance( |  | ||||||
|                         stream, |  | ||||||
|                         tractor.trionics.BroadcastReceiver |  | ||||||
|                     ) |  | ||||||
| 
 |  | ||||||
|                 first = await stream.receive() |  | ||||||
|                 print(f'{taskname} started with value {first}') |  | ||||||
|                 seq: list[int] = [] |  | ||||||
|                 async for msg in stream: |  | ||||||
|                     seq.append(msg) |  | ||||||
| 
 |  | ||||||
|                 assert set(seq).issubset(set(full)) |  | ||||||
| 
 |  | ||||||
|             # end of @acm block |  | ||||||
|             print(f'{taskname} finished') |  | ||||||
| 
 |  | ||||||
|         root: tractor.Actor |  | ||||||
|         with trio.fail_after(timeout) as cs: |  | ||||||
|             # TODO: turns out this isn't multi-task entrant XD |  | ||||||
|             # We probably need an indepotent entry semantic? |  | ||||||
|             async with tractor.open_root_actor( |  | ||||||
|                 debug_mode=debug_mode, |  | ||||||
|                 # maybe_enable_greenback=True, |  | ||||||
|                 # |  | ||||||
|                 # ^TODO? doesn't seem to mk breakpoint() usage work |  | ||||||
|                 # bc each bg task needs to open a portal?? |  | ||||||
|                 # - [ ] we should consider making this part of |  | ||||||
|                 #      our taskman defaults? |  | ||||||
|                 #   |_see https://github.com/goodboy/tractor/pull/363 |  | ||||||
|                 # |  | ||||||
|             ) as root: |  | ||||||
|                 assert root.is_registrar |  | ||||||
| 
 |  | ||||||
|                 async with ( |  | ||||||
|                     trio.open_nursery() as tn, |  | ||||||
|                 ): |  | ||||||
|                     for i in range(num_tasks): |  | ||||||
|                         tn.start_soon( |  | ||||||
|                             get_sub_and_pull, |  | ||||||
|                             f'task_{i}', |  | ||||||
|                         ) |  | ||||||
|                         await trio.sleep(0.001) |  | ||||||
| 
 |  | ||||||
|                 print('all consumer tasks finished!') |  | ||||||
| 
 |  | ||||||
|                 # ?XXX, ensure actor-nursery is shutdown or we might |  | ||||||
|                 # hang here due to a minor task deadlock/race-condition? |  | ||||||
|                 # |  | ||||||
|                 # - seems that all we need is a checkpoint to ensure |  | ||||||
|                 #   the last suspended task, which is inside |  | ||||||
|                 #   `.maybe_open_context()`, can do the |  | ||||||
|                 #   `Portal.cancel_actor()` call? |  | ||||||
|                 # |  | ||||||
|                 # - if that bg task isn't resumed, then this blocks |  | ||||||
|                 #   timeout might hit before that? |  | ||||||
|                 # |  | ||||||
|                 if root.ipc_server.has_peers(): |  | ||||||
|                     await trio.lowlevel.checkpoint() |  | ||||||
| 
 |  | ||||||
|                     # alt approach, cancel the entire `an` |  | ||||||
|                     # await tractor.pause() |  | ||||||
|                     # await an.cancel() |  | ||||||
| 
 |  | ||||||
|             # end of runtime scope |  | ||||||
|             print('root actor terminated.') |  | ||||||
| 
 |  | ||||||
|         if cs.cancelled_caught: |  | ||||||
|             pytest.fail( |  | ||||||
|                 'Should NOT time out in `open_root_actor()` ?' |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|         print('exiting main.') |  | ||||||
| 
 |  | ||||||
|     trio.run(main) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @acm |  | ||||||
| async def cancel_outer_cs( |  | ||||||
|     cs: trio.CancelScope|None = None, |  | ||||||
|     delay: float = 0, |  | ||||||
| ): |  | ||||||
|     # on first task delay this enough to block |  | ||||||
|     # the 2nd task but then cancel it mid sleep |  | ||||||
|     # so that the tn.start() inside the key-err handler block |  | ||||||
|     # is cancelled and would previously corrupt the |  | ||||||
|     # mutext state. |  | ||||||
|     log.info(f'task entering sleep({delay})') |  | ||||||
|     await trio.sleep(delay) |  | ||||||
|     if cs: |  | ||||||
|         log.info('task calling cs.cancel()') |  | ||||||
|         cs.cancel() |  | ||||||
|     trio.lowlevel.checkpoint() |  | ||||||
|     yield |  | ||||||
|     await trio.sleep_forever() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def test_lock_not_corrupted_on_fast_cancel( |  | ||||||
|     debug_mode: bool, |  | ||||||
|     loglevel: str, |  | ||||||
| ): |  | ||||||
|     ''' |  | ||||||
|     Verify that if the caching-task (the first to enter |  | ||||||
|     `maybe_open_context()`) is cancelled mid-cache-miss, the embedded |  | ||||||
|     mutex can never be left in a corrupted state. |  | ||||||
| 
 |  | ||||||
|     That is, the lock is always eventually released ensuring a peer |  | ||||||
|     (cache-hitting) task will never, |  | ||||||
| 
 |  | ||||||
|     - be left to inf-block/hang on the `lock.acquire()`. |  | ||||||
|     - try to release the lock when still owned by the caching-task |  | ||||||
|       due to it having erronously exited without calling |  | ||||||
|       `lock.release()`. |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     delay: float = 1. |  | ||||||
| 
 |  | ||||||
|     async def use_moc( |  | ||||||
|         cs: trio.CancelScope|None, |  | ||||||
|         delay: float, |  | ||||||
|     ): |  | ||||||
|         log.info('task entering moc') |  | ||||||
|         async with maybe_open_context( |  | ||||||
|             cancel_outer_cs, |  | ||||||
|             kwargs={ |  | ||||||
|                 'cs': cs, |  | ||||||
|                 'delay': delay, |  | ||||||
|             }, |  | ||||||
|         ) as (cache_hit, _null): |  | ||||||
|             if cache_hit: |  | ||||||
|                 log.info('2nd task entered') |  | ||||||
|             else: |  | ||||||
|                 log.info('1st task entered') |  | ||||||
| 
 |  | ||||||
|             await trio.sleep_forever() |  | ||||||
| 
 |  | ||||||
|     async def main(): |  | ||||||
|         with trio.fail_after(delay + 2): |  | ||||||
|             async with ( |  | ||||||
|                 tractor.open_root_actor( |  | ||||||
|                     debug_mode=debug_mode, |  | ||||||
|                     loglevel=loglevel, |  | ||||||
|                 ), |  | ||||||
|                 trio.open_nursery() as tn, |  | ||||||
|             ): |  | ||||||
|                 get_console_log('info') |  | ||||||
|                 log.info('yo starting') |  | ||||||
|                 cs = tn.cancel_scope |  | ||||||
|                 tn.start_soon( |  | ||||||
|                     use_moc, |  | ||||||
|                     cs, |  | ||||||
|                     delay, |  | ||||||
|                     name='child', |  | ||||||
|                 ) |  | ||||||
|                 with trio.CancelScope() as rent_cs: |  | ||||||
|                     await use_moc( |  | ||||||
|                         cs=rent_cs, |  | ||||||
|                         delay=delay, |  | ||||||
|                     ) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     trio.run(main) |  | ||||||
|  | @ -1,211 +0,0 @@ | ||||||
| import time |  | ||||||
| 
 |  | ||||||
| import trio |  | ||||||
| import pytest |  | ||||||
| 
 |  | ||||||
| import tractor |  | ||||||
| from tractor.ipc._ringbuf import ( |  | ||||||
|     open_ringbuf, |  | ||||||
|     RBToken, |  | ||||||
|     RingBuffSender, |  | ||||||
|     RingBuffReceiver |  | ||||||
| ) |  | ||||||
| from tractor._testing.samples import ( |  | ||||||
|     generate_sample_messages, |  | ||||||
| ) |  | ||||||
| 
 |  | ||||||
| # in case you don't want to melt your cores, uncomment dis! |  | ||||||
| pytestmark = pytest.mark.skip |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @tractor.context |  | ||||||
| async def child_read_shm( |  | ||||||
|     ctx: tractor.Context, |  | ||||||
|     msg_amount: int, |  | ||||||
|     token: RBToken, |  | ||||||
|     total_bytes: int, |  | ||||||
| ) -> None: |  | ||||||
|     recvd_bytes = 0 |  | ||||||
|     await ctx.started() |  | ||||||
|     start_ts = time.time() |  | ||||||
|     async with RingBuffReceiver(token) as receiver: |  | ||||||
|         while recvd_bytes < total_bytes: |  | ||||||
|             msg = await receiver.receive_some() |  | ||||||
|             recvd_bytes += len(msg) |  | ||||||
| 
 |  | ||||||
|         # make sure we dont hold any memoryviews |  | ||||||
|         # before the ctx manager aclose() |  | ||||||
|         msg = None |  | ||||||
| 
 |  | ||||||
|     end_ts = time.time() |  | ||||||
|     elapsed = end_ts - start_ts |  | ||||||
|     elapsed_ms = int(elapsed * 1000) |  | ||||||
| 
 |  | ||||||
|     print(f'\n\telapsed ms: {elapsed_ms}') |  | ||||||
|     print(f'\tmsg/sec: {int(msg_amount / elapsed):,}') |  | ||||||
|     print(f'\tbytes/sec: {int(recvd_bytes / elapsed):,}') |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @tractor.context |  | ||||||
| async def child_write_shm( |  | ||||||
|     ctx: tractor.Context, |  | ||||||
|     msg_amount: int, |  | ||||||
|     rand_min: int, |  | ||||||
|     rand_max: int, |  | ||||||
|     token: RBToken, |  | ||||||
| ) -> None: |  | ||||||
|     msgs, total_bytes = generate_sample_messages( |  | ||||||
|         msg_amount, |  | ||||||
|         rand_min=rand_min, |  | ||||||
|         rand_max=rand_max, |  | ||||||
|     ) |  | ||||||
|     await ctx.started(total_bytes) |  | ||||||
|     async with RingBuffSender(token) as sender: |  | ||||||
|         for msg in msgs: |  | ||||||
|             await sender.send_all(msg) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @pytest.mark.parametrize( |  | ||||||
|     'msg_amount,rand_min,rand_max,buf_size', |  | ||||||
|     [ |  | ||||||
|         # simple case, fixed payloads, large buffer |  | ||||||
|         (100_000, 0, 0, 10 * 1024), |  | ||||||
| 
 |  | ||||||
|         # guaranteed wrap around on every write |  | ||||||
|         (100, 10 * 1024, 20 * 1024, 10 * 1024), |  | ||||||
| 
 |  | ||||||
|         # large payload size, but large buffer |  | ||||||
|         (10_000, 256 * 1024, 512 * 1024, 10 * 1024 * 1024) |  | ||||||
|     ], |  | ||||||
|     ids=[ |  | ||||||
|         'fixed_payloads_large_buffer', |  | ||||||
|         'wrap_around_every_write', |  | ||||||
|         'large_payloads_large_buffer', |  | ||||||
|     ] |  | ||||||
| ) |  | ||||||
| def test_ringbuf( |  | ||||||
|     msg_amount: int, |  | ||||||
|     rand_min: int, |  | ||||||
|     rand_max: int, |  | ||||||
|     buf_size: int |  | ||||||
| ): |  | ||||||
|     async def main(): |  | ||||||
|         with open_ringbuf( |  | ||||||
|             'test_ringbuf', |  | ||||||
|             buf_size=buf_size |  | ||||||
|         ) as token: |  | ||||||
|             proc_kwargs = { |  | ||||||
|                 'pass_fds': (token.write_eventfd, token.wrap_eventfd) |  | ||||||
|             } |  | ||||||
| 
 |  | ||||||
|             common_kwargs = { |  | ||||||
|                 'msg_amount': msg_amount, |  | ||||||
|                 'token': token, |  | ||||||
|             } |  | ||||||
|             async with tractor.open_nursery() as an: |  | ||||||
|                 send_p = await an.start_actor( |  | ||||||
|                     'ring_sender', |  | ||||||
|                     enable_modules=[__name__], |  | ||||||
|                     proc_kwargs=proc_kwargs |  | ||||||
|                 ) |  | ||||||
|                 recv_p = await an.start_actor( |  | ||||||
|                     'ring_receiver', |  | ||||||
|                     enable_modules=[__name__], |  | ||||||
|                     proc_kwargs=proc_kwargs |  | ||||||
|                 ) |  | ||||||
|                 async with ( |  | ||||||
|                     send_p.open_context( |  | ||||||
|                         child_write_shm, |  | ||||||
|                         rand_min=rand_min, |  | ||||||
|                         rand_max=rand_max, |  | ||||||
|                         **common_kwargs |  | ||||||
|                     ) as (sctx, total_bytes), |  | ||||||
|                     recv_p.open_context( |  | ||||||
|                         child_read_shm, |  | ||||||
|                         **common_kwargs, |  | ||||||
|                         total_bytes=total_bytes, |  | ||||||
|                     ) as (sctx, _sent), |  | ||||||
|                 ): |  | ||||||
|                     await recv_p.result() |  | ||||||
| 
 |  | ||||||
|                 await send_p.cancel_actor() |  | ||||||
|                 await recv_p.cancel_actor() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     trio.run(main) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @tractor.context |  | ||||||
| async def child_blocked_receiver( |  | ||||||
|     ctx: tractor.Context, |  | ||||||
|     token: RBToken |  | ||||||
| ): |  | ||||||
|     async with RingBuffReceiver(token) as receiver: |  | ||||||
|         await ctx.started() |  | ||||||
|         await receiver.receive_some() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def test_ring_reader_cancel(): |  | ||||||
|     async def main(): |  | ||||||
|         with open_ringbuf('test_ring_cancel_reader') as token: |  | ||||||
|             async with ( |  | ||||||
|                 tractor.open_nursery() as an, |  | ||||||
|                 RingBuffSender(token) as _sender, |  | ||||||
|             ): |  | ||||||
|                 recv_p = await an.start_actor( |  | ||||||
|                     'ring_blocked_receiver', |  | ||||||
|                     enable_modules=[__name__], |  | ||||||
|                     proc_kwargs={ |  | ||||||
|                         'pass_fds': (token.write_eventfd, token.wrap_eventfd) |  | ||||||
|                     } |  | ||||||
|                 ) |  | ||||||
|                 async with ( |  | ||||||
|                     recv_p.open_context( |  | ||||||
|                         child_blocked_receiver, |  | ||||||
|                         token=token |  | ||||||
|                     ) as (sctx, _sent), |  | ||||||
|                 ): |  | ||||||
|                     await trio.sleep(1) |  | ||||||
|                     await an.cancel() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     with pytest.raises(tractor._exceptions.ContextCancelled): |  | ||||||
|         trio.run(main) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @tractor.context |  | ||||||
| async def child_blocked_sender( |  | ||||||
|     ctx: tractor.Context, |  | ||||||
|     token: RBToken |  | ||||||
| ): |  | ||||||
|     async with RingBuffSender(token) as sender: |  | ||||||
|         await ctx.started() |  | ||||||
|         await sender.send_all(b'this will wrap') |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def test_ring_sender_cancel(): |  | ||||||
|     async def main(): |  | ||||||
|         with open_ringbuf( |  | ||||||
|             'test_ring_cancel_sender', |  | ||||||
|             buf_size=1 |  | ||||||
|         ) as token: |  | ||||||
|             async with tractor.open_nursery() as an: |  | ||||||
|                 recv_p = await an.start_actor( |  | ||||||
|                     'ring_blocked_sender', |  | ||||||
|                     enable_modules=[__name__], |  | ||||||
|                     proc_kwargs={ |  | ||||||
|                         'pass_fds': (token.write_eventfd, token.wrap_eventfd) |  | ||||||
|                     } |  | ||||||
|                 ) |  | ||||||
|                 async with ( |  | ||||||
|                     recv_p.open_context( |  | ||||||
|                         child_blocked_sender, |  | ||||||
|                         token=token |  | ||||||
|                     ) as (sctx, _sent), |  | ||||||
|                 ): |  | ||||||
|                     await trio.sleep(1) |  | ||||||
|                     await an.cancel() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     with pytest.raises(tractor._exceptions.ContextCancelled): |  | ||||||
|         trio.run(main) |  | ||||||
|  | @ -1,240 +0,0 @@ | ||||||
| ''' |  | ||||||
| Special attention cases for using "infect `asyncio`" mode from a root |  | ||||||
| actor; i.e. not using a std `trio.run()` bootstrap. |  | ||||||
| 
 |  | ||||||
| ''' |  | ||||||
| import asyncio |  | ||||||
| from functools import partial |  | ||||||
| 
 |  | ||||||
| import pytest |  | ||||||
| import trio |  | ||||||
| import tractor |  | ||||||
| from tractor import ( |  | ||||||
|     to_asyncio, |  | ||||||
| ) |  | ||||||
| from tests.test_infected_asyncio import ( |  | ||||||
|     aio_echo_server, |  | ||||||
| ) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @pytest.mark.parametrize( |  | ||||||
|     'raise_error_mid_stream', |  | ||||||
|     [ |  | ||||||
|         False, |  | ||||||
|         Exception, |  | ||||||
|         KeyboardInterrupt, |  | ||||||
|     ], |  | ||||||
|     ids='raise_error={}'.format, |  | ||||||
| ) |  | ||||||
| def test_infected_root_actor( |  | ||||||
|     raise_error_mid_stream: bool|Exception, |  | ||||||
| 
 |  | ||||||
|     # conftest wide |  | ||||||
|     loglevel: str, |  | ||||||
|     debug_mode: bool, |  | ||||||
| ): |  | ||||||
|     ''' |  | ||||||
|     Verify you can run the `tractor` runtime with `Actor.is_infected_aio() == True` |  | ||||||
|     in the root actor. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     async def _trio_main(): |  | ||||||
|         with trio.fail_after(2 if not debug_mode else 999): |  | ||||||
|             first: str |  | ||||||
|             chan: to_asyncio.LinkedTaskChannel |  | ||||||
|             async with ( |  | ||||||
|                 tractor.open_root_actor( |  | ||||||
|                     debug_mode=debug_mode, |  | ||||||
|                     loglevel=loglevel, |  | ||||||
|                 ), |  | ||||||
|                 to_asyncio.open_channel_from( |  | ||||||
|                     aio_echo_server, |  | ||||||
|                 ) as (first, chan), |  | ||||||
|             ): |  | ||||||
|                 assert first == 'start' |  | ||||||
| 
 |  | ||||||
|                 for i in range(1000): |  | ||||||
|                     await chan.send(i) |  | ||||||
|                     out = await chan.receive() |  | ||||||
|                     assert out == i |  | ||||||
|                     print(f'asyncio echoing {i}') |  | ||||||
| 
 |  | ||||||
|                     if ( |  | ||||||
|                         raise_error_mid_stream |  | ||||||
|                         and |  | ||||||
|                         i == 500 |  | ||||||
|                     ): |  | ||||||
|                         raise raise_error_mid_stream |  | ||||||
| 
 |  | ||||||
|                     if out is None: |  | ||||||
|                         try: |  | ||||||
|                             out = await chan.receive() |  | ||||||
|                         except trio.EndOfChannel: |  | ||||||
|                             break |  | ||||||
|                         else: |  | ||||||
|                             raise RuntimeError( |  | ||||||
|                                 'aio channel never stopped?' |  | ||||||
|                             ) |  | ||||||
| 
 |  | ||||||
|     if raise_error_mid_stream: |  | ||||||
|         with pytest.raises(raise_error_mid_stream): |  | ||||||
|             tractor.to_asyncio.run_as_asyncio_guest( |  | ||||||
|                 trio_main=_trio_main, |  | ||||||
|             ) |  | ||||||
|     else: |  | ||||||
|         tractor.to_asyncio.run_as_asyncio_guest( |  | ||||||
|             trio_main=_trio_main, |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| async def sync_and_err( |  | ||||||
|     # just signature placeholders for compat with |  | ||||||
|     # ``to_asyncio.open_channel_from()`` |  | ||||||
|     to_trio: trio.MemorySendChannel, |  | ||||||
|     from_trio: asyncio.Queue, |  | ||||||
|     ev: asyncio.Event, |  | ||||||
| 
 |  | ||||||
| ): |  | ||||||
|     if to_trio: |  | ||||||
|         to_trio.send_nowait('start') |  | ||||||
| 
 |  | ||||||
|     await ev.wait() |  | ||||||
|     raise RuntimeError('asyncio-side') |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @pytest.mark.parametrize( |  | ||||||
|     'aio_err_trigger', |  | ||||||
|     [ |  | ||||||
|         'before_start_point', |  | ||||||
|         'after_trio_task_starts', |  | ||||||
|         'after_start_point', |  | ||||||
|     ], |  | ||||||
|     ids='aio_err_triggered={}'.format |  | ||||||
| ) |  | ||||||
| def test_trio_prestarted_task_bubbles( |  | ||||||
|     aio_err_trigger: str, |  | ||||||
| 
 |  | ||||||
|     # conftest wide |  | ||||||
|     loglevel: str, |  | ||||||
|     debug_mode: bool, |  | ||||||
| ): |  | ||||||
|     async def pre_started_err( |  | ||||||
|         raise_err: bool = False, |  | ||||||
|         pre_sleep: float|None = None, |  | ||||||
|         aio_trigger: asyncio.Event|None = None, |  | ||||||
|         task_status=trio.TASK_STATUS_IGNORED, |  | ||||||
|     ): |  | ||||||
|         ''' |  | ||||||
|         Maybe pre-started error then sleep. |  | ||||||
| 
 |  | ||||||
|         ''' |  | ||||||
|         if pre_sleep is not None: |  | ||||||
|             print(f'Sleeping from trio for {pre_sleep!r}s !') |  | ||||||
|             await trio.sleep(pre_sleep) |  | ||||||
| 
 |  | ||||||
|         # signal aio-task to raise JUST AFTER this task |  | ||||||
|         # starts but has not yet `.started()` |  | ||||||
|         if aio_trigger: |  | ||||||
|             print('Signalling aio-task to raise from `trio`!!') |  | ||||||
|             aio_trigger.set() |  | ||||||
| 
 |  | ||||||
|         if raise_err: |  | ||||||
|             print('Raising from trio!') |  | ||||||
|             raise TypeError('trio-side') |  | ||||||
| 
 |  | ||||||
|         task_status.started() |  | ||||||
|         await trio.sleep_forever() |  | ||||||
| 
 |  | ||||||
|     async def _trio_main(): |  | ||||||
|         with trio.fail_after(2 if not debug_mode else 999): |  | ||||||
|             first: str |  | ||||||
|             chan: to_asyncio.LinkedTaskChannel |  | ||||||
|             aio_ev = asyncio.Event() |  | ||||||
| 
 |  | ||||||
|             async with ( |  | ||||||
|                 tractor.open_root_actor( |  | ||||||
|                     debug_mode=False, |  | ||||||
|                     loglevel=loglevel, |  | ||||||
|                 ), |  | ||||||
|             ): |  | ||||||
|                 # TODO, tests for this with 3.13 egs? |  | ||||||
|                 # from tractor.devx import open_crash_handler |  | ||||||
|                 # with open_crash_handler(): |  | ||||||
|                 async with ( |  | ||||||
|                     # where we'll start a sub-task that errors BEFORE |  | ||||||
|                     # calling `.started()` such that the error should |  | ||||||
|                     # bubble before the guest run terminates! |  | ||||||
|                     trio.open_nursery() as tn, |  | ||||||
| 
 |  | ||||||
|                     # THEN start an infect task which should error just |  | ||||||
|                     # after the trio-side's task does. |  | ||||||
|                     to_asyncio.open_channel_from( |  | ||||||
|                         partial( |  | ||||||
|                             sync_and_err, |  | ||||||
|                             ev=aio_ev, |  | ||||||
|                         ) |  | ||||||
|                     ) as (first, chan), |  | ||||||
|                 ): |  | ||||||
| 
 |  | ||||||
|                     for i in range(5): |  | ||||||
|                         pre_sleep: float|None = None |  | ||||||
|                         last_iter: bool = (i == 4) |  | ||||||
| 
 |  | ||||||
|                         # TODO, missing cases? |  | ||||||
|                         # -[ ] error as well on |  | ||||||
|                         #    'after_start_point' case as well for |  | ||||||
|                         #    another case? |  | ||||||
|                         raise_err: bool = False |  | ||||||
| 
 |  | ||||||
|                         if last_iter: |  | ||||||
|                             raise_err: bool = True |  | ||||||
| 
 |  | ||||||
|                             # trigger aio task to error on next loop |  | ||||||
|                             # tick/checkpoint |  | ||||||
|                             if aio_err_trigger == 'before_start_point': |  | ||||||
|                                 aio_ev.set() |  | ||||||
| 
 |  | ||||||
|                             pre_sleep: float = 0 |  | ||||||
| 
 |  | ||||||
|                         await tn.start( |  | ||||||
|                             pre_started_err, |  | ||||||
|                             raise_err, |  | ||||||
|                             pre_sleep, |  | ||||||
|                             (aio_ev if ( |  | ||||||
|                                     aio_err_trigger == 'after_trio_task_starts' |  | ||||||
|                                     and |  | ||||||
|                                     last_iter |  | ||||||
|                                 ) else None |  | ||||||
|                             ), |  | ||||||
|                         ) |  | ||||||
| 
 |  | ||||||
|                         if ( |  | ||||||
|                             aio_err_trigger == 'after_start_point' |  | ||||||
|                             and |  | ||||||
|                             last_iter |  | ||||||
|                         ): |  | ||||||
|                             aio_ev.set() |  | ||||||
| 
 |  | ||||||
|     # ensure the trio-task's error bubbled despite the aio-side |  | ||||||
|     # having (maybe) errored first. |  | ||||||
|     if aio_err_trigger in ( |  | ||||||
|         'after_trio_task_starts', |  | ||||||
|         'after_start_point', |  | ||||||
|     ): |  | ||||||
|         patt: str = 'trio-side' |  | ||||||
|         expect_exc = TypeError |  | ||||||
| 
 |  | ||||||
|     # when aio errors BEFORE (last) trio task is scheduled, we should |  | ||||||
|     # never see anythinb but the aio-side. |  | ||||||
|     else: |  | ||||||
|         patt: str = 'asyncio-side' |  | ||||||
|         expect_exc = RuntimeError |  | ||||||
| 
 |  | ||||||
|     with pytest.raises(expect_exc) as excinfo: |  | ||||||
|         tractor.to_asyncio.run_as_asyncio_guest( |  | ||||||
|             trio_main=_trio_main, |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|     caught_exc = excinfo.value |  | ||||||
|     assert patt in caught_exc.args |  | ||||||
Some files were not shown because too many files have changed in this diff Show More
		Loading…
	
		Reference in New Issue