Compare commits
	
		
			455 Commits 
		
	
	
		
			main
			...
			uv_migrati
		
	
	| Author | SHA1 | Date | 
|---|---|---|
|  | 63d5e65100 | |
|  | e9d7f9cff4 | |
|  | 2a4f0b829b | |
|  | 1d5521bcbc | |
|  | f9709e5aa2 | |
|  | a1d75625e4 | |
|  | 85c60095ba | |
|  | e2b9c3e769 | |
|  | ae18ceb633 | |
|  | 917699417f | |
|  | 71a29d0106 | |
|  | 095bf28f5d | |
|  | 129dff575f | |
|  | 9167fbb0a8 | |
|  | b6608e1c46 | |
|  | 33e5e2c06f | |
|  | 52238ade28 | |
|  | f7cd8739a5 | |
|  | 7537c6f053 | |
|  | 9c83f02568 | |
|  | 441cf0962d | |
|  | fb04f74605 | |
|  | aa1f6fa4b5 | |
|  | 9002f608ee | |
|  | 8ebc022535 | |
|  | e26fa8330f | |
|  | a2659069c5 | |
|  | 54699d7a0b | |
|  | b91ab9e3a8 | |
|  | cd14c4fe72 | |
|  | ad40fcd2bc | |
|  | 508ba510a5 | |
|  | b875b35b98 | |
|  | 46ddc214cd | |
|  | b3ee20d3b9 | |
|  | cf3e6c1218 | |
|  | 8af9b0201d | |
|  | 5c2e972315 | |
|  | 59f4024242 | |
|  | 7859e743cc | |
|  | f7f738638d | |
|  | ae95e0c83e | |
|  | 3b39cce741 | |
|  | 110a023a03 | |
|  | 89127614d5 | |
|  | 1f1a3f19d5 | |
|  | f7469442e3 | |
|  | 8363317e11 | |
|  | a628eabb30 | |
|  | d216068713 | |
|  | 131e3e8157 | |
|  | fc95c6719f | |
|  | bef3dd9e97 | |
|  | e6ccfce751 | |
|  | 31207f92ee | |
|  | 5f8f8e98ba | |
|  | b56352b0e4 | |
|  | 9be821a5cf | |
|  | b46400a86f | |
|  | 02812b9f51 | |
|  | 3c5816c977 | |
|  | af3745684c | |
|  | 3907cba68e | |
|  | e3d59964af | |
|  | ba83bab776 | |
|  | 18d440c207 | |
|  | edac717613 | |
|  | 7e93b81a83 | |
|  | 4fbd469c33 | |
|  | cb90f3e6ba | |
|  | 5e009a8229 | |
|  | b72a025d0f | |
|  | 5739e79645 | |
|  | 2ac999cc3c | |
|  | 9f9b0b17dc | |
|  | 9133f42b07 | |
|  | 268bd0d8ec | |
|  | 4f1db1ff52 | |
|  | a870df68c0 | |
|  | 3d12a7e005 | |
|  | 9292d73b40 | |
|  | 83d69fe395 | |
|  | 72df312e71 | |
|  | 711f639fc5 | |
|  | 8477919fc9 | |
|  | 872feef24b | |
|  | affc210033 | |
|  | 04bd111037 | |
|  | a0ee0cc713 | |
|  | 5449bd5673 | |
|  | e6d4ec43b9 | |
|  | 418c6907fd | |
|  | d528e7ab4d | |
|  | 7a89b59a3f | |
|  | 7d4cd8944c | |
|  | a6058d14ae | |
|  | 43a8cf4be1 | |
|  | 6534a363a5 | |
|  | 30d60379c1 | |
|  | 408a74784e | |
|  | f0342d6ae3 | |
|  | 21f633a900 | |
|  | 4a270f85ca | |
|  | d802c8aa90 | |
|  | 8ea0f08386 | |
|  | 13ea500a44 | |
|  | 2f854a3e86 | |
|  | cdb1311e40 | |
|  | fcd089c08f | |
|  | 993281882b | |
|  | bbb4d4e52c | |
|  | 0e8c60ee4a | |
|  | 1db5d4def2 | |
|  | 6e54abc56d | |
|  | 28af4749cc | |
|  | 02a7c7c276 | |
|  | 4fa71cc01c | |
|  | 6a4ee461f5 | |
|  | 2db03444f7 | |
|  | a1b124b62b | |
|  | 59ca256183 | |
|  | 6c2efc96dc | |
|  | f7fd8278af | |
|  | 7ac730e326 | |
|  | 582144830f | |
|  | 8b860f4245 | |
|  | 27fd96729a | |
|  | eee4c61b51 | |
|  | 42ba855d1b | |
|  | c2cc12e14f | |
|  | e4ec6b7b0c | |
|  | 9ce958cb4a | |
|  | ce4d64ed2f | |
|  | c6f599b1be | |
|  | 9eb74560ad | |
|  | 702dfe47d5 | |
|  | d15e73557a | |
|  | 74d4b5280a | |
|  | 3538ccd799 | |
|  | b22f7dcae0 | |
|  | fde62c72be | |
|  | 4ef77bb64f | |
|  | e78fdf2f69 | |
|  | 13bc3c308d | |
|  | 60fc43e530 | |
|  | 30afcd2b6b | |
|  | c80f020ebc | |
|  | 262a0e36c6 | |
|  | d93135acd8 | |
|  | b23780c102 | |
|  | 31de5f6648 | |
|  | 236083b6e4 | |
|  | d2dee87b36 | |
|  | 5cb0cc0f0b | |
|  | fc075e96c6 | |
|  | d6ca4771ce | |
|  | c5a0cfc639 | |
|  | f85314ecab | |
|  | c929bc15c9 | |
|  | 6690968236 | |
|  | 343b7c9712 | |
|  | 45f37870af | |
|  | 4d528b76a0 | |
|  | 05b143d9ef | |
|  | a354732a9e | |
|  | fbc21a1dec | |
|  | b278164f83 | |
|  | 8ffa6a5e68 | |
|  | 7707e0e75a | |
|  | 523c24eb72 | |
|  | 544ff5ab4c | |
|  | 63c23d6b82 | |
|  | cca3206fd6 | |
|  | 54530dcf94 | |
|  | 338395346d | |
|  | 30c5896d26 | |
|  | 88a0e90f82 | |
|  | 40c972f0ec | |
|  | f139adddca | |
|  | 979af79588 | |
|  | a3429268ea | |
|  | d285a3479a | |
|  | 61db040702 | |
|  | a5a0e6854b | |
|  | c383978402 | |
|  | 08fcd3fb03 | |
|  | adba454d1d | |
|  | 4bab998ff9 | |
|  | c25c77c573 | |
|  | 188ff0e0e5 | |
|  | 6b30c86eca | |
|  | 6aa52417ef | |
|  | 18e97a8f9a | |
|  | 5eb9144921 | |
|  | a51632ffa6 | |
|  | 0df7d557db | |
|  | 7b020c42cc | |
|  | d18cf32e28 | |
|  | dd6a4d49d8 | |
|  | d51be2a36a | |
|  | 3018187228 | |
|  | e5f0b450cf | |
|  | 4aa24f8518 | |
|  | d2f6428e46 | |
|  | 5439060cd3 | |
|  | 7372404d76 | |
|  | 77a15ebf19 | |
|  | d0e7610073 | |
|  | a73b24cf4a | |
|  | 5dfff3f75a | |
|  | d4155396bf | |
|  | 3869e91b19 | |
|  | 829dfa7520 | |
|  | b209990d04 | |
|  | 60aa16adf6 | |
|  | eca2c02f8b | |
|  | 921f72f7fe | |
|  | 38a6483859 | |
|  | f72b972348 | |
|  | 2edfed75eb | |
|  | 2d22713806 | |
|  | df548257ad | |
|  | 3fb3608879 | |
|  | faa7194daf | |
|  | eec240a70a | |
|  | 322e015d32 | |
|  | dbc445ff9d | |
|  | 7aaa2a61ec | |
|  | 0dcaf5f3b2 | |
|  | af013912ac | |
|  | 8839bb06a3 | |
|  | a35c1d40ab | |
|  | 15549f7c26 | |
|  | cf48fdecfe | |
|  | b341146bd1 | |
|  | 2f451ab9a3 | |
|  | 8e83455a78 | |
|  | 38111e8d53 | |
|  | aea5abdd70 | |
|  | aca6503fcd | |
|  | b9a61ded0a | |
|  | 4cfe4979ff | |
|  | 97bfbdbc1c | |
|  | b1fd8b2ec3 | |
|  | 5c1401bf81 | |
|  | 7f1c2b8ecf | |
|  | 10c98946bd | |
|  | 5b551dd9fa | |
|  | 0fcd424d57 | |
|  | 70ab60ce7c | |
|  | a65e1e7a88 | |
|  | 40cba51909 | |
|  | e153cc0187 | |
|  | f2ce4a3469 | |
|  | 3aa964315a | |
|  | f3ca8608d5 | |
|  | 25ffdedc06 | |
|  | 3ba46362a9 | |
|  | fb8196e354 | |
|  | b6ed26589a | |
|  | 8ff18739be | |
|  | 456979dd12 | |
|  | 995af130cf | |
|  | d55266f4a2 | |
|  | 79211eab9a | |
|  | 336db8425e | |
|  | 2eaef26547 | |
|  | 0a69829ec5 | |
|  | 496dce57a8 | |
|  | 72b4dc1461 | |
|  | 90bfdaf58c | |
|  | 507cd96904 | |
|  | 2588e54867 | |
|  | 0055c1d954 | |
|  | 4f863a6989 | |
|  | c04d77a3c9 | |
|  | 8e66f45e23 | |
|  | 290b0a86b1 | |
|  | d5e5174d97 | |
|  | 8ab5e08830 | |
|  | 668016d37b | |
|  | 9221c57234 | |
|  | 78434f6317 | |
|  | 5fb5682269 | |
|  | 71de56b09a | |
|  | e5cb39804c | |
|  | d28c7e17c6 | |
|  | d23d8c1779 | |
|  | 58cc57a422 | |
|  | da913ef2bb | |
|  | 96992bcbb9 | |
|  | 6533285d7d | |
|  | 8c39b8b124 | |
|  | ededa2e88f | |
|  | dd168184c3 | |
|  | 37ee477aee | |
|  | f067cf48a7 | |
|  | c56d4b0a79 | |
|  | 7cafb59ab7 | |
|  | 7458f99733 | |
|  | 4c3c3e4b56 | |
|  | b29d33d603 | |
|  | 1617e0ff2c | |
|  | c025761f15 | |
|  | 2e797ef7ee | |
|  | c36deb1f4d | |
|  | fa7e37d6ed | |
|  | 364ea91983 | |
|  | 7ae9b5319b | |
|  | 6156ff95f8 | |
|  | 9e3f41a5b1 | |
|  | 7c22f76274 | |
|  | 04c99c2749 | |
|  | e536057fea | |
|  | c6b4da5788 | |
|  | 1f7f84fdfa | |
|  | a5bdc6db66 | |
|  | 9a18b57d38 | |
|  | ed10632d97 | |
|  | 299429a278 | |
|  | 28fefe4ffe | |
|  | 08a6a51cb8 | |
|  | 50465d4b34 | |
|  | 4f69af872c | |
|  | 9bc6a61c93 | |
|  | 23aa97692e | |
|  | 1e5810e56c | |
|  | b54cb6682c | |
|  | 3ed309f019 | |
|  | d08aeaeafe | |
|  | c6ee4e5dc1 | |
|  | ad5eee5666 | |
|  | fc72d75061 | |
|  | de1843dc84 | |
|  | 930d498841 | |
|  | 5ea112699d | |
|  | e244747bc3 | |
|  | 5a09ccf459 | |
|  | ce1bcf6d36 | |
|  | 28ba5e5435 | |
|  | 10adf34be5 | |
|  | 82dcaff8db | |
|  | 621b252b0c | |
|  | 20a089c331 | |
|  | df50d78042 | |
|  | 114ec36436 | |
|  | 179d7d2b04 | |
|  | f568fca98f | |
|  | 6c9bc627d8 | |
|  | 1d7cf7d1dd | |
|  | 54a0a0000d | |
|  | 0268b2ce91 | |
|  | 81f8e2d4ac | |
|  | bf0739c194 | |
|  | 5fe3f58ea9 | |
|  | 3e1d033708 | |
|  | c35576e196 | |
|  | 8ce26d692f | |
|  | 7f29fd8dcf | |
|  | 7fbada8a15 | |
|  | 286e75d342 | |
|  | df641d9d31 | |
|  | 35b0c4bef0 | |
|  | c4496f21fc | |
|  | 7e0e627921 | |
|  | 28ea8e787a | |
|  | 0294455c5e | |
|  | 734bc09b67 | |
|  | 0bcdea28a0 | |
|  | fdf3a1b01b | |
|  | ce7b8a5e18 | |
|  | 00024181cd | |
|  | 814384848d | |
|  | bea31f6d19 | |
|  | 250275d98d | |
|  | f415fc43ce | |
|  | 3f15923537 | |
|  | 87cd725adb | |
|  | 48accbd28f | |
|  | 227c9ea173 | |
|  | d651f3d8e9 | |
|  | ef0cfc4b20 | |
|  | ecb525a2bc | |
|  | b77d123edd | |
|  | f4e63465de | |
|  | df31047ecb | |
|  | 131674eabd | |
|  | 5a94e8fb5b | |
|  | 0518b3ab04 | |
|  | 2f0bed3018 | |
|  | 9da3b63644 | |
|  | 1d6f55543d | |
|  | a3ed30e62b | |
|  | 42d621bba7 | |
|  | 2e81ccf5b4 | |
|  | 022bf8ce75 | |
|  | 0e9457299c | |
|  | 6b1ceee19f | |
|  | 1e689ee701 | |
|  | 190845ce1d | |
|  | 0c74b04c83 | |
|  | 215fec1d41 | |
|  | fcc8cee9d3 | |
|  | ca3f7a1b6b | |
|  | 87c1113de4 | |
|  | 43b659dbe4 | |
|  | 63b1488ab6 | |
|  | 7eb31f3fea | |
|  | 534e5d150d | |
|  | e4a6223256 | |
|  | ab2664da70 | |
|  | ae326cbb9a | |
|  | 07cec02303 | |
|  | 2fdb8fc25a | |
|  | 6d951c526a | |
|  | 575a24adf1 | |
|  | 919e462f88 | |
|  | a09b8560bb | |
|  | c4cd573b26 | |
|  | d24a9e158f | |
|  | 18a1634025 | |
|  | 78c0d2b234 | |
|  | 4314a59327 | |
|  | e94f1261b5 | |
|  | 86da79a854 | |
|  | de89e3a9c4 | |
|  | 7bed470f5c | |
|  | fa9a9cfb1d | |
|  | 3d0e95513c | |
|  | ee151b00af | |
|  | 22c14e235e | |
|  | 1102843087 | |
|  | e03bec5efc | |
|  | bee2c36072 | |
|  | b36b3d522f | |
|  | 4ace8f6037 | |
|  | 98a7326c85 | |
|  | 46972df041 | |
|  | 565d7c3ee5 | |
|  | ac695a05bf | |
|  | fc56971a2d | |
|  | ee87cf0e29 | |
|  | ebcb275cd8 | |
|  | f745da9fb2 | |
|  | 4f442efbd7 | |
|  | f9a84f0732 | |
|  | e0bf964ff0 | |
|  | a9fc4c1b91 | |
|  | b52ff270c5 | |
|  | 1713ecd9f8 | |
|  | edb82fdd78 | |
|  | 339d787cf8 | |
|  | c32b21b4b1 | |
|  | 71477290fc | |
|  | 9716d86825 | 
|  | @ -8,70 +8,46 @@ on: | |||
|   workflow_dispatch: | ||||
| 
 | ||||
| jobs: | ||||
|   # ------ sdist ------ | ||||
| 
 | ||||
|   mypy: | ||||
|     name: 'MyPy' | ||||
|     runs-on: ubuntu-latest | ||||
| 
 | ||||
|     steps: | ||||
|       - name: Checkout | ||||
|         uses: actions/checkout@v2 | ||||
| 
 | ||||
|       - name: Setup python | ||||
|         uses: actions/setup-python@v2 | ||||
|         with: | ||||
|           python-version: '3.10' | ||||
| 
 | ||||
|       - name: Install dependencies | ||||
|         run: pip install -U . --upgrade-strategy eager -r requirements-test.txt | ||||
| 
 | ||||
|       - name: Run MyPy check | ||||
|         run: mypy tractor/ --ignore-missing-imports --show-traceback | ||||
| 
 | ||||
|   # test that we can generate a software distribution and install it | ||||
|   # thus avoid missing file issues after packaging. | ||||
|   # | ||||
|   # -[x] produce sdist with uv | ||||
|   # ------ - ------ | ||||
|   sdist-linux: | ||||
|     name: 'sdist' | ||||
|     runs-on: ubuntu-latest | ||||
| 
 | ||||
|     steps: | ||||
|       - name: Checkout | ||||
|         uses: actions/checkout@v4 | ||||
|         uses: actions/checkout@v2 | ||||
| 
 | ||||
|       - name: Install latest uv | ||||
|         uses: astral-sh/setup-uv@v6 | ||||
|       - name: Setup python | ||||
|         uses: actions/setup-python@v2 | ||||
|         with: | ||||
|           python-version: '3.10' | ||||
| 
 | ||||
|       - name: Build sdist as tar.gz | ||||
|         run: uv build --sdist --python=3.13 | ||||
|       - name: Build sdist | ||||
|         run: python setup.py sdist --formats=zip | ||||
| 
 | ||||
|       - name: Install sdist from .tar.gz | ||||
|         run: python -m pip install dist/*.tar.gz | ||||
| 
 | ||||
|   # ------ type-check ------ | ||||
|   # mypy: | ||||
|   #   name: 'MyPy' | ||||
|   #   runs-on: ubuntu-latest | ||||
| 
 | ||||
|   #   steps: | ||||
|   #     - name: Checkout | ||||
|   #       uses: actions/checkout@v4 | ||||
| 
 | ||||
|   #     - name: Install latest uv | ||||
|   #       uses: astral-sh/setup-uv@v6 | ||||
| 
 | ||||
|   #     # faster due to server caching? | ||||
|   #     # https://docs.astral.sh/uv/guides/integration/github/#setting-up-python | ||||
|   #     - name: "Set up Python" | ||||
|   #       uses: actions/setup-python@v6 | ||||
|   #       with: | ||||
|   #         python-version-file: "pyproject.toml" | ||||
| 
 | ||||
|   #     # w uv | ||||
|   #     # - name: Set up Python | ||||
|   #     #   run: uv python install | ||||
| 
 | ||||
|   #     - name: Setup uv venv | ||||
|   #       run: uv venv .venv --python=3.13 | ||||
| 
 | ||||
|   #     - name: Install | ||||
|   #       run: uv sync --dev | ||||
| 
 | ||||
|   #     # TODO, ty cmd over repo | ||||
|   #     # - name: type check with ty | ||||
|   #     #   run: ty ./tractor/ | ||||
| 
 | ||||
|   #     # - uses: actions/cache@v3 | ||||
|   #     #     name: Cache uv virtenv as default .venv | ||||
|   #     #     with: | ||||
|   #     #       path: ./.venv | ||||
|   #     #       key: venv-${{ hashFiles('uv.lock') }} | ||||
| 
 | ||||
|   #     - name: Run MyPy check | ||||
|   #       run: mypy tractor/ --ignore-missing-imports --show-traceback | ||||
|       - name: Install sdist from .zips | ||||
|         run: python -m pip install dist/*.zip | ||||
| 
 | ||||
| 
 | ||||
|   testing-linux: | ||||
|  | @ -83,45 +59,32 @@ jobs: | |||
|       fail-fast: false | ||||
|       matrix: | ||||
|         os: [ubuntu-latest] | ||||
|         python-version: ['3.13'] | ||||
|         python: ['3.10'] | ||||
|         spawn_backend: [ | ||||
|           'trio', | ||||
|           # 'mp_spawn', | ||||
|           # 'mp_forkserver', | ||||
|           'mp_spawn', | ||||
|           'mp_forkserver', | ||||
|         ] | ||||
| 
 | ||||
|     steps: | ||||
| 
 | ||||
|       - uses: actions/checkout@v4 | ||||
|       - name: Checkout | ||||
|         uses: actions/checkout@v2 | ||||
| 
 | ||||
|       - name: 'Install uv + py-${{ matrix.python-version }}' | ||||
|         uses: astral-sh/setup-uv@v6 | ||||
|       - name: Setup python | ||||
|         uses: actions/setup-python@v2 | ||||
|         with: | ||||
|           python-version: ${{ matrix.python-version }} | ||||
|           python-version: '${{ matrix.python }}' | ||||
| 
 | ||||
|       # GH way.. faster? | ||||
|       # - name: setup-python@v6 | ||||
|       #   uses: actions/setup-python@v6 | ||||
|       #   with: | ||||
|       #     python-version: '${{ matrix.python-version }}' | ||||
|       - name: Install dependencies | ||||
|         run: pip install -U . -r requirements-test.txt -r requirements-docs.txt --upgrade-strategy eager | ||||
| 
 | ||||
|       # consider caching for speedups? | ||||
|       # https://docs.astral.sh/uv/guides/integration/github/#caching | ||||
| 
 | ||||
|       - name: Install the project w uv | ||||
|         run: uv sync --all-extras --dev | ||||
| 
 | ||||
|       # - name: Install dependencies | ||||
|       #   run: pip install -U . -r requirements-test.txt -r requirements-docs.txt --upgrade-strategy eager | ||||
| 
 | ||||
|       - name: List deps tree | ||||
|         run: uv tree | ||||
|       - name: List dependencies | ||||
|         run: pip list | ||||
| 
 | ||||
|       - name: Run tests | ||||
|         run: uv run pytest tests/ --spawn-backend=${{ matrix.spawn_backend }} -rsx | ||||
|         run: pytest tests/ --spawn-backend=${{ matrix.spawn_backend }} -rsx | ||||
| 
 | ||||
|   # XXX legacy NOTE XXX | ||||
|   # | ||||
|   # We skip 3.10 on windows for now due to not having any collabs to | ||||
|   # debug the CI failures. Anyone wanting to hack and solve them is very | ||||
|   # welcome, but our primary user base is not using that OS. | ||||
|  |  | |||
							
								
								
									
										19
									
								
								default.nix
								
								
								
								
							
							
						
						
									
										19
									
								
								default.nix
								
								
								
								
							|  | @ -1,19 +0,0 @@ | |||
| { pkgs ? import <nixpkgs> {} }: | ||||
| let | ||||
|   nativeBuildInputs = with pkgs; [ | ||||
|     stdenv.cc.cc.lib | ||||
|     uv | ||||
|   ]; | ||||
| 
 | ||||
| in | ||||
| pkgs.mkShell { | ||||
|   inherit nativeBuildInputs; | ||||
| 
 | ||||
|   LD_LIBRARY_PATH = pkgs.lib.makeLibraryPath nativeBuildInputs; | ||||
|   TMPDIR = "/tmp"; | ||||
| 
 | ||||
|   shellHook = '' | ||||
|     set -e | ||||
|     uv venv .venv --python=3.12 | ||||
|   ''; | ||||
| } | ||||
							
								
								
									
										167
									
								
								docs/README.rst
								
								
								
								
							
							
						
						
									
										167
									
								
								docs/README.rst
								
								
								
								
							|  | @ -1,20 +1,20 @@ | |||
| |logo| ``tractor``: distributed structurred concurrency | ||||
| |logo| ``tractor``: next-gen Python parallelism | ||||
| 
 | ||||
| ``tractor`` is a `structured concurrency`_ (SC), multi-processing_ runtime built on trio_. | ||||
| |gh_actions| | ||||
| |docs| | ||||
| 
 | ||||
| Fundamentally, ``tractor`` provides parallelism via | ||||
| ``trio``-"*actors*": independent Python **processes** (i.e. | ||||
| *non-shared-memory threads*) which can schedule ``trio`` tasks whilst | ||||
| maintaining *end-to-end SC* inside a *distributed supervision tree*. | ||||
| ``tractor`` is a `structured concurrent`_, (optionally | ||||
| distributed_) multi-processing_ runtime built on trio_. | ||||
| 
 | ||||
| Fundamentally, ``tractor`` gives you parallelism via | ||||
| ``trio``-"*actors*": independent Python processes (aka | ||||
| non-shared-memory threads) which maintain structured | ||||
| concurrency (SC) *end-to-end* inside a *supervision tree*. | ||||
| 
 | ||||
| Cross-process (and thus cross-host) SC is accomplished through the | ||||
| combined use of our, | ||||
| 
 | ||||
| - "actor nurseries_" which provide for spawning multiple, and | ||||
|   possibly nested, Python processes each running a ``trio`` scheduled | ||||
|   runtime - a call to ``trio.run()``, | ||||
| - an "SC-transitive supervision protocol" enforced as an | ||||
|   IPC-message-spec encapsulating all RPC-dialogs. | ||||
| combined use of our "actor nurseries_" and an "SC-transitive IPC | ||||
| protocol" constructed on top of multiple Pythons each running a ``trio`` | ||||
| scheduled runtime - a call to ``trio.run()``. | ||||
| 
 | ||||
| We believe the system adheres to the `3 axioms`_ of an "`actor model`_" | ||||
| but likely **does not** look like what **you** probably *think* an "actor | ||||
|  | @ -27,7 +27,6 @@ The first step to grok ``tractor`` is to get an intermediate | |||
| knowledge of ``trio`` and **structured concurrency** B) | ||||
| 
 | ||||
| Some great places to start are, | ||||
| 
 | ||||
| - the seminal `blog post`_ | ||||
| - obviously the `trio docs`_ | ||||
| - wikipedia's nascent SC_ page | ||||
|  | @ -36,91 +35,22 @@ Some great places to start are, | |||
| 
 | ||||
| Features | ||||
| -------- | ||||
| - **It's just** a ``trio`` API! | ||||
| - *Infinitely nesteable* process trees running embedded ``trio`` tasks. | ||||
| - Swappable, OS-specific, process spawning via multiple backends. | ||||
| - Modular IPC stack, allowing for custom interchange formats (eg. | ||||
|   as offered from `msgspec`_), varied transport protocols (TCP, RUDP, | ||||
|   QUIC, wireguard), and OS-env specific higher-perf primitives (UDS, | ||||
|   shm-ring-buffers). | ||||
| - Optionally distributed_: all IPC and RPC APIs work over multi-host | ||||
|   transports the same as local. | ||||
| - Builtin high-level streaming API that enables your app to easily | ||||
|   leverage the benefits of a "`cheap or nasty`_" `(un)protocol`_. | ||||
| - A "native UX" around a multi-process safe debugger REPL using | ||||
|   `pdbp`_ (a fork & fix of `pdb++`_) | ||||
| - "Infected ``asyncio``" mode: support for starting an actor's | ||||
|   runtime as a `guest`_ on the ``asyncio`` loop allowing us to | ||||
|   provide stringent SC-style ``trio.Task``-supervision around any | ||||
|   ``asyncio.Task`` spawned via our ``tractor.to_asyncio`` APIs. | ||||
| - A **very naive** and still very much work-in-progress inter-actor | ||||
|   `discovery`_ sys with plans to support multiple `modern protocol`_ | ||||
|   approaches. | ||||
| - Various ``trio`` extension APIs via ``tractor.trionics`` such as, | ||||
|   - task fan-out `broadcasting`_, | ||||
|   - multi-task-single-resource-caching and fan-out-to-multi | ||||
|     ``__aenter__()`` APIs for ``@acm`` functions, | ||||
|   - (WIP) a ``TaskMngr``: one-cancels-one style nursery supervisor. | ||||
| 
 | ||||
| 
 | ||||
| Status of `main` / infra | ||||
| ------------------------ | ||||
| 
 | ||||
| - |gh_actions| | ||||
| - |docs| | ||||
| 
 | ||||
| 
 | ||||
| Install | ||||
| ------- | ||||
| ``tractor`` is still in a *alpha-near-beta-stage* for many | ||||
| of its subsystems, however we are very close to having a stable | ||||
| lowlevel runtime and API. | ||||
| 
 | ||||
| As such, it's currently recommended that you clone and install the | ||||
| repo from source:: | ||||
| 
 | ||||
|     pip install git+git://github.com/goodboy/tractor.git | ||||
| 
 | ||||
| 
 | ||||
| We use the very hip `uv`_ for project mgmt:: | ||||
| 
 | ||||
|     git clone https://github.com/goodboy/tractor.git | ||||
|     cd tractor | ||||
|     uv sync --dev | ||||
|     uv run python examples/rpc_bidir_streaming.py | ||||
| 
 | ||||
| Consider activating a virtual/project-env before starting to hack on | ||||
| the code base:: | ||||
| 
 | ||||
|     # you could use plain ol' venvs | ||||
|     # https://docs.astral.sh/uv/pip/environments/ | ||||
|     uv venv tractor_py313 --python 3.13 | ||||
| 
 | ||||
|     # but @goodboy prefers the more explicit (and shell agnostic) | ||||
|     # https://docs.astral.sh/uv/configuration/environment/#uv_project_environment | ||||
|     UV_PROJECT_ENVIRONMENT="tractor_py313 | ||||
| 
 | ||||
|     # hint hint, enter @goodboy's fave shell B) | ||||
|     uv run --dev xonsh | ||||
| 
 | ||||
| Alongside all this we ofc offer "releases" on PyPi:: | ||||
| 
 | ||||
|     pip install tractor | ||||
| 
 | ||||
| Just note that YMMV since the main git branch is often much further | ||||
| ahead then any latest release. | ||||
| 
 | ||||
| 
 | ||||
| Example codez | ||||
| ------------- | ||||
| In ``tractor``'s (very lacking) documention we prefer to point to | ||||
| example scripts in the repo over duplicating them in docs, but with | ||||
| that in mind here are some definitive snippets to try and hook you | ||||
| into digging deeper. | ||||
| - **It's just** a ``trio`` API | ||||
| - *Infinitely nesteable* process trees | ||||
| - Builtin IPC streaming APIs with task fan-out broadcasting | ||||
| - A "native" multi-core debugger REPL using `pdbp`_ (a fork & fix of | ||||
|   `pdb++`_ thanks to @mdmintz!) | ||||
| - Support for a swappable, OS specific, process spawning layer | ||||
| - A modular transport stack, allowing for custom serialization (eg. with | ||||
|   `msgspec`_), communications protocols, and environment specific IPC | ||||
|   primitives | ||||
| - Support for spawning process-level-SC, inter-loop one-to-one-task oriented | ||||
|   ``asyncio`` actors via "infected ``asyncio``" mode | ||||
| - `structured chadcurrency`_ from the ground up | ||||
| 
 | ||||
| 
 | ||||
| Run a func in a process | ||||
| *********************** | ||||
| ----------------------- | ||||
| Use ``trio``'s style of focussing on *tasks as functions*: | ||||
| 
 | ||||
| .. code:: python | ||||
|  | @ -178,7 +108,7 @@ might want to check out `trio-parallel`_. | |||
| 
 | ||||
| 
 | ||||
| Zombie safe: self-destruct a process tree | ||||
| ***************************************** | ||||
| ----------------------------------------- | ||||
| ``tractor`` tries to protect you from zombies, no matter what. | ||||
| 
 | ||||
| .. code:: python | ||||
|  | @ -234,7 +164,7 @@ it **is a bug**. | |||
| 
 | ||||
| 
 | ||||
| "Native" multi-process debugging | ||||
| ******************************** | ||||
| -------------------------------- | ||||
| Using the magic of `pdbp`_ and our internal IPC, we've | ||||
| been able to create a native feeling debugging experience for | ||||
| any (sub-)process in your ``tractor`` tree. | ||||
|  | @ -289,7 +219,7 @@ We're hoping to add a respawn-from-repl system soon! | |||
| 
 | ||||
| 
 | ||||
| SC compatible bi-directional streaming | ||||
| ************************************** | ||||
| -------------------------------------- | ||||
| Yes, you saw it here first; we provide 2-way streams | ||||
| with reliable, transitive setup/teardown semantics. | ||||
| 
 | ||||
|  | @ -381,7 +311,7 @@ hear your thoughts on! | |||
| 
 | ||||
| 
 | ||||
| Worker poolz are easy peasy | ||||
| *************************** | ||||
| --------------------------- | ||||
| The initial ask from most new users is *"how do I make a worker | ||||
| pool thing?"*. | ||||
| 
 | ||||
|  | @ -403,10 +333,10 @@ This uses no extra threads, fancy semaphores or futures; all we need | |||
| is ``tractor``'s IPC! | ||||
| 
 | ||||
| "Infected ``asyncio``" mode | ||||
| *************************** | ||||
| --------------------------- | ||||
| Have a bunch of ``asyncio`` code you want to force to be SC at the process level? | ||||
| 
 | ||||
| Check out our experimental system for `guest`_-mode controlled | ||||
| Check out our experimental system for `guest-mode`_ controlled | ||||
| ``asyncio`` actors: | ||||
| 
 | ||||
| .. code:: python | ||||
|  | @ -512,7 +442,7 @@ We need help refining the `asyncio`-side channel API to be more | |||
| 
 | ||||
| 
 | ||||
| Higher level "cluster" APIs | ||||
| *************************** | ||||
| --------------------------- | ||||
| To be extra terse the ``tractor`` devs have started hacking some "higher | ||||
| level" APIs for managing actor trees/clusters. These interfaces should | ||||
| generally be condsidered provisional for now but we encourage you to try | ||||
|  | @ -569,6 +499,18 @@ spawn a flat cluster: | |||
| .. _full worker pool re-implementation: https://github.com/goodboy/tractor/blob/master/examples/parallelism/concurrent_actors_primes.py | ||||
| 
 | ||||
| 
 | ||||
| Install | ||||
| ------- | ||||
| From PyPi:: | ||||
| 
 | ||||
|     pip install tractor | ||||
| 
 | ||||
| 
 | ||||
| From git:: | ||||
| 
 | ||||
|     pip install git+git://github.com/goodboy/tractor.git | ||||
| 
 | ||||
| 
 | ||||
| Under the hood | ||||
| -------------- | ||||
| ``tractor`` is an attempt to pair trionic_ `structured concurrency`_ with | ||||
|  | @ -672,32 +614,25 @@ channel`_! | |||
| .. _adherance to: https://www.youtube.com/watch?v=7erJ1DV_Tlo&t=1821s | ||||
| .. _trio gitter channel: https://gitter.im/python-trio/general | ||||
| .. _matrix channel: https://matrix.to/#/!tractor:matrix.org | ||||
| .. _broadcasting: https://github.com/goodboy/tractor/pull/229 | ||||
| .. _modern procotol: https://en.wikipedia.org/wiki/Rendezvous_protocol | ||||
| .. _pdbp: https://github.com/mdmintz/pdbp | ||||
| .. _pdb++: https://github.com/pdbpp/pdbpp | ||||
| .. _cheap or nasty: https://zguide.zeromq.org/docs/chapter7/#The-Cheap-or-Nasty-Pattern | ||||
| .. _(un)protocol: https://zguide.zeromq.org/docs/chapter7/#Unprotocols | ||||
| .. _discovery: https://zguide.zeromq.org/docs/chapter8/#Discovery | ||||
| .. _modern protocol: https://en.wikipedia.org/wiki/Rendezvous_protocol | ||||
| .. _guest mode: https://trio.readthedocs.io/en/stable/reference-lowlevel.html?highlight=guest%20mode#using-guest-mode-to-run-trio-on-top-of-other-event-loops | ||||
| .. _messages: https://en.wikipedia.org/wiki/Message_passing | ||||
| .. _trio docs: https://trio.readthedocs.io/en/latest/ | ||||
| .. _blog post: https://vorpus.org/blog/notes-on-structured-concurrency-or-go-statement-considered-harmful/ | ||||
| .. _structured concurrency: https://en.wikipedia.org/wiki/Structured_concurrency | ||||
| .. _SC: https://en.wikipedia.org/wiki/Structured_concurrency | ||||
| .. _libdill-docs: https://sustrik.github.io/libdill/structured-concurrency.html | ||||
| .. _structured chadcurrency: https://en.wikipedia.org/wiki/Structured_concurrency | ||||
| .. _unrequirements: https://en.wikipedia.org/wiki/Actor_model#Direct_communication_and_asynchrony | ||||
| .. _async generators: https://www.python.org/dev/peps/pep-0525/ | ||||
| .. _trio-parallel: https://github.com/richardsheridan/trio-parallel | ||||
| .. _uv: https://docs.astral.sh/uv/ | ||||
| .. _msgspec: https://jcristharif.com/msgspec/ | ||||
| .. _guest: https://trio.readthedocs.io/en/stable/reference-lowlevel.html?highlight=guest%20mode#using-guest-mode-to-run-trio-on-top-of-other-event-loops | ||||
| .. _guest-mode: https://trio.readthedocs.io/en/stable/reference-lowlevel.html?highlight=guest%20mode#using-guest-mode-to-run-trio-on-top-of-other-event-loops | ||||
| 
 | ||||
| .. | ||||
|    NOTE, on generating badge links from the UI | ||||
|    https://docs.github.com/en/actions/how-tos/monitoring-and-troubleshooting-workflows/monitoring-workflows/adding-a-workflow-status-badge?ref=gitguardian-blog-automated-secrets-detection#using-the-ui | ||||
| .. |gh_actions| image:: https://github.com/goodboy/tractor/actions/workflows/ci.yml/badge.svg?branch=main | ||||
|     :target: https://github.com/goodboy/tractor/actions/workflows/ci.yml | ||||
| 
 | ||||
| .. |gh_actions| image:: https://img.shields.io/endpoint.svg?url=https%3A%2F%2Factions-badge.atrox.dev%2Fgoodboy%2Ftractor%2Fbadge&style=popout-square | ||||
|     :target: https://actions-badge.atrox.dev/goodboy/tractor/goto | ||||
| 
 | ||||
| .. |docs| image:: https://readthedocs.org/projects/tractor/badge/?version=latest | ||||
|     :target: https://tractor.readthedocs.io/en/latest/?badge=latest | ||||
|  |  | |||
|  | @ -16,7 +16,6 @@ from tractor import ( | |||
|     ContextCancelled, | ||||
|     MsgStream, | ||||
|     _testing, | ||||
|     trionics, | ||||
| ) | ||||
| import trio | ||||
| import pytest | ||||
|  | @ -63,8 +62,7 @@ async def recv_and_spawn_net_killers( | |||
|     await ctx.started() | ||||
|     async with ( | ||||
|         ctx.open_stream() as stream, | ||||
|         trionics.collapse_eg(), | ||||
|         trio.open_nursery() as tn, | ||||
|         trio.open_nursery() as n, | ||||
|     ): | ||||
|         async for i in stream: | ||||
|             print(f'child echoing {i}') | ||||
|  | @ -79,11 +77,11 @@ async def recv_and_spawn_net_killers( | |||
|                 i >= break_ipc_after | ||||
|             ): | ||||
|                 broke_ipc = True | ||||
|                 tn.start_soon( | ||||
|                 n.start_soon( | ||||
|                     iter_ipc_stream, | ||||
|                     stream, | ||||
|                 ) | ||||
|                 tn.start_soon( | ||||
|                 n.start_soon( | ||||
|                     partial( | ||||
|                         break_ipc_then_error, | ||||
|                         stream=stream, | ||||
|  | @ -120,7 +118,6 @@ async def main( | |||
|     break_parent_ipc_after: int|bool = False, | ||||
|     break_child_ipc_after: int|bool = False, | ||||
|     pre_close: bool = False, | ||||
|     tpt_proto: str = 'tcp', | ||||
| 
 | ||||
| ) -> None: | ||||
| 
 | ||||
|  | @ -132,7 +129,6 @@ async def main( | |||
|             # a hang since it never engages due to broken IPC | ||||
|             debug_mode=debug_mode, | ||||
|             loglevel=loglevel, | ||||
|             enable_transports=[tpt_proto], | ||||
| 
 | ||||
|         ) as an, | ||||
|     ): | ||||
|  | @ -147,8 +143,7 @@ async def main( | |||
|             _testing.expect_ctxc( | ||||
|                 yay=( | ||||
|                     break_parent_ipc_after | ||||
|                     or | ||||
|                     break_child_ipc_after | ||||
|                     or break_child_ipc_after | ||||
|                 ), | ||||
|                 # TODO: we CAN'T remove this right? | ||||
|                 # since we need the ctxc to bubble up from either | ||||
|  |  | |||
|  | @ -25,15 +25,16 @@ async def bp_then_error( | |||
| 
 | ||||
| ) -> None: | ||||
| 
 | ||||
|     # sync with `trio`-side (caller) task | ||||
|     # sync with ``trio``-side (caller) task | ||||
|     to_trio.send_nowait('start') | ||||
| 
 | ||||
|     # NOTE: what happens here inside the hook needs some refinement.. | ||||
|     # => seems like it's still `.debug._set_trace()` but | ||||
|     # => seems like it's still `._debug._set_trace()` but | ||||
|     #    we set `Lock.local_task_in_debug = 'sync'`, we probably want | ||||
|     #    some further, at least, meta-data about the task/actor in debug | ||||
|     #    in terms of making it clear it's `asyncio` mucking about. | ||||
|     breakpoint()  # asyncio-side | ||||
|     breakpoint() | ||||
| 
 | ||||
| 
 | ||||
|     # short checkpoint / delay | ||||
|     await asyncio.sleep(0.5)  # asyncio-side | ||||
|  | @ -57,6 +58,7 @@ async def trio_ctx( | |||
|     # this will block until the ``asyncio`` task sends a "first" | ||||
|     # message, see first line in above func. | ||||
|     async with ( | ||||
| 
 | ||||
|         to_asyncio.open_channel_from( | ||||
|             bp_then_error, | ||||
|             # raise_after_bp=not bp_before_started, | ||||
|  | @ -67,7 +69,7 @@ async def trio_ctx( | |||
|         assert first == 'start' | ||||
| 
 | ||||
|         if bp_before_started: | ||||
|             await tractor.pause()  # trio-side | ||||
|             await tractor.pause() | ||||
| 
 | ||||
|         await ctx.started(first)  # trio-side | ||||
| 
 | ||||
|  | @ -109,7 +111,7 @@ async def main( | |||
| 
 | ||||
|             # pause in parent to ensure no cross-actor | ||||
|             # locking problems exist! | ||||
|             await tractor.pause()  # trio-root | ||||
|             await tractor.pause() | ||||
| 
 | ||||
|             if cancel_from_root: | ||||
|                 await ctx.cancel() | ||||
|  |  | |||
|  | @ -21,13 +21,11 @@ async def name_error(): | |||
| 
 | ||||
| 
 | ||||
| async def main(): | ||||
|     ''' | ||||
|     Test breakpoint in a streaming actor. | ||||
| 
 | ||||
|     ''' | ||||
|     """Test breakpoint in a streaming actor. | ||||
|     """ | ||||
|     async with tractor.open_nursery( | ||||
|         debug_mode=True, | ||||
|         loglevel='cancel', | ||||
|         # loglevel='cancel', | ||||
|         # loglevel='devx', | ||||
|     ) as n: | ||||
| 
 | ||||
|  |  | |||
|  | @ -40,7 +40,7 @@ async def main(): | |||
|     """ | ||||
|     async with tractor.open_nursery( | ||||
|         debug_mode=True, | ||||
|         loglevel='devx', | ||||
|         # loglevel='cancel', | ||||
|     ) as n: | ||||
| 
 | ||||
|         # spawn both actors | ||||
|  |  | |||
|  | @ -4,11 +4,6 @@ import sys | |||
| import trio | ||||
| import tractor | ||||
| 
 | ||||
| # ensure mod-path is correct! | ||||
| from tractor.devx.debug import ( | ||||
|     _sync_pause_from_builtin as _sync_pause_from_builtin, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| async def main() -> None: | ||||
| 
 | ||||
|  | @ -18,30 +13,27 @@ async def main() -> None: | |||
| 
 | ||||
|     async with tractor.open_nursery( | ||||
|         debug_mode=True, | ||||
|         loglevel='devx', | ||||
|         maybe_enable_greenback=True, | ||||
|         # ^XXX REQUIRED to enable `breakpoint()` support (from sync | ||||
|         # fns) and thus required here to avoid an assertion err | ||||
|         # on the next line | ||||
|     ): | ||||
|     ) as an: | ||||
|         assert an | ||||
|         assert ( | ||||
|             (pybp_var := os.environ['PYTHONBREAKPOINT']) | ||||
|             == | ||||
|             'tractor.devx.debug._sync_pause_from_builtin' | ||||
|             'tractor.devx._debug._sync_pause_from_builtin' | ||||
|         ) | ||||
| 
 | ||||
|         # TODO: an assert that verifies the hook has indeed been, hooked | ||||
|         # XD | ||||
|         assert ( | ||||
|             (pybp_hook := sys.breakpointhook) | ||||
|             is not tractor.devx.debug._set_trace | ||||
|             is not tractor.devx._debug._set_trace | ||||
|         ) | ||||
| 
 | ||||
|         print( | ||||
|             f'$PYTHONOBREAKPOINT: {pybp_var!r}\n' | ||||
|             f'`sys.breakpointhook`: {pybp_hook!r}\n' | ||||
|         ) | ||||
|         breakpoint()  # first bp, tractor hook set. | ||||
|         breakpoint() | ||||
|         pass  # first bp, tractor hook set. | ||||
| 
 | ||||
|     # XXX AFTER EXIT (of actor-runtime) verify the hook is unset.. | ||||
|     # | ||||
|  | @ -51,7 +43,8 @@ async def main() -> None: | |||
|     assert sys.breakpointhook | ||||
| 
 | ||||
|     # now ensure a regular builtin pause still works | ||||
|     breakpoint()  # last bp, stdlib hook restored | ||||
|     breakpoint() | ||||
|     pass  # last bp, stdlib hook restored | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == '__main__': | ||||
|  |  | |||
|  | @ -24,9 +24,10 @@ async def spawn_until(depth=0): | |||
| 
 | ||||
| 
 | ||||
| async def main(): | ||||
|     ''' | ||||
|     The process tree should look as approximately as follows when the | ||||
|     debugger first engages: | ||||
|     """The main ``tractor`` routine. | ||||
| 
 | ||||
|     The process tree should look as approximately as follows when the debugger | ||||
|     first engages: | ||||
| 
 | ||||
|     python examples/debugging/multi_nested_subactors_bp_forever.py | ||||
|     ├─ python -m tractor._child --uid ('spawner1', '7eab8462 ...) | ||||
|  | @ -36,11 +37,10 @@ async def main(): | |||
|     └─ python -m tractor._child --uid ('spawner0', '1d42012b ...) | ||||
|        └─ python -m tractor._child --uid ('name_error', '6c2733b8 ...) | ||||
| 
 | ||||
|     ''' | ||||
|     """ | ||||
|     async with tractor.open_nursery( | ||||
|         debug_mode=True, | ||||
|         loglevel='devx', | ||||
|         enable_transports=['uds'], | ||||
|         loglevel='warning' | ||||
|     ) as n: | ||||
| 
 | ||||
|         # spawn both actors | ||||
|  |  | |||
|  | @ -1,35 +0,0 @@ | |||
| import trio | ||||
| import tractor | ||||
| 
 | ||||
| 
 | ||||
| async def main(): | ||||
|     async with tractor.open_root_actor( | ||||
|         debug_mode=True, | ||||
|         loglevel='cancel', | ||||
|     ) as _root: | ||||
| 
 | ||||
|         # manually trigger self-cancellation and wait | ||||
|         # for it to fully trigger. | ||||
|         _root.cancel_soon() | ||||
|         await _root._cancel_complete.wait() | ||||
|         print('root cancelled') | ||||
| 
 | ||||
|         # now ensure we can still use the REPL | ||||
|         try: | ||||
|             await tractor.pause() | ||||
|         except trio.Cancelled as _taskc: | ||||
|             assert (root_cs := _root._root_tn.cancel_scope).cancel_called | ||||
|             # NOTE^^ above logic but inside `open_root_actor()` and | ||||
|             # passed to the `shield=` expression is effectively what | ||||
|             # we're testing here! | ||||
|             await tractor.pause(shield=root_cs.cancel_called) | ||||
| 
 | ||||
|         # XXX, if shield logic *is wrong* inside `open_root_actor()`'s | ||||
|         # crash-handler block this should never be interacted, | ||||
|         # instead `trio.Cancelled` would be bubbled up: the original | ||||
|         # BUG. | ||||
|         assert 0 | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == '__main__': | ||||
|     trio.run(main) | ||||
|  | @ -37,9 +37,9 @@ async def main( | |||
|             enable_stack_on_sig=True, | ||||
|             # maybe_enable_greenback=False, | ||||
|             loglevel='devx', | ||||
|             enable_transports=['uds'], | ||||
|         ) as an, | ||||
|     ): | ||||
| 
 | ||||
|         ptl: tractor.Portal  = await an.start_actor( | ||||
|             'hanger', | ||||
|             enable_modules=[__name__], | ||||
|  | @ -54,16 +54,13 @@ async def main( | |||
| 
 | ||||
|             print( | ||||
|                 'Yo my child hanging..?\n' | ||||
|                 # "i'm a user who wants to see a `stackscope` tree!\n" | ||||
|                 'Sending SIGUSR1 to see a tree-trace!\n' | ||||
|             ) | ||||
| 
 | ||||
|             # XXX simulate the wrapping test's "user actions" | ||||
|             # (i.e. if a human didn't run this manually but wants to | ||||
|             # know what they should do to reproduce test behaviour) | ||||
|             if from_test: | ||||
|                 print( | ||||
|                     f'Sending SIGUSR1 to {cpid!r}!\n' | ||||
|                 ) | ||||
|                 os.kill( | ||||
|                     cpid, | ||||
|                     signal.SIGUSR1, | ||||
|  |  | |||
|  | @ -33,11 +33,8 @@ async def just_bp( | |||
| 
 | ||||
| 
 | ||||
| async def main(): | ||||
| 
 | ||||
|     async with tractor.open_nursery( | ||||
|         debug_mode=True, | ||||
|         enable_transports=['uds'], | ||||
|         loglevel='devx', | ||||
|     ) as n: | ||||
|         p = await n.start_actor( | ||||
|             'bp_boi', | ||||
|  |  | |||
|  | @ -6,7 +6,7 @@ import tractor | |||
| 
 | ||||
| # TODO: only import these when not running from test harness? | ||||
| # can we detect `pexpect` usage maybe? | ||||
| # from tractor.devx.debug import ( | ||||
| # from tractor.devx._debug import ( | ||||
| #     get_lock, | ||||
| #     get_debug_req, | ||||
| # ) | ||||
|  |  | |||
|  | @ -91,7 +91,7 @@ async def main() -> list[int]: | |||
|     an: ActorNursery | ||||
|     async with tractor.open_nursery( | ||||
|         loglevel='cancel', | ||||
|         # debug_mode=True, | ||||
|         debug_mode=True, | ||||
|     ) as an: | ||||
| 
 | ||||
|         seed = int(1e3) | ||||
|  |  | |||
|  | @ -3,18 +3,20 @@ import trio | |||
| import tractor | ||||
| 
 | ||||
| 
 | ||||
| async def sleepy_jane() -> None: | ||||
|     uid: tuple = tractor.current_actor().uid | ||||
| async def sleepy_jane(): | ||||
|     uid = tractor.current_actor().uid | ||||
|     print(f'Yo i am actor {uid}') | ||||
|     await trio.sleep_forever() | ||||
| 
 | ||||
| 
 | ||||
| async def main(): | ||||
|     ''' | ||||
|     Spawn a flat actor cluster, with one process per detected core. | ||||
|     Spawn a flat actor cluster, with one process per | ||||
|     detected core. | ||||
| 
 | ||||
|     ''' | ||||
|     portal_map: dict[str, tractor.Portal] | ||||
|     results: dict[str, str] | ||||
| 
 | ||||
|     # look at this hip new syntax! | ||||
|     async with ( | ||||
|  | @ -23,15 +25,11 @@ async def main(): | |||
|             modules=[__name__] | ||||
|         ) as portal_map, | ||||
| 
 | ||||
|         tractor.trionics.collapse_eg(), | ||||
|         trio.open_nursery() as tn, | ||||
|         trio.open_nursery() as n, | ||||
|     ): | ||||
| 
 | ||||
|         for (name, portal) in portal_map.items(): | ||||
|             tn.start_soon( | ||||
|                 portal.run, | ||||
|                 sleepy_jane, | ||||
|             ) | ||||
|             n.start_soon(portal.run, sleepy_jane) | ||||
| 
 | ||||
|         await trio.sleep(0.5) | ||||
| 
 | ||||
|  | @ -43,4 +41,4 @@ if __name__ == '__main__': | |||
|     try: | ||||
|         trio.run(main) | ||||
|     except KeyboardInterrupt: | ||||
|         print('trio cancelled by KBI') | ||||
|         pass | ||||
|  |  | |||
|  | @ -9,7 +9,7 @@ async def main(service_name): | |||
|     async with tractor.open_nursery() as an: | ||||
|         await an.start_actor(service_name) | ||||
| 
 | ||||
|         async with tractor.get_registry() as portal: | ||||
|         async with tractor.get_registry('127.0.0.1', 1616) as portal: | ||||
|             print(f"Arbiter is listening on {portal.channel}") | ||||
| 
 | ||||
|         async with tractor.wait_for_actor(service_name) as sockaddr: | ||||
|  |  | |||
|  | @ -1,85 +0,0 @@ | |||
| from contextlib import ( | ||||
|     asynccontextmanager as acm, | ||||
| ) | ||||
| from functools import partial | ||||
| 
 | ||||
| import tractor | ||||
| import trio | ||||
| 
 | ||||
| 
 | ||||
| log = tractor.log.get_logger( | ||||
|     name=__name__ | ||||
| ) | ||||
| 
 | ||||
| _lock: trio.Lock|None = None | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def acquire_singleton_lock( | ||||
| ) -> None: | ||||
|     global _lock | ||||
|     if _lock is None: | ||||
|         log.info('Allocating LOCK') | ||||
|         _lock = trio.Lock() | ||||
| 
 | ||||
|     log.info('TRYING TO LOCK ACQUIRE') | ||||
|     async with _lock: | ||||
|         log.info('ACQUIRED') | ||||
|         yield _lock | ||||
| 
 | ||||
|     log.info('RELEASED') | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| async def hold_lock_forever( | ||||
|     task_status=trio.TASK_STATUS_IGNORED | ||||
| ): | ||||
|     async with ( | ||||
|         tractor.trionics.maybe_raise_from_masking_exc(), | ||||
|         acquire_singleton_lock() as lock, | ||||
|     ): | ||||
|         task_status.started(lock) | ||||
|         await trio.sleep_forever() | ||||
| 
 | ||||
| 
 | ||||
| async def main( | ||||
|     ignore_special_cases: bool, | ||||
|     loglevel: str = 'info', | ||||
|     debug_mode: bool = True, | ||||
| ): | ||||
|     async with ( | ||||
|         trio.open_nursery() as tn, | ||||
| 
 | ||||
|         # tractor.trionics.maybe_raise_from_masking_exc() | ||||
|         # ^^^ XXX NOTE, interestingly putting the unmasker | ||||
|         # here does not exhibit the same behaviour ?? | ||||
|     ): | ||||
|         if not ignore_special_cases: | ||||
|             from tractor.trionics import _taskc | ||||
|             _taskc._mask_cases.clear() | ||||
| 
 | ||||
|         _lock = await tn.start( | ||||
|             hold_lock_forever, | ||||
|         ) | ||||
|         with trio.move_on_after(0.2): | ||||
|             await tn.start( | ||||
|                 hold_lock_forever, | ||||
|             ) | ||||
| 
 | ||||
|         tn.cancel_scope.cancel() | ||||
| 
 | ||||
| 
 | ||||
| # XXX, manual test as script | ||||
| if __name__ == '__main__': | ||||
|     tractor.log.get_console_log(level='info') | ||||
|     for case in [True, False]: | ||||
|         log.info( | ||||
|             f'\n' | ||||
|             f'------ RUNNING SCRIPT TRIAL ------\n' | ||||
|             f'ignore_special_cases: {case!r}\n' | ||||
|         ) | ||||
|         trio.run(partial( | ||||
|             main, | ||||
|             ignore_special_cases=case, | ||||
|             loglevel='info', | ||||
|         )) | ||||
|  | @ -1,195 +0,0 @@ | |||
| from contextlib import ( | ||||
|     contextmanager as cm, | ||||
|     # TODO, any diff in async case(s)?? | ||||
|     # asynccontextmanager as acm, | ||||
| ) | ||||
| from functools import partial | ||||
| 
 | ||||
| import tractor | ||||
| import trio | ||||
| 
 | ||||
| 
 | ||||
| log = tractor.log.get_logger( | ||||
|     name=__name__ | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| @cm | ||||
| def teardown_on_exc( | ||||
|     raise_from_handler: bool = False, | ||||
| ): | ||||
|     ''' | ||||
|     You could also have a teardown handler which catches any exc and | ||||
|     does some required teardown. In this case the problem is | ||||
|     compounded UNLESS you ensure the handler's scope is OUTSIDE the | ||||
|     `ux.aclose()`.. that is in the caller's enclosing scope. | ||||
| 
 | ||||
|     ''' | ||||
|     try: | ||||
|         yield | ||||
|     except BaseException as _berr: | ||||
|         berr = _berr | ||||
|         log.exception( | ||||
|             f'Handling termination teardown in child due to,\n' | ||||
|             f'{berr!r}\n' | ||||
|         ) | ||||
|         if raise_from_handler: | ||||
|             # XXX teardown ops XXX | ||||
|             # on termination these steps say need to be run to | ||||
|             # ensure wider system consistency (like the state of | ||||
|             # remote connections/services). | ||||
|             # | ||||
|             # HOWEVER, any bug in this teardown code is also | ||||
|             # masked by the `tx.aclose()`! | ||||
|             # this is also true if `_tn.cancel_scope` is | ||||
|             # `.cancel_called` by the parent in a graceful | ||||
|             # request case.. | ||||
| 
 | ||||
|             # simulate a bug in teardown handler. | ||||
|             raise RuntimeError( | ||||
|                 'woopsie teardown bug!' | ||||
|             ) | ||||
| 
 | ||||
|         raise  # no teardown bug. | ||||
| 
 | ||||
| 
 | ||||
| async def finite_stream_to_rent( | ||||
|     tx: trio.abc.SendChannel, | ||||
|     child_errors_mid_stream: bool, | ||||
|     raise_unmasked: bool, | ||||
| 
 | ||||
|     task_status: trio.TaskStatus[ | ||||
|         trio.CancelScope, | ||||
|     ] = trio.TASK_STATUS_IGNORED, | ||||
| ): | ||||
|     async with ( | ||||
|         # XXX without this unmasker the mid-streaming RTE is never | ||||
|         # reported since it is masked by the `tx.aclose()` | ||||
|         # call which in turn raises `Cancelled`! | ||||
|         # | ||||
|         # NOTE, this is WITHOUT doing any exception handling | ||||
|         # inside the child  task! | ||||
|         # | ||||
|         # TODO, uncomment next LoC to see the supprsessed beg[RTE]! | ||||
|         tractor.trionics.maybe_raise_from_masking_exc( | ||||
|             raise_unmasked=raise_unmasked, | ||||
|         ), | ||||
| 
 | ||||
|         tx as tx,  # .aclose() is the guilty masker chkpt! | ||||
| 
 | ||||
|         # XXX, this ONLY matters in the | ||||
|         # `child_errors_mid_stream=False` case oddly!? | ||||
|         # THAT IS, if no tn is opened in that case then the | ||||
|         # test will not fail; it raises the RTE correctly? | ||||
|         # | ||||
|         # -> so it seems this new scope somehow affects the form of | ||||
|         #    eventual in the parent EG? | ||||
|         tractor.trionics.maybe_open_nursery( | ||||
|             nursery=( | ||||
|                 None | ||||
|                 if not child_errors_mid_stream | ||||
|                 else True | ||||
|             ), | ||||
|         ) as _tn, | ||||
|     ): | ||||
|         # pass our scope back to parent for supervision\ | ||||
|         # control. | ||||
|         cs: trio.CancelScope|None = ( | ||||
|             None | ||||
|             if _tn is True | ||||
|             else _tn.cancel_scope | ||||
|         ) | ||||
|         task_status.started(cs) | ||||
| 
 | ||||
|         with teardown_on_exc( | ||||
|             raise_from_handler=not child_errors_mid_stream, | ||||
|         ): | ||||
|             for i in range(100): | ||||
|                 log.debug( | ||||
|                     f'Child tx {i!r}\n' | ||||
|                 ) | ||||
|                 if ( | ||||
|                     child_errors_mid_stream | ||||
|                     and | ||||
|                     i == 66 | ||||
|                 ): | ||||
|                     # oh wait but WOOPS there's a bug | ||||
|                     # in that teardown code!? | ||||
|                     raise RuntimeError( | ||||
|                         'woopsie, a mid-streaming bug!?' | ||||
|                     ) | ||||
| 
 | ||||
|                 await tx.send(i) | ||||
| 
 | ||||
| 
 | ||||
| async def main( | ||||
|     # TODO! toggle this for the 2 cases! | ||||
|     # 1. child errors mid-stream while parent is also requesting | ||||
|     #   (graceful) cancel of that child streamer. | ||||
|     # | ||||
|     # 2. child contains a teardown handler which contains a | ||||
|     #   bug and raises. | ||||
|     # | ||||
|     child_errors_mid_stream: bool, | ||||
| 
 | ||||
|     raise_unmasked: bool = False, | ||||
|     loglevel: str = 'info', | ||||
| ): | ||||
|     tractor.log.get_console_log(level=loglevel) | ||||
| 
 | ||||
|     # the `.aclose()` being checkpoints on these | ||||
|     # is the source of the problem.. | ||||
|     tx, rx = trio.open_memory_channel(1) | ||||
| 
 | ||||
|     async with ( | ||||
|         tractor.trionics.collapse_eg(), | ||||
|         trio.open_nursery() as tn, | ||||
|         rx as rx, | ||||
|     ): | ||||
|         _child_cs = await tn.start( | ||||
|             partial( | ||||
|                 finite_stream_to_rent, | ||||
|                 child_errors_mid_stream=child_errors_mid_stream, | ||||
|                 raise_unmasked=raise_unmasked, | ||||
|                 tx=tx, | ||||
|             ) | ||||
|         ) | ||||
|         async for msg in rx: | ||||
|             log.debug( | ||||
|                 f'Rent rx {msg!r}\n' | ||||
|             ) | ||||
| 
 | ||||
|             # simulate some external cancellation | ||||
|             # request **JUST BEFORE** the child errors. | ||||
|             if msg == 65: | ||||
|                 log.cancel( | ||||
|                     f'Cancelling parent on,\n' | ||||
|                     f'msg={msg}\n' | ||||
|                     f'\n' | ||||
|                     f'Simulates OOB cancel request!\n' | ||||
|                 ) | ||||
|                 tn.cancel_scope.cancel() | ||||
| 
 | ||||
| 
 | ||||
| # XXX, manual test as script | ||||
| if __name__ == '__main__': | ||||
|     tractor.log.get_console_log(level='info') | ||||
|     for case in [True, False]: | ||||
|         log.info( | ||||
|             f'\n' | ||||
|             f'------ RUNNING SCRIPT TRIAL ------\n' | ||||
|             f'child_errors_midstream: {case!r}\n' | ||||
|         ) | ||||
|         try: | ||||
|             trio.run(partial( | ||||
|                 main, | ||||
|                 child_errors_mid_stream=case, | ||||
|                 # raise_unmasked=True, | ||||
|                 loglevel='info', | ||||
|             )) | ||||
|         except Exception as _exc: | ||||
|             exc = _exc | ||||
|             log.exception( | ||||
|                 'Should have raised an RTE or Cancelled?\n' | ||||
|             ) | ||||
|             breakpoint() | ||||
|  | @ -32,21 +32,25 @@ classifiers = [ | |||
|   "Topic :: System :: Distributed Computing", | ||||
| ] | ||||
| dependencies = [ | ||||
|   # trio runtime and friends | ||||
| # trio runtime and friends | ||||
|   # (poetry) proper range specs, | ||||
|   # https://packaging.python.org/en/latest/discussions/install-requires-vs-requirements/#id5 | ||||
|   # TODO, for 3.13 we must go go `0.27` which means we have to | ||||
|   # disable strict egs or port to handling them internally! | ||||
|   "trio>0.27", | ||||
|   # trio='^0.27' | ||||
|   "trio>=0.24,<0.25", | ||||
|   "tricycle>=0.4.1,<0.5", | ||||
|   "trio-typing>=0.10.0,<0.11", | ||||
| 
 | ||||
|   "wrapt>=1.16.0,<2", | ||||
|   "colorlog>=6.8.2,<7", | ||||
|   # built-in multi-actor `pdb` REPL | ||||
|   "pdbp>=1.6,<2", # windows only (from `pdbp`) | ||||
|   # typed IPC msging | ||||
|   "msgspec>=0.19.0", | ||||
|   "cffi>=1.17.1", | ||||
|   "bidict>=0.23.1", | ||||
| 
 | ||||
| # built-in multi-actor `pdb` REPL | ||||
|   "pdbp>=1.5.0,<2", | ||||
| 
 | ||||
| # typed IPC msging | ||||
| # TODO, get back on release once 3.13 support is out! | ||||
|   "msgspec", | ||||
| ] | ||||
| 
 | ||||
| # ------ project ------ | ||||
|  | @ -56,61 +60,31 @@ dev = [ | |||
|   # test suite | ||||
|   # TODO: maybe some of these layout choices? | ||||
|   # https://docs.pytest.org/en/8.0.x/explanation/goodpractices.html#choosing-a-test-layout-import-rules | ||||
|   "pytest>=8.3.5", | ||||
|   "pytest>=8.2.0,<9", | ||||
|   "pexpect>=4.9.0,<5", | ||||
|   # `tractor.devx` tooling | ||||
|   "greenback>=1.2.1,<2", | ||||
|   "stackscope>=0.2.2,<0.3", | ||||
|   # ^ requires this? | ||||
|   "typing-extensions>=4.14.1", | ||||
| 
 | ||||
|   # xonsh usage/integration (namely as @goodboy's sh of choice Bp) | ||||
|   "xonsh>=0.19.1", | ||||
|   "xontrib-vox>=0.0.1,<0.0.2", | ||||
|   "prompt-toolkit>=3.0.43,<4", | ||||
|   "xonsh-vox-tabcomplete>=0.5,<0.6", | ||||
|   "pyperclip>=1.9.0", | ||||
|   "prompt-toolkit>=3.0.50", | ||||
|   "xonsh>=0.19.2", | ||||
|   "psutil>=7.0.0", | ||||
| ] | ||||
| # TODO, add these with sane versions; were originally in | ||||
| # `requirements-docs.txt`.. | ||||
| # docs = [ | ||||
| #   "sphinx>=" | ||||
| #   "sphinx_book_theme>=" | ||||
| # ] | ||||
| 
 | ||||
| # ------ dependency-groups ------ | ||||
| 
 | ||||
| # ------ dependency-groups ------ | ||||
| 
 | ||||
| [tool.uv.sources] | ||||
| # XXX NOTE, only for @goodboy's hacking on `pprint(sort_dicts=False)` | ||||
| # for the `pp` alias.. | ||||
| # pdbp = { path = "../pdbp", editable = true } | ||||
| 
 | ||||
| # ------ tool.uv.sources ------ | ||||
| # TODO, distributed (multi-host) extensions | ||||
| # linux kernel networking | ||||
| # 'pyroute2 | ||||
| 
 | ||||
| # ------ tool.uv.sources ------ | ||||
| 
 | ||||
| [tool.uv] | ||||
| # XXX NOTE, prefer the sys python bc apparently the distis from | ||||
| # `astral` are built in a way that breaks `pdbp`+`tabcompleter`'s | ||||
| # likely due to linking against `libedit` over `readline`.. | ||||
| # |_https://docs.astral.sh/uv/concepts/python-versions/#managed-python-distributions | ||||
| # |_https://gregoryszorc.com/docs/python-build-standalone/main/quirks.html#use-of-libedit-on-linux | ||||
| # | ||||
| # https://docs.astral.sh/uv/reference/settings/#python-preference | ||||
| python-preference = 'system' | ||||
| 
 | ||||
| # ------ tool.uv ------ | ||||
| 
 | ||||
| [tool.hatch.build.targets.sdist] | ||||
| include = ["tractor"] | ||||
| 
 | ||||
| [tool.hatch.build.targets.wheel] | ||||
| include = ["tractor"] | ||||
| 
 | ||||
| # ------ tool.hatch ------ | ||||
| # ------ dependency-groups ------ | ||||
| 
 | ||||
| [tool.towncrier] | ||||
| package = "tractor" | ||||
|  | @ -141,8 +115,6 @@ all_bullets = true | |||
|   name = "Trivial/Internal Changes" | ||||
|   showcontent = true | ||||
| 
 | ||||
| # ------ tool.towncrier ------ | ||||
| 
 | ||||
| [tool.pytest.ini_options] | ||||
| minversion = '6.0' | ||||
| testpaths = [ | ||||
|  | @ -157,8 +129,10 @@ addopts = [ | |||
|   '--show-capture=no', | ||||
| ] | ||||
| log_cli = false | ||||
| # TODO: maybe some of these layout choices? | ||||
| # https://docs.pytest.org/en/8.0.x/explanation/goodpractices.html#choosing-a-test-layout-import-rules | ||||
| # pythonpath = "src" | ||||
| 
 | ||||
| # ------ tool.pytest ------ | ||||
| # ------ tool.towncrier ------ | ||||
| 
 | ||||
| [tool.uv.sources] | ||||
| msgspec = { git = "https://github.com/jcrist/msgspec.git" } | ||||
| 
 | ||||
| # ------ tool.uv.sources ------ | ||||
|  |  | |||
|  | @ -1,8 +0,0 @@ | |||
| # vim: ft=ini | ||||
| # pytest.ini for tractor | ||||
| 
 | ||||
| [pytest] | ||||
| # don't show frickin captured logs AGAIN in the report.. | ||||
| addopts = --show-capture='no' | ||||
| log_cli = false | ||||
| ; minversion = 6.0 | ||||
|  | @ -0,0 +1,2 @@ | |||
| sphinx | ||||
| sphinx_book_theme | ||||
|  | @ -0,0 +1,9 @@ | |||
| pytest | ||||
| pytest-trio | ||||
| pytest-timeout | ||||
| pdbp | ||||
| mypy | ||||
| trio_typing | ||||
| pexpect | ||||
| towncrier | ||||
| numpy | ||||
|  | @ -0,0 +1,102 @@ | |||
| #!/usr/bin/env python | ||||
| # | ||||
| # tractor: structured concurrent "actors". | ||||
| # | ||||
| # Copyright 2018-eternity Tyler Goodlet. | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| from setuptools import setup | ||||
| 
 | ||||
| with open('docs/README.rst', encoding='utf-8') as f: | ||||
|     readme = f.read() | ||||
| 
 | ||||
| 
 | ||||
| setup( | ||||
|     name="tractor", | ||||
|     version='0.1.0a6dev0',  # alpha zone | ||||
|     description='structured concurrent `trio`-"actors"', | ||||
|     long_description=readme, | ||||
|     license='AGPLv3', | ||||
|     author='Tyler Goodlet', | ||||
|     maintainer='Tyler Goodlet', | ||||
|     maintainer_email='goodboy_foss@protonmail.com', | ||||
|     url='https://github.com/goodboy/tractor', | ||||
|     platforms=['linux', 'windows'], | ||||
|     packages=[ | ||||
|         'tractor', | ||||
|         'tractor.experimental',  # wacky ideas | ||||
|         'tractor.trionics',  # trio extensions | ||||
|         'tractor.msg',  # lowlevel data types | ||||
|         'tractor.devx',  # "dev-experience" | ||||
|     ], | ||||
|     install_requires=[ | ||||
| 
 | ||||
|         # trio related | ||||
|         # proper range spec: | ||||
|         # https://packaging.python.org/en/latest/discussions/install-requires-vs-requirements/#id5 | ||||
|         'trio >= 0.24', | ||||
| 
 | ||||
|         # 'async_generator',  # in stdlib mostly! | ||||
|         # 'trio_typing',  # trio==0.23.0 has type hints! | ||||
|         # 'exceptiongroup',  # in stdlib as of 3.11! | ||||
| 
 | ||||
|         # tooling | ||||
|         'stackscope', | ||||
|         'tricycle', | ||||
|         'trio_typing', | ||||
|         'colorlog', | ||||
|         'wrapt', | ||||
| 
 | ||||
|         # IPC serialization | ||||
|         'msgspec>=0.18.5', | ||||
| 
 | ||||
|         # debug mode REPL | ||||
|         'pdbp', | ||||
| 
 | ||||
|         # TODO: distributed transport using | ||||
|         # linux kernel networking | ||||
|         # 'pyroute2', | ||||
| 
 | ||||
|         # pip ref docs on these specs: | ||||
|         # https://pip.pypa.io/en/stable/reference/requirement-specifiers/#examples | ||||
|         # and pep: | ||||
|         # https://peps.python.org/pep-0440/#version-specifiers | ||||
| 
 | ||||
|     ], | ||||
|     tests_require=['pytest'], | ||||
|     python_requires=">=3.10", | ||||
|     keywords=[ | ||||
|         'trio', | ||||
|         'async', | ||||
|         'concurrency', | ||||
|         'structured concurrency', | ||||
|         'actor model', | ||||
|         'distributed', | ||||
|         'multiprocessing' | ||||
|     ], | ||||
|     classifiers=[ | ||||
|         "Development Status :: 3 - Alpha", | ||||
|         "Operating System :: POSIX :: Linux", | ||||
|         "Operating System :: Microsoft :: Windows", | ||||
|         "Framework :: Trio", | ||||
|         "License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)", | ||||
|         "Programming Language :: Python :: Implementation :: CPython", | ||||
|         "Programming Language :: Python :: 3 :: Only", | ||||
|         "Programming Language :: Python :: 3.10", | ||||
|         "Intended Audience :: Science/Research", | ||||
|         "Intended Audience :: Developers", | ||||
|         "Topic :: System :: Distributed Computing", | ||||
|     ], | ||||
| ) | ||||
|  | @ -1,27 +1,24 @@ | |||
| """ | ||||
| Top level of the testing suites! | ||||
| 
 | ||||
| ``tractor`` testing!! | ||||
| """ | ||||
| from __future__ import annotations | ||||
| import sys | ||||
| import subprocess | ||||
| import os | ||||
| import random | ||||
| import signal | ||||
| import platform | ||||
| import time | ||||
| 
 | ||||
| import pytest | ||||
| import tractor | ||||
| from tractor._testing import ( | ||||
|     examples_dir as examples_dir, | ||||
|     tractor_test as tractor_test, | ||||
|     expect_ctxc as expect_ctxc, | ||||
| ) | ||||
| 
 | ||||
| pytest_plugins: list[str] = [ | ||||
|     'pytester', | ||||
|     'tractor._testing.pytest', | ||||
| ] | ||||
| 
 | ||||
| # TODO: include wtv plugin(s) we build in `._testing.pytest`? | ||||
| pytest_plugins = ['pytester'] | ||||
| 
 | ||||
| # Sending signal.SIGINT on subprocess fails on windows. Use CTRL_* alternatives | ||||
| if platform.system() == 'Windows': | ||||
|  | @ -33,11 +30,7 @@ else: | |||
|     _KILL_SIGNAL = signal.SIGKILL | ||||
|     _INT_SIGNAL = signal.SIGINT | ||||
|     _INT_RETURN_CODE = 1 if sys.version_info < (3, 8) else -signal.SIGINT.value | ||||
|     _PROC_SPAWN_WAIT = ( | ||||
|         0.6 | ||||
|         if sys.version_info < (3, 7) | ||||
|         else 0.4 | ||||
|     ) | ||||
|     _PROC_SPAWN_WAIT = 0.6 if sys.version_info < (3, 7) else 0.4 | ||||
| 
 | ||||
| 
 | ||||
| no_windows = pytest.mark.skipif( | ||||
|  | @ -46,12 +39,7 @@ no_windows = pytest.mark.skipif( | |||
| ) | ||||
| 
 | ||||
| 
 | ||||
| def pytest_addoption( | ||||
|     parser: pytest.Parser, | ||||
| ): | ||||
|     # ?TODO? should this be exposed from our `._testing.pytest` | ||||
|     # plugin or should we make it more explicit with `--tl` for | ||||
|     # tractor logging like we do in other client projects? | ||||
| def pytest_addoption(parser): | ||||
|     parser.addoption( | ||||
|         "--ll", | ||||
|         action="store", | ||||
|  | @ -59,10 +47,39 @@ def pytest_addoption( | |||
|         default='ERROR', help="logging level to set when testing" | ||||
|     ) | ||||
| 
 | ||||
|     parser.addoption( | ||||
|         "--spawn-backend", | ||||
|         action="store", | ||||
|         dest='spawn_backend', | ||||
|         default='trio', | ||||
|         help="Processing spawning backend to use for test run", | ||||
|     ) | ||||
| 
 | ||||
|     parser.addoption( | ||||
|         "--tpdb", "--debug-mode", | ||||
|         action="store_true", | ||||
|         dest='tractor_debug_mode', | ||||
|         # default=False, | ||||
|         help=( | ||||
|             'Enable a flag that can be used by tests to to set the ' | ||||
|             '`debug_mode: bool` for engaging the internal ' | ||||
|             'multi-proc debugger sys.' | ||||
|         ), | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| def pytest_configure(config): | ||||
|     backend = config.option.spawn_backend | ||||
|     tractor._spawn.try_set_start_method(backend) | ||||
| 
 | ||||
| 
 | ||||
| @pytest.fixture(scope='session') | ||||
| def debug_mode(request): | ||||
|     return request.config.option.tractor_debug_mode | ||||
| 
 | ||||
| 
 | ||||
| @pytest.fixture(scope='session', autouse=True) | ||||
| def loglevel(request): | ||||
|     import tractor | ||||
|     orig = tractor.log._default_loglevel | ||||
|     level = tractor.log._default_loglevel = request.config.option.loglevel | ||||
|     tractor.log.get_console_log(level) | ||||
|  | @ -70,44 +87,100 @@ def loglevel(request): | |||
|     tractor.log._default_loglevel = orig | ||||
| 
 | ||||
| 
 | ||||
| @pytest.fixture(scope='session') | ||||
| def spawn_backend(request) -> str: | ||||
|     return request.config.option.spawn_backend | ||||
| 
 | ||||
| 
 | ||||
| _ci_env: bool = os.environ.get('CI', False) | ||||
| 
 | ||||
| 
 | ||||
| @pytest.fixture(scope='session') | ||||
| def ci_env() -> bool: | ||||
|     ''' | ||||
|     Detect CI environment. | ||||
|     Detect CI envoirment. | ||||
| 
 | ||||
|     ''' | ||||
|     return _ci_env | ||||
| 
 | ||||
| 
 | ||||
| def sig_prog( | ||||
|     proc: subprocess.Popen, | ||||
|     sig: int, | ||||
|     canc_timeout: float = 0.1, | ||||
| ) -> int: | ||||
| # TODO: also move this to `._testing` for now? | ||||
| # -[ ] possibly generalize and re-use for multi-tree spawning | ||||
| #    along with the new stuff for multi-addrs in distribute_dis | ||||
| #    branch? | ||||
| # | ||||
| # choose randomly at import time | ||||
| _reg_addr: tuple[str, int] = ( | ||||
|     '127.0.0.1', | ||||
|     random.randint(1000, 9999), | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| @pytest.fixture(scope='session') | ||||
| def reg_addr() -> tuple[str, int]: | ||||
| 
 | ||||
|     # globally override the runtime to the per-test-session-dynamic | ||||
|     # addr so that all tests never conflict with any other actor | ||||
|     # tree using the default. | ||||
|     from tractor import _root | ||||
|     _root._default_lo_addrs = [_reg_addr] | ||||
| 
 | ||||
|     return _reg_addr | ||||
| 
 | ||||
| 
 | ||||
| def pytest_generate_tests(metafunc): | ||||
|     spawn_backend = metafunc.config.option.spawn_backend | ||||
| 
 | ||||
|     if not spawn_backend: | ||||
|         # XXX some weird windows bug with `pytest`? | ||||
|         spawn_backend = 'trio' | ||||
| 
 | ||||
|     # TODO: maybe just use the literal `._spawn.SpawnMethodKey`? | ||||
|     assert spawn_backend in ( | ||||
|         'mp_spawn', | ||||
|         'mp_forkserver', | ||||
|         'trio', | ||||
|     ) | ||||
| 
 | ||||
|     # NOTE: used to be used to dyanmically parametrize tests for when | ||||
|     # you just passed --spawn-backend=`mp` on the cli, but now we expect | ||||
|     # that cli input to be manually specified, BUT, maybe we'll do | ||||
|     # something like this again in the future? | ||||
|     if 'start_method' in metafunc.fixturenames: | ||||
|         metafunc.parametrize("start_method", [spawn_backend], scope='module') | ||||
| 
 | ||||
| 
 | ||||
| # TODO: a way to let test scripts (like from `examples/`) | ||||
| # guarantee they won't registry addr collide! | ||||
| # @pytest.fixture | ||||
| # def open_test_runtime( | ||||
| #     reg_addr: tuple, | ||||
| # ) -> AsyncContextManager: | ||||
| #     return partial( | ||||
| #         tractor.open_nursery, | ||||
| #         registry_addrs=[reg_addr], | ||||
| #     ) | ||||
| 
 | ||||
| 
 | ||||
| def sig_prog(proc, sig): | ||||
|     "Kill the actor-process with ``sig``." | ||||
|     proc.send_signal(sig) | ||||
|     time.sleep(canc_timeout) | ||||
|     time.sleep(0.1) | ||||
|     if not proc.poll(): | ||||
|         # TODO: why sometimes does SIGINT not work on teardown? | ||||
|         # seems to happen only when trace logging enabled? | ||||
|         proc.send_signal(_KILL_SIGNAL) | ||||
|     ret: int = proc.wait() | ||||
|     ret = proc.wait() | ||||
|     assert ret | ||||
| 
 | ||||
| 
 | ||||
| # TODO: factor into @cm and move to `._testing`? | ||||
| @pytest.fixture | ||||
| def daemon( | ||||
|     debug_mode: bool, | ||||
|     loglevel: str, | ||||
|     testdir: pytest.Pytester, | ||||
|     testdir, | ||||
|     reg_addr: tuple[str, int], | ||||
|     tpt_proto: str, | ||||
| 
 | ||||
| ) -> subprocess.Popen: | ||||
| ): | ||||
|     ''' | ||||
|     Run a daemon root actor as a separate actor-process tree and | ||||
|     "remote registrar" for discovery-protocol related tests. | ||||
|  | @ -118,100 +191,28 @@ def daemon( | |||
|         loglevel: str = 'info' | ||||
| 
 | ||||
|     code: str = ( | ||||
|         "import tractor; " | ||||
|         "tractor.run_daemon([], " | ||||
|         "registry_addrs={reg_addrs}, " | ||||
|         "debug_mode={debug_mode}, " | ||||
|         "loglevel={ll})" | ||||
|             "import tractor; " | ||||
|             "tractor.run_daemon([], registry_addrs={reg_addrs}, loglevel={ll})" | ||||
|     ).format( | ||||
|         reg_addrs=str([reg_addr]), | ||||
|         ll="'{}'".format(loglevel) if loglevel else None, | ||||
|         debug_mode=debug_mode, | ||||
|     ) | ||||
|     cmd: list[str] = [ | ||||
|         sys.executable, | ||||
|         '-c', code, | ||||
|     ] | ||||
|     # breakpoint() | ||||
|     kwargs = {} | ||||
|     if platform.system() == 'Windows': | ||||
|         # without this, tests hang on windows forever | ||||
|         kwargs['creationflags'] = subprocess.CREATE_NEW_PROCESS_GROUP | ||||
| 
 | ||||
|     proc: subprocess.Popen = testdir.popen( | ||||
|     proc = testdir.popen( | ||||
|         cmd, | ||||
|         stdout=subprocess.PIPE, | ||||
|         stderr=subprocess.PIPE, | ||||
|         **kwargs, | ||||
|     ) | ||||
| 
 | ||||
|     # UDS sockets are **really** fast to bind()/listen()/connect() | ||||
|     # so it's often required that we delay a bit more starting | ||||
|     # the first actor-tree.. | ||||
|     if tpt_proto == 'uds': | ||||
|         global _PROC_SPAWN_WAIT | ||||
|         _PROC_SPAWN_WAIT = 0.6 | ||||
| 
 | ||||
|     time.sleep(_PROC_SPAWN_WAIT) | ||||
| 
 | ||||
|     assert not proc.returncode | ||||
|     time.sleep(_PROC_SPAWN_WAIT) | ||||
|     yield proc | ||||
|     sig_prog(proc, _INT_SIGNAL) | ||||
| 
 | ||||
|     # XXX! yeah.. just be reaaal careful with this bc sometimes it | ||||
|     # can lock up on the `_io.BufferedReader` and hang.. | ||||
|     stderr: str = proc.stderr.read().decode() | ||||
|     if stderr: | ||||
|         print( | ||||
|             f'Daemon actor tree produced STDERR:\n' | ||||
|             f'{proc.args}\n' | ||||
|             f'\n' | ||||
|             f'{stderr}\n' | ||||
|         ) | ||||
|     if proc.returncode != -2: | ||||
|         raise RuntimeError( | ||||
|             'Daemon actor tree failed !?\n' | ||||
|             f'{proc.args}\n' | ||||
|         ) | ||||
| 
 | ||||
| 
 | ||||
| # @pytest.fixture(autouse=True) | ||||
| # def shared_last_failed(pytestconfig): | ||||
| #     val = pytestconfig.cache.get("example/value", None) | ||||
| #     breakpoint() | ||||
| #     if val is None: | ||||
| #         pytestconfig.cache.set("example/value", val) | ||||
| #     return val | ||||
| 
 | ||||
| 
 | ||||
| # TODO: a way to let test scripts (like from `examples/`) | ||||
| # guarantee they won't `registry_addrs` collide! | ||||
| # -[ ] maybe use some kinda standard `def main()` arg-spec that | ||||
| #     we can introspect from a fixture that is called from the test | ||||
| #     body? | ||||
| # -[ ] test and figure out typing for below prototype! Bp | ||||
| # | ||||
| # @pytest.fixture | ||||
| # def set_script_runtime_args( | ||||
| #     reg_addr: tuple, | ||||
| # ) -> Callable[[...], None]: | ||||
| 
 | ||||
| #     def import_n_partial_in_args_n_triorun( | ||||
| #         script: Path,  # under examples? | ||||
| #         **runtime_args, | ||||
| #     ) -> Callable[[], Any]:  # a `partial`-ed equiv of `trio.run()` | ||||
| 
 | ||||
| #         # NOTE, below is taken from | ||||
| #         # `.test_advanced_faults.test_ipc_channel_break_during_stream` | ||||
| #         mod: ModuleType = import_path( | ||||
| #             examples_dir() / 'advanced_faults' | ||||
| #             / 'ipc_failure_during_stream.py', | ||||
| #             root=examples_dir(), | ||||
| #             consider_namespace_packages=False, | ||||
| #         ) | ||||
| #         return partial( | ||||
| #             trio.run, | ||||
| #             partial( | ||||
| #                 mod.main, | ||||
| #                 **runtime_args, | ||||
| #             ) | ||||
| #         ) | ||||
| #     return import_n_partial_in_args_n_triorun | ||||
|  |  | |||
|  | @ -2,11 +2,9 @@ | |||
| `tractor.devx.*` tooling sub-pkg test space. | ||||
| 
 | ||||
| ''' | ||||
| from __future__ import annotations | ||||
| import time | ||||
| from typing import ( | ||||
|     Callable, | ||||
|     TYPE_CHECKING, | ||||
| ) | ||||
| 
 | ||||
| import pytest | ||||
|  | @ -18,32 +16,24 @@ from pexpect.spawnbase import SpawnBase | |||
| from tractor._testing import ( | ||||
|     mk_cmd, | ||||
| ) | ||||
| from tractor.devx.debug import ( | ||||
| from tractor.devx._debug import ( | ||||
|     _pause_msg as _pause_msg, | ||||
|     _crash_msg as _crash_msg, | ||||
|     _repl_fail_msg as _repl_fail_msg, | ||||
|     _ctlc_ignore_header as _ctlc_ignore_header, | ||||
| ) | ||||
| from ..conftest import ( | ||||
| from conftest import ( | ||||
|     _ci_env, | ||||
| ) | ||||
| 
 | ||||
| if TYPE_CHECKING: | ||||
|     from pexpect import pty_spawn | ||||
| 
 | ||||
| 
 | ||||
| # a fn that sub-instantiates a `pexpect.spawn()` | ||||
| # and returns it. | ||||
| type PexpectSpawner = Callable[[str], pty_spawn.spawn] | ||||
| 
 | ||||
| 
 | ||||
| @pytest.fixture | ||||
| def spawn( | ||||
|     start_method: str, | ||||
|     testdir: pytest.Pytester, | ||||
|     start_method, | ||||
|     testdir: pytest.Testdir, | ||||
|     reg_addr: tuple[str, int], | ||||
| 
 | ||||
| ) -> PexpectSpawner: | ||||
| ) -> Callable[[str], None]: | ||||
|     ''' | ||||
|     Use the `pexpect` module shipped via `testdir.spawn()` to | ||||
|     run an `./examples/..` script by name. | ||||
|  | @ -54,36 +44,20 @@ def spawn( | |||
|             '`pexpect` based tests only supported on `trio` backend' | ||||
|         ) | ||||
| 
 | ||||
|     def unset_colors(): | ||||
|         ''' | ||||
|         Python 3.13 introduced colored tracebacks that break patt | ||||
|         matching, | ||||
| 
 | ||||
|         https://docs.python.org/3/using/cmdline.html#envvar-PYTHON_COLORS | ||||
|         https://docs.python.org/3/using/cmdline.html#using-on-controlling-color | ||||
| 
 | ||||
|         ''' | ||||
|         import os | ||||
|         os.environ['PYTHON_COLORS'] = '0' | ||||
| 
 | ||||
|     def _spawn( | ||||
|         cmd: str, | ||||
|         **mkcmd_kwargs, | ||||
|     ) -> pty_spawn.spawn: | ||||
|         unset_colors() | ||||
|     ): | ||||
|         return testdir.spawn( | ||||
|             cmd=mk_cmd( | ||||
|                 cmd, | ||||
|                 **mkcmd_kwargs, | ||||
|             ), | ||||
|             expect_timeout=3, | ||||
|             # preexec_fn=unset_colors, | ||||
|             # ^TODO? get `pytest` core to expose underlying | ||||
|             # `pexpect.spawn()` stuff? | ||||
|         ) | ||||
| 
 | ||||
|     # such that test-dep can pass input script name. | ||||
|     return _spawn  # the `PexpectSpawner`, type alias. | ||||
|     return _spawn | ||||
| 
 | ||||
| 
 | ||||
| @pytest.fixture( | ||||
|  | @ -109,19 +83,11 @@ def ctlc( | |||
|                 'https://github.com/goodboy/tractor/issues/320' | ||||
|             ) | ||||
| 
 | ||||
|         if mark.name == 'ctlcs_bish': | ||||
|             pytest.skip( | ||||
|                 f'Test {node} prolly uses something from the stdlib (namely `asyncio`..)\n' | ||||
|                 f'The test and/or underlying example script can *sometimes* run fine ' | ||||
|                 f'locally but more then likely until the cpython peeps get their sh#$ together, ' | ||||
|                 f'this test will definitely not behave like `trio` under SIGINT..\n' | ||||
|             ) | ||||
| 
 | ||||
|     if use_ctlc: | ||||
|         # XXX: disable pygments highlighting for auto-tests | ||||
|         # since some envs (like actions CI) will struggle | ||||
|         # the the added color-char encoding.. | ||||
|         from tractor.devx.debug import TractorConfig | ||||
|         from tractor.devx._debug import TractorConfig | ||||
|         TractorConfig.use_pygements = False | ||||
| 
 | ||||
|     yield use_ctlc | ||||
|  |  | |||
|  | @ -1,23 +1,19 @@ | |||
| """ | ||||
| That "native" debug mode better work! | ||||
| 
 | ||||
| All these tests can be understood (somewhat) by running the | ||||
| equivalent `examples/debugging/` scripts manually. | ||||
| All these tests can be understood (somewhat) by running the equivalent | ||||
| `examples/debugging/` scripts manually. | ||||
| 
 | ||||
| TODO: | ||||
|   - none of these tests have been run successfully on windows yet but | ||||
|     there's been manual testing that verified it works. | ||||
|   - wonder if any of it'll work on OS X? | ||||
|     - none of these tests have been run successfully on windows yet but | ||||
|       there's been manual testing that verified it works. | ||||
|     - wonder if any of it'll work on OS X? | ||||
| 
 | ||||
| """ | ||||
| from __future__ import annotations | ||||
| from functools import partial | ||||
| import itertools | ||||
| import platform | ||||
| import time | ||||
| from typing import ( | ||||
|     TYPE_CHECKING, | ||||
| ) | ||||
| 
 | ||||
| import pytest | ||||
| from pexpect.exceptions import ( | ||||
|  | @ -38,9 +34,6 @@ from .conftest import ( | |||
|     assert_before, | ||||
| ) | ||||
| 
 | ||||
| if TYPE_CHECKING: | ||||
|     from ..conftest import PexpectSpawner | ||||
| 
 | ||||
| # TODO: The next great debugger audit could be done by you! | ||||
| # - recurrent entry to breakpoint() from single actor *after* and an | ||||
| #   error in another task? | ||||
|  | @ -316,12 +309,10 @@ def test_subactor_breakpoint( | |||
|     child.expect(EOF) | ||||
| 
 | ||||
|     assert in_prompt_msg( | ||||
|         child, [ | ||||
|         'RemoteActorError:', | ||||
|         child, | ||||
|         ['RemoteActorError:', | ||||
|          "('breakpoint_forever'", | ||||
|          'bdb.BdbQuit', | ||||
|         ], | ||||
|         pause_on_false=True, | ||||
|          'bdb.BdbQuit',] | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
|  | @ -534,7 +525,7 @@ def test_multi_daemon_subactors( | |||
|     # now the root actor won't clobber the bp_forever child | ||||
|     # during it's first access to the debug lock, but will instead | ||||
|     # wait for the lock to release, by the edge triggered | ||||
|     # ``devx.debug.Lock.no_remote_has_tty`` event before sending cancel messages | ||||
|     # ``devx._debug.Lock.no_remote_has_tty`` event before sending cancel messages | ||||
|     # (via portals) to its underlings B) | ||||
| 
 | ||||
|     # at some point here there should have been some warning msg from | ||||
|  | @ -925,7 +916,6 @@ def test_post_mortem_api( | |||
|             "<Task 'name_error'", | ||||
|             "NameError", | ||||
|             "('child'", | ||||
|             'getattr(doggypants)',  # exc-LoC | ||||
|         ] | ||||
|     ) | ||||
|     if ctlc: | ||||
|  | @ -942,8 +932,8 @@ def test_post_mortem_api( | |||
|             "<Task '__main__.main'", | ||||
|             "('root'", | ||||
|             "NameError", | ||||
|             "tractor.post_mortem()", | ||||
|             "src_uid=('child'", | ||||
|             "tractor.post_mortem()",  # in `main()`-LoC | ||||
|         ] | ||||
|     ) | ||||
|     if ctlc: | ||||
|  | @ -961,10 +951,6 @@ def test_post_mortem_api( | |||
|             "('root'", | ||||
|             "NameError", | ||||
|             "src_uid=('child'", | ||||
| 
 | ||||
|             # raising line in `main()` but from crash-handling | ||||
|             # in `tractor.open_nursery()`. | ||||
|             'async with p.open_context(name_error) as (ctx, first):', | ||||
|         ] | ||||
|     ) | ||||
|     if ctlc: | ||||
|  | @ -1074,136 +1060,6 @@ def test_shield_pause( | |||
|     child.expect(EOF) | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     'quit_early', [False, True] | ||||
| ) | ||||
| def test_ctxep_pauses_n_maybe_ipc_breaks( | ||||
|     spawn: PexpectSpawner, | ||||
|     quit_early: bool, | ||||
| ): | ||||
|     ''' | ||||
|     Audit generator embedded `.pause()`es from within a `@context` | ||||
|     endpoint with a chan close at the end, requiring that ctl-c is | ||||
|     mashed and zombie reaper kills sub with no hangs. | ||||
| 
 | ||||
|     ''' | ||||
|     child = spawn('subactor_bp_in_ctx') | ||||
|     child.expect(PROMPT) | ||||
| 
 | ||||
|     # 3 iters for the `gen()` pause-points | ||||
|     for i in range(3): | ||||
|         assert_before( | ||||
|             child, | ||||
|             [ | ||||
|                 _pause_msg, | ||||
|                 "('bp_boi'",  # actor name | ||||
|                 "<Task 'just_bp'",  # task name | ||||
|             ] | ||||
|         ) | ||||
|         if ( | ||||
|             i == 1 | ||||
|             and | ||||
|             quit_early | ||||
|         ): | ||||
|             child.sendline('q') | ||||
|             child.expect(PROMPT) | ||||
|             assert_before( | ||||
|                 child, | ||||
|                 ["tractor._exceptions.RemoteActorError: remote task raised a 'BdbQuit'", | ||||
|                  "bdb.BdbQuit", | ||||
|                  "('bp_boi'", | ||||
|                 ] | ||||
|             ) | ||||
|             child.sendline('c') | ||||
|             child.expect(EOF) | ||||
|             assert_before( | ||||
|                 child, | ||||
|                 ["tractor._exceptions.RemoteActorError: remote task raised a 'BdbQuit'", | ||||
|                  "bdb.BdbQuit", | ||||
|                  "('bp_boi'", | ||||
|                 ] | ||||
|             ) | ||||
|             break  # end-of-test | ||||
| 
 | ||||
|         child.sendline('c') | ||||
|         try: | ||||
|             child.expect(PROMPT) | ||||
|         except TIMEOUT: | ||||
|             # no prompt since we hang due to IPC chan purposely | ||||
|             # closed so verify we see error reporting as well as | ||||
|             # a failed crash-REPL request msg and can CTL-c our way | ||||
|             # out. | ||||
|             assert_before( | ||||
|                 child, | ||||
|                 ['peer IPC channel closed abruptly?', | ||||
|                  'another task closed this fd', | ||||
|                  'Debug lock request was CANCELLED?', | ||||
|                  "TransportClosed: 'MsgpackUDSStream' was already closed locally ?",] | ||||
| 
 | ||||
|                 # XXX races on whether these show/hit? | ||||
|                  # 'Failed to REPl via `_pause()` You called `tractor.pause()` from an already cancelled scope!', | ||||
|                  # 'AssertionError', | ||||
|             ) | ||||
|             # OSc(ancel) the hanging tree | ||||
|             do_ctlc( | ||||
|                 child=child, | ||||
|                 expect_prompt=False, | ||||
|             ) | ||||
|             child.expect(EOF) | ||||
|             assert_before( | ||||
|                 child, | ||||
|                 ['KeyboardInterrupt'], | ||||
|             ) | ||||
| 
 | ||||
| 
 | ||||
| def test_crash_handling_within_cancelled_root_actor( | ||||
|     spawn: PexpectSpawner, | ||||
| ): | ||||
|     ''' | ||||
|     Ensure that when only a root-actor is started via `open_root_actor()` | ||||
|     we can crash-handle in debug-mode despite self-cancellation. | ||||
| 
 | ||||
|     More-or-less ensures we conditionally shield the pause in | ||||
|     `._root.open_root_actor()`'s `await debug._maybe_enter_pm()` | ||||
|     call. | ||||
| 
 | ||||
|     ''' | ||||
|     child = spawn('root_self_cancelled_w_error') | ||||
|     child.expect(PROMPT) | ||||
| 
 | ||||
|     assert_before( | ||||
|         child, | ||||
|         [ | ||||
|             "Actor.cancel_soon()` was called!", | ||||
|             "root cancelled", | ||||
|             _pause_msg, | ||||
|             "('root'",  # actor name | ||||
|         ] | ||||
|     ) | ||||
| 
 | ||||
|     child.sendline('c') | ||||
|     child.expect(PROMPT) | ||||
|     assert_before( | ||||
|         child, | ||||
|         [ | ||||
|             _crash_msg, | ||||
|             "('root'",  # actor name | ||||
|             "AssertionError", | ||||
|             "assert 0", | ||||
|         ] | ||||
|     ) | ||||
| 
 | ||||
|     child.sendline('c') | ||||
|     child.expect(EOF) | ||||
|     assert_before( | ||||
|         child, | ||||
|         [ | ||||
|             "AssertionError", | ||||
|             "assert 0", | ||||
|         ] | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| # TODO: better error for "non-ideal" usage from the root actor. | ||||
| # -[ ] if called from an async scope emit a message that suggests | ||||
| #    using `await tractor.pause()` instead since it's less overhead | ||||
|  |  | |||
|  | @ -6,9 +6,6 @@ All these tests can be understood (somewhat) by running the | |||
| equivalent `examples/debugging/` scripts manually. | ||||
| 
 | ||||
| ''' | ||||
| from contextlib import ( | ||||
|     contextmanager as cm, | ||||
| ) | ||||
| # from functools import partial | ||||
| # import itertools | ||||
| import time | ||||
|  | @ -18,7 +15,7 @@ import time | |||
| 
 | ||||
| import pytest | ||||
| from pexpect.exceptions import ( | ||||
|     TIMEOUT, | ||||
|     # TIMEOUT, | ||||
|     EOF, | ||||
| ) | ||||
| 
 | ||||
|  | @ -35,23 +32,7 @@ from .conftest import ( | |||
|     # _repl_fail_msg, | ||||
| ) | ||||
| 
 | ||||
| @cm | ||||
| def maybe_expect_timeout( | ||||
|     ctlc: bool = False, | ||||
| ) -> None: | ||||
|     try: | ||||
|         yield | ||||
|     except TIMEOUT: | ||||
|         # breakpoint() | ||||
|         if ctlc: | ||||
|             pytest.xfail( | ||||
|                 'Some kinda redic threading SIGINT bug i think?\n' | ||||
|                 'See the notes in `examples/debugging/sync_bp.py`..\n' | ||||
|             ) | ||||
|         raise | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.ctlcs_bish | ||||
| def test_pause_from_sync( | ||||
|     spawn, | ||||
|     ctlc: bool, | ||||
|  | @ -86,10 +67,10 @@ def test_pause_from_sync( | |||
|     child.expect(PROMPT) | ||||
| 
 | ||||
|     # XXX shouldn't see gb loaded message with PDB loglevel! | ||||
|     # assert not in_prompt_msg( | ||||
|     #     child, | ||||
|     #     ['`greenback` portal opened!'], | ||||
|     # ) | ||||
|     assert not in_prompt_msg( | ||||
|         child, | ||||
|         ['`greenback` portal opened!'], | ||||
|     ) | ||||
|     # should be same root task | ||||
|     assert_before( | ||||
|         child, | ||||
|  | @ -181,14 +162,7 @@ def test_pause_from_sync( | |||
|             ) | ||||
| 
 | ||||
|     child.sendline('c') | ||||
| 
 | ||||
|     # XXX TODO, weird threading bug it seems despite the | ||||
|     # `abandon_on_cancel: bool` setting to | ||||
|     # `trio.to_thread.run_sync()`.. | ||||
|     with maybe_expect_timeout( | ||||
|         ctlc=ctlc, | ||||
|     ): | ||||
|         child.expect(EOF) | ||||
|     child.expect(EOF) | ||||
| 
 | ||||
| 
 | ||||
| def expect_any_of( | ||||
|  | @ -246,10 +220,8 @@ def expect_any_of( | |||
|     return expected_patts | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.ctlcs_bish | ||||
| def test_sync_pause_from_aio_task( | ||||
|     spawn, | ||||
| 
 | ||||
|     ctlc: bool | ||||
|     # ^TODO, fix for `asyncio`!! | ||||
| ): | ||||
|  | @ -298,12 +270,10 @@ def test_sync_pause_from_aio_task( | |||
|         # error raised in `asyncio.Task` | ||||
|         "raise ValueError('asyncio side error!')": [ | ||||
|             _crash_msg, | ||||
|             'return await chan.receive()',  # `.to_asyncio` impl internals in tb | ||||
|             "<Task 'trio_ctx'", | ||||
|             "@ ('aio_daemon'", | ||||
|             "ValueError: asyncio side error!", | ||||
| 
 | ||||
|             # XXX, we no longer show this frame by default! | ||||
|             # 'return await chan.receive()',  # `.to_asyncio` impl internals in tb | ||||
|         ], | ||||
| 
 | ||||
|         # parent-side propagation via actor-nursery/portal | ||||
|  | @ -355,7 +325,6 @@ def test_sync_pause_from_aio_task( | |||
|         ) | ||||
| 
 | ||||
|     child.sendline('c') | ||||
|     # with maybe_expect_timeout(): | ||||
|     child.expect(EOF) | ||||
| 
 | ||||
| 
 | ||||
|  |  | |||
|  | @ -13,37 +13,18 @@ TODO: | |||
|   when debugging a problem inside the stack vs. in their app. | ||||
| 
 | ||||
| ''' | ||||
| from __future__ import annotations | ||||
| from contextlib import ( | ||||
|     contextmanager as cm, | ||||
| ) | ||||
| import os | ||||
| import signal | ||||
| import time | ||||
| from typing import ( | ||||
|     TYPE_CHECKING, | ||||
| ) | ||||
| 
 | ||||
| from .conftest import ( | ||||
|     expect, | ||||
|     assert_before, | ||||
|     in_prompt_msg, | ||||
|     PROMPT, | ||||
|     _pause_msg, | ||||
|     # in_prompt_msg, | ||||
| ) | ||||
| 
 | ||||
| import pytest | ||||
| from pexpect.exceptions import ( | ||||
|     # TIMEOUT, | ||||
|     EOF, | ||||
| ) | ||||
| 
 | ||||
| if TYPE_CHECKING: | ||||
|     from ..conftest import PexpectSpawner | ||||
| 
 | ||||
| 
 | ||||
| def test_shield_pause( | ||||
|     spawn: PexpectSpawner, | ||||
|     spawn, | ||||
| ): | ||||
|     ''' | ||||
|     Verify the `tractor.pause()/.post_mortem()` API works inside an | ||||
|  | @ -66,39 +47,41 @@ def test_shield_pause( | |||
|         ] | ||||
|     ) | ||||
| 
 | ||||
|     script_pid: int = child.pid | ||||
|     print( | ||||
|         f'Sending SIGUSR1 to {script_pid}\n' | ||||
|         f'(kill -s SIGUSR1 {script_pid})\n' | ||||
|         'Sending SIGUSR1 to see a tree-trace!', | ||||
|     ) | ||||
|     os.kill( | ||||
|         script_pid, | ||||
|         child.pid, | ||||
|         signal.SIGUSR1, | ||||
|     ) | ||||
|     time.sleep(0.2) | ||||
|     expect( | ||||
|         child, | ||||
|         # end-of-tree delimiter | ||||
|         "end-of-\('root'", | ||||
|         "------ \('root', ", | ||||
|     ) | ||||
| 
 | ||||
|     assert_before( | ||||
|         child, | ||||
|         [ | ||||
|             # 'Srying to dump `stackscope` tree..', | ||||
|             # 'Dumping `stackscope` tree for actor', | ||||
|             'Trying to dump `stackscope` tree..', | ||||
|             'Dumping `stackscope` tree for actor', | ||||
|             "('root'",  # uid line | ||||
| 
 | ||||
|             # TODO!? this used to show? | ||||
|             # -[ ] mk reproducable for @oremanj? | ||||
|             # | ||||
|             # parent block point (non-shielded) | ||||
|             # 'await trio.sleep_forever()  # in root', | ||||
|             'await trio.sleep_forever()  # in root', | ||||
|         ] | ||||
|     ) | ||||
| 
 | ||||
|     # expect( | ||||
|     #     child, | ||||
|     #     # relay to the sub should be reported | ||||
|     #     'Relaying `SIGUSR1`[10] to sub-actor', | ||||
|     # ) | ||||
| 
 | ||||
|     expect( | ||||
|         child, | ||||
|         # end-of-tree delimiter | ||||
|         "end-of-\('hanger'", | ||||
|         "------ \('hanger', ", | ||||
|     ) | ||||
|     assert_before( | ||||
|         child, | ||||
|  | @ -108,11 +91,11 @@ def test_shield_pause( | |||
| 
 | ||||
|             "('hanger'",  # uid line | ||||
| 
 | ||||
|             # TODO!? SEE ABOVE | ||||
|             # hanger LOC where it's shield-halted | ||||
|             # 'await trio.sleep_forever()  # in subactor', | ||||
|             'await trio.sleep_forever()  # in subactor', | ||||
|         ] | ||||
|     ) | ||||
|     # breakpoint() | ||||
| 
 | ||||
|     # simulate the user sending a ctl-c to the hanging program. | ||||
|     # this should result in the terminator kicking in since | ||||
|  | @ -121,11 +104,9 @@ def test_shield_pause( | |||
|         child.pid, | ||||
|         signal.SIGINT, | ||||
|     ) | ||||
|     from tractor._supervise import _shutdown_msg | ||||
|     expect( | ||||
|         child, | ||||
|         # 'Shutting down actor runtime', | ||||
|         _shutdown_msg, | ||||
|         'Shutting down actor runtime', | ||||
|         timeout=6, | ||||
|     ) | ||||
|     assert_before( | ||||
|  | @ -137,170 +118,3 @@ def test_shield_pause( | |||
|             "'--uid', \"('hanger',", | ||||
|         ] | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| def test_breakpoint_hook_restored( | ||||
|     spawn: PexpectSpawner, | ||||
| ): | ||||
|     ''' | ||||
|     Ensures our actor runtime sets a custom `breakpoint()` hook | ||||
|     on open then restores the stdlib's default on close. | ||||
| 
 | ||||
|     The hook state validation is done via `assert`s inside the | ||||
|     invoked script with only `breakpoint()` (not `tractor.pause()`) | ||||
|     calls used. | ||||
| 
 | ||||
|     ''' | ||||
|     child = spawn('restore_builtin_breakpoint') | ||||
| 
 | ||||
|     child.expect(PROMPT) | ||||
|     try: | ||||
|         assert_before( | ||||
|             child, | ||||
|             [ | ||||
|                 _pause_msg, | ||||
|                 "<Task '__main__.main'", | ||||
|                 "('root'", | ||||
|                 "first bp, tractor hook set", | ||||
|             ] | ||||
|         ) | ||||
|     # XXX if the above raises `AssertionError`, without sending | ||||
|     # the final 'continue' cmd to the REPL-active sub-process, | ||||
|     # we'll hang waiting for that pexpect instance to terminate.. | ||||
|     finally: | ||||
|         child.sendline('c') | ||||
| 
 | ||||
|     child.expect(PROMPT) | ||||
|     assert_before( | ||||
|         child, | ||||
|         [ | ||||
|             "last bp, stdlib hook restored", | ||||
|         ] | ||||
|     ) | ||||
| 
 | ||||
|     # since the stdlib hook was already restored there should be NO | ||||
|     # `tractor` `log.pdb()` content from console! | ||||
|     assert not in_prompt_msg( | ||||
|         child, | ||||
|         [ | ||||
|             _pause_msg, | ||||
|             "<Task '__main__.main'", | ||||
|             "('root'", | ||||
|         ], | ||||
|     ) | ||||
|     child.sendline('c') | ||||
|     child.expect(EOF) | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| _to_raise = Exception('Triggering a crash') | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     'to_raise', | ||||
|     [ | ||||
|         None, | ||||
|         _to_raise, | ||||
|         RuntimeError('Never crash handle this!'), | ||||
|     ], | ||||
| ) | ||||
| @pytest.mark.parametrize( | ||||
|     'raise_on_exit', | ||||
|     [ | ||||
|         True, | ||||
|         [type(_to_raise)], | ||||
|         False, | ||||
|     ] | ||||
| ) | ||||
| def test_crash_handler_cms( | ||||
|     debug_mode: bool, | ||||
|     to_raise: Exception, | ||||
|     raise_on_exit: bool|list[Exception], | ||||
| ): | ||||
|     ''' | ||||
|     Verify the `.devx.open_crash_handler()` API(s) by also | ||||
|     (conveniently enough) tesing its `repl_fixture: ContextManager` | ||||
|     param support which for this suite allows use to avoid use of | ||||
|     a `pexpect`-style-test since we use the fixture to avoid actually | ||||
|     entering `PdbpREPL.iteract()` :smirk: | ||||
| 
 | ||||
|     ''' | ||||
|     import tractor | ||||
|     # import trio | ||||
| 
 | ||||
|     # state flags | ||||
|     repl_acquired: bool = False | ||||
|     repl_released: bool = False | ||||
| 
 | ||||
|     @cm | ||||
|     def block_repl_ux( | ||||
|         repl: tractor.devx.debug.PdbREPL, | ||||
|         maybe_bxerr: ( | ||||
|             tractor.devx._debug.BoxedMaybeException | ||||
|             |None | ||||
|         ) = None, | ||||
|         enter_repl: bool = True, | ||||
| 
 | ||||
|     ) -> bool: | ||||
|         ''' | ||||
|         Set pre/post-REPL state vars and bypass actual conole | ||||
|         interaction. | ||||
| 
 | ||||
|         ''' | ||||
|         nonlocal repl_acquired, repl_released | ||||
| 
 | ||||
|         # task: trio.Task = trio.lowlevel.current_task() | ||||
|         # print(f'pre-REPL active_task={task.name}') | ||||
| 
 | ||||
|         print('pre-REPL') | ||||
|         repl_acquired = True | ||||
|         yield False  # never actually .interact() | ||||
|         print('post-REPL') | ||||
|         repl_released = True | ||||
| 
 | ||||
|     try: | ||||
|         # TODO, with runtime's `debug_mode` setting | ||||
|         # -[ ] need to open runtime tho obvi.. | ||||
|         # | ||||
|         # with tractor.devx.maybe_open_crash_handler( | ||||
|         #     pdb=True, | ||||
| 
 | ||||
|         with tractor.devx.open_crash_handler( | ||||
|             raise_on_exit=raise_on_exit, | ||||
|             repl_fixture=block_repl_ux | ||||
|         ) as bxerr: | ||||
|             if to_raise is not None: | ||||
|                 raise to_raise | ||||
| 
 | ||||
|     except Exception as _exc: | ||||
|         exc = _exc | ||||
|         if ( | ||||
|             raise_on_exit is True | ||||
|             or | ||||
|             type(to_raise) in raise_on_exit | ||||
|         ): | ||||
|             assert ( | ||||
|                 exc | ||||
|                 is | ||||
|                 to_raise | ||||
|                 is | ||||
|                 bxerr.value | ||||
|             ) | ||||
| 
 | ||||
|         else: | ||||
|             raise | ||||
|     else: | ||||
|         assert ( | ||||
|             to_raise is None | ||||
|             or | ||||
|             not raise_on_exit | ||||
|             or | ||||
|             type(to_raise) not in raise_on_exit | ||||
|         ) | ||||
|         assert bxerr.value is to_raise | ||||
| 
 | ||||
|     assert bxerr.raise_on_exit == raise_on_exit | ||||
| 
 | ||||
|     if to_raise is not None: | ||||
|         assert repl_acquired | ||||
|         assert repl_released | ||||
|  |  | |||
|  | @ -1,4 +0,0 @@ | |||
| ''' | ||||
| `tractor.ipc` subsystem(s)/unit testing suites. | ||||
| 
 | ||||
| ''' | ||||
|  | @ -1,114 +0,0 @@ | |||
| ''' | ||||
| Unit-ish tests for specific IPC transport protocol backends. | ||||
| 
 | ||||
| ''' | ||||
| from __future__ import annotations | ||||
| from pathlib import Path | ||||
| 
 | ||||
| import pytest | ||||
| import trio | ||||
| import tractor | ||||
| from tractor import ( | ||||
|     Actor, | ||||
|     _state, | ||||
|     _addr, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| @pytest.fixture | ||||
| def bindspace_dir_str() -> str: | ||||
| 
 | ||||
|     rt_dir: Path = tractor._state.get_rt_dir() | ||||
|     bs_dir: Path = rt_dir / 'doggy' | ||||
|     bs_dir_str: str = str(bs_dir) | ||||
|     assert not bs_dir.is_dir() | ||||
| 
 | ||||
|     yield bs_dir_str | ||||
| 
 | ||||
|     # delete it on suite teardown. | ||||
|     # ?TODO? should we support this internally | ||||
|     # or is leaking it ok? | ||||
|     if bs_dir.is_dir(): | ||||
|         bs_dir.rmdir() | ||||
| 
 | ||||
| 
 | ||||
| def test_uds_bindspace_created_implicitly( | ||||
|     debug_mode: bool, | ||||
|     bindspace_dir_str: str, | ||||
| ): | ||||
|     registry_addr: tuple = ( | ||||
|         f'{bindspace_dir_str}', | ||||
|         'registry@doggy.sock', | ||||
|     ) | ||||
|     bs_dir_str: str = registry_addr[0] | ||||
| 
 | ||||
|     # XXX, ensure bindspace-dir DNE beforehand! | ||||
|     assert not Path(bs_dir_str).is_dir() | ||||
| 
 | ||||
|     async def main(): | ||||
|         async with tractor.open_nursery( | ||||
|             enable_transports=['uds'], | ||||
|             registry_addrs=[registry_addr], | ||||
|             debug_mode=debug_mode, | ||||
|         ) as _an: | ||||
| 
 | ||||
|             # XXX MUST be created implicitly by | ||||
|             # `.ipc._uds.start_listener()`! | ||||
|             assert Path(bs_dir_str).is_dir() | ||||
| 
 | ||||
|             root: Actor = tractor.current_actor() | ||||
|             assert root.is_registrar | ||||
| 
 | ||||
|             assert registry_addr in root.reg_addrs | ||||
|             assert ( | ||||
|                 registry_addr | ||||
|                 in | ||||
|                 _state._runtime_vars['_registry_addrs'] | ||||
|             ) | ||||
|             assert ( | ||||
|                 _addr.wrap_address(registry_addr) | ||||
|                 in | ||||
|                 root.registry_addrs | ||||
|             ) | ||||
| 
 | ||||
|     trio.run(main) | ||||
| 
 | ||||
| 
 | ||||
| def test_uds_double_listen_raises_connerr( | ||||
|     debug_mode: bool, | ||||
|     bindspace_dir_str: str, | ||||
| ): | ||||
|     registry_addr: tuple = ( | ||||
|         f'{bindspace_dir_str}', | ||||
|         'registry@doggy.sock', | ||||
|     ) | ||||
| 
 | ||||
|     async def main(): | ||||
|         async with tractor.open_nursery( | ||||
|             enable_transports=['uds'], | ||||
|             registry_addrs=[registry_addr], | ||||
|             debug_mode=debug_mode, | ||||
|         ) as _an: | ||||
| 
 | ||||
|             # runtime up | ||||
|             root: Actor = tractor.current_actor() | ||||
| 
 | ||||
|             from tractor.ipc._uds import ( | ||||
|                 start_listener, | ||||
|                 UDSAddress, | ||||
|             ) | ||||
|             ya_bound_addr: UDSAddress = root.registry_addrs[0] | ||||
|             try: | ||||
|                 await start_listener( | ||||
|                     addr=ya_bound_addr, | ||||
|                 ) | ||||
|             except ConnectionError as connerr: | ||||
|                 assert type(src_exc := connerr.__context__) is OSError | ||||
|                 assert 'Address already in use' in src_exc.args | ||||
|                 # complete, exit test. | ||||
| 
 | ||||
|             else: | ||||
|                 pytest.fail('It dint raise a connerr !?') | ||||
| 
 | ||||
| 
 | ||||
|     trio.run(main) | ||||
|  | @ -1,95 +0,0 @@ | |||
| ''' | ||||
| Verify the `enable_transports` param drives various | ||||
| per-root/sub-actor IPC endpoint/server settings. | ||||
| 
 | ||||
| ''' | ||||
| from __future__ import annotations | ||||
| 
 | ||||
| import pytest | ||||
| import trio | ||||
| import tractor | ||||
| from tractor import ( | ||||
|     Actor, | ||||
|     Portal, | ||||
|     ipc, | ||||
|     msg, | ||||
|     _state, | ||||
|     _addr, | ||||
| ) | ||||
| 
 | ||||
| @tractor.context | ||||
| async def chk_tpts( | ||||
|     ctx: tractor.Context, | ||||
|     tpt_proto_key: str, | ||||
| ): | ||||
|     rtvars = _state._runtime_vars | ||||
|     assert ( | ||||
|         tpt_proto_key | ||||
|         in | ||||
|         rtvars['_enable_tpts'] | ||||
|     ) | ||||
|     actor: Actor = tractor.current_actor() | ||||
|     spec: msg.types.SpawnSpec = actor._spawn_spec | ||||
|     assert spec._runtime_vars == rtvars | ||||
| 
 | ||||
|     # ensure individual IPC ep-addr types | ||||
|     serv: ipc._server.Server = actor.ipc_server | ||||
|     addr: ipc._types.Address | ||||
|     for addr in serv.addrs: | ||||
|         assert addr.proto_key == tpt_proto_key | ||||
| 
 | ||||
|     # Actor delegate-props enforcement | ||||
|     assert ( | ||||
|         actor.accept_addrs | ||||
|         == | ||||
|         serv.accept_addrs | ||||
|     ) | ||||
| 
 | ||||
|     await ctx.started(serv.accept_addrs) | ||||
| 
 | ||||
| 
 | ||||
| # TODO, parametrize over mis-matched-proto-typed `registry_addrs` | ||||
| # since i seems to work in `piker` but not exactly sure if both tcp | ||||
| # & uds are being deployed then? | ||||
| # | ||||
| @pytest.mark.parametrize( | ||||
|     'tpt_proto_key', | ||||
|     ['tcp', 'uds'], | ||||
|     ids=lambda item: f'ipc_tpt={item!r}' | ||||
| ) | ||||
| def test_root_passes_tpt_to_sub( | ||||
|     tpt_proto_key: str, | ||||
|     reg_addr: tuple, | ||||
|     debug_mode: bool, | ||||
| ): | ||||
|     async def main(): | ||||
|         async with tractor.open_nursery( | ||||
|             enable_transports=[tpt_proto_key], | ||||
|             registry_addrs=[reg_addr], | ||||
|             debug_mode=debug_mode, | ||||
|         ) as an: | ||||
| 
 | ||||
|             assert ( | ||||
|                 tpt_proto_key | ||||
|                 in | ||||
|                 _state._runtime_vars['_enable_tpts'] | ||||
|             ) | ||||
| 
 | ||||
|             ptl: Portal = await an.start_actor( | ||||
|                 name='sub', | ||||
|                 enable_modules=[__name__], | ||||
|             ) | ||||
|             async with ptl.open_context( | ||||
|                 chk_tpts, | ||||
|                 tpt_proto_key=tpt_proto_key, | ||||
|             ) as (ctx, accept_addrs): | ||||
| 
 | ||||
|                 uw_addr: tuple | ||||
|                 for uw_addr in accept_addrs: | ||||
|                     addr = _addr.wrap_address(uw_addr) | ||||
|                     assert addr.is_valid | ||||
| 
 | ||||
|             # shudown sub-actor(s) | ||||
|             await an.cancel() | ||||
| 
 | ||||
|     trio.run(main) | ||||
|  | @ -1,72 +0,0 @@ | |||
| ''' | ||||
| High-level `.ipc._server` unit tests. | ||||
| 
 | ||||
| ''' | ||||
| from __future__ import annotations | ||||
| 
 | ||||
| import pytest | ||||
| import trio | ||||
| from tractor import ( | ||||
|     devx, | ||||
|     ipc, | ||||
|     log, | ||||
| ) | ||||
| from tractor._testing.addr import ( | ||||
|     get_rando_addr, | ||||
| ) | ||||
| # TODO, use/check-roundtripping with some of these wrapper types? | ||||
| # | ||||
| # from .._addr import Address | ||||
| # from ._chan import Channel | ||||
| # from ._transport import MsgTransport | ||||
| # from ._uds import UDSAddress | ||||
| # from ._tcp import TCPAddress | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     '_tpt_proto', | ||||
|     ['uds', 'tcp'] | ||||
| ) | ||||
| def test_basic_ipc_server( | ||||
|     _tpt_proto: str, | ||||
|     debug_mode: bool, | ||||
|     loglevel: str, | ||||
| ): | ||||
| 
 | ||||
|     # so we see the socket-listener reporting on console | ||||
|     log.get_console_log("INFO") | ||||
| 
 | ||||
|     rando_addr: tuple = get_rando_addr( | ||||
|         tpt_proto=_tpt_proto, | ||||
|     ) | ||||
|     async def main(): | ||||
|         async with ipc._server.open_ipc_server() as server: | ||||
| 
 | ||||
|             assert ( | ||||
|                 server._parent_tn | ||||
|                 and | ||||
|                 server._parent_tn is server._stream_handler_tn | ||||
|             ) | ||||
|             assert server._no_more_peers.is_set() | ||||
| 
 | ||||
|             eps: list[ipc._server.Endpoint] = await server.listen_on( | ||||
|                 accept_addrs=[rando_addr], | ||||
|                 stream_handler_nursery=None, | ||||
|             ) | ||||
|             assert ( | ||||
|                 len(eps) == 1 | ||||
|                 and | ||||
|                 (ep := eps[0])._listener | ||||
|                 and | ||||
|                 not ep.peer_tpts | ||||
|             ) | ||||
| 
 | ||||
|             server._parent_tn.cancel_scope.cancel() | ||||
| 
 | ||||
|         # !TODO! actually make a bg-task connection from a client | ||||
|         # using `ipc._chan._connect_chan()` | ||||
| 
 | ||||
|     with devx.maybe_open_crash_handler( | ||||
|         pdb=debug_mode, | ||||
|     ): | ||||
|         trio.run(main) | ||||
|  | @ -3,6 +3,7 @@ Sketchy network blackoutz, ugly byzantine gens, puedes eschuchar la | |||
| cancelacion?.. | ||||
| 
 | ||||
| ''' | ||||
| import itertools | ||||
| from functools import partial | ||||
| from types import ModuleType | ||||
| 
 | ||||
|  | @ -10,9 +11,6 @@ import pytest | |||
| from _pytest.pathlib import import_path | ||||
| import trio | ||||
| import tractor | ||||
| from tractor import ( | ||||
|     TransportClosed, | ||||
| ) | ||||
| from tractor._testing import ( | ||||
|     examples_dir, | ||||
|     break_ipc, | ||||
|  | @ -77,7 +75,6 @@ def test_ipc_channel_break_during_stream( | |||
|     spawn_backend: str, | ||||
|     ipc_break: dict|None, | ||||
|     pre_aclose_msgstream: bool, | ||||
|     tpt_proto: str, | ||||
| ): | ||||
|     ''' | ||||
|     Ensure we can have an IPC channel break its connection during | ||||
|  | @ -95,7 +92,7 @@ def test_ipc_channel_break_during_stream( | |||
|         # non-`trio` spawners should never hit the hang condition that | ||||
|         # requires the user to do ctl-c to cancel the actor tree. | ||||
|         # expect_final_exc = trio.ClosedResourceError | ||||
|         expect_final_exc = TransportClosed | ||||
|         expect_final_exc = tractor.TransportClosed | ||||
| 
 | ||||
|     mod: ModuleType = import_path( | ||||
|         examples_dir() / 'advanced_faults' | ||||
|  | @ -108,8 +105,6 @@ def test_ipc_channel_break_during_stream( | |||
|     # period" wherein the user eventually hits ctl-c to kill the | ||||
|     # root-actor tree. | ||||
|     expect_final_exc: BaseException = KeyboardInterrupt | ||||
|     expect_final_cause: BaseException|None = None | ||||
| 
 | ||||
|     if ( | ||||
|         # only expect EoC if trans is broken on the child side, | ||||
|         ipc_break['break_child_ipc_after'] is not False | ||||
|  | @ -144,9 +139,6 @@ def test_ipc_channel_break_during_stream( | |||
|         # a user sending ctl-c by raising a KBI. | ||||
|         if pre_aclose_msgstream: | ||||
|             expect_final_exc = KeyboardInterrupt | ||||
|             if tpt_proto == 'uds': | ||||
|                 expect_final_exc = TransportClosed | ||||
|                 expect_final_cause = trio.BrokenResourceError | ||||
| 
 | ||||
|             # XXX OLD XXX | ||||
|             # if child calls `MsgStream.aclose()` then expect EoC. | ||||
|  | @ -166,10 +158,6 @@ def test_ipc_channel_break_during_stream( | |||
|         if pre_aclose_msgstream: | ||||
|             expect_final_exc = KeyboardInterrupt | ||||
| 
 | ||||
|             if tpt_proto == 'uds': | ||||
|                 expect_final_exc = TransportClosed | ||||
|                 expect_final_cause = trio.BrokenResourceError | ||||
| 
 | ||||
|     # NOTE when the parent IPC side dies (even if the child does as well | ||||
|     # but the child fails BEFORE the parent) we always expect the | ||||
|     # IPC layer to raise a closed-resource, NEVER do we expect | ||||
|  | @ -182,8 +170,8 @@ def test_ipc_channel_break_during_stream( | |||
|         and | ||||
|         ipc_break['break_child_ipc_after'] is False | ||||
|     ): | ||||
|         # expect_final_exc = trio.ClosedResourceError | ||||
|         expect_final_exc = tractor.TransportClosed | ||||
|         expect_final_cause = trio.ClosedResourceError | ||||
| 
 | ||||
|     # BOTH but, PARENT breaks FIRST | ||||
|     elif ( | ||||
|  | @ -194,8 +182,8 @@ def test_ipc_channel_break_during_stream( | |||
|             ipc_break['break_parent_ipc_after'] | ||||
|         ) | ||||
|     ): | ||||
|         # expect_final_exc = trio.ClosedResourceError | ||||
|         expect_final_exc = tractor.TransportClosed | ||||
|         expect_final_cause = trio.ClosedResourceError | ||||
| 
 | ||||
|     with pytest.raises( | ||||
|         expected_exception=( | ||||
|  | @ -211,7 +199,6 @@ def test_ipc_channel_break_during_stream( | |||
|                     start_method=spawn_backend, | ||||
|                     loglevel=loglevel, | ||||
|                     pre_close=pre_aclose_msgstream, | ||||
|                     tpt_proto=tpt_proto, | ||||
|                     **ipc_break, | ||||
|                 ) | ||||
|             ) | ||||
|  | @ -234,24 +221,22 @@ def test_ipc_channel_break_during_stream( | |||
|                 ) | ||||
|             cause: Exception = tc.__cause__ | ||||
|             assert ( | ||||
|                 # type(cause) is trio.ClosedResourceError | ||||
|                 type(cause) is expect_final_cause | ||||
| 
 | ||||
|                 # TODO, should we expect a certain exc-message (per | ||||
|                 # tpt) as well?? | ||||
|                 # and | ||||
|                 # cause.args[0] == 'another task closed this fd' | ||||
|                 type(cause) is trio.ClosedResourceError | ||||
|                 and | ||||
|                 cause.args[0] == 'another task closed this fd' | ||||
|             ) | ||||
| 
 | ||||
|             raise | ||||
| 
 | ||||
|     # get raw instance from pytest wrapper | ||||
|     value = excinfo.value | ||||
|     if isinstance(value, ExceptionGroup): | ||||
|         excs = value.exceptions | ||||
|         assert len(excs) == 1 | ||||
|         final_exc = excs[0] | ||||
|         assert isinstance(final_exc, expect_final_exc) | ||||
|         value = next( | ||||
|             itertools.dropwhile( | ||||
|                 lambda exc: not isinstance(exc, expect_final_exc), | ||||
|                 value.exceptions, | ||||
|             ) | ||||
|         ) | ||||
|         assert value | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
|  | @ -274,16 +259,15 @@ async def break_ipc_after_started( | |||
| 
 | ||||
| def test_stream_closed_right_after_ipc_break_and_zombie_lord_engages(): | ||||
|     ''' | ||||
|     Verify that is a subactor's IPC goes down just after bringing up | ||||
|     a stream the parent can trigger a SIGINT and the child will be | ||||
|     reaped out-of-IPC by the localhost process supervision machinery: | ||||
|     aka "zombie lord". | ||||
|     Verify that is a subactor's IPC goes down just after bringing up a stream | ||||
|     the parent can trigger a SIGINT and the child will be reaped out-of-IPC by | ||||
|     the localhost process supervision machinery: aka "zombie lord". | ||||
| 
 | ||||
|     ''' | ||||
|     async def main(): | ||||
|         with trio.fail_after(3): | ||||
|             async with tractor.open_nursery() as an: | ||||
|                 portal = await an.start_actor( | ||||
|             async with tractor.open_nursery() as n: | ||||
|                 portal = await n.start_actor( | ||||
|                     'ipc_breaker', | ||||
|                     enable_modules=[__name__], | ||||
|                 ) | ||||
|  |  | |||
|  | @ -307,13 +307,6 @@ async def inf_streamer( | |||
| 
 | ||||
|     async with ( | ||||
|         ctx.open_stream() as stream, | ||||
| 
 | ||||
|         # XXX TODO, INTERESTING CASE!! | ||||
|         # - if we don't collapse the eg then the embedded | ||||
|         # `trio.EndOfChannel` doesn't propagate directly to the above | ||||
|         # .open_stream() parent, resulting in it also raising instead | ||||
|         # of gracefully absorbing as normal.. so how to handle? | ||||
|         tractor.trionics.collapse_eg(), | ||||
|         trio.open_nursery() as tn, | ||||
|     ): | ||||
|         async def close_stream_on_sentinel(): | ||||
|  |  | |||
|  | @ -14,7 +14,7 @@ import tractor | |||
| from tractor._testing import ( | ||||
|     tractor_test, | ||||
| ) | ||||
| from .conftest import no_windows | ||||
| from conftest import no_windows | ||||
| 
 | ||||
| 
 | ||||
| def is_win(): | ||||
|  | @ -130,7 +130,7 @@ def test_multierror( | |||
|             try: | ||||
|                 await portal2.result() | ||||
|             except tractor.RemoteActorError as err: | ||||
|                 assert err.boxed_type is AssertionError | ||||
|                 assert err.boxed_type == AssertionError | ||||
|                 print("Look Maa that first actor failed hard, hehh") | ||||
|                 raise | ||||
| 
 | ||||
|  | @ -182,7 +182,7 @@ def test_multierror_fast_nursery(reg_addr, start_method, num_subactors, delay): | |||
| 
 | ||||
|     for exc in exceptions: | ||||
|         assert isinstance(exc, tractor.RemoteActorError) | ||||
|         assert exc.boxed_type is AssertionError | ||||
|         assert exc.boxed_type == AssertionError | ||||
| 
 | ||||
| 
 | ||||
| async def do_nothing(): | ||||
|  | @ -236,10 +236,7 @@ async def stream_forever(): | |||
| async def test_cancel_infinite_streamer(start_method): | ||||
| 
 | ||||
|     # stream for at most 1 seconds | ||||
|     with ( | ||||
|         trio.fail_after(4), | ||||
|         trio.move_on_after(1) as cancel_scope | ||||
|     ): | ||||
|     with trio.move_on_after(1) as cancel_scope: | ||||
|         async with tractor.open_nursery() as n: | ||||
|             portal = await n.start_actor( | ||||
|                 'donny', | ||||
|  | @ -287,32 +284,20 @@ async def test_cancel_infinite_streamer(start_method): | |||
|     ], | ||||
| ) | ||||
| @tractor_test | ||||
| async def test_some_cancels_all( | ||||
|     num_actors_and_errs: tuple, | ||||
|     start_method: str, | ||||
|     loglevel: str, | ||||
| ): | ||||
|     ''' | ||||
|     Verify a subset of failed subactors causes all others in | ||||
| async def test_some_cancels_all(num_actors_and_errs, start_method, loglevel): | ||||
|     """Verify a subset of failed subactors causes all others in | ||||
|     the nursery to be cancelled just like the strategy in trio. | ||||
| 
 | ||||
|     This is the first and only supervisory strategy at the moment. | ||||
| 
 | ||||
|     ''' | ||||
|     ( | ||||
|         num_actors, | ||||
|         first_err, | ||||
|         err_type, | ||||
|         ria_func, | ||||
|         da_func, | ||||
|     ) = num_actors_and_errs | ||||
|     """ | ||||
|     num_actors, first_err, err_type, ria_func, da_func = num_actors_and_errs | ||||
|     try: | ||||
|         async with tractor.open_nursery() as an: | ||||
|         async with tractor.open_nursery() as n: | ||||
| 
 | ||||
|             # spawn the same number of deamon actors which should be cancelled | ||||
|             dactor_portals = [] | ||||
|             for i in range(num_actors): | ||||
|                 dactor_portals.append(await an.start_actor( | ||||
|                 dactor_portals.append(await n.start_actor( | ||||
|                     f'deamon_{i}', | ||||
|                     enable_modules=[__name__], | ||||
|                 )) | ||||
|  | @ -322,7 +307,7 @@ async def test_some_cancels_all( | |||
|             for i in range(num_actors): | ||||
|                 # start actor(s) that will fail immediately | ||||
|                 riactor_portals.append( | ||||
|                     await an.run_in_actor( | ||||
|                     await n.run_in_actor( | ||||
|                         func, | ||||
|                         name=f'actor_{i}', | ||||
|                         **kwargs | ||||
|  | @ -352,8 +337,7 @@ async def test_some_cancels_all( | |||
| 
 | ||||
|         # should error here with a ``RemoteActorError`` or ``MultiError`` | ||||
| 
 | ||||
|     except first_err as _err: | ||||
|         err = _err | ||||
|     except first_err as err: | ||||
|         if isinstance(err, BaseExceptionGroup): | ||||
|             assert len(err.exceptions) == num_actors | ||||
|             for exc in err.exceptions: | ||||
|  | @ -364,8 +348,8 @@ async def test_some_cancels_all( | |||
|         elif isinstance(err, tractor.RemoteActorError): | ||||
|             assert err.boxed_type == err_type | ||||
| 
 | ||||
|         assert an.cancelled is True | ||||
|         assert not an._children | ||||
|         assert n.cancelled is True | ||||
|         assert not n._children | ||||
|     else: | ||||
|         pytest.fail("Should have gotten a remote assertion error?") | ||||
| 
 | ||||
|  | @ -520,9 +504,7 @@ def test_cancel_via_SIGINT_other_task( | |||
|     if is_win():  # smh | ||||
|         timeout += 1 | ||||
| 
 | ||||
|     async def spawn_and_sleep_forever( | ||||
|         task_status=trio.TASK_STATUS_IGNORED | ||||
|     ): | ||||
|     async def spawn_and_sleep_forever(task_status=trio.TASK_STATUS_IGNORED): | ||||
|         async with tractor.open_nursery() as tn: | ||||
|             for i in range(3): | ||||
|                 await tn.run_in_actor( | ||||
|  | @ -535,15 +517,8 @@ def test_cancel_via_SIGINT_other_task( | |||
|     async def main(): | ||||
|         # should never timeout since SIGINT should cancel the current program | ||||
|         with trio.fail_after(timeout): | ||||
|             async with ( | ||||
| 
 | ||||
|                 # XXX ?TODO? why no work!? | ||||
|                 # tractor.trionics.collapse_eg(), | ||||
|                 trio.open_nursery( | ||||
|                     strict_exception_groups=False, | ||||
|                 ) as tn, | ||||
|             ): | ||||
|                 await tn.start(spawn_and_sleep_forever) | ||||
|             async with trio.open_nursery() as n: | ||||
|                 await n.start(spawn_and_sleep_forever) | ||||
|                 if 'mp' in spawn_backend: | ||||
|                     time.sleep(0.1) | ||||
|                 os.kill(pid, signal.SIGINT) | ||||
|  | @ -554,123 +529,38 @@ def test_cancel_via_SIGINT_other_task( | |||
| 
 | ||||
| async def spin_for(period=3): | ||||
|     "Sync sleep." | ||||
|     print(f'sync sleeping in sub-sub for {period}\n') | ||||
|     time.sleep(period) | ||||
| 
 | ||||
| 
 | ||||
| async def spawn_sub_with_sync_blocking_task(): | ||||
|     async with tractor.open_nursery() as an: | ||||
|         print('starting sync blocking subactor..\n') | ||||
|         await an.run_in_actor( | ||||
| async def spawn(): | ||||
|     async with tractor.open_nursery() as tn: | ||||
|         await tn.run_in_actor( | ||||
|             spin_for, | ||||
|             name='sleeper', | ||||
|         ) | ||||
|         print('exiting first subactor layer..\n') | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     'man_cancel_outer', | ||||
|     [ | ||||
|         False,  # passes if delay != 2 | ||||
| 
 | ||||
|         # always causes an unexpected eg-w-embedded-assert-err? | ||||
|         pytest.param(True, | ||||
|              marks=pytest.mark.xfail( | ||||
|                  reason=( | ||||
|                     'always causes an unexpected eg-w-embedded-assert-err?' | ||||
|                 ) | ||||
|             ), | ||||
|         ), | ||||
|     ], | ||||
| ) | ||||
| @no_windows | ||||
| def test_cancel_while_childs_child_in_sync_sleep( | ||||
|     loglevel: str, | ||||
|     start_method: str, | ||||
|     spawn_backend: str, | ||||
|     debug_mode: bool, | ||||
|     reg_addr: tuple, | ||||
|     man_cancel_outer: bool, | ||||
|     loglevel, | ||||
|     start_method, | ||||
|     spawn_backend, | ||||
| ): | ||||
|     ''' | ||||
|     Verify that a child cancelled while executing sync code is torn | ||||
|     """Verify that a child cancelled while executing sync code is torn | ||||
|     down even when that cancellation is triggered by the parent | ||||
|     2 nurseries "up". | ||||
| 
 | ||||
|     Though the grandchild should stay blocking its actor runtime, its | ||||
|     parent should issue a "zombie reaper" to hard kill it after | ||||
|     sufficient timeout. | ||||
| 
 | ||||
|     ''' | ||||
|     """ | ||||
|     if start_method == 'forkserver': | ||||
|         pytest.skip("Forksever sux hard at resuming from sync sleep...") | ||||
| 
 | ||||
|     async def main(): | ||||
|         # | ||||
|         # XXX BIG TODO NOTE XXX | ||||
|         # | ||||
|         # it seems there's a strange race that can happen | ||||
|         # where where the fail-after will trigger outer scope | ||||
|         # .cancel() which then causes the inner scope to raise, | ||||
|         # | ||||
|         # BaseExceptionGroup('Exceptions from Trio nursery', [ | ||||
|         #   BaseExceptionGroup('Exceptions from Trio nursery', | ||||
|         #   [ | ||||
|         #       Cancelled(), | ||||
|         #       Cancelled(), | ||||
|         #   ] | ||||
|         #   ), | ||||
|         #   AssertionError('assert 0') | ||||
|         # ]) | ||||
|         # | ||||
|         # WHY THIS DOESN'T MAKE SENSE: | ||||
|         # --------------------------- | ||||
|         # - it should raise too-slow-error when too slow.. | ||||
|         #  * verified that using simple-cs and manually cancelling | ||||
|         #    you get same outcome -> indicates that the fail-after | ||||
|         #    can have its TooSlowError overriden! | ||||
|         #  |_ to check this it's easy, simplly decrease the timeout | ||||
|         #     as per the var below. | ||||
|         # | ||||
|         # - when using the manual simple-cs the outcome is different | ||||
|         #   DESPITE the `assert 0` which means regardless of the | ||||
|         #   inner scope effectively failing in the same way, the | ||||
|         #   bubbling up **is NOT the same**. | ||||
|         # | ||||
|         # delays trigger diff outcomes.. | ||||
|         # --------------------------- | ||||
|         # as seen by uncommenting various lines below there is from | ||||
|         # my POV an unexpected outcome due to the delay=2 case. | ||||
|         # | ||||
|         # delay = 1  # no AssertionError in eg, TooSlowError raised. | ||||
|         # delay = 2  # is AssertionError in eg AND no TooSlowError !? | ||||
|         delay = 4  # is AssertionError in eg AND no _cs cancellation. | ||||
| 
 | ||||
|         with trio.fail_after(delay) as _cs: | ||||
|         # with trio.CancelScope() as cs: | ||||
|         # ^XXX^ can be used instead to see same outcome. | ||||
| 
 | ||||
|             async with ( | ||||
|                 # tractor.trionics.collapse_eg(),  # doesn't help | ||||
|                 tractor.open_nursery( | ||||
|                     hide_tb=False, | ||||
|                     debug_mode=debug_mode, | ||||
|                     registry_addrs=[reg_addr], | ||||
|                 ) as an, | ||||
|             ): | ||||
|                 await an.run_in_actor( | ||||
|                     spawn_sub_with_sync_blocking_task, | ||||
|                     name='sync_blocking_sub', | ||||
|         with trio.fail_after(2): | ||||
|             async with tractor.open_nursery() as tn: | ||||
|                 await tn.run_in_actor( | ||||
|                     spawn, | ||||
|                     name='spawn', | ||||
|                 ) | ||||
|                 await trio.sleep(1) | ||||
| 
 | ||||
|                 if man_cancel_outer: | ||||
|                     print('Cancelling manually in root') | ||||
|                     _cs.cancel() | ||||
| 
 | ||||
|                 # trigger exc-srced taskc down | ||||
|                 # the actor tree. | ||||
|                 print('RAISING IN ROOT') | ||||
|                 assert 0 | ||||
| 
 | ||||
|     with pytest.raises(AssertionError): | ||||
|  | @ -720,12 +610,6 @@ def test_fast_graceful_cancel_when_spawn_task_in_soft_proc_wait_for_daemon( | |||
|                     nurse.start_soon(delayed_kbi) | ||||
| 
 | ||||
|                     await p.run(do_nuthin) | ||||
| 
 | ||||
|         # need to explicitly re-raise the lone kbi..now | ||||
|         except* KeyboardInterrupt as kbi_eg: | ||||
|             assert (len(excs := kbi_eg.exceptions) == 1) | ||||
|             raise excs[0] | ||||
| 
 | ||||
|         finally: | ||||
|             duration = time.time() - start | ||||
|             if duration > timeout: | ||||
|  |  | |||
|  | @ -0,0 +1,917 @@ | |||
| ''' | ||||
| Low-level functional audits for our | ||||
| "capability based messaging"-spec feats. | ||||
| 
 | ||||
| B~) | ||||
| 
 | ||||
| ''' | ||||
| import typing | ||||
| from typing import ( | ||||
|     Any, | ||||
|     Type, | ||||
|     Union, | ||||
| ) | ||||
| 
 | ||||
| from msgspec import ( | ||||
|     structs, | ||||
|     msgpack, | ||||
|     Struct, | ||||
|     ValidationError, | ||||
| ) | ||||
| import pytest | ||||
| 
 | ||||
| import tractor | ||||
| from tractor import ( | ||||
|     _state, | ||||
|     MsgTypeError, | ||||
|     Context, | ||||
| ) | ||||
| from tractor.msg import ( | ||||
|     _codec, | ||||
|     _ctxvar_MsgCodec, | ||||
| 
 | ||||
|     NamespacePath, | ||||
|     MsgCodec, | ||||
|     mk_codec, | ||||
|     apply_codec, | ||||
|     current_codec, | ||||
| ) | ||||
| from tractor.msg.types import ( | ||||
|     _payload_msgs, | ||||
|     log, | ||||
|     PayloadMsg, | ||||
|     Started, | ||||
|     mk_msg_spec, | ||||
| ) | ||||
| import trio | ||||
| 
 | ||||
| 
 | ||||
| def mk_custom_codec( | ||||
|     pld_spec: Union[Type]|Any, | ||||
|     add_hooks: bool, | ||||
| 
 | ||||
| ) -> MsgCodec: | ||||
|     ''' | ||||
|     Create custom `msgpack` enc/dec-hooks and set a `Decoder` | ||||
|     which only loads `pld_spec` (like `NamespacePath`) types. | ||||
| 
 | ||||
|     ''' | ||||
|     uid: tuple[str, str] = tractor.current_actor().uid | ||||
| 
 | ||||
|     # XXX NOTE XXX: despite defining `NamespacePath` as a type | ||||
|     # field on our `PayloadMsg.pld`, we still need a enc/dec_hook() pair | ||||
|     # to cast to/from that type on the wire. See the docs: | ||||
|     # https://jcristharif.com/msgspec/extending.html#mapping-to-from-native-types | ||||
| 
 | ||||
|     def enc_nsp(obj: Any) -> Any: | ||||
|         print(f'{uid} ENC HOOK') | ||||
|         match obj: | ||||
|             case NamespacePath(): | ||||
|                 print( | ||||
|                     f'{uid}: `NamespacePath`-Only ENCODE?\n' | ||||
|                     f'obj-> `{obj}`: {type(obj)}\n' | ||||
|                 ) | ||||
|                 # if type(obj) != NamespacePath: | ||||
|                 #     breakpoint() | ||||
|                 return str(obj) | ||||
| 
 | ||||
|         print( | ||||
|             f'{uid}\n' | ||||
|             'CUSTOM ENCODE\n' | ||||
|             f'obj-arg-> `{obj}`: {type(obj)}\n' | ||||
|         ) | ||||
|         logmsg: str = ( | ||||
|             f'{uid}\n' | ||||
|             'FAILED ENCODE\n' | ||||
|             f'obj-> `{obj}: {type(obj)}`\n' | ||||
|         ) | ||||
|         raise NotImplementedError(logmsg) | ||||
| 
 | ||||
|     def dec_nsp( | ||||
|         obj_type: Type, | ||||
|         obj: Any, | ||||
| 
 | ||||
|     ) -> Any: | ||||
|         print( | ||||
|             f'{uid}\n' | ||||
|             'CUSTOM DECODE\n' | ||||
|             f'type-arg-> {obj_type}\n' | ||||
|             f'obj-arg-> `{obj}`: {type(obj)}\n' | ||||
|         ) | ||||
|         nsp = None | ||||
| 
 | ||||
|         if ( | ||||
|             obj_type is NamespacePath | ||||
|             and isinstance(obj, str) | ||||
|             and ':' in obj | ||||
|         ): | ||||
|             nsp = NamespacePath(obj) | ||||
|             # TODO: we could built a generic handler using | ||||
|             # JUST matching the obj_type part? | ||||
|             # nsp = obj_type(obj) | ||||
| 
 | ||||
|         if nsp: | ||||
|             print(f'Returning NSP instance: {nsp}') | ||||
|             return nsp | ||||
| 
 | ||||
|         logmsg: str = ( | ||||
|             f'{uid}\n' | ||||
|             'FAILED DECODE\n' | ||||
|             f'type-> {obj_type}\n' | ||||
|             f'obj-arg-> `{obj}`: {type(obj)}\n\n' | ||||
|             f'current codec:\n' | ||||
|             f'{current_codec()}\n' | ||||
|         ) | ||||
|         # TODO: figure out the ignore subsys for this! | ||||
|         # -[ ] option whether to defense-relay backc the msg | ||||
|         #   inside an `Invalid`/`Ignore` | ||||
|         # -[ ] how to make this handling pluggable such that a | ||||
|         #   `Channel`/`MsgTransport` can intercept and process | ||||
|         #   back msgs either via exception handling or some other | ||||
|         #   signal? | ||||
|         log.warning(logmsg) | ||||
|         # NOTE: this delivers the invalid | ||||
|         # value up to `msgspec`'s decoding | ||||
|         # machinery for error raising. | ||||
|         return obj | ||||
|         # raise NotImplementedError(logmsg) | ||||
| 
 | ||||
|     nsp_codec: MsgCodec = mk_codec( | ||||
|         ipc_pld_spec=pld_spec, | ||||
| 
 | ||||
|         # NOTE XXX: the encode hook MUST be used no matter what since | ||||
|         # our `NamespacePath` is not any of a `Any` native type nor | ||||
|         # a `msgspec.Struct` subtype - so `msgspec` has no way to know | ||||
|         # how to encode it unless we provide the custom hook. | ||||
|         # | ||||
|         # AGAIN that is, regardless of whether we spec an | ||||
|         # `Any`-decoded-pld the enc has no knowledge (by default) | ||||
|         # how to enc `NamespacePath` (nsp), so we add a custom | ||||
|         # hook to do that ALWAYS. | ||||
|         enc_hook=enc_nsp if add_hooks else None, | ||||
| 
 | ||||
|         # XXX NOTE: pretty sure this is mutex with the `type=` to | ||||
|         # `Decoder`? so it won't work in tandem with the | ||||
|         # `ipc_pld_spec` passed above? | ||||
|         dec_hook=dec_nsp if add_hooks else None, | ||||
|     ) | ||||
|     return nsp_codec | ||||
| 
 | ||||
| 
 | ||||
| def chk_codec_applied( | ||||
|     expect_codec: MsgCodec, | ||||
|     enter_value: MsgCodec|None = None, | ||||
| 
 | ||||
| ) -> MsgCodec: | ||||
|     ''' | ||||
|     buncha sanity checks ensuring that the IPC channel's | ||||
|     context-vars are set to the expected codec and that are | ||||
|     ctx-var wrapper APIs match the same. | ||||
| 
 | ||||
|     ''' | ||||
|     # TODO: play with tricyle again, bc this is supposed to work | ||||
|     # the way we want? | ||||
|     # | ||||
|     # TreeVar | ||||
|     # task: trio.Task = trio.lowlevel.current_task() | ||||
|     # curr_codec = _ctxvar_MsgCodec.get_in(task) | ||||
| 
 | ||||
|     # ContextVar | ||||
|     # task_ctx: Context = task.context | ||||
|     # assert _ctxvar_MsgCodec in task_ctx | ||||
|     # curr_codec: MsgCodec = task.context[_ctxvar_MsgCodec] | ||||
| 
 | ||||
|     # NOTE: currently we use this! | ||||
|     # RunVar | ||||
|     curr_codec: MsgCodec = current_codec() | ||||
|     last_read_codec = _ctxvar_MsgCodec.get() | ||||
|     # assert curr_codec is last_read_codec | ||||
| 
 | ||||
|     assert ( | ||||
|         (same_codec := expect_codec) is | ||||
|         # returned from `mk_codec()` | ||||
| 
 | ||||
|         # yielded value from `apply_codec()` | ||||
| 
 | ||||
|         # read from current task's `contextvars.Context` | ||||
|         curr_codec is | ||||
|         last_read_codec | ||||
| 
 | ||||
|         # the default `msgspec` settings | ||||
|         is not _codec._def_msgspec_codec | ||||
|         is not _codec._def_tractor_codec | ||||
|     ) | ||||
| 
 | ||||
|     if enter_value: | ||||
|         enter_value is same_codec | ||||
| 
 | ||||
| 
 | ||||
| def iter_maybe_sends( | ||||
|     send_items: dict[Union[Type], Any] | list[tuple], | ||||
|     ipc_pld_spec: Union[Type] | Any, | ||||
|     add_codec_hooks: bool, | ||||
| 
 | ||||
|     codec: MsgCodec|None = None, | ||||
| 
 | ||||
| ) -> tuple[Any, bool]: | ||||
| 
 | ||||
|     if isinstance(send_items, dict): | ||||
|         send_items = send_items.items() | ||||
| 
 | ||||
|     for ( | ||||
|         send_type_spec, | ||||
|         send_value, | ||||
|     ) in send_items: | ||||
| 
 | ||||
|         expect_roundtrip: bool = False | ||||
| 
 | ||||
|         # values-to-typespec santiy | ||||
|         send_type = type(send_value) | ||||
|         assert send_type == send_type_spec or ( | ||||
|             (subtypes := getattr(send_type_spec, '__args__', None)) | ||||
|             and send_type in subtypes | ||||
|         ) | ||||
| 
 | ||||
|         spec_subtypes: set[Union[Type]] = ( | ||||
|              getattr( | ||||
|                  ipc_pld_spec, | ||||
|                  '__args__', | ||||
|                  {ipc_pld_spec,}, | ||||
|              ) | ||||
|         ) | ||||
|         send_in_spec: bool = ( | ||||
|             send_type == ipc_pld_spec | ||||
|             or ( | ||||
|                 ipc_pld_spec != Any | ||||
|                 and  # presume `Union` of types | ||||
|                 send_type in spec_subtypes | ||||
|             ) | ||||
|             or ( | ||||
|                 ipc_pld_spec == Any | ||||
|                 and | ||||
|                 send_type != NamespacePath | ||||
|             ) | ||||
|         ) | ||||
|         expect_roundtrip = ( | ||||
|             send_in_spec | ||||
|             # any spec should support all other | ||||
|             # builtin py values that we send | ||||
|             # except our custom nsp type which | ||||
|             # we should be able to send as long | ||||
|             # as we provide the custom codec hooks. | ||||
|             or ( | ||||
|                 ipc_pld_spec == Any | ||||
|                 and | ||||
|                 send_type == NamespacePath | ||||
|                 and | ||||
|                 add_codec_hooks | ||||
|             ) | ||||
|         ) | ||||
| 
 | ||||
|         if codec is not None: | ||||
|             # XXX FIRST XXX ensure roundtripping works | ||||
|             # before touching any IPC primitives/APIs. | ||||
|             wire_bytes: bytes = codec.encode( | ||||
|                 Started( | ||||
|                     cid='blahblah', | ||||
|                     pld=send_value, | ||||
|                 ) | ||||
|             ) | ||||
|             # NOTE: demonstrates the decoder loading | ||||
|             # to via our native SCIPP msg-spec | ||||
|             # (structurred-conc-inter-proc-protocol) | ||||
|             # implemented as per, | ||||
|             try: | ||||
|                 msg: Started = codec.decode(wire_bytes) | ||||
|                 if not expect_roundtrip: | ||||
|                     pytest.fail( | ||||
|                         f'NOT-EXPECTED able to roundtrip value given spec:\n' | ||||
|                         f'ipc_pld_spec -> {ipc_pld_spec}\n' | ||||
|                         f'value -> {send_value}: {send_type}\n' | ||||
|                     ) | ||||
| 
 | ||||
|                 pld = msg.pld | ||||
|                 assert pld == send_value | ||||
| 
 | ||||
|             except ValidationError: | ||||
|                 if expect_roundtrip: | ||||
|                     pytest.fail( | ||||
|                         f'EXPECTED to roundtrip value given spec:\n' | ||||
|                         f'ipc_pld_spec -> {ipc_pld_spec}\n' | ||||
|                         f'value -> {send_value}: {send_type}\n' | ||||
|                     ) | ||||
| 
 | ||||
|         yield ( | ||||
|             str(send_type), | ||||
|             send_value, | ||||
|             expect_roundtrip, | ||||
|         ) | ||||
| 
 | ||||
| 
 | ||||
| def dec_type_union( | ||||
|     type_names: list[str], | ||||
| ) -> Type: | ||||
|     ''' | ||||
|     Look up types by name, compile into a list and then create and | ||||
|     return a `typing.Union` from the full set. | ||||
| 
 | ||||
|     ''' | ||||
|     import importlib | ||||
|     types: list[Type] = [] | ||||
|     for type_name in type_names: | ||||
|         for mod in [ | ||||
|             typing, | ||||
|             importlib.import_module(__name__), | ||||
|         ]: | ||||
|             if type_ref := getattr( | ||||
|                 mod, | ||||
|                 type_name, | ||||
|                 False, | ||||
|             ): | ||||
|                 types.append(type_ref) | ||||
| 
 | ||||
|     # special case handling only.. | ||||
|     # ipc_pld_spec: Union[Type] = eval( | ||||
|     #     pld_spec_str, | ||||
|     #     {},  # globals | ||||
|     #     {'typing': typing},  # locals | ||||
|     # ) | ||||
| 
 | ||||
|     return Union[*types] | ||||
| 
 | ||||
| 
 | ||||
| def enc_type_union( | ||||
|     union_or_type: Union[Type]|Type, | ||||
| ) -> list[str]: | ||||
|     ''' | ||||
|     Encode a type-union or single type to a list of type-name-strings | ||||
|     ready for IPC interchange. | ||||
| 
 | ||||
|     ''' | ||||
|     type_strs: list[str] = [] | ||||
|     for typ in getattr( | ||||
|         union_or_type, | ||||
|         '__args__', | ||||
|         {union_or_type,}, | ||||
|     ): | ||||
|         type_strs.append(typ.__qualname__) | ||||
| 
 | ||||
|     return type_strs | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def send_back_values( | ||||
|     ctx: Context, | ||||
|     expect_debug: bool, | ||||
|     pld_spec_type_strs: list[str], | ||||
|     add_hooks: bool, | ||||
|     started_msg_bytes: bytes, | ||||
|     expect_ipc_send: dict[str, tuple[Any, bool]], | ||||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|     Setup up a custom codec to load instances of `NamespacePath` | ||||
|     and ensure we can round trip a func ref with our parent. | ||||
| 
 | ||||
|     ''' | ||||
|     uid: tuple = tractor.current_actor().uid | ||||
| 
 | ||||
|     # debug mode sanity check (prolly superfluous but, meh) | ||||
|     assert expect_debug == _state.debug_mode() | ||||
| 
 | ||||
|     # init state in sub-actor should be default | ||||
|     chk_codec_applied( | ||||
|         expect_codec=_codec._def_tractor_codec, | ||||
|     ) | ||||
| 
 | ||||
|     # load pld spec from input str | ||||
|     ipc_pld_spec = dec_type_union( | ||||
|         pld_spec_type_strs, | ||||
|     ) | ||||
|     pld_spec_str = str(ipc_pld_spec) | ||||
| 
 | ||||
|     # same as on parent side config. | ||||
|     nsp_codec: MsgCodec = mk_custom_codec( | ||||
|         pld_spec=ipc_pld_spec, | ||||
|         add_hooks=add_hooks, | ||||
|     ) | ||||
|     with ( | ||||
|         apply_codec(nsp_codec) as codec, | ||||
|     ): | ||||
|         chk_codec_applied( | ||||
|             expect_codec=nsp_codec, | ||||
|             enter_value=codec, | ||||
|         ) | ||||
| 
 | ||||
|         print( | ||||
|             f'{uid}: attempting `Started`-bytes DECODE..\n' | ||||
|         ) | ||||
|         try: | ||||
|             msg: Started = nsp_codec.decode(started_msg_bytes) | ||||
|             expected_pld_spec_str: str = msg.pld | ||||
|             assert pld_spec_str == expected_pld_spec_str | ||||
| 
 | ||||
|         # TODO: maybe we should add our own wrapper error so as to | ||||
|         # be interchange-lib agnostic? | ||||
|         # -[ ] the error type is wtv is raised from the hook so we | ||||
|         #   could also require a type-class of errors for | ||||
|         #   indicating whether the hook-failure can be handled by | ||||
|         #   a nasty-dialog-unprot sub-sys? | ||||
|         except ValidationError: | ||||
| 
 | ||||
|             # NOTE: only in the `Any` spec case do we expect this to | ||||
|             # work since otherwise no spec covers a plain-ol' | ||||
|             # `.pld: str` | ||||
|             if pld_spec_str == 'Any': | ||||
|                 raise | ||||
|             else: | ||||
|                 print( | ||||
|                     f'{uid}: (correctly) unable to DECODE `Started`-bytes\n' | ||||
|                     f'{started_msg_bytes}\n' | ||||
|                 ) | ||||
| 
 | ||||
|         iter_send_val_items = iter(expect_ipc_send.values()) | ||||
|         sent: list[Any] = [] | ||||
|         for send_value, expect_send in iter_send_val_items: | ||||
|             try: | ||||
|                 print( | ||||
|                     f'{uid}: attempting to `.started({send_value})`\n' | ||||
|                     f'=> expect_send: {expect_send}\n' | ||||
|                     f'SINCE, ipc_pld_spec: {ipc_pld_spec}\n' | ||||
|                     f'AND, codec: {codec}\n' | ||||
|                 ) | ||||
|                 await ctx.started(send_value) | ||||
|                 sent.append(send_value) | ||||
|                 if not expect_send: | ||||
| 
 | ||||
|                     # XXX NOTE XXX THIS WON'T WORK WITHOUT SPECIAL | ||||
|                     # `str` handling! or special debug mode IPC | ||||
|                     # msgs! | ||||
|                     await tractor.pause() | ||||
| 
 | ||||
|                     raise RuntimeError( | ||||
|                         f'NOT-EXPECTED able to roundtrip value given spec:\n' | ||||
|                         f'ipc_pld_spec -> {ipc_pld_spec}\n' | ||||
|                         f'value -> {send_value}: {type(send_value)}\n' | ||||
|                     ) | ||||
| 
 | ||||
|                 break  # move on to streaming block.. | ||||
| 
 | ||||
|             except tractor.MsgTypeError: | ||||
|                 await tractor.pause() | ||||
| 
 | ||||
|                 if expect_send: | ||||
|                     raise RuntimeError( | ||||
|                         f'EXPECTED to `.started()` value given spec:\n' | ||||
|                         f'ipc_pld_spec -> {ipc_pld_spec}\n' | ||||
|                         f'value -> {send_value}: {type(send_value)}\n' | ||||
|                     ) | ||||
| 
 | ||||
|         async with ctx.open_stream() as ipc: | ||||
|             print( | ||||
|                 f'{uid}: Entering streaming block to send remaining values..' | ||||
|             ) | ||||
| 
 | ||||
|             for send_value, expect_send in iter_send_val_items: | ||||
|                 send_type: Type = type(send_value) | ||||
|                 print( | ||||
|                     '------ - ------\n' | ||||
|                     f'{uid}: SENDING NEXT VALUE\n' | ||||
|                     f'ipc_pld_spec: {ipc_pld_spec}\n' | ||||
|                     f'expect_send: {expect_send}\n' | ||||
|                     f'val: {send_value}\n' | ||||
|                     '------ - ------\n' | ||||
|                 ) | ||||
|                 try: | ||||
|                     await ipc.send(send_value) | ||||
|                     print(f'***\n{uid}-CHILD sent {send_value!r}\n***\n') | ||||
|                     sent.append(send_value) | ||||
| 
 | ||||
|                     # NOTE: should only raise above on | ||||
|                     # `.started()` or a `Return` | ||||
|                     # if not expect_send: | ||||
|                     #     raise RuntimeError( | ||||
|                     #         f'NOT-EXPECTED able to roundtrip value given spec:\n' | ||||
|                     #         f'ipc_pld_spec -> {ipc_pld_spec}\n' | ||||
|                     #         f'value -> {send_value}: {send_type}\n' | ||||
|                     #     ) | ||||
| 
 | ||||
|                 except ValidationError: | ||||
|                     print(f'{uid} FAILED TO SEND {send_value}!') | ||||
| 
 | ||||
|                     # await tractor.pause() | ||||
|                     if expect_send: | ||||
|                         raise RuntimeError( | ||||
|                             f'EXPECTED to roundtrip value given spec:\n' | ||||
|                             f'ipc_pld_spec -> {ipc_pld_spec}\n' | ||||
|                             f'value -> {send_value}: {send_type}\n' | ||||
|                         ) | ||||
|                     # continue | ||||
| 
 | ||||
|             else: | ||||
|                 print( | ||||
|                     f'{uid}: finished sending all values\n' | ||||
|                     'Should be exiting stream block!\n' | ||||
|                 ) | ||||
| 
 | ||||
|         print(f'{uid}: exited streaming block!') | ||||
| 
 | ||||
|         # TODO: this won't be true bc in streaming phase we DO NOT | ||||
|         # msgspec check outbound msgs! | ||||
|         # -[ ] once we implement the receiver side `InvalidMsg` | ||||
|         #   then we can expect it here? | ||||
|         # assert ( | ||||
|         #     len(sent) | ||||
|         #     == | ||||
|         #     len([val | ||||
|         #          for val, expect in | ||||
|         #          expect_ipc_send.values() | ||||
|         #          if expect is True]) | ||||
|         # ) | ||||
| 
 | ||||
| 
 | ||||
| def ex_func(*args): | ||||
|     print(f'ex_func({args})') | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     'ipc_pld_spec', | ||||
|     [ | ||||
|         Any, | ||||
|         NamespacePath, | ||||
|         NamespacePath|None,  # the "maybe" spec Bo | ||||
|     ], | ||||
|     ids=[ | ||||
|         'any_type', | ||||
|         'nsp_type', | ||||
|         'maybe_nsp_type', | ||||
|     ] | ||||
| ) | ||||
| @pytest.mark.parametrize( | ||||
|     'add_codec_hooks', | ||||
|     [ | ||||
|         True, | ||||
|         False, | ||||
|     ], | ||||
|     ids=['use_codec_hooks', 'no_codec_hooks'], | ||||
| ) | ||||
| def test_codec_hooks_mod( | ||||
|     debug_mode: bool, | ||||
|     ipc_pld_spec: Union[Type]|Any, | ||||
|     # send_value: None|str|NamespacePath, | ||||
|     add_codec_hooks: bool, | ||||
| ): | ||||
|     ''' | ||||
|     Audit the `.msg.MsgCodec` override apis details given our impl | ||||
|     uses `contextvars` to accomplish per `trio` task codec | ||||
|     application around an inter-proc-task-comms context. | ||||
| 
 | ||||
|     ''' | ||||
|     async def main(): | ||||
|         nsp = NamespacePath.from_ref(ex_func) | ||||
|         send_items: dict[Union, Any] = { | ||||
|             Union[None]: None, | ||||
|             Union[NamespacePath]: nsp, | ||||
|             Union[str]: str(nsp), | ||||
|         } | ||||
| 
 | ||||
|         # init default state for actor | ||||
|         chk_codec_applied( | ||||
|             expect_codec=_codec._def_tractor_codec, | ||||
|         ) | ||||
| 
 | ||||
|         async with tractor.open_nursery( | ||||
|             debug_mode=debug_mode, | ||||
|         ) as an: | ||||
|             p: tractor.Portal = await an.start_actor( | ||||
|                 'sub', | ||||
|                 enable_modules=[__name__], | ||||
|             ) | ||||
| 
 | ||||
|             # TODO: 2 cases: | ||||
|             # - codec not modified -> decode nsp as `str` | ||||
|             # - codec modified with hooks -> decode nsp as | ||||
|             #   `NamespacePath` | ||||
|             nsp_codec: MsgCodec = mk_custom_codec( | ||||
|                 pld_spec=ipc_pld_spec, | ||||
|                 add_hooks=add_codec_hooks, | ||||
|             ) | ||||
|             with apply_codec(nsp_codec) as codec: | ||||
|                 chk_codec_applied( | ||||
|                     expect_codec=nsp_codec, | ||||
|                     enter_value=codec, | ||||
|                 ) | ||||
| 
 | ||||
|                 expect_ipc_send: dict[str, tuple[Any, bool]] = {} | ||||
| 
 | ||||
|                 report: str = ( | ||||
|                     'Parent report on send values with\n' | ||||
|                     f'ipc_pld_spec: {ipc_pld_spec}\n' | ||||
|                     '       ------ - ------\n' | ||||
|                 ) | ||||
|                 for val_type_str, val, expect_send in iter_maybe_sends( | ||||
|                     send_items, | ||||
|                     ipc_pld_spec, | ||||
|                     add_codec_hooks=add_codec_hooks, | ||||
|                 ): | ||||
|                     report += ( | ||||
|                         f'send_value: {val}: {type(val)} ' | ||||
|                         f'=> expect_send: {expect_send}\n' | ||||
|                     ) | ||||
|                     expect_ipc_send[val_type_str] = (val, expect_send) | ||||
| 
 | ||||
|                 print( | ||||
|                     report + | ||||
|                     '       ------ - ------\n' | ||||
|                 ) | ||||
|                 assert len(expect_ipc_send) == len(send_items) | ||||
|                 # now try over real IPC with a the subactor | ||||
|                 # expect_ipc_rountrip: bool = True | ||||
|                 expected_started = Started( | ||||
|                     cid='cid', | ||||
|                     pld=str(ipc_pld_spec), | ||||
|                 ) | ||||
|                 # build list of values we expect to receive from | ||||
|                 # the subactor. | ||||
|                 expect_to_send: list[Any] = [ | ||||
|                     val | ||||
|                     for val, expect_send in expect_ipc_send.values() | ||||
|                     if expect_send | ||||
|                 ] | ||||
| 
 | ||||
|                 pld_spec_type_strs: list[str] = enc_type_union(ipc_pld_spec) | ||||
| 
 | ||||
|                 # XXX should raise an mte (`MsgTypeError`) | ||||
|                 # when `add_codec_hooks == False` bc the input | ||||
|                 # `expect_ipc_send` kwarg has a nsp which can't be | ||||
|                 # serialized! | ||||
|                 # | ||||
|                 # TODO:can we ensure this happens from the | ||||
|                 # `Return`-side (aka the sub) as well? | ||||
|                 if not add_codec_hooks: | ||||
|                     try: | ||||
|                         async with p.open_context( | ||||
|                             send_back_values, | ||||
|                             expect_debug=debug_mode, | ||||
|                             pld_spec_type_strs=pld_spec_type_strs, | ||||
|                             add_hooks=add_codec_hooks, | ||||
|                             started_msg_bytes=nsp_codec.encode(expected_started), | ||||
| 
 | ||||
|                             # XXX NOTE bc we send a `NamespacePath` in this kwarg | ||||
|                             expect_ipc_send=expect_ipc_send, | ||||
| 
 | ||||
|                         ) as (ctx, first): | ||||
|                             pytest.fail('ctx should fail to open without custom enc_hook!?') | ||||
| 
 | ||||
|                     # this test passes bc we can go no further! | ||||
|                     except MsgTypeError: | ||||
|                         # teardown nursery | ||||
|                         await p.cancel_actor() | ||||
|                         return | ||||
| 
 | ||||
|                 # TODO: send the original nsp here and | ||||
|                 # test with `limit_msg_spec()` above? | ||||
|                 # await tractor.pause() | ||||
|                 print('PARENT opening IPC ctx!\n') | ||||
|                 async with ( | ||||
| 
 | ||||
|                     # XXX should raise an mte (`MsgTypeError`) | ||||
|                     # when `add_codec_hooks == False`.. | ||||
|                     p.open_context( | ||||
|                         send_back_values, | ||||
|                         expect_debug=debug_mode, | ||||
|                         pld_spec_type_strs=pld_spec_type_strs, | ||||
|                         add_hooks=add_codec_hooks, | ||||
|                         started_msg_bytes=nsp_codec.encode(expected_started), | ||||
|                         expect_ipc_send=expect_ipc_send, | ||||
|                     ) as (ctx, first), | ||||
| 
 | ||||
|                     ctx.open_stream() as ipc, | ||||
|                 ): | ||||
|                     # ensure codec is still applied across | ||||
|                     # `tractor.Context` + its embedded nursery. | ||||
|                     chk_codec_applied( | ||||
|                         expect_codec=nsp_codec, | ||||
|                         enter_value=codec, | ||||
|                     ) | ||||
|                     print( | ||||
|                         'root: ENTERING CONTEXT BLOCK\n' | ||||
|                         f'type(first): {type(first)}\n' | ||||
|                         f'first: {first}\n' | ||||
|                     ) | ||||
|                     expect_to_send.remove(first) | ||||
| 
 | ||||
|                     # TODO: explicit values we expect depending on | ||||
|                     # codec config! | ||||
|                     # assert first == first_val | ||||
|                     # assert first == f'{__name__}:ex_func' | ||||
| 
 | ||||
|                     async for next_sent in ipc: | ||||
|                         print( | ||||
|                             'Parent: child sent next value\n' | ||||
|                             f'{next_sent}: {type(next_sent)}\n' | ||||
|                         ) | ||||
|                         if expect_to_send: | ||||
|                             expect_to_send.remove(next_sent) | ||||
|                         else: | ||||
|                             print('PARENT should terminate stream loop + block!') | ||||
| 
 | ||||
|                     # all sent values should have arrived! | ||||
|                     assert not expect_to_send | ||||
| 
 | ||||
|             await p.cancel_actor() | ||||
| 
 | ||||
|     trio.run(main) | ||||
| 
 | ||||
| 
 | ||||
| def chk_pld_type( | ||||
|     payload_spec: Type[Struct]|Any, | ||||
|     pld: Any, | ||||
| 
 | ||||
|     expect_roundtrip: bool|None = None, | ||||
| 
 | ||||
| ) -> bool: | ||||
| 
 | ||||
|     pld_val_type: Type = type(pld) | ||||
| 
 | ||||
|     # TODO: verify that the overridden subtypes | ||||
|     # DO NOT have modified type-annots from original! | ||||
|     # 'Start',  .pld: FuncSpec | ||||
|     # 'StartAck',  .pld: IpcCtxSpec | ||||
|     # 'Stop',  .pld: UNSEt | ||||
|     # 'Error',  .pld: ErrorData | ||||
| 
 | ||||
|     codec: MsgCodec = mk_codec( | ||||
|         # NOTE: this ONLY accepts `PayloadMsg.pld` fields of a specified | ||||
|         # type union. | ||||
|         ipc_pld_spec=payload_spec, | ||||
|     ) | ||||
| 
 | ||||
|     # make a one-off dec to compare with our `MsgCodec` instance | ||||
|     # which does the below `mk_msg_spec()` call internally | ||||
|     ipc_msg_spec: Union[Type[Struct]] | ||||
|     msg_types: list[PayloadMsg[payload_spec]] | ||||
|     ( | ||||
|         ipc_msg_spec, | ||||
|         msg_types, | ||||
|     ) = mk_msg_spec( | ||||
|         payload_type_union=payload_spec, | ||||
|     ) | ||||
|     _enc = msgpack.Encoder() | ||||
|     _dec = msgpack.Decoder( | ||||
|         type=ipc_msg_spec or Any,  # like `PayloadMsg[Any]` | ||||
|     ) | ||||
| 
 | ||||
|     assert ( | ||||
|         payload_spec | ||||
|         == | ||||
|         codec.pld_spec | ||||
|     ) | ||||
| 
 | ||||
|     # assert codec.dec == dec | ||||
|     # | ||||
|     # ^-XXX-^ not sure why these aren't "equal" but when cast | ||||
|     # to `str` they seem to match ?? .. kk | ||||
| 
 | ||||
|     assert ( | ||||
|         str(ipc_msg_spec) | ||||
|         == | ||||
|         str(codec.msg_spec) | ||||
|         == | ||||
|         str(_dec.type) | ||||
|         == | ||||
|         str(codec.dec.type) | ||||
|     ) | ||||
| 
 | ||||
|     # verify the boxed-type for all variable payload-type msgs. | ||||
|     if not msg_types: | ||||
|         breakpoint() | ||||
| 
 | ||||
|     roundtrip: bool|None = None | ||||
|     pld_spec_msg_names: list[str] = [ | ||||
|         td.__name__ for td in _payload_msgs | ||||
|     ] | ||||
|     for typedef in msg_types: | ||||
| 
 | ||||
|         skip_runtime_msg: bool = typedef.__name__ not in pld_spec_msg_names | ||||
|         if skip_runtime_msg: | ||||
|             continue | ||||
| 
 | ||||
|         pld_field = structs.fields(typedef)[1] | ||||
|         assert pld_field.type is payload_spec # TODO-^ does this need to work to get all subtypes to adhere? | ||||
| 
 | ||||
|         kwargs: dict[str, Any] = { | ||||
|             'cid': '666', | ||||
|             'pld': pld, | ||||
|         } | ||||
|         enc_msg: PayloadMsg = typedef(**kwargs) | ||||
| 
 | ||||
|         _wire_bytes: bytes = _enc.encode(enc_msg) | ||||
|         wire_bytes: bytes = codec.enc.encode(enc_msg) | ||||
|         assert _wire_bytes == wire_bytes | ||||
| 
 | ||||
|         ve: ValidationError|None = None | ||||
|         try: | ||||
|             dec_msg = codec.dec.decode(wire_bytes) | ||||
|             _dec_msg = _dec.decode(wire_bytes) | ||||
| 
 | ||||
|             # decoded msg and thus payload should be exactly same! | ||||
|             assert (roundtrip := ( | ||||
|                 _dec_msg | ||||
|                 == | ||||
|                 dec_msg | ||||
|                 == | ||||
|                 enc_msg | ||||
|             )) | ||||
| 
 | ||||
|             if ( | ||||
|                 expect_roundtrip is not None | ||||
|                 and expect_roundtrip != roundtrip | ||||
|             ): | ||||
|                 breakpoint() | ||||
| 
 | ||||
|             assert ( | ||||
|                 pld | ||||
|                 == | ||||
|                 dec_msg.pld | ||||
|                 == | ||||
|                 enc_msg.pld | ||||
|             ) | ||||
|             # assert (roundtrip := (_dec_msg == enc_msg)) | ||||
| 
 | ||||
|         except ValidationError as _ve: | ||||
|             ve = _ve | ||||
|             roundtrip: bool = False | ||||
|             if pld_val_type is payload_spec: | ||||
|                 raise ValueError( | ||||
|                    'Got `ValidationError` despite type-var match!?\n' | ||||
|                     f'pld_val_type: {pld_val_type}\n' | ||||
|                     f'payload_type: {payload_spec}\n' | ||||
|                 ) from ve | ||||
| 
 | ||||
|             else: | ||||
|                 # ow we good cuz the pld spec mismatched. | ||||
|                 print( | ||||
|                     'Got expected `ValidationError` since,\n' | ||||
|                     f'{pld_val_type} is not {payload_spec}\n' | ||||
|                 ) | ||||
|         else: | ||||
|             if ( | ||||
|                 payload_spec is not Any | ||||
|                 and | ||||
|                 pld_val_type is not payload_spec | ||||
|             ): | ||||
|                 raise ValueError( | ||||
|                    'DID NOT `ValidationError` despite expected type match!?\n' | ||||
|                     f'pld_val_type: {pld_val_type}\n' | ||||
|                     f'payload_type: {payload_spec}\n' | ||||
|                 ) | ||||
| 
 | ||||
|     # full code decode should always be attempted! | ||||
|     if roundtrip is None: | ||||
|         breakpoint() | ||||
| 
 | ||||
|     return roundtrip | ||||
| 
 | ||||
| 
 | ||||
| def test_limit_msgspec(): | ||||
| 
 | ||||
|     async def main(): | ||||
|         async with tractor.open_root_actor( | ||||
|             debug_mode=True | ||||
|         ): | ||||
| 
 | ||||
|             # ensure we can round-trip a boxing `PayloadMsg` | ||||
|             assert chk_pld_type( | ||||
|                 payload_spec=Any, | ||||
|                 pld=None, | ||||
|                 expect_roundtrip=True, | ||||
|             ) | ||||
| 
 | ||||
|             # verify that a mis-typed payload value won't decode | ||||
|             assert not chk_pld_type( | ||||
|                 payload_spec=int, | ||||
|                 pld='doggy', | ||||
|             ) | ||||
| 
 | ||||
|             # parametrize the boxed `.pld` type as a custom-struct | ||||
|             # and ensure that parametrization propagates | ||||
|             # to all payload-msg-spec-able subtypes! | ||||
|             class CustomPayload(Struct): | ||||
|                 name: str | ||||
|                 value: Any | ||||
| 
 | ||||
|             assert not chk_pld_type( | ||||
|                 payload_spec=CustomPayload, | ||||
|                 pld='doggy', | ||||
|             ) | ||||
| 
 | ||||
|             assert chk_pld_type( | ||||
|                 payload_spec=CustomPayload, | ||||
|                 pld=CustomPayload(name='doggy', value='urmom') | ||||
|             ) | ||||
| 
 | ||||
|             # yah, we can `.pause_from_sync()` now! | ||||
|             # breakpoint() | ||||
| 
 | ||||
|     trio.run(main) | ||||
|  | @ -95,8 +95,8 @@ async def trio_main( | |||
| 
 | ||||
|     # stash a "service nursery" as "actor local" (aka a Python global) | ||||
|     global _nursery | ||||
|     tn = _nursery | ||||
|     assert tn | ||||
|     n = _nursery | ||||
|     assert n | ||||
| 
 | ||||
|     async def consume_stream(): | ||||
|         async with wrapper_mngr() as stream: | ||||
|  | @ -104,10 +104,10 @@ async def trio_main( | |||
|                 print(msg) | ||||
| 
 | ||||
|     # run 2 tasks to ensure broadcaster chan use | ||||
|     tn.start_soon(consume_stream) | ||||
|     tn.start_soon(consume_stream) | ||||
|     n.start_soon(consume_stream) | ||||
|     n.start_soon(consume_stream) | ||||
| 
 | ||||
|     tn.start_soon(trio_sleep_and_err) | ||||
|     n.start_soon(trio_sleep_and_err) | ||||
| 
 | ||||
|     await trio.sleep_forever() | ||||
| 
 | ||||
|  | @ -117,11 +117,8 @@ async def open_actor_local_nursery( | |||
|     ctx: tractor.Context, | ||||
| ): | ||||
|     global _nursery | ||||
|     async with ( | ||||
|         tractor.trionics.collapse_eg(), | ||||
|         trio.open_nursery() as tn | ||||
|     ): | ||||
|         _nursery = tn | ||||
|     async with trio.open_nursery() as n: | ||||
|         _nursery = n | ||||
|         await ctx.started() | ||||
|         await trio.sleep(10) | ||||
|         # await trio.sleep(1) | ||||
|  | @ -135,7 +132,7 @@ async def open_actor_local_nursery( | |||
|         # never yields back.. aka a scenario where the | ||||
|         # ``tractor.context`` task IS NOT in the service n's cancel | ||||
|         # scope. | ||||
|         tn.cancel_scope.cancel() | ||||
|         n.cancel_scope.cancel() | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|  | @ -160,7 +157,7 @@ def test_actor_managed_trio_nursery_task_error_cancels_aio( | |||
|         async with tractor.open_nursery() as n: | ||||
|             p = await n.start_actor( | ||||
|                 'nursery_mngr', | ||||
|                 infect_asyncio=asyncio_mode,  # TODO, is this enabling debug mode? | ||||
|                 infect_asyncio=asyncio_mode, | ||||
|                 enable_modules=[__name__], | ||||
|             ) | ||||
|             async with ( | ||||
|  |  | |||
|  | @ -13,24 +13,26 @@ MESSAGE = 'tractoring at full speed' | |||
| def test_empty_mngrs_input_raises() -> None: | ||||
| 
 | ||||
|     async def main(): | ||||
|         with trio.fail_after(3): | ||||
|         with trio.fail_after(1): | ||||
|             async with ( | ||||
|                 open_actor_cluster( | ||||
|                     modules=[__name__], | ||||
| 
 | ||||
|                     # NOTE: ensure we can passthrough runtime opts | ||||
|                     loglevel='cancel', | ||||
|                     debug_mode=False, | ||||
|                     loglevel='info', | ||||
|                     # debug_mode=True, | ||||
| 
 | ||||
|                 ) as portals, | ||||
| 
 | ||||
|                 gather_contexts(mngrs=()), | ||||
|                 gather_contexts( | ||||
|                     # NOTE: it's the use of inline-generator syntax | ||||
|                     # here that causes the empty input. | ||||
|                     mngrs=( | ||||
|                         p.open_context(worker) for p in portals.values() | ||||
|                     ), | ||||
|                 ), | ||||
|             ): | ||||
|                 # should fail before this? | ||||
|                 assert portals | ||||
| 
 | ||||
|                 # test should fail if we mk it here! | ||||
|                 assert 0, 'Should have raised val-err !?' | ||||
|                 assert 0 | ||||
| 
 | ||||
|     with pytest.raises(ValueError): | ||||
|         trio.run(main) | ||||
|  |  | |||
|  | @ -38,9 +38,9 @@ from tractor._testing import ( | |||
| # - standard setup/teardown: | ||||
| #   ``Portal.open_context()`` starts a new | ||||
| #   remote task context in another actor. The target actor's task must | ||||
| #   call ``Context.started()`` to unblock this entry on the parent side. | ||||
| #   the child task executes until complete and returns a final value | ||||
| #   which is delivered to the parent side and retreived via | ||||
| #   call ``Context.started()`` to unblock this entry on the caller side. | ||||
| #   the callee task executes until complete and returns a final value | ||||
| #   which is delivered to the caller side and retreived via | ||||
| #   ``Context.result()``. | ||||
| 
 | ||||
| # - cancel termination: | ||||
|  | @ -170,9 +170,9 @@ async def assert_state(value: bool): | |||
|     [False, ValueError, KeyboardInterrupt], | ||||
| ) | ||||
| @pytest.mark.parametrize( | ||||
|     'child_blocks_forever', | ||||
|     'callee_blocks_forever', | ||||
|     [False, True], | ||||
|     ids=lambda item: f'child_blocks_forever={item}' | ||||
|     ids=lambda item: f'callee_blocks_forever={item}' | ||||
| ) | ||||
| @pytest.mark.parametrize( | ||||
|     'pointlessly_open_stream', | ||||
|  | @ -181,7 +181,7 @@ async def assert_state(value: bool): | |||
| ) | ||||
| def test_simple_context( | ||||
|     error_parent, | ||||
|     child_blocks_forever, | ||||
|     callee_blocks_forever, | ||||
|     pointlessly_open_stream, | ||||
|     debug_mode: bool, | ||||
| ): | ||||
|  | @ -204,13 +204,13 @@ def test_simple_context( | |||
|                         portal.open_context( | ||||
|                             simple_setup_teardown, | ||||
|                             data=10, | ||||
|                             block_forever=child_blocks_forever, | ||||
|                             block_forever=callee_blocks_forever, | ||||
|                         ) as (ctx, sent), | ||||
|                     ): | ||||
|                         assert current_ipc_ctx() is ctx | ||||
|                         assert sent == 11 | ||||
| 
 | ||||
|                         if child_blocks_forever: | ||||
|                         if callee_blocks_forever: | ||||
|                             await portal.run(assert_state, value=True) | ||||
|                         else: | ||||
|                             assert await ctx.result() == 'yo' | ||||
|  | @ -220,7 +220,7 @@ def test_simple_context( | |||
|                                 if error_parent: | ||||
|                                     raise error_parent | ||||
| 
 | ||||
|                                 if child_blocks_forever: | ||||
|                                 if callee_blocks_forever: | ||||
|                                     await ctx.cancel() | ||||
|                                 else: | ||||
|                                     # in this case the stream will send a | ||||
|  | @ -252,16 +252,16 @@ def test_simple_context( | |||
|             pass | ||||
|         except BaseExceptionGroup as beg: | ||||
|             # XXX: on windows it seems we may have to expect the group error | ||||
|             from tractor.trionics import is_multi_cancelled | ||||
|             from tractor._exceptions import is_multi_cancelled | ||||
|             assert is_multi_cancelled(beg) | ||||
|     else: | ||||
|         trio.run(main) | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     'child_returns_early', | ||||
|     'callee_returns_early', | ||||
|     [True, False], | ||||
|     ids=lambda item: f'child_returns_early={item}' | ||||
|     ids=lambda item: f'callee_returns_early={item}' | ||||
| ) | ||||
| @pytest.mark.parametrize( | ||||
|     'cancel_method', | ||||
|  | @ -273,14 +273,14 @@ def test_simple_context( | |||
|     [True, False], | ||||
|     ids=lambda item: f'chk_ctx_result_before_exit={item}' | ||||
| ) | ||||
| def test_parent_cancels( | ||||
| def test_caller_cancels( | ||||
|     cancel_method: str, | ||||
|     chk_ctx_result_before_exit: bool, | ||||
|     child_returns_early: bool, | ||||
|     callee_returns_early: bool, | ||||
|     debug_mode: bool, | ||||
| ): | ||||
|     ''' | ||||
|     Verify that when the opening side of a context (aka the parent) | ||||
|     Verify that when the opening side of a context (aka the caller) | ||||
|     cancels that context, the ctx does not raise a cancelled when | ||||
|     either calling `.result()` or on context exit. | ||||
| 
 | ||||
|  | @ -294,7 +294,7 @@ def test_parent_cancels( | |||
| 
 | ||||
|         if ( | ||||
|             cancel_method == 'portal' | ||||
|             and not child_returns_early | ||||
|             and not callee_returns_early | ||||
|         ): | ||||
|             try: | ||||
|                 res = await ctx.result() | ||||
|  | @ -318,7 +318,7 @@ def test_parent_cancels( | |||
|                 pytest.fail(f'should not have raised ctxc\n{ctxc}') | ||||
| 
 | ||||
|         # we actually get a result | ||||
|         if child_returns_early: | ||||
|         if callee_returns_early: | ||||
|             assert res == 'yo' | ||||
|             assert ctx.outcome is res | ||||
|             assert ctx.maybe_error is None | ||||
|  | @ -362,14 +362,14 @@ def test_parent_cancels( | |||
|             ) | ||||
|             timeout: float = ( | ||||
|                 0.5 | ||||
|                 if not child_returns_early | ||||
|                 if not callee_returns_early | ||||
|                 else 2 | ||||
|             ) | ||||
|             with trio.fail_after(timeout): | ||||
|                 async with ( | ||||
|                     expect_ctxc( | ||||
|                         yay=( | ||||
|                             not child_returns_early | ||||
|                             not callee_returns_early | ||||
|                             and cancel_method == 'portal' | ||||
|                         ) | ||||
|                     ), | ||||
|  | @ -377,13 +377,13 @@ def test_parent_cancels( | |||
|                     portal.open_context( | ||||
|                         simple_setup_teardown, | ||||
|                         data=10, | ||||
|                         block_forever=not child_returns_early, | ||||
|                         block_forever=not callee_returns_early, | ||||
|                     ) as (ctx, sent), | ||||
|                 ): | ||||
| 
 | ||||
|                     if child_returns_early: | ||||
|                     if callee_returns_early: | ||||
|                         # ensure we block long enough before sending | ||||
|                         # a cancel such that the child has already | ||||
|                         # a cancel such that the callee has already | ||||
|                         # returned it's result. | ||||
|                         await trio.sleep(0.5) | ||||
| 
 | ||||
|  | @ -421,7 +421,7 @@ def test_parent_cancels( | |||
|             #   which should in turn cause `ctx._scope` to | ||||
|             # catch any cancellation? | ||||
|             if ( | ||||
|                 not child_returns_early | ||||
|                 not callee_returns_early | ||||
|                 and cancel_method != 'portal' | ||||
|             ): | ||||
|                 assert not ctx._scope.cancelled_caught | ||||
|  | @ -430,11 +430,11 @@ def test_parent_cancels( | |||
| 
 | ||||
| 
 | ||||
| # basic stream terminations: | ||||
| # - child context closes without using stream | ||||
| # - parent context closes without using stream | ||||
| # - parent context calls `Context.cancel()` while streaming | ||||
| #   is ongoing resulting in child being cancelled | ||||
| # - child calls `Context.cancel()` while streaming and parent | ||||
| # - callee context closes without using stream | ||||
| # - caller context closes without using stream | ||||
| # - caller context calls `Context.cancel()` while streaming | ||||
| #   is ongoing resulting in callee being cancelled | ||||
| # - callee calls `Context.cancel()` while streaming and caller | ||||
| #   sees stream terminated in `RemoteActorError` | ||||
| 
 | ||||
| # TODO: future possible features | ||||
|  | @ -443,6 +443,7 @@ def test_parent_cancels( | |||
| 
 | ||||
| @tractor.context | ||||
| async def close_ctx_immediately( | ||||
| 
 | ||||
|     ctx: Context, | ||||
| 
 | ||||
| ) -> None: | ||||
|  | @ -453,24 +454,13 @@ async def close_ctx_immediately( | |||
|     async with ctx.open_stream(): | ||||
|         pass | ||||
| 
 | ||||
|     print('child returning!') | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     'parent_send_before_receive', | ||||
|     [ | ||||
|         False, | ||||
|         True, | ||||
|     ], | ||||
|     ids=lambda item: f'child_send_before_receive={item}' | ||||
| ) | ||||
| @tractor_test | ||||
| async def test_child_exits_ctx_after_stream_open( | ||||
| async def test_callee_closes_ctx_after_stream_open( | ||||
|     debug_mode: bool, | ||||
|     parent_send_before_receive: bool, | ||||
| ): | ||||
|     ''' | ||||
|     child context closes without using stream. | ||||
|     callee context closes without using stream. | ||||
| 
 | ||||
|     This should result in a msg sequence | ||||
|     |_<root>_ | ||||
|  | @ -484,9 +474,6 @@ async def test_child_exits_ctx_after_stream_open( | |||
|     => {'stop': True, 'cid': <str>} | ||||
| 
 | ||||
|     ''' | ||||
|     timeout: float = ( | ||||
|         0.5 if not debug_mode else 999 | ||||
|     ) | ||||
|     async with tractor.open_nursery( | ||||
|         debug_mode=debug_mode, | ||||
|     ) as an: | ||||
|  | @ -495,7 +482,7 @@ async def test_child_exits_ctx_after_stream_open( | |||
|             enable_modules=[__name__], | ||||
|         ) | ||||
| 
 | ||||
|         with trio.fail_after(timeout): | ||||
|         with trio.fail_after(0.5): | ||||
|             async with portal.open_context( | ||||
|                 close_ctx_immediately, | ||||
| 
 | ||||
|  | @ -507,56 +494,41 @@ async def test_child_exits_ctx_after_stream_open( | |||
| 
 | ||||
|                 with trio.fail_after(0.4): | ||||
|                     async with ctx.open_stream() as stream: | ||||
|                         if parent_send_before_receive: | ||||
|                             print('sending first msg from parent!') | ||||
|                             await stream.send('yo') | ||||
| 
 | ||||
|                         # should fall through since ``StopAsyncIteration`` | ||||
|                         # should be raised through translation of | ||||
|                         # a ``trio.EndOfChannel`` by | ||||
|                         # ``trio.abc.ReceiveChannel.__anext__()`` | ||||
|                         msg = 10 | ||||
|                         async for msg in stream: | ||||
|                         async for _ in stream: | ||||
|                             # trigger failure if we DO NOT | ||||
|                             # get an EOC! | ||||
|                             assert 0 | ||||
|                         else: | ||||
|                             # never should get anythinig new from | ||||
|                             # the underlying stream | ||||
|                             assert msg == 10 | ||||
| 
 | ||||
|                             # verify stream is now closed | ||||
|                             try: | ||||
|                                 with trio.fail_after(0.3): | ||||
|                                     print('parent trying to `.receive()` on EoC stream!') | ||||
|                                     await stream.receive() | ||||
|                                     assert 0, 'should have raised eoc!?' | ||||
|                             except trio.EndOfChannel: | ||||
|                                 print('parent got EoC as expected!') | ||||
|                                 pass | ||||
|                                 # raise | ||||
| 
 | ||||
|                 # TODO: should be just raise the closed resource err | ||||
|                 # directly here to enforce not allowing a re-open | ||||
|                 # of a stream to the context (at least until a time of | ||||
|                 # if/when we decide that's a good idea?) | ||||
|                 try: | ||||
|                     with trio.fail_after(timeout): | ||||
|                     with trio.fail_after(0.5): | ||||
|                         async with ctx.open_stream() as stream: | ||||
|                             pass | ||||
|                 except trio.ClosedResourceError: | ||||
|                     pass | ||||
| 
 | ||||
|                 # if ctx._rx_chan._state.data: | ||||
|                 #     await tractor.pause() | ||||
| 
 | ||||
|         await portal.cancel_actor() | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def expect_cancelled( | ||||
|     ctx: Context, | ||||
|     send_before_receive: bool = False, | ||||
| 
 | ||||
| ) -> None: | ||||
|     global _state | ||||
|  | @ -566,10 +538,6 @@ async def expect_cancelled( | |||
| 
 | ||||
|     try: | ||||
|         async with ctx.open_stream() as stream: | ||||
| 
 | ||||
|             if send_before_receive: | ||||
|                 await stream.send('yo') | ||||
| 
 | ||||
|             async for msg in stream: | ||||
|                 await stream.send(msg)  # echo server | ||||
| 
 | ||||
|  | @ -596,49 +564,26 @@ async def expect_cancelled( | |||
|         raise | ||||
| 
 | ||||
|     else: | ||||
|         assert 0, "child wasn't cancelled !?" | ||||
|         assert 0, "callee wasn't cancelled !?" | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     'child_send_before_receive', | ||||
|     [ | ||||
|         False, | ||||
|         True, | ||||
|     ], | ||||
|     ids=lambda item: f'child_send_before_receive={item}' | ||||
| ) | ||||
| @pytest.mark.parametrize( | ||||
|     'rent_wait_for_msg', | ||||
|     [ | ||||
|         False, | ||||
|         True, | ||||
|     ], | ||||
|     ids=lambda item: f'rent_wait_for_msg={item}' | ||||
| ) | ||||
| @pytest.mark.parametrize( | ||||
|     'use_ctx_cancel_method', | ||||
|     [ | ||||
|         False, | ||||
|         'pre_stream', | ||||
|         'post_stream_open', | ||||
|         'post_stream_close', | ||||
|     ], | ||||
|     ids=lambda item: f'use_ctx_cancel_method={item}' | ||||
|     [False, True], | ||||
| ) | ||||
| @tractor_test | ||||
| async def test_parent_exits_ctx_after_child_enters_stream( | ||||
|     use_ctx_cancel_method: bool|str, | ||||
| async def test_caller_closes_ctx_after_callee_opens_stream( | ||||
|     use_ctx_cancel_method: bool, | ||||
|     debug_mode: bool, | ||||
|     rent_wait_for_msg: bool, | ||||
|     child_send_before_receive: bool, | ||||
| ): | ||||
|     ''' | ||||
|     Parent-side of IPC context closes without sending on `MsgStream`. | ||||
|     caller context closes without using/opening stream | ||||
| 
 | ||||
|     ''' | ||||
|     async with tractor.open_nursery( | ||||
|         debug_mode=debug_mode, | ||||
|     ) as an: | ||||
| 
 | ||||
|         root: Actor = current_actor() | ||||
|         portal = await an.start_actor( | ||||
|             'ctx_cancelled', | ||||
|  | @ -647,52 +592,41 @@ async def test_parent_exits_ctx_after_child_enters_stream( | |||
| 
 | ||||
|         async with portal.open_context( | ||||
|             expect_cancelled, | ||||
|             send_before_receive=child_send_before_receive, | ||||
|         ) as (ctx, sent): | ||||
|             assert sent is None | ||||
| 
 | ||||
|             await portal.run(assert_state, value=True) | ||||
| 
 | ||||
|             # call `ctx.cancel()` explicitly | ||||
|             if use_ctx_cancel_method == 'pre_stream': | ||||
|             if use_ctx_cancel_method: | ||||
|                 await ctx.cancel() | ||||
| 
 | ||||
|                 # NOTE: means the local side `ctx._scope` will | ||||
|                 # have been cancelled by an ctxc ack and thus | ||||
|                 # `._scope.cancelled_caught` should be set. | ||||
|                 async with ( | ||||
|                     expect_ctxc( | ||||
|                         # XXX: the cause is US since we call | ||||
|                         # `Context.cancel()` just above! | ||||
|                         yay=True, | ||||
| 
 | ||||
|                         # XXX: must be propagated to __aexit__ | ||||
|                         # and should be silently absorbed there | ||||
|                         # since we called `.cancel()` just above ;) | ||||
|                         reraise=True, | ||||
|                     ) as maybe_ctxc, | ||||
|                 ): | ||||
|                 try: | ||||
|                     async with ctx.open_stream() as stream: | ||||
|                         async for msg in stream: | ||||
|                             pass | ||||
| 
 | ||||
|                         if rent_wait_for_msg: | ||||
|                             async for msg in stream: | ||||
|                                 print(f'PARENT rx: {msg!r}\n') | ||||
|                                 break | ||||
|                 except tractor.ContextCancelled as ctxc: | ||||
|                     # XXX: the cause is US since we call | ||||
|                     # `Context.cancel()` just above! | ||||
|                     assert ( | ||||
|                         ctxc.canceller | ||||
|                         == | ||||
|                         current_actor().uid | ||||
|                         == | ||||
|                         root.uid | ||||
|                     ) | ||||
| 
 | ||||
|                         if use_ctx_cancel_method == 'post_stream_open': | ||||
|                             await ctx.cancel() | ||||
|                     # XXX: must be propagated to __aexit__ | ||||
|                     # and should be silently absorbed there | ||||
|                     # since we called `.cancel()` just above ;) | ||||
|                     raise | ||||
| 
 | ||||
|                     if use_ctx_cancel_method == 'post_stream_close': | ||||
|                         await ctx.cancel() | ||||
| 
 | ||||
|                 ctxc: tractor.ContextCancelled = maybe_ctxc.value | ||||
|                 assert ( | ||||
|                     ctxc.canceller | ||||
|                     == | ||||
|                     current_actor().uid | ||||
|                     == | ||||
|                     root.uid | ||||
|                 ) | ||||
|                 else: | ||||
|                     assert 0, "Should have context cancelled?" | ||||
| 
 | ||||
|                 # channel should still be up | ||||
|                 assert portal.channel.connected() | ||||
|  | @ -703,20 +637,13 @@ async def test_parent_exits_ctx_after_child_enters_stream( | |||
|                     value=False, | ||||
|                 ) | ||||
| 
 | ||||
|             # XXX CHILD-BLOCKS case, we SHOULD NOT exit from the | ||||
|             # `.open_context()` before the child has returned, | ||||
|             # errored or been cancelled! | ||||
|             else: | ||||
|                 try: | ||||
|                     with trio.fail_after( | ||||
|                         0.5  # if not debug_mode else 999 | ||||
|                     ): | ||||
|                         res = await ctx.wait_for_result() | ||||
|                         assert res is not tractor._context.Unresolved | ||||
|                     with trio.fail_after(0.2): | ||||
|                         await ctx.result() | ||||
|                         assert 0, "Callee should have blocked!?" | ||||
|                 except trio.TooSlowError: | ||||
|                     # NO-OP -> since already triggered by | ||||
|                     # `trio.fail_after()` above! | ||||
|                     # NO-OP -> since already called above | ||||
|                     await ctx.cancel() | ||||
| 
 | ||||
|         # NOTE: local scope should have absorbed the cancellation since | ||||
|  | @ -756,7 +683,7 @@ async def test_parent_exits_ctx_after_child_enters_stream( | |||
| 
 | ||||
| 
 | ||||
| @tractor_test | ||||
| async def test_multitask_parent_cancels_from_nonroot_task( | ||||
| async def test_multitask_caller_cancels_from_nonroot_task( | ||||
|     debug_mode: bool, | ||||
| ): | ||||
|     async with tractor.open_nursery( | ||||
|  | @ -808,6 +735,7 @@ async def test_multitask_parent_cancels_from_nonroot_task( | |||
| 
 | ||||
| @tractor.context | ||||
| async def cancel_self( | ||||
| 
 | ||||
|     ctx: Context, | ||||
| 
 | ||||
| ) -> None: | ||||
|  | @ -847,11 +775,11 @@ async def cancel_self( | |||
| 
 | ||||
| 
 | ||||
| @tractor_test | ||||
| async def test_child_cancels_before_started( | ||||
| async def test_callee_cancels_before_started( | ||||
|     debug_mode: bool, | ||||
| ): | ||||
|     ''' | ||||
|     Callee calls `Context.cancel()` while streaming and parent | ||||
|     Callee calls `Context.cancel()` while streaming and caller | ||||
|     sees stream terminated in `ContextCancelled`. | ||||
| 
 | ||||
|     ''' | ||||
|  | @ -898,13 +826,14 @@ async def never_open_stream( | |||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def keep_sending_from_child( | ||||
| async def keep_sending_from_callee( | ||||
| 
 | ||||
|     ctx:  Context, | ||||
|     msg_buffer_size: int|None = None, | ||||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|     Send endlessly on the child stream. | ||||
|     Send endlessly on the calleee stream. | ||||
| 
 | ||||
|     ''' | ||||
|     await ctx.started() | ||||
|  | @ -912,7 +841,7 @@ async def keep_sending_from_child( | |||
|         msg_buffer_size=msg_buffer_size, | ||||
|     ) as stream: | ||||
|         for msg in count(): | ||||
|             print(f'child sending {msg}') | ||||
|             print(f'callee sending {msg}') | ||||
|             await stream.send(msg) | ||||
|             await trio.sleep(0.01) | ||||
| 
 | ||||
|  | @ -920,12 +849,12 @@ async def keep_sending_from_child( | |||
| @pytest.mark.parametrize( | ||||
|     'overrun_by', | ||||
|     [ | ||||
|         ('parent', 1, never_open_stream), | ||||
|         ('child', 0, keep_sending_from_child), | ||||
|         ('caller', 1, never_open_stream), | ||||
|         ('callee', 0, keep_sending_from_callee), | ||||
|     ], | ||||
|     ids=[ | ||||
|          ('parent_1buf_never_open_stream'), | ||||
|          ('child_0buf_keep_sending_from_child'), | ||||
|          ('caller_1buf_never_open_stream'), | ||||
|          ('callee_0buf_keep_sending_from_callee'), | ||||
|     ] | ||||
| ) | ||||
| def test_one_end_stream_not_opened( | ||||
|  | @ -956,7 +885,8 @@ def test_one_end_stream_not_opened( | |||
|                 ) as (ctx, sent): | ||||
|                     assert sent is None | ||||
| 
 | ||||
|                     if 'parent' in overrunner: | ||||
|                     if 'caller' in overrunner: | ||||
| 
 | ||||
|                         async with ctx.open_stream() as stream: | ||||
| 
 | ||||
|                             # itersend +1 msg more then the buffer size | ||||
|  | @ -971,7 +901,7 @@ def test_one_end_stream_not_opened( | |||
|                                 await trio.sleep_forever() | ||||
| 
 | ||||
|                     else: | ||||
|                         # child overruns parent case so we do nothing here | ||||
|                         # callee overruns caller case so we do nothing here | ||||
|                         await trio.sleep_forever() | ||||
| 
 | ||||
|             await portal.cancel_actor() | ||||
|  | @ -979,19 +909,19 @@ def test_one_end_stream_not_opened( | |||
|     # 2 overrun cases and the no overrun case (which pushes right up to | ||||
|     # the msg limit) | ||||
|     if ( | ||||
|         overrunner == 'parent' | ||||
|         overrunner == 'caller' | ||||
|     ): | ||||
|         with pytest.raises(tractor.RemoteActorError) as excinfo: | ||||
|             trio.run(main) | ||||
| 
 | ||||
|         assert excinfo.value.boxed_type == StreamOverrun | ||||
| 
 | ||||
|     elif overrunner == 'child': | ||||
|     elif overrunner == 'callee': | ||||
|         with pytest.raises(tractor.RemoteActorError) as excinfo: | ||||
|             trio.run(main) | ||||
| 
 | ||||
|         # TODO: embedded remote errors so that we can verify the source | ||||
|         # error? the child delivers an error which is an overrun | ||||
|         # error? the callee delivers an error which is an overrun | ||||
|         # wrapped in a remote actor error. | ||||
|         assert excinfo.value.boxed_type == tractor.RemoteActorError | ||||
| 
 | ||||
|  | @ -1001,7 +931,8 @@ def test_one_end_stream_not_opened( | |||
| 
 | ||||
| @tractor.context | ||||
| async def echo_back_sequence( | ||||
|     ctx: Context, | ||||
| 
 | ||||
|     ctx:  Context, | ||||
|     seq: list[int], | ||||
|     wait_for_cancel: bool, | ||||
|     allow_overruns_side: str, | ||||
|  | @ -1010,12 +941,12 @@ async def echo_back_sequence( | |||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|     Send endlessly on the child stream using a small buffer size | ||||
|     Send endlessly on the calleee stream using a small buffer size | ||||
|     setting on the contex to simulate backlogging that would normally | ||||
|     cause overruns. | ||||
| 
 | ||||
|     ''' | ||||
|     # NOTE: ensure that if the parent is expecting to cancel this task | ||||
|     # NOTE: ensure that if the caller is expecting to cancel this task | ||||
|     # that we stay echoing much longer then they are so we don't | ||||
|     # return early instead of receive the cancel msg. | ||||
|     total_batches: int = ( | ||||
|  | @ -1065,18 +996,18 @@ async def echo_back_sequence( | |||
|                 if be_slow: | ||||
|                     await trio.sleep(0.05) | ||||
| 
 | ||||
|                 print('child waiting on next') | ||||
|                 print('callee waiting on next') | ||||
| 
 | ||||
|             print(f'child echoing back latest batch\n{batch}') | ||||
|             print(f'callee echoing back latest batch\n{batch}') | ||||
|             for msg in batch: | ||||
|                 print(f'child sending msg\n{msg}') | ||||
|                 print(f'callee sending msg\n{msg}') | ||||
|                 await stream.send(msg) | ||||
| 
 | ||||
|     try: | ||||
|         return 'yo' | ||||
|     finally: | ||||
|         print( | ||||
|             'exiting child with context:\n' | ||||
|             'exiting callee with context:\n' | ||||
|             f'{pformat(ctx)}\n' | ||||
|         ) | ||||
| 
 | ||||
|  | @ -1130,7 +1061,7 @@ def test_maybe_allow_overruns_stream( | |||
|             debug_mode=debug_mode, | ||||
|         ) as an: | ||||
|             portal = await an.start_actor( | ||||
|                 'child_sends_forever', | ||||
|                 'callee_sends_forever', | ||||
|                 enable_modules=[__name__], | ||||
|                 loglevel=loglevel, | ||||
|                 debug_mode=debug_mode, | ||||
|  |  | |||
|  | @ -7,11 +7,8 @@ import platform | |||
| from functools import partial | ||||
| import itertools | ||||
| 
 | ||||
| import psutil | ||||
| import pytest | ||||
| import subprocess | ||||
| import tractor | ||||
| from tractor.trionics import collapse_eg | ||||
| from tractor._testing import tractor_test | ||||
| import trio | ||||
| 
 | ||||
|  | @ -29,7 +26,7 @@ async def test_reg_then_unreg(reg_addr): | |||
|         portal = await n.start_actor('actor', enable_modules=[__name__]) | ||||
|         uid = portal.channel.uid | ||||
| 
 | ||||
|         async with tractor.get_registry(reg_addr) as aportal: | ||||
|         async with tractor.get_registry(*reg_addr) as aportal: | ||||
|             # this local actor should be the arbiter | ||||
|             assert actor is aportal.actor | ||||
| 
 | ||||
|  | @ -155,25 +152,15 @@ async def unpack_reg(actor_or_portal): | |||
| async def spawn_and_check_registry( | ||||
|     reg_addr: tuple, | ||||
|     use_signal: bool, | ||||
|     debug_mode: bool = False, | ||||
|     remote_arbiter: bool = False, | ||||
|     with_streaming: bool = False, | ||||
|     maybe_daemon: tuple[ | ||||
|         subprocess.Popen, | ||||
|         psutil.Process, | ||||
|     ]|None = None, | ||||
| 
 | ||||
| ) -> None: | ||||
| 
 | ||||
|     if maybe_daemon: | ||||
|         popen, proc = maybe_daemon | ||||
|         # breakpoint() | ||||
| 
 | ||||
|     async with tractor.open_root_actor( | ||||
|         registry_addrs=[reg_addr], | ||||
|         debug_mode=debug_mode, | ||||
|     ): | ||||
|         async with tractor.get_registry(reg_addr) as portal: | ||||
|         async with tractor.get_registry(*reg_addr) as portal: | ||||
|             # runtime needs to be up to call this | ||||
|             actor = tractor.current_actor() | ||||
| 
 | ||||
|  | @ -189,30 +176,28 @@ async def spawn_and_check_registry( | |||
|                 extra = 2  # local root actor + remote arbiter | ||||
| 
 | ||||
|             # ensure current actor is registered | ||||
|             registry: dict = await get_reg() | ||||
|             registry = await get_reg() | ||||
|             assert actor.uid in registry | ||||
| 
 | ||||
|             try: | ||||
|                 async with tractor.open_nursery() as an: | ||||
|                     async with ( | ||||
|                         collapse_eg(), | ||||
|                         trio.open_nursery() as trion, | ||||
|                     ): | ||||
|                 async with tractor.open_nursery() as n: | ||||
|                     async with trio.open_nursery() as trion: | ||||
| 
 | ||||
|                         portals = {} | ||||
|                         for i in range(3): | ||||
|                             name = f'a{i}' | ||||
|                             if with_streaming: | ||||
|                                 portals[name] = await an.start_actor( | ||||
|                                 portals[name] = await n.start_actor( | ||||
|                                     name=name, enable_modules=[__name__]) | ||||
| 
 | ||||
|                             else:  # no streaming | ||||
|                                 portals[name] = await an.run_in_actor( | ||||
|                                 portals[name] = await n.run_in_actor( | ||||
|                                     trio.sleep_forever, name=name) | ||||
| 
 | ||||
|                         # wait on last actor to come up | ||||
|                         async with tractor.wait_for_actor(name): | ||||
|                             registry = await get_reg() | ||||
|                             for uid in an._children: | ||||
|                             for uid in n._children: | ||||
|                                 assert uid in registry | ||||
| 
 | ||||
|                         assert len(portals) + extra == len(registry) | ||||
|  | @ -245,7 +230,6 @@ async def spawn_and_check_registry( | |||
| @pytest.mark.parametrize('use_signal', [False, True]) | ||||
| @pytest.mark.parametrize('with_streaming', [False, True]) | ||||
| def test_subactors_unregister_on_cancel( | ||||
|     debug_mode: bool, | ||||
|     start_method, | ||||
|     use_signal, | ||||
|     reg_addr, | ||||
|  | @ -262,7 +246,6 @@ def test_subactors_unregister_on_cancel( | |||
|                 spawn_and_check_registry, | ||||
|                 reg_addr, | ||||
|                 use_signal, | ||||
|                 debug_mode=debug_mode, | ||||
|                 remote_arbiter=False, | ||||
|                 with_streaming=with_streaming, | ||||
|             ), | ||||
|  | @ -272,8 +255,7 @@ def test_subactors_unregister_on_cancel( | |||
| @pytest.mark.parametrize('use_signal', [False, True]) | ||||
| @pytest.mark.parametrize('with_streaming', [False, True]) | ||||
| def test_subactors_unregister_on_cancel_remote_daemon( | ||||
|     daemon: subprocess.Popen, | ||||
|     debug_mode: bool, | ||||
|     daemon, | ||||
|     start_method, | ||||
|     use_signal, | ||||
|     reg_addr, | ||||
|  | @ -289,13 +271,8 @@ def test_subactors_unregister_on_cancel_remote_daemon( | |||
|                 spawn_and_check_registry, | ||||
|                 reg_addr, | ||||
|                 use_signal, | ||||
|                 debug_mode=debug_mode, | ||||
|                 remote_arbiter=True, | ||||
|                 with_streaming=with_streaming, | ||||
|                 maybe_daemon=( | ||||
|                     daemon, | ||||
|                     psutil.Process(daemon.pid) | ||||
|                 ), | ||||
|             ), | ||||
|         ) | ||||
| 
 | ||||
|  | @ -321,7 +298,7 @@ async def close_chans_before_nursery( | |||
|     async with tractor.open_root_actor( | ||||
|         registry_addrs=[reg_addr], | ||||
|     ): | ||||
|         async with tractor.get_registry(reg_addr) as aportal: | ||||
|         async with tractor.get_registry(*reg_addr) as aportal: | ||||
|             try: | ||||
|                 get_reg = partial(unpack_reg, aportal) | ||||
| 
 | ||||
|  | @ -339,12 +316,9 @@ async def close_chans_before_nursery( | |||
|                         async with portal2.open_stream_from( | ||||
|                             stream_forever | ||||
|                         ) as agen2: | ||||
|                             async with ( | ||||
|                                 collapse_eg(), | ||||
|                                 trio.open_nursery() as tn, | ||||
|                             ): | ||||
|                                 tn.start_soon(streamer, agen1) | ||||
|                                 tn.start_soon(cancel, use_signal, .5) | ||||
|                             async with trio.open_nursery() as n: | ||||
|                                 n.start_soon(streamer, agen1) | ||||
|                                 n.start_soon(cancel, use_signal, .5) | ||||
|                                 try: | ||||
|                                     await streamer(agen2) | ||||
|                                 finally: | ||||
|  | @ -395,7 +369,7 @@ def test_close_channel_explicit( | |||
| 
 | ||||
| @pytest.mark.parametrize('use_signal', [False, True]) | ||||
| def test_close_channel_explicit_remote_arbiter( | ||||
|     daemon: subprocess.Popen, | ||||
|     daemon, | ||||
|     start_method, | ||||
|     use_signal, | ||||
|     reg_addr, | ||||
|  |  | |||
|  | @ -19,7 +19,7 @@ from tractor._testing import ( | |||
| @pytest.fixture | ||||
| def run_example_in_subproc( | ||||
|     loglevel: str, | ||||
|     testdir: pytest.Pytester, | ||||
|     testdir: pytest.Testdir, | ||||
|     reg_addr: tuple[str, int], | ||||
| ): | ||||
| 
 | ||||
|  | @ -66,9 +66,6 @@ def run_example_in_subproc( | |||
|         # due to backpressure!!! | ||||
|         proc = testdir.popen( | ||||
|             cmdargs, | ||||
|             stdin=subprocess.PIPE, | ||||
|             stdout=subprocess.PIPE, | ||||
|             stderr=subprocess.PIPE, | ||||
|             **kwargs, | ||||
|         ) | ||||
|         assert not proc.returncode | ||||
|  | @ -84,37 +81,27 @@ def run_example_in_subproc( | |||
| 
 | ||||
|     # walk yields: (dirpath, dirnames, filenames) | ||||
|     [ | ||||
|         (p[0], f) | ||||
|         for p in os.walk(examples_dir()) | ||||
|         for f in p[2] | ||||
|         (p[0], f) for p in os.walk(examples_dir()) for f in p[2] | ||||
| 
 | ||||
|         if ( | ||||
|             '__' not in f | ||||
|             and f[0] != '_' | ||||
|             and 'debugging' not in p[0] | ||||
|             and 'integration' not in p[0] | ||||
|             and 'advanced_faults' not in p[0] | ||||
|             and 'multihost' not in p[0] | ||||
|             and 'trio' not in p[0] | ||||
|         ) | ||||
|         if '__' not in f | ||||
|         and f[0] != '_' | ||||
|         and 'debugging' not in p[0] | ||||
|         and 'integration' not in p[0] | ||||
|         and 'advanced_faults' not in p[0] | ||||
|     ], | ||||
| 
 | ||||
|     ids=lambda t: t[1], | ||||
| ) | ||||
| def test_example( | ||||
|     run_example_in_subproc, | ||||
|     example_script, | ||||
| ): | ||||
|     ''' | ||||
|     Load and run scripts from this repo's ``examples/`` dir as a user | ||||
| def test_example(run_example_in_subproc, example_script): | ||||
|     """Load and run scripts from this repo's ``examples/`` dir as a user | ||||
|     would copy and pasing them into their editor. | ||||
| 
 | ||||
|     On windows a little more "finessing" is done to make | ||||
|     ``multiprocessing`` play nice: we copy the ``__main__.py`` into the | ||||
|     test directory and invoke the script as a module with ``python -m | ||||
|     test_example``. | ||||
| 
 | ||||
|     ''' | ||||
|     ex_file: str = os.path.join(*example_script) | ||||
|     """ | ||||
|     ex_file = os.path.join(*example_script) | ||||
| 
 | ||||
|     if 'rpc_bidir_streaming' in ex_file and sys.version_info < (3, 9): | ||||
|         pytest.skip("2-way streaming example requires py3.9 async with syntax") | ||||
|  | @ -123,14 +110,10 @@ def test_example( | |||
|         code = ex.read() | ||||
| 
 | ||||
|         with run_example_in_subproc(code) as proc: | ||||
|             err = None | ||||
|             try: | ||||
|                 if not proc.poll(): | ||||
|                     _, err = proc.communicate(timeout=15) | ||||
| 
 | ||||
|             except subprocess.TimeoutExpired as e: | ||||
|                 proc.kill() | ||||
|                 err = e.stderr | ||||
|             proc.wait() | ||||
|             err, _ = proc.stderr.read(), proc.stdout.read() | ||||
|             # print(f'STDERR: {err}') | ||||
|             # print(f'STDOUT: {out}') | ||||
| 
 | ||||
|             # if we get some gnarly output let's aggregate and raise | ||||
|             if err: | ||||
|  | @ -144,8 +127,7 @@ def test_example( | |||
|                     # shouldn't eventually once we figure out what's | ||||
|                     # a better way to be explicit about aio side | ||||
|                     # cancels? | ||||
|                     and | ||||
|                     'asyncio.exceptions.CancelledError' not in last_error | ||||
|                     and 'asyncio.exceptions.CancelledError' not in last_error | ||||
|                 ): | ||||
|                     raise Exception(errmsg) | ||||
| 
 | ||||
|  |  | |||
|  | @ -1,946 +0,0 @@ | |||
| ''' | ||||
| Low-level functional audits for our | ||||
| "capability based messaging"-spec feats. | ||||
| 
 | ||||
| B~) | ||||
| 
 | ||||
| ''' | ||||
| from contextlib import ( | ||||
|     contextmanager as cm, | ||||
|     # nullcontext, | ||||
| ) | ||||
| import importlib | ||||
| from typing import ( | ||||
|     Any, | ||||
|     Type, | ||||
|     Union, | ||||
| ) | ||||
| 
 | ||||
| from msgspec import ( | ||||
|     # structs, | ||||
|     # msgpack, | ||||
|     Raw, | ||||
|     # Struct, | ||||
|     ValidationError, | ||||
| ) | ||||
| import pytest | ||||
| import trio | ||||
| 
 | ||||
| import tractor | ||||
| from tractor import ( | ||||
|     Actor, | ||||
|     # _state, | ||||
|     MsgTypeError, | ||||
|     Context, | ||||
| ) | ||||
| from tractor.msg import ( | ||||
|     _codec, | ||||
|     _ctxvar_MsgCodec, | ||||
|     _exts, | ||||
| 
 | ||||
|     NamespacePath, | ||||
|     MsgCodec, | ||||
|     MsgDec, | ||||
|     mk_codec, | ||||
|     mk_dec, | ||||
|     apply_codec, | ||||
|     current_codec, | ||||
| ) | ||||
| from tractor.msg.types import ( | ||||
|     log, | ||||
|     Started, | ||||
|     # _payload_msgs, | ||||
|     # PayloadMsg, | ||||
|     # mk_msg_spec, | ||||
| ) | ||||
| from tractor.msg._ops import ( | ||||
|     limit_plds, | ||||
| ) | ||||
| 
 | ||||
| def enc_nsp(obj: Any) -> Any: | ||||
|     actor: Actor = tractor.current_actor( | ||||
|         err_on_no_runtime=False, | ||||
|     ) | ||||
|     uid: tuple[str, str]|None = None if not actor else actor.uid | ||||
|     print(f'{uid} ENC HOOK') | ||||
| 
 | ||||
|     match obj: | ||||
|         # case NamespacePath()|str(): | ||||
|         case NamespacePath(): | ||||
|             encoded: str = str(obj) | ||||
|             print( | ||||
|                 f'----- ENCODING `NamespacePath` as `str` ------\n' | ||||
|                 f'|_obj:{type(obj)!r} = {obj!r}\n' | ||||
|                 f'|_encoded: str = {encoded!r}\n' | ||||
|             ) | ||||
|             # if type(obj) != NamespacePath: | ||||
|             #     breakpoint() | ||||
|             return encoded | ||||
|         case _: | ||||
|             logmsg: str = ( | ||||
|                 f'{uid}\n' | ||||
|                 'FAILED ENCODE\n' | ||||
|                 f'obj-> `{obj}: {type(obj)}`\n' | ||||
|             ) | ||||
|             raise NotImplementedError(logmsg) | ||||
| 
 | ||||
| 
 | ||||
| def dec_nsp( | ||||
|     obj_type: Type, | ||||
|     obj: Any, | ||||
| 
 | ||||
| ) -> Any: | ||||
|     # breakpoint() | ||||
|     actor: Actor = tractor.current_actor( | ||||
|         err_on_no_runtime=False, | ||||
|     ) | ||||
|     uid: tuple[str, str]|None = None if not actor else actor.uid | ||||
|     print( | ||||
|         f'{uid}\n' | ||||
|         'CUSTOM DECODE\n' | ||||
|         f'type-arg-> {obj_type}\n' | ||||
|         f'obj-arg-> `{obj}`: {type(obj)}\n' | ||||
|     ) | ||||
|     nsp = None | ||||
|     # XXX, never happens right? | ||||
|     if obj_type is Raw: | ||||
|         breakpoint() | ||||
| 
 | ||||
|     if ( | ||||
|         obj_type is NamespacePath | ||||
|         and isinstance(obj, str) | ||||
|         and ':' in obj | ||||
|     ): | ||||
|         nsp = NamespacePath(obj) | ||||
|         # TODO: we could built a generic handler using | ||||
|         # JUST matching the obj_type part? | ||||
|         # nsp = obj_type(obj) | ||||
| 
 | ||||
|     if nsp: | ||||
|         print(f'Returning NSP instance: {nsp}') | ||||
|         return nsp | ||||
| 
 | ||||
|     logmsg: str = ( | ||||
|         f'{uid}\n' | ||||
|         'FAILED DECODE\n' | ||||
|         f'type-> {obj_type}\n' | ||||
|         f'obj-arg-> `{obj}`: {type(obj)}\n\n' | ||||
|         f'current codec:\n' | ||||
|         f'{current_codec()}\n' | ||||
|     ) | ||||
|     # TODO: figure out the ignore subsys for this! | ||||
|     # -[ ] option whether to defense-relay backc the msg | ||||
|     #   inside an `Invalid`/`Ignore` | ||||
|     # -[ ] how to make this handling pluggable such that a | ||||
|     #   `Channel`/`MsgTransport` can intercept and process | ||||
|     #   back msgs either via exception handling or some other | ||||
|     #   signal? | ||||
|     log.warning(logmsg) | ||||
|     # NOTE: this delivers the invalid | ||||
|     # value up to `msgspec`'s decoding | ||||
|     # machinery for error raising. | ||||
|     return obj | ||||
|     # raise NotImplementedError(logmsg) | ||||
| 
 | ||||
| 
 | ||||
| def ex_func(*args): | ||||
|     ''' | ||||
|     A mod level func we can ref and load via our `NamespacePath` | ||||
|     python-object pointer `str` subtype. | ||||
| 
 | ||||
|     ''' | ||||
|     print(f'ex_func({args})') | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     'add_codec_hooks', | ||||
|     [ | ||||
|         True, | ||||
|         False, | ||||
|     ], | ||||
|     ids=['use_codec_hooks', 'no_codec_hooks'], | ||||
| ) | ||||
| def test_custom_extension_types( | ||||
|     debug_mode: bool, | ||||
|     add_codec_hooks: bool | ||||
| ): | ||||
|     ''' | ||||
|     Verify that a `MsgCodec` (used for encoding all outbound IPC msgs | ||||
|     and decoding all inbound `PayloadMsg`s) and a paired `MsgDec` | ||||
|     (used for decoding the `PayloadMsg.pld: Raw` received within a given | ||||
|     task's ipc `Context` scope) can both send and receive "extension types" | ||||
|     as supported via custom converter hooks passed to `msgspec`. | ||||
| 
 | ||||
|     ''' | ||||
|     nsp_pld_dec: MsgDec = mk_dec( | ||||
|         spec=None,  # ONLY support the ext type | ||||
|         dec_hook=dec_nsp if add_codec_hooks else None, | ||||
|         ext_types=[NamespacePath], | ||||
|     ) | ||||
|     nsp_codec: MsgCodec = mk_codec( | ||||
|         # ipc_pld_spec=Raw,  # default! | ||||
| 
 | ||||
|         # NOTE XXX: the encode hook MUST be used no matter what since | ||||
|         # our `NamespacePath` is not any of a `Any` native type nor | ||||
|         # a `msgspec.Struct` subtype - so `msgspec` has no way to know | ||||
|         # how to encode it unless we provide the custom hook. | ||||
|         # | ||||
|         # AGAIN that is, regardless of whether we spec an | ||||
|         # `Any`-decoded-pld the enc has no knowledge (by default) | ||||
|         # how to enc `NamespacePath` (nsp), so we add a custom | ||||
|         # hook to do that ALWAYS. | ||||
|         enc_hook=enc_nsp if add_codec_hooks else None, | ||||
| 
 | ||||
|         # XXX NOTE: pretty sure this is mutex with the `type=` to | ||||
|         # `Decoder`? so it won't work in tandem with the | ||||
|         # `ipc_pld_spec` passed above? | ||||
|         ext_types=[NamespacePath], | ||||
| 
 | ||||
|         # TODO? is it useful to have the `.pld` decoded *prior* to | ||||
|         # the `PldRx`?? like perf or mem related? | ||||
|         # ext_dec=nsp_pld_dec, | ||||
|     ) | ||||
|     if add_codec_hooks: | ||||
|         assert nsp_codec.dec.dec_hook is None | ||||
| 
 | ||||
|         # TODO? if we pass `ext_dec` above? | ||||
|         # assert nsp_codec.dec.dec_hook is dec_nsp | ||||
| 
 | ||||
|         assert nsp_codec.enc.enc_hook is enc_nsp | ||||
| 
 | ||||
|     nsp = NamespacePath.from_ref(ex_func) | ||||
| 
 | ||||
|     try: | ||||
|         nsp_bytes: bytes = nsp_codec.encode(nsp) | ||||
|         nsp_rt_sin_msg = nsp_pld_dec.decode(nsp_bytes) | ||||
|         nsp_rt_sin_msg.load_ref() is ex_func | ||||
|     except TypeError: | ||||
|         if not add_codec_hooks: | ||||
|             pass | ||||
| 
 | ||||
|     try: | ||||
|         msg_bytes: bytes = nsp_codec.encode( | ||||
|             Started( | ||||
|                 cid='cid', | ||||
|                 pld=nsp, | ||||
|             ) | ||||
|         ) | ||||
|         # since the ext-type obj should also be set as the msg.pld | ||||
|         assert nsp_bytes in msg_bytes | ||||
|         started_rt: Started = nsp_codec.decode(msg_bytes) | ||||
|         pld: Raw = started_rt.pld | ||||
|         assert isinstance(pld, Raw) | ||||
|         nsp_rt: NamespacePath = nsp_pld_dec.decode(pld) | ||||
|         assert isinstance(nsp_rt, NamespacePath) | ||||
|         # in obj comparison terms they should be the same | ||||
|         assert nsp_rt == nsp | ||||
|         # ensure we've decoded to ext type! | ||||
|         assert nsp_rt.load_ref() is ex_func | ||||
| 
 | ||||
|     except TypeError: | ||||
|         if not add_codec_hooks: | ||||
|             pass | ||||
| 
 | ||||
| @tractor.context | ||||
| async def sleep_forever_in_sub( | ||||
|     ctx: Context, | ||||
| ) -> None: | ||||
|     await trio.sleep_forever() | ||||
| 
 | ||||
| 
 | ||||
| def mk_custom_codec( | ||||
|     add_hooks: bool, | ||||
| 
 | ||||
| ) -> tuple[ | ||||
|     MsgCodec,  # encode to send | ||||
|     MsgDec,  # pld receive-n-decode | ||||
| ]: | ||||
|     ''' | ||||
|     Create custom `msgpack` enc/dec-hooks and set a `Decoder` | ||||
|     which only loads `pld_spec` (like `NamespacePath`) types. | ||||
| 
 | ||||
|     ''' | ||||
| 
 | ||||
|     # XXX NOTE XXX: despite defining `NamespacePath` as a type | ||||
|     # field on our `PayloadMsg.pld`, we still need a enc/dec_hook() pair | ||||
|     # to cast to/from that type on the wire. See the docs: | ||||
|     # https://jcristharif.com/msgspec/extending.html#mapping-to-from-native-types | ||||
| 
 | ||||
|     # if pld_spec is Any: | ||||
|     #     pld_spec = Raw | ||||
| 
 | ||||
|     nsp_codec: MsgCodec = mk_codec( | ||||
|         # ipc_pld_spec=Raw,  # default! | ||||
| 
 | ||||
|         # NOTE XXX: the encode hook MUST be used no matter what since | ||||
|         # our `NamespacePath` is not any of a `Any` native type nor | ||||
|         # a `msgspec.Struct` subtype - so `msgspec` has no way to know | ||||
|         # how to encode it unless we provide the custom hook. | ||||
|         # | ||||
|         # AGAIN that is, regardless of whether we spec an | ||||
|         # `Any`-decoded-pld the enc has no knowledge (by default) | ||||
|         # how to enc `NamespacePath` (nsp), so we add a custom | ||||
|         # hook to do that ALWAYS. | ||||
|         enc_hook=enc_nsp if add_hooks else None, | ||||
| 
 | ||||
|         # XXX NOTE: pretty sure this is mutex with the `type=` to | ||||
|         # `Decoder`? so it won't work in tandem with the | ||||
|         # `ipc_pld_spec` passed above? | ||||
|         ext_types=[NamespacePath], | ||||
|     ) | ||||
|     # dec_hook=dec_nsp if add_hooks else None, | ||||
|     return nsp_codec | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     'limit_plds_args', | ||||
|     [ | ||||
|         ( | ||||
|             {'dec_hook': None, 'ext_types': None}, | ||||
|             None, | ||||
|         ), | ||||
|         ( | ||||
|             {'dec_hook': dec_nsp, 'ext_types': None}, | ||||
|             TypeError, | ||||
|         ), | ||||
|         ( | ||||
|             {'dec_hook': dec_nsp, 'ext_types': [NamespacePath]}, | ||||
|             None, | ||||
|         ), | ||||
|         ( | ||||
|             {'dec_hook': dec_nsp, 'ext_types': [NamespacePath|None]}, | ||||
|             None, | ||||
|         ), | ||||
|     ], | ||||
|     ids=[ | ||||
|         'no_hook_no_ext_types', | ||||
|         'only_hook', | ||||
|         'hook_and_ext_types', | ||||
|         'hook_and_ext_types_w_null', | ||||
|     ] | ||||
| ) | ||||
| def test_pld_limiting_usage( | ||||
|     limit_plds_args: tuple[dict, Exception|None], | ||||
| ): | ||||
|     ''' | ||||
|     Verify `dec_hook()` and `ext_types` need to either both be | ||||
|     provided or we raise a explanator type-error. | ||||
| 
 | ||||
|     ''' | ||||
|     kwargs, maybe_err = limit_plds_args | ||||
|     async def main(): | ||||
|         async with tractor.open_nursery() as an:  # just to open runtime | ||||
| 
 | ||||
|             # XXX SHOULD NEVER WORK outside an ipc ctx scope! | ||||
|             try: | ||||
|                 with limit_plds(**kwargs): | ||||
|                     pass | ||||
|             except RuntimeError: | ||||
|                 pass | ||||
| 
 | ||||
|             p: tractor.Portal = await an.start_actor( | ||||
|                 'sub', | ||||
|                 enable_modules=[__name__], | ||||
|             ) | ||||
|             async with ( | ||||
|                 p.open_context( | ||||
|                     sleep_forever_in_sub | ||||
|                 ) as (ctx, first), | ||||
|             ): | ||||
|                 try: | ||||
|                     with limit_plds(**kwargs): | ||||
|                         pass | ||||
|                 except maybe_err as exc: | ||||
|                     assert type(exc) is maybe_err | ||||
|                     pass | ||||
| 
 | ||||
| 
 | ||||
| def chk_codec_applied( | ||||
|     expect_codec: MsgCodec|None, | ||||
|     enter_value: MsgCodec|None = None, | ||||
| 
 | ||||
| ) -> MsgCodec: | ||||
|     ''' | ||||
|     buncha sanity checks ensuring that the IPC channel's | ||||
|     context-vars are set to the expected codec and that are | ||||
|     ctx-var wrapper APIs match the same. | ||||
| 
 | ||||
|     ''' | ||||
|     # TODO: play with tricyle again, bc this is supposed to work | ||||
|     # the way we want? | ||||
|     # | ||||
|     # TreeVar | ||||
|     # task: trio.Task = trio.lowlevel.current_task() | ||||
|     # curr_codec = _ctxvar_MsgCodec.get_in(task) | ||||
| 
 | ||||
|     # ContextVar | ||||
|     # task_ctx: Context = task.context | ||||
|     # assert _ctxvar_MsgCodec in task_ctx | ||||
|     # curr_codec: MsgCodec = task.context[_ctxvar_MsgCodec] | ||||
|     if expect_codec is None: | ||||
|         assert enter_value is None | ||||
|         return | ||||
| 
 | ||||
|     # NOTE: currently we use this! | ||||
|     # RunVar | ||||
|     curr_codec: MsgCodec = current_codec() | ||||
|     last_read_codec = _ctxvar_MsgCodec.get() | ||||
|     # assert curr_codec is last_read_codec | ||||
| 
 | ||||
|     assert ( | ||||
|         (same_codec := expect_codec) is | ||||
|         # returned from `mk_codec()` | ||||
| 
 | ||||
|         # yielded value from `apply_codec()` | ||||
| 
 | ||||
|         # read from current task's `contextvars.Context` | ||||
|         curr_codec is | ||||
|         last_read_codec | ||||
| 
 | ||||
|         # the default `msgspec` settings | ||||
|         is not _codec._def_msgspec_codec | ||||
|         is not _codec._def_tractor_codec | ||||
|     ) | ||||
| 
 | ||||
|     if enter_value: | ||||
|         assert enter_value is same_codec | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def send_back_values( | ||||
|     ctx: Context, | ||||
|     rent_pld_spec_type_strs: list[str], | ||||
|     add_hooks: bool, | ||||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|     Setup up a custom codec to load instances of `NamespacePath` | ||||
|     and ensure we can round trip a func ref with our parent. | ||||
| 
 | ||||
|     ''' | ||||
|     uid: tuple = tractor.current_actor().uid | ||||
| 
 | ||||
|     # init state in sub-actor should be default | ||||
|     chk_codec_applied( | ||||
|         expect_codec=_codec._def_tractor_codec, | ||||
|     ) | ||||
| 
 | ||||
|     # load pld spec from input str | ||||
|     rent_pld_spec = _exts.dec_type_union( | ||||
|         rent_pld_spec_type_strs, | ||||
|         mods=[ | ||||
|             importlib.import_module(__name__), | ||||
|         ], | ||||
|     ) | ||||
|     rent_pld_spec_types: set[Type] = _codec.unpack_spec_types( | ||||
|         rent_pld_spec, | ||||
|     ) | ||||
| 
 | ||||
|     # ONLY add ext-hooks if the rent specified a non-std type! | ||||
|     add_hooks: bool = ( | ||||
|         NamespacePath in rent_pld_spec_types | ||||
|         and | ||||
|         add_hooks | ||||
|     ) | ||||
| 
 | ||||
|     # same as on parent side config. | ||||
|     nsp_codec: MsgCodec|None = None | ||||
|     if add_hooks: | ||||
|         nsp_codec = mk_codec( | ||||
|             enc_hook=enc_nsp, | ||||
|             ext_types=[NamespacePath], | ||||
|         ) | ||||
| 
 | ||||
|     with ( | ||||
|         maybe_apply_codec(nsp_codec) as codec, | ||||
|         limit_plds( | ||||
|             rent_pld_spec, | ||||
|             dec_hook=dec_nsp if add_hooks else None, | ||||
|             ext_types=[NamespacePath]  if add_hooks else None, | ||||
|         ) as pld_dec, | ||||
|     ): | ||||
|         # ?XXX? SHOULD WE NOT be swapping the global codec since it | ||||
|         # breaks `Context.started()` roundtripping checks?? | ||||
|         chk_codec_applied( | ||||
|             expect_codec=nsp_codec, | ||||
|             enter_value=codec, | ||||
|         ) | ||||
| 
 | ||||
|         # ?TODO, mismatch case(s)? | ||||
|         # | ||||
|         # ensure pld spec matches on both sides | ||||
|         ctx_pld_dec: MsgDec = ctx._pld_rx._pld_dec | ||||
|         assert pld_dec is ctx_pld_dec | ||||
|         child_pld_spec: Type = pld_dec.spec | ||||
|         child_pld_spec_types: set[Type] = _codec.unpack_spec_types( | ||||
|             child_pld_spec, | ||||
|         ) | ||||
|         assert ( | ||||
|             child_pld_spec_types.issuperset( | ||||
|                 rent_pld_spec_types | ||||
|             ) | ||||
|         ) | ||||
| 
 | ||||
|         # ?TODO, try loop for each of the types in pld-superset? | ||||
|         # | ||||
|         # for send_value in [ | ||||
|         #     nsp, | ||||
|         #     str(nsp), | ||||
|         #     None, | ||||
|         # ]: | ||||
|         nsp = NamespacePath.from_ref(ex_func) | ||||
|         try: | ||||
|             print( | ||||
|                 f'{uid}: attempting to `.started({nsp})`\n' | ||||
|                 f'\n' | ||||
|                 f'rent_pld_spec: {rent_pld_spec}\n' | ||||
|                 f'child_pld_spec: {child_pld_spec}\n' | ||||
|                 f'codec: {codec}\n' | ||||
|             ) | ||||
|             # await tractor.pause() | ||||
|             await ctx.started(nsp) | ||||
| 
 | ||||
|         except tractor.MsgTypeError as _mte: | ||||
|             mte = _mte | ||||
| 
 | ||||
|             # false -ve case | ||||
|             if add_hooks: | ||||
|                 raise RuntimeError( | ||||
|                     f'EXPECTED to `.started()` value given spec ??\n\n' | ||||
|                     f'child_pld_spec -> {child_pld_spec}\n' | ||||
|                     f'value = {nsp}: {type(nsp)}\n' | ||||
|                 ) | ||||
| 
 | ||||
|             # true -ve case | ||||
|             raise mte | ||||
| 
 | ||||
|         # TODO: maybe we should add our own wrapper error so as to | ||||
|         # be interchange-lib agnostic? | ||||
|         # -[ ] the error type is wtv is raised from the hook so we | ||||
|         #   could also require a type-class of errors for | ||||
|         #   indicating whether the hook-failure can be handled by | ||||
|         #   a nasty-dialog-unprot sub-sys? | ||||
|         except TypeError as typerr: | ||||
|             # false -ve | ||||
|             if add_hooks: | ||||
|                 raise RuntimeError('Should have been able to send `nsp`??') | ||||
| 
 | ||||
|             # true -ve | ||||
|             print('Failed to send `nsp` due to no ext hooks set!') | ||||
|             raise typerr | ||||
| 
 | ||||
|         # now try sending a set of valid and invalid plds to ensure | ||||
|         # the pld spec is respected. | ||||
|         sent: list[Any] = [] | ||||
|         async with ctx.open_stream() as ipc: | ||||
|             print( | ||||
|                 f'{uid}: streaming all pld types to rent..' | ||||
|             ) | ||||
| 
 | ||||
|             # for send_value, expect_send in iter_send_val_items: | ||||
|             for send_value in [ | ||||
|                 nsp, | ||||
|                 str(nsp), | ||||
|                 None, | ||||
|             ]: | ||||
|                 send_type: Type = type(send_value) | ||||
|                 print( | ||||
|                     f'{uid}: SENDING NEXT pld\n' | ||||
|                     f'send_type: {send_type}\n' | ||||
|                     f'send_value: {send_value}\n' | ||||
|                 ) | ||||
|                 try: | ||||
|                     await ipc.send(send_value) | ||||
|                     sent.append(send_value) | ||||
| 
 | ||||
|                 except ValidationError as valerr: | ||||
|                     print(f'{uid} FAILED TO SEND {send_value}!') | ||||
| 
 | ||||
|                     # false -ve | ||||
|                     if add_hooks: | ||||
|                         raise RuntimeError( | ||||
|                             f'EXPECTED to roundtrip value given spec:\n' | ||||
|                             f'rent_pld_spec -> {rent_pld_spec}\n' | ||||
|                             f'child_pld_spec -> {child_pld_spec}\n' | ||||
|                             f'value = {send_value}: {send_type}\n' | ||||
|                         ) | ||||
| 
 | ||||
|                     # true -ve | ||||
|                     raise valerr | ||||
|                     # continue | ||||
| 
 | ||||
|             else: | ||||
|                 print( | ||||
|                     f'{uid}: finished sending all values\n' | ||||
|                     'Should be exiting stream block!\n' | ||||
|                 ) | ||||
| 
 | ||||
|         print(f'{uid}: exited streaming block!') | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| @cm | ||||
| def maybe_apply_codec(codec: MsgCodec|None) -> MsgCodec|None: | ||||
|     if codec is None: | ||||
|         yield None | ||||
|         return | ||||
| 
 | ||||
|     with apply_codec(codec) as codec: | ||||
|         yield codec | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     'pld_spec', | ||||
|     [ | ||||
|         Any, | ||||
|         NamespacePath, | ||||
|         NamespacePath|None,  # the "maybe" spec Bo | ||||
|     ], | ||||
|     ids=[ | ||||
|         'any_type', | ||||
|         'only_nsp_ext', | ||||
|         'maybe_nsp_ext', | ||||
|     ] | ||||
| ) | ||||
| @pytest.mark.parametrize( | ||||
|     'add_hooks', | ||||
|     [ | ||||
|         True, | ||||
|         False, | ||||
|     ], | ||||
|     ids=[ | ||||
|         'use_codec_hooks', | ||||
|         'no_codec_hooks', | ||||
|     ], | ||||
| ) | ||||
| def test_ext_types_over_ipc( | ||||
|     debug_mode: bool, | ||||
|     pld_spec: Union[Type], | ||||
|     add_hooks: bool, | ||||
| ): | ||||
|     ''' | ||||
|     Ensure we can support extension types coverted using | ||||
|     `enc/dec_hook()`s passed to the `.msg.limit_plds()` API | ||||
|     and that sane errors happen when we try do the same without | ||||
|     the codec hooks. | ||||
| 
 | ||||
|     ''' | ||||
|     pld_types: set[Type] = _codec.unpack_spec_types(pld_spec) | ||||
| 
 | ||||
|     async def main(): | ||||
| 
 | ||||
|         # sanity check the default pld-spec beforehand | ||||
|         chk_codec_applied( | ||||
|             expect_codec=_codec._def_tractor_codec, | ||||
|         ) | ||||
| 
 | ||||
|         # extension type we want to send as msg payload | ||||
|         nsp = NamespacePath.from_ref(ex_func) | ||||
| 
 | ||||
|         # ^NOTE, 2 cases: | ||||
|         # - codec hooks noto added -> decode nsp as `str` | ||||
|         # - codec with hooks -> decode nsp as `NamespacePath` | ||||
|         nsp_codec: MsgCodec|None = None | ||||
|         if ( | ||||
|             NamespacePath in pld_types | ||||
|             and | ||||
|             add_hooks | ||||
|         ): | ||||
|             nsp_codec = mk_codec( | ||||
|                 enc_hook=enc_nsp, | ||||
|                 ext_types=[NamespacePath], | ||||
|             ) | ||||
| 
 | ||||
|         async with tractor.open_nursery( | ||||
|             debug_mode=debug_mode, | ||||
|         ) as an: | ||||
|             p: tractor.Portal = await an.start_actor( | ||||
|                 'sub', | ||||
|                 enable_modules=[__name__], | ||||
|             ) | ||||
|             with ( | ||||
|                 maybe_apply_codec(nsp_codec) as codec, | ||||
|             ): | ||||
|                 chk_codec_applied( | ||||
|                     expect_codec=nsp_codec, | ||||
|                     enter_value=codec, | ||||
|                 ) | ||||
|                 rent_pld_spec_type_strs: list[str] = _exts.enc_type_union(pld_spec) | ||||
| 
 | ||||
|                 # XXX should raise an mte (`MsgTypeError`) | ||||
|                 # when `add_hooks == False` bc the input | ||||
|                 # `expect_ipc_send` kwarg has a nsp which can't be | ||||
|                 # serialized! | ||||
|                 # | ||||
|                 # TODO:can we ensure this happens from the | ||||
|                 # `Return`-side (aka the sub) as well? | ||||
|                 try: | ||||
|                     ctx: tractor.Context | ||||
|                     ipc: tractor.MsgStream | ||||
|                     async with ( | ||||
| 
 | ||||
|                         # XXX should raise an mte (`MsgTypeError`) | ||||
|                         # when `add_hooks == False`.. | ||||
|                         p.open_context( | ||||
|                             send_back_values, | ||||
|                             # expect_debug=debug_mode, | ||||
|                             rent_pld_spec_type_strs=rent_pld_spec_type_strs, | ||||
|                             add_hooks=add_hooks, | ||||
|                             # expect_ipc_send=expect_ipc_send, | ||||
|                         ) as (ctx, first), | ||||
| 
 | ||||
|                         ctx.open_stream() as ipc, | ||||
|                     ): | ||||
|                         with ( | ||||
|                             limit_plds( | ||||
|                                 pld_spec, | ||||
|                                 dec_hook=dec_nsp if add_hooks else None, | ||||
|                                 ext_types=[NamespacePath]  if add_hooks else None, | ||||
|                             ) as pld_dec, | ||||
|                         ): | ||||
|                             ctx_pld_dec: MsgDec = ctx._pld_rx._pld_dec | ||||
|                             assert pld_dec is ctx_pld_dec | ||||
| 
 | ||||
|                             # if ( | ||||
|                             #     not add_hooks | ||||
|                             #     and | ||||
|                             #     NamespacePath in  | ||||
|                             # ): | ||||
|                             #     pytest.fail('ctx should fail to open without custom enc_hook!?') | ||||
| 
 | ||||
|                             await ipc.send(nsp) | ||||
|                             nsp_rt = await ipc.receive() | ||||
| 
 | ||||
|                             assert nsp_rt == nsp | ||||
|                             assert nsp_rt.load_ref() is ex_func | ||||
| 
 | ||||
|                 # this test passes bc we can go no further! | ||||
|                 except MsgTypeError as mte: | ||||
|                     # if not add_hooks: | ||||
|                     #     # teardown nursery | ||||
|                     #     await p.cancel_actor() | ||||
|                         # return | ||||
| 
 | ||||
|                     raise mte | ||||
| 
 | ||||
|             await p.cancel_actor() | ||||
| 
 | ||||
|     if ( | ||||
|         NamespacePath in pld_types | ||||
|         and | ||||
|         add_hooks | ||||
|     ): | ||||
|         trio.run(main) | ||||
| 
 | ||||
|     else: | ||||
|         with pytest.raises( | ||||
|             expected_exception=tractor.RemoteActorError, | ||||
|         ) as excinfo: | ||||
|             trio.run(main) | ||||
| 
 | ||||
|         exc = excinfo.value | ||||
|         # bc `.started(nsp: NamespacePath)` will raise | ||||
|         assert exc.boxed_type is TypeError | ||||
| 
 | ||||
| 
 | ||||
| # def chk_pld_type( | ||||
| #     payload_spec: Type[Struct]|Any, | ||||
| #     pld: Any, | ||||
| 
 | ||||
| #     expect_roundtrip: bool|None = None, | ||||
| 
 | ||||
| # ) -> bool: | ||||
| 
 | ||||
| #     pld_val_type: Type = type(pld) | ||||
| 
 | ||||
| #     # TODO: verify that the overridden subtypes | ||||
| #     # DO NOT have modified type-annots from original! | ||||
| #     # 'Start',  .pld: FuncSpec | ||||
| #     # 'StartAck',  .pld: IpcCtxSpec | ||||
| #     # 'Stop',  .pld: UNSEt | ||||
| #     # 'Error',  .pld: ErrorData | ||||
| 
 | ||||
| #     codec: MsgCodec = mk_codec( | ||||
| #         # NOTE: this ONLY accepts `PayloadMsg.pld` fields of a specified | ||||
| #         # type union. | ||||
| #         ipc_pld_spec=payload_spec, | ||||
| #     ) | ||||
| 
 | ||||
| #     # make a one-off dec to compare with our `MsgCodec` instance | ||||
| #     # which does the below `mk_msg_spec()` call internally | ||||
| #     ipc_msg_spec: Union[Type[Struct]] | ||||
| #     msg_types: list[PayloadMsg[payload_spec]] | ||||
| #     ( | ||||
| #         ipc_msg_spec, | ||||
| #         msg_types, | ||||
| #     ) = mk_msg_spec( | ||||
| #         payload_type_union=payload_spec, | ||||
| #     ) | ||||
| #     _enc = msgpack.Encoder() | ||||
| #     _dec = msgpack.Decoder( | ||||
| #         type=ipc_msg_spec or Any,  # like `PayloadMsg[Any]` | ||||
| #     ) | ||||
| 
 | ||||
| #     assert ( | ||||
| #         payload_spec | ||||
| #         == | ||||
| #         codec.pld_spec | ||||
| #     ) | ||||
| 
 | ||||
| #     # assert codec.dec == dec | ||||
| #     # | ||||
| #     # ^-XXX-^ not sure why these aren't "equal" but when cast | ||||
| #     # to `str` they seem to match ?? .. kk | ||||
| 
 | ||||
| #     assert ( | ||||
| #         str(ipc_msg_spec) | ||||
| #         == | ||||
| #         str(codec.msg_spec) | ||||
| #         == | ||||
| #         str(_dec.type) | ||||
| #         == | ||||
| #         str(codec.dec.type) | ||||
| #     ) | ||||
| 
 | ||||
| #     # verify the boxed-type for all variable payload-type msgs. | ||||
| #     if not msg_types: | ||||
| #         breakpoint() | ||||
| 
 | ||||
| #     roundtrip: bool|None = None | ||||
| #     pld_spec_msg_names: list[str] = [ | ||||
| #         td.__name__ for td in _payload_msgs | ||||
| #     ] | ||||
| #     for typedef in msg_types: | ||||
| 
 | ||||
| #         skip_runtime_msg: bool = typedef.__name__ not in pld_spec_msg_names | ||||
| #         if skip_runtime_msg: | ||||
| #             continue | ||||
| 
 | ||||
| #         pld_field = structs.fields(typedef)[1] | ||||
| #         assert pld_field.type is payload_spec # TODO-^ does this need to work to get all subtypes to adhere? | ||||
| 
 | ||||
| #         kwargs: dict[str, Any] = { | ||||
| #             'cid': '666', | ||||
| #             'pld': pld, | ||||
| #         } | ||||
| #         enc_msg: PayloadMsg = typedef(**kwargs) | ||||
| 
 | ||||
| #         _wire_bytes: bytes = _enc.encode(enc_msg) | ||||
| #         wire_bytes: bytes = codec.enc.encode(enc_msg) | ||||
| #         assert _wire_bytes == wire_bytes | ||||
| 
 | ||||
| #         ve: ValidationError|None = None | ||||
| #         try: | ||||
| #             dec_msg = codec.dec.decode(wire_bytes) | ||||
| #             _dec_msg = _dec.decode(wire_bytes) | ||||
| 
 | ||||
| #             # decoded msg and thus payload should be exactly same! | ||||
| #             assert (roundtrip := ( | ||||
| #                 _dec_msg | ||||
| #                 == | ||||
| #                 dec_msg | ||||
| #                 == | ||||
| #                 enc_msg | ||||
| #             )) | ||||
| 
 | ||||
| #             if ( | ||||
| #                 expect_roundtrip is not None | ||||
| #                 and expect_roundtrip != roundtrip | ||||
| #             ): | ||||
| #                 breakpoint() | ||||
| 
 | ||||
| #             assert ( | ||||
| #                 pld | ||||
| #                 == | ||||
| #                 dec_msg.pld | ||||
| #                 == | ||||
| #                 enc_msg.pld | ||||
| #             ) | ||||
| #             # assert (roundtrip := (_dec_msg == enc_msg)) | ||||
| 
 | ||||
| #         except ValidationError as _ve: | ||||
| #             ve = _ve | ||||
| #             roundtrip: bool = False | ||||
| #             if pld_val_type is payload_spec: | ||||
| #                 raise ValueError( | ||||
| #                    'Got `ValidationError` despite type-var match!?\n' | ||||
| #                     f'pld_val_type: {pld_val_type}\n' | ||||
| #                     f'payload_type: {payload_spec}\n' | ||||
| #                 ) from ve | ||||
| 
 | ||||
| #             else: | ||||
| #                 # ow we good cuz the pld spec mismatched. | ||||
| #                 print( | ||||
| #                     'Got expected `ValidationError` since,\n' | ||||
| #                     f'{pld_val_type} is not {payload_spec}\n' | ||||
| #                 ) | ||||
| #         else: | ||||
| #             if ( | ||||
| #                 payload_spec is not Any | ||||
| #                 and | ||||
| #                 pld_val_type is not payload_spec | ||||
| #             ): | ||||
| #                 raise ValueError( | ||||
| #                    'DID NOT `ValidationError` despite expected type match!?\n' | ||||
| #                     f'pld_val_type: {pld_val_type}\n' | ||||
| #                     f'payload_type: {payload_spec}\n' | ||||
| #                 ) | ||||
| 
 | ||||
| #     # full code decode should always be attempted! | ||||
| #     if roundtrip is None: | ||||
| #         breakpoint() | ||||
| 
 | ||||
| #     return roundtrip | ||||
| 
 | ||||
| 
 | ||||
| # ?TODO? maybe remove since covered in the newer `test_pldrx_limiting` | ||||
| # via end-2-end testing of all this? | ||||
| # -[ ] IOW do we really NEED this lowlevel unit testing? | ||||
| # | ||||
| # def test_limit_msgspec( | ||||
| #     debug_mode: bool, | ||||
| # ): | ||||
| #     ''' | ||||
| #     Internals unit testing to verify that type-limiting an IPC ctx's | ||||
| #     msg spec with `Pldrx.limit_plds()` results in various | ||||
| #     encapsulated `msgspec` object settings and state. | ||||
| 
 | ||||
| #     ''' | ||||
| #     async def main(): | ||||
| #         async with tractor.open_root_actor( | ||||
| #             debug_mode=debug_mode, | ||||
| #         ): | ||||
| #             # ensure we can round-trip a boxing `PayloadMsg` | ||||
| #             assert chk_pld_type( | ||||
| #                 payload_spec=Any, | ||||
| #                 pld=None, | ||||
| #                 expect_roundtrip=True, | ||||
| #             ) | ||||
| 
 | ||||
| #             # verify that a mis-typed payload value won't decode | ||||
| #             assert not chk_pld_type( | ||||
| #                 payload_spec=int, | ||||
| #                 pld='doggy', | ||||
| #             ) | ||||
| 
 | ||||
| #             # parametrize the boxed `.pld` type as a custom-struct | ||||
| #             # and ensure that parametrization propagates | ||||
| #             # to all payload-msg-spec-able subtypes! | ||||
| #             class CustomPayload(Struct): | ||||
| #                 name: str | ||||
| #                 value: Any | ||||
| 
 | ||||
| #             assert not chk_pld_type( | ||||
| #                 payload_spec=CustomPayload, | ||||
| #                 pld='doggy', | ||||
| #             ) | ||||
| 
 | ||||
| #             assert chk_pld_type( | ||||
| #                 payload_spec=CustomPayload, | ||||
| #                 pld=CustomPayload(name='doggy', value='urmom') | ||||
| #             ) | ||||
| 
 | ||||
| #             # yah, we can `.pause_from_sync()` now! | ||||
| #             # breakpoint() | ||||
| 
 | ||||
| #     trio.run(main) | ||||
|  | @ -32,16 +32,6 @@ from tractor.trionics import BroadcastReceiver | |||
| from tractor._testing import expect_ctxc | ||||
| 
 | ||||
| 
 | ||||
| @pytest.fixture( | ||||
|     scope='module', | ||||
| ) | ||||
| def delay(debug_mode: bool) -> int: | ||||
|     if debug_mode: | ||||
|         return 999 | ||||
|     else: | ||||
|         return 1 | ||||
| 
 | ||||
| 
 | ||||
| async def sleep_and_err( | ||||
|     sleep_for: float = 0.1, | ||||
| 
 | ||||
|  | @ -69,26 +59,20 @@ async def trio_cancels_single_aio_task(): | |||
|         await tractor.to_asyncio.run_task(aio_sleep_forever) | ||||
| 
 | ||||
| 
 | ||||
| def test_trio_cancels_aio_on_actor_side( | ||||
|     reg_addr: tuple[str, int], | ||||
|     delay: int, | ||||
|     debug_mode: bool, | ||||
| ): | ||||
| def test_trio_cancels_aio_on_actor_side(reg_addr): | ||||
|     ''' | ||||
|     Spawn an infected actor that is cancelled by the ``trio`` side | ||||
|     task using std cancel scope apis. | ||||
| 
 | ||||
|     ''' | ||||
|     async def main(): | ||||
|         with trio.fail_after(1 + delay): | ||||
|             async with tractor.open_nursery( | ||||
|                 registry_addrs=[reg_addr], | ||||
|                 debug_mode=debug_mode, | ||||
|             ) as an: | ||||
|                 await an.run_in_actor( | ||||
|                     trio_cancels_single_aio_task, | ||||
|                     infect_asyncio=True, | ||||
|                 ) | ||||
|         async with tractor.open_nursery( | ||||
|             registry_addrs=[reg_addr] | ||||
|         ) as n: | ||||
|             await n.run_in_actor( | ||||
|                 trio_cancels_single_aio_task, | ||||
|                 infect_asyncio=True, | ||||
|             ) | ||||
| 
 | ||||
|     trio.run(main) | ||||
| 
 | ||||
|  | @ -132,10 +116,7 @@ async def asyncio_actor( | |||
|         raise | ||||
| 
 | ||||
| 
 | ||||
| def test_aio_simple_error( | ||||
|     reg_addr: tuple[str, int], | ||||
|     debug_mode: bool, | ||||
| ): | ||||
| def test_aio_simple_error(reg_addr): | ||||
|     ''' | ||||
|     Verify a simple remote asyncio error propagates back through trio | ||||
|     to the parent actor. | ||||
|  | @ -144,10 +125,9 @@ def test_aio_simple_error( | |||
|     ''' | ||||
|     async def main(): | ||||
|         async with tractor.open_nursery( | ||||
|             registry_addrs=[reg_addr], | ||||
|             debug_mode=debug_mode, | ||||
|         ) as an: | ||||
|             await an.run_in_actor( | ||||
|             registry_addrs=[reg_addr] | ||||
|         ) as n: | ||||
|             await n.run_in_actor( | ||||
|                 asyncio_actor, | ||||
|                 target='sleep_and_err', | ||||
|                 expect_err='AssertionError', | ||||
|  | @ -173,19 +153,14 @@ def test_aio_simple_error( | |||
|     assert err.boxed_type is AssertionError | ||||
| 
 | ||||
| 
 | ||||
| def test_tractor_cancels_aio( | ||||
|     reg_addr: tuple[str, int], | ||||
|     debug_mode: bool, | ||||
| ): | ||||
| def test_tractor_cancels_aio(reg_addr): | ||||
|     ''' | ||||
|     Verify we can cancel a spawned asyncio task gracefully. | ||||
| 
 | ||||
|     ''' | ||||
|     async def main(): | ||||
|         async with tractor.open_nursery( | ||||
|             debug_mode=debug_mode, | ||||
|         ) as an: | ||||
|             portal = await an.run_in_actor( | ||||
|         async with tractor.open_nursery() as n: | ||||
|             portal = await n.run_in_actor( | ||||
|                 asyncio_actor, | ||||
|                 target='aio_sleep_forever', | ||||
|                 expect_err='trio.Cancelled', | ||||
|  | @ -197,9 +172,7 @@ def test_tractor_cancels_aio( | |||
|     trio.run(main) | ||||
| 
 | ||||
| 
 | ||||
| def test_trio_cancels_aio( | ||||
|     reg_addr: tuple[str, int], | ||||
| ): | ||||
| def test_trio_cancels_aio(reg_addr): | ||||
|     ''' | ||||
|     Much like the above test with ``tractor.Portal.cancel_actor()`` | ||||
|     except we just use a standard ``trio`` cancellation api. | ||||
|  | @ -230,12 +203,13 @@ async def trio_ctx( | |||
| 
 | ||||
|     # this will block until the ``asyncio`` task sends a "first" | ||||
|     # message. | ||||
|     delay: int = 999 if tractor.debug_mode() else 1 | ||||
|     with trio.fail_after(1 + delay): | ||||
|     with trio.fail_after(2): | ||||
|         try: | ||||
|             async with ( | ||||
|                 tractor.trionics.collapse_eg(), | ||||
|                 trio.open_nursery() as tn, | ||||
|                 trio.open_nursery( | ||||
|                     # TODO, for new `trio` / py3.13 | ||||
|                     # strict_exception_groups=False, | ||||
|                 ) as tn, | ||||
|                 tractor.to_asyncio.open_channel_from( | ||||
|                     sleep_and_err, | ||||
|                 ) as (first, chan), | ||||
|  | @ -265,10 +239,8 @@ async def trio_ctx( | |||
|     ids='parent_actor_cancels_child={}'.format | ||||
| ) | ||||
| def test_context_spawns_aio_task_that_errors( | ||||
|     reg_addr: tuple[str, int], | ||||
|     delay: int, | ||||
|     reg_addr, | ||||
|     parent_cancels: bool, | ||||
|     debug_mode: bool, | ||||
| ): | ||||
|     ''' | ||||
|     Verify that spawning a task via an intertask channel ctx mngr that | ||||
|  | @ -277,13 +249,13 @@ def test_context_spawns_aio_task_that_errors( | |||
| 
 | ||||
|     ''' | ||||
|     async def main(): | ||||
|         with trio.fail_after(1 + delay): | ||||
|             async with tractor.open_nursery() as an: | ||||
|                 p = await an.start_actor( | ||||
|         with trio.fail_after(2): | ||||
|             async with tractor.open_nursery() as n: | ||||
|                 p = await n.start_actor( | ||||
|                     'aio_daemon', | ||||
|                     enable_modules=[__name__], | ||||
|                     infect_asyncio=True, | ||||
|                     debug_mode=debug_mode, | ||||
|                     # debug_mode=True, | ||||
|                     loglevel='cancel', | ||||
|                 ) | ||||
|                 async with ( | ||||
|  | @ -350,12 +322,11 @@ async def aio_cancel(): | |||
| 
 | ||||
| def test_aio_cancelled_from_aio_causes_trio_cancelled( | ||||
|     reg_addr: tuple, | ||||
|     delay: int, | ||||
| ): | ||||
|     ''' | ||||
|     When the `asyncio.Task` cancels itself the `trio` side should | ||||
|     When the `asyncio.Task` cancels itself the `trio` side cshould | ||||
|     also cancel and teardown and relay the cancellation cross-process | ||||
|     to the parent caller. | ||||
|     to the caller (parent). | ||||
| 
 | ||||
|     ''' | ||||
|     async def main(): | ||||
|  | @ -371,7 +342,7 @@ def test_aio_cancelled_from_aio_causes_trio_cancelled( | |||
|             # NOTE: normally the `an.__aexit__()` waits on the | ||||
|             # portal's result but we do it explicitly here | ||||
|             # to avoid indent levels. | ||||
|             with trio.fail_after(1 + delay): | ||||
|             with trio.fail_after(1): | ||||
|                 await p.wait_for_result() | ||||
| 
 | ||||
|     with pytest.raises( | ||||
|  | @ -382,10 +353,11 @@ def test_aio_cancelled_from_aio_causes_trio_cancelled( | |||
|     # might get multiple `trio.Cancelled`s as well inside an inception | ||||
|     err: RemoteActorError|ExceptionGroup = excinfo.value | ||||
|     if isinstance(err, ExceptionGroup): | ||||
|         excs = err.exceptions | ||||
|         assert len(excs) == 1 | ||||
|         final_exc = excs[0] | ||||
|         assert isinstance(final_exc, tractor.RemoteActorError) | ||||
|         err = next(itertools.dropwhile( | ||||
|             lambda exc: not isinstance(exc, tractor.RemoteActorError), | ||||
|             err.exceptions | ||||
|         )) | ||||
|         assert err | ||||
| 
 | ||||
|     # relayed boxed error should be our `trio`-task's | ||||
|     # cancel-signal-proxy-equivalent of `asyncio.CancelledError`. | ||||
|  | @ -398,18 +370,15 @@ async def no_to_trio_in_args(): | |||
| 
 | ||||
| 
 | ||||
| async def push_from_aio_task( | ||||
| 
 | ||||
|     sequence: Iterable, | ||||
|     to_trio: trio.abc.SendChannel, | ||||
|     expect_cancel: False, | ||||
|     fail_early: bool, | ||||
|     exit_early: bool, | ||||
| 
 | ||||
| ) -> None: | ||||
| 
 | ||||
|     try: | ||||
|         # print('trying breakpoint') | ||||
|         # breakpoint() | ||||
| 
 | ||||
|         # sync caller ctx manager | ||||
|         to_trio.send_nowait(True) | ||||
| 
 | ||||
|  | @ -418,27 +387,10 @@ async def push_from_aio_task( | |||
|             to_trio.send_nowait(i) | ||||
|             await asyncio.sleep(0.001) | ||||
| 
 | ||||
|             if ( | ||||
|                 i == 50 | ||||
|             ): | ||||
|                 if fail_early: | ||||
|                     print('Raising exc from aio side!') | ||||
|                     raise Exception | ||||
|             if i == 50 and fail_early: | ||||
|                 raise Exception | ||||
| 
 | ||||
|                 if exit_early: | ||||
|                     # TODO? really you could enforce the same | ||||
|                     # SC-proto we use for actors here with asyncio | ||||
|                     # such that a Return[None] msg would be | ||||
|                     # implicitly delivered to the trio side? | ||||
|                     # | ||||
|                     # XXX => this might be the end-all soln for | ||||
|                     # converting any-inter-task system (regardless | ||||
|                     # of maybe-remote runtime or language) to be | ||||
|                     # SC-compat no? | ||||
|                     print(f'asyncio breaking early @ {i!r}') | ||||
|                     break | ||||
| 
 | ||||
|         print('asyncio streaming complete!') | ||||
|         print('asyncio streamer complete!') | ||||
| 
 | ||||
|     except asyncio.CancelledError: | ||||
|         if not expect_cancel: | ||||
|  | @ -450,10 +402,9 @@ async def push_from_aio_task( | |||
| 
 | ||||
| 
 | ||||
| async def stream_from_aio( | ||||
|     trio_exit_early: bool = False, | ||||
|     trio_raise_err: bool = False, | ||||
|     exit_early: bool = False, | ||||
|     raise_err: bool = False, | ||||
|     aio_raise_err: bool = False, | ||||
|     aio_exit_early: bool = False, | ||||
|     fan_out: bool = False, | ||||
| 
 | ||||
| ) -> None: | ||||
|  | @ -466,17 +417,8 @@ async def stream_from_aio( | |||
|         async with to_asyncio.open_channel_from( | ||||
|             push_from_aio_task, | ||||
|             sequence=seq, | ||||
|             expect_cancel=trio_raise_err or trio_exit_early, | ||||
|             expect_cancel=raise_err or exit_early, | ||||
|             fail_early=aio_raise_err, | ||||
|             exit_early=aio_exit_early, | ||||
| 
 | ||||
|             # such that we can test exit early cases | ||||
|             # for each side explicitly. | ||||
|             suppress_graceful_exits=(not( | ||||
|                 aio_exit_early | ||||
|                 or | ||||
|                 trio_exit_early | ||||
|             )) | ||||
| 
 | ||||
|         ) as (first, chan): | ||||
| 
 | ||||
|  | @ -489,19 +431,13 @@ async def stream_from_aio( | |||
|                 ], | ||||
|             ): | ||||
|                 async for value in chan: | ||||
|                     print(f'trio received: {value!r}') | ||||
| 
 | ||||
|                     # XXX, debugging EoC not being handled correctly | ||||
|                     # in `transate_aio_errors()`.. | ||||
|                     # if value is None: | ||||
|                     #     await tractor.pause(shield=True) | ||||
| 
 | ||||
|                     print(f'trio received {value}') | ||||
|                     pulled.append(value) | ||||
| 
 | ||||
|                     if value == 50: | ||||
|                         if trio_raise_err: | ||||
|                         if raise_err: | ||||
|                             raise Exception | ||||
|                         elif trio_exit_early: | ||||
|                         elif exit_early: | ||||
|                             print('`consume()` breaking early!\n') | ||||
|                             break | ||||
| 
 | ||||
|  | @ -518,11 +454,11 @@ async def stream_from_aio( | |||
|                     # tasks are joined.. | ||||
|                     chan.subscribe() as br, | ||||
| 
 | ||||
|                     trio.open_nursery() as tn, | ||||
|                     trio.open_nursery() as n, | ||||
|                 ): | ||||
|                     # start 2nd task that get's broadcast the same | ||||
|                     # value set. | ||||
|                     tn.start_soon(consume, br) | ||||
|                     n.start_soon(consume, br) | ||||
|                     await consume(chan) | ||||
| 
 | ||||
|             else: | ||||
|  | @ -535,14 +471,10 @@ async def stream_from_aio( | |||
| 
 | ||||
|     finally: | ||||
| 
 | ||||
|         if not ( | ||||
|             trio_raise_err | ||||
|             or | ||||
|             trio_exit_early | ||||
|             or | ||||
|             aio_raise_err | ||||
|             or | ||||
|             aio_exit_early | ||||
|         if ( | ||||
|             not raise_err and | ||||
|             not exit_early and | ||||
|             not aio_raise_err | ||||
|         ): | ||||
|             if fan_out: | ||||
|                 # we get double the pulled values in the | ||||
|  | @ -552,7 +484,6 @@ async def stream_from_aio( | |||
|                 assert list(sorted(pulled)) == expect | ||||
| 
 | ||||
|             else: | ||||
|                 # await tractor.pause() | ||||
|                 assert pulled == expect | ||||
|         else: | ||||
|             assert not fan_out | ||||
|  | @ -566,21 +497,16 @@ async def stream_from_aio( | |||
|     'fan_out', [False, True], | ||||
|     ids='fan_out_w_chan_subscribe={}'.format | ||||
| ) | ||||
| def test_basic_interloop_channel_stream( | ||||
|     reg_addr: tuple[str, int], | ||||
|     fan_out: bool, | ||||
| ): | ||||
| def test_basic_interloop_channel_stream(reg_addr, fan_out): | ||||
|     async def main(): | ||||
|         # TODO, figure out min timeout here! | ||||
|         with trio.fail_after(6): | ||||
|             async with tractor.open_nursery() as an: | ||||
|                 portal = await an.run_in_actor( | ||||
|                     stream_from_aio, | ||||
|                     infect_asyncio=True, | ||||
|                     fan_out=fan_out, | ||||
|                 ) | ||||
|                 # should raise RAE diectly | ||||
|                 await portal.result() | ||||
|         async with tractor.open_nursery() as n: | ||||
|             portal = await n.run_in_actor( | ||||
|                 stream_from_aio, | ||||
|                 infect_asyncio=True, | ||||
|                 fan_out=fan_out, | ||||
|             ) | ||||
|             # should raise RAE diectly | ||||
|             await portal.result() | ||||
| 
 | ||||
|     trio.run(main) | ||||
| 
 | ||||
|  | @ -588,10 +514,10 @@ def test_basic_interloop_channel_stream( | |||
| # TODO: parametrize the above test and avoid the duplication here? | ||||
| def test_trio_error_cancels_intertask_chan(reg_addr): | ||||
|     async def main(): | ||||
|         async with tractor.open_nursery() as an: | ||||
|             portal = await an.run_in_actor( | ||||
|         async with tractor.open_nursery() as n: | ||||
|             portal = await n.run_in_actor( | ||||
|                 stream_from_aio, | ||||
|                 trio_raise_err=True, | ||||
|                 raise_err=True, | ||||
|                 infect_asyncio=True, | ||||
|             ) | ||||
|             # should trigger remote actor error | ||||
|  | @ -604,116 +530,43 @@ def test_trio_error_cancels_intertask_chan(reg_addr): | |||
|     excinfo.value.boxed_type is Exception | ||||
| 
 | ||||
| 
 | ||||
| def test_trio_closes_early_causes_aio_checkpoint_raise( | ||||
| def test_trio_closes_early_and_channel_exits( | ||||
|     reg_addr: tuple[str, int], | ||||
|     delay: int, | ||||
|     debug_mode: bool, | ||||
| ): | ||||
|     ''' | ||||
|     Check that if the `trio`-task "exits early and silently" (in this | ||||
|     case during `async for`-ing the inter-task-channel via | ||||
|     a `break`-from-loop), we raise `TrioTaskExited` on the | ||||
|     `asyncio`-side which also then bubbles up through the | ||||
|     `open_channel_from()` block indicating that the `asyncio.Task` | ||||
|     hit a ran another checkpoint despite the `trio.Task` exit. | ||||
|     Check that if the `trio`-task "exits early" on `async for`ing the | ||||
|     inter-task-channel (via a `break`) we exit silently from the | ||||
|     `open_channel_from()` block and get a final `Return[None]` msg. | ||||
| 
 | ||||
|     ''' | ||||
|     async def main(): | ||||
|         with trio.fail_after(1 + delay): | ||||
|         with trio.fail_after(2): | ||||
|             async with tractor.open_nursery( | ||||
|                 debug_mode=debug_mode, | ||||
|                 # debug_mode=True, | ||||
|                 # enable_stack_on_sig=True, | ||||
|             ) as an: | ||||
|                 portal = await an.run_in_actor( | ||||
|             ) as n: | ||||
|                 portal = await n.run_in_actor( | ||||
|                     stream_from_aio, | ||||
|                     trio_exit_early=True, | ||||
|                     exit_early=True, | ||||
|                     infect_asyncio=True, | ||||
|                 ) | ||||
|                 # should raise RAE diectly | ||||
|                 print('waiting on final infected subactor result..') | ||||
|                 res: None = await portal.wait_for_result() | ||||
|                 assert res is None | ||||
|                 print(f'infected subactor returned result: {res!r}\n') | ||||
|                 print('infected subactor returned result: {res!r}\n') | ||||
| 
 | ||||
|     # should be a quiet exit on a simple channel exit | ||||
|     with pytest.raises(RemoteActorError) as excinfo: | ||||
|         trio.run(main) | ||||
| 
 | ||||
|     # ensure remote error is an explicit `AsyncioCancelled` sub-type | ||||
|     # which indicates to the aio task that the trio side exited | ||||
|     # silently WITHOUT raising a `trio.Cancelled` (which would | ||||
|     # normally be raised instead as a `AsyncioCancelled`). | ||||
|     excinfo.value.boxed_type is to_asyncio.TrioTaskExited | ||||
|     trio.run( | ||||
|         main, | ||||
|         # strict_exception_groups=False, | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| def test_aio_exits_early_relays_AsyncioTaskExited( | ||||
|     # TODO, parametrize the 3 possible trio side conditions: | ||||
|     # - trio blocking on receive, aio exits early | ||||
|     # - trio cancelled AND aio exits early on its next tick | ||||
|     # - trio errors AND aio exits early on its next tick | ||||
|     reg_addr: tuple[str, int], | ||||
|     debug_mode: bool, | ||||
|     delay: int, | ||||
| ): | ||||
|     ''' | ||||
|     Check that if the `asyncio`-task "exits early and silently" (in this | ||||
|     case during `push_from_aio_task()` pushing to the `InterLoopTaskChannel` | ||||
|     it `break`s from the loop), we raise `AsyncioTaskExited` on the | ||||
|     `trio`-side which then DOES NOT BUBBLE up through the | ||||
|     `open_channel_from()` block UNLESS, | ||||
| 
 | ||||
|     - the trio.Task also errored/cancelled, in which case we wrap | ||||
|       both errors in an eg | ||||
|     - the trio.Task was blocking on rxing a value from the | ||||
|       `InterLoopTaskChannel`. | ||||
| 
 | ||||
|     ''' | ||||
| def test_aio_errors_and_channel_propagates_and_closes(reg_addr): | ||||
|     async def main(): | ||||
|         with trio.fail_after(1 + delay): | ||||
|             async with tractor.open_nursery( | ||||
|                 debug_mode=debug_mode, | ||||
|                 # enable_stack_on_sig=True, | ||||
|             ) as an: | ||||
|                 portal = await an.run_in_actor( | ||||
|                     stream_from_aio, | ||||
|                     infect_asyncio=True, | ||||
|                     trio_exit_early=False, | ||||
|                     aio_exit_early=True, | ||||
|                 ) | ||||
|                 # should raise RAE diectly | ||||
|                 print('waiting on final infected subactor result..') | ||||
|                 res: None = await portal.wait_for_result() | ||||
|                 assert res is None | ||||
|                 print(f'infected subactor returned result: {res!r}\n') | ||||
| 
 | ||||
|     # should be a quiet exit on a simple channel exit | ||||
|     with pytest.raises(RemoteActorError) as excinfo: | ||||
|         trio.run(main) | ||||
| 
 | ||||
|     exc = excinfo.value | ||||
| 
 | ||||
|     # TODO, wow bug! | ||||
|     # -[ ] bp handler not replaced!?!? | ||||
|     # breakpoint() | ||||
| 
 | ||||
|     # import pdbp; pdbp.set_trace() | ||||
| 
 | ||||
|     # ensure remote error is an explicit `AsyncioCancelled` sub-type | ||||
|     # which indicates to the aio task that the trio side exited | ||||
|     # silently WITHOUT raising a `trio.Cancelled` (which would | ||||
|     # normally be raised instead as a `AsyncioCancelled`). | ||||
|     assert exc.boxed_type is to_asyncio.AsyncioTaskExited | ||||
| 
 | ||||
| 
 | ||||
| def test_aio_errors_and_channel_propagates_and_closes( | ||||
|     reg_addr: tuple[str, int], | ||||
|     debug_mode: bool, | ||||
| ): | ||||
|     async def main(): | ||||
|         async with tractor.open_nursery( | ||||
|             debug_mode=debug_mode, | ||||
|         ) as an: | ||||
|             portal = await an.run_in_actor( | ||||
|         async with tractor.open_nursery() as n: | ||||
|             portal = await n.run_in_actor( | ||||
|                 stream_from_aio, | ||||
|                 aio_raise_err=True, | ||||
|                 infect_asyncio=True, | ||||
|  | @ -739,13 +592,7 @@ async def aio_echo_server( | |||
|     to_trio.send_nowait('start') | ||||
| 
 | ||||
|     while True: | ||||
|         try: | ||||
|             msg = await from_trio.get() | ||||
|         except to_asyncio.TrioTaskExited: | ||||
|             print( | ||||
|                 'breaking aio echo loop due to `trio` exit!' | ||||
|             ) | ||||
|             break | ||||
|         msg = await from_trio.get() | ||||
| 
 | ||||
|         # echo the msg back | ||||
|         to_trio.send_nowait(msg) | ||||
|  | @ -794,15 +641,13 @@ async def trio_to_aio_echo_server( | |||
|     ids='raise_error={}'.format, | ||||
| ) | ||||
| def test_echoserver_detailed_mechanics( | ||||
|     reg_addr: tuple[str, int], | ||||
|     debug_mode: bool, | ||||
|     reg_addr, | ||||
|     raise_error_mid_stream, | ||||
| ): | ||||
| 
 | ||||
|     async def main(): | ||||
|         async with tractor.open_nursery( | ||||
|             debug_mode=debug_mode, | ||||
|         ) as an: | ||||
|             p = await an.start_actor( | ||||
|         async with tractor.open_nursery() as n: | ||||
|             p = await n.start_actor( | ||||
|                 'aio_server', | ||||
|                 enable_modules=[__name__], | ||||
|                 infect_asyncio=True, | ||||
|  | @ -889,7 +734,7 @@ async def manage_file( | |||
| 
 | ||||
|         # NOTE: turns out you don't even need to sched an aio task | ||||
|         # since the original issue, even though seemingly was due to | ||||
|         # the guest-run being abandoned + a `.debug.pause()` inside | ||||
|         # the guest-run being abandoned + a `._debug.pause()` inside | ||||
|         # `._runtime._async_main()` (which was originally trying to | ||||
|         # debug the `.lifetime_stack` not closing), IS NOT actually | ||||
|         # the core issue? | ||||
|  | @ -1007,8 +852,6 @@ def test_sigint_closes_lifetime_stack( | |||
| 
 | ||||
|     ''' | ||||
|     async def main(): | ||||
| 
 | ||||
|         delay = 999 if tractor.debug_mode() else 1 | ||||
|         try: | ||||
|             an: tractor.ActorNursery | ||||
|             async with tractor.open_nursery( | ||||
|  | @ -1059,7 +902,7 @@ def test_sigint_closes_lifetime_stack( | |||
|                     if wait_for_ctx: | ||||
|                         print('waiting for ctx outcome in parent..') | ||||
|                         try: | ||||
|                             with trio.fail_after(1 + delay): | ||||
|                             with trio.fail_after(1): | ||||
|                                 await ctx.wait_for_result() | ||||
|                         except tractor.ContextCancelled as ctxc: | ||||
|                             assert ctxc.canceller == ctx.chan.uid | ||||
|  | @ -1088,108 +931,6 @@ def test_sigint_closes_lifetime_stack( | |||
|     trio.run(main) | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| # ?TODO asyncio.Task fn-deco? | ||||
| # -[ ] do sig checkingat import time like @context? | ||||
| # -[ ] maybe name it @aio_task ?? | ||||
| # -[ ] chan: to_asyncio.InterloopChannel ?? | ||||
| async def raise_before_started( | ||||
|     # from_trio: asyncio.Queue, | ||||
|     # to_trio: trio.abc.SendChannel, | ||||
|     chan: to_asyncio.LinkedTaskChannel, | ||||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|     `asyncio.Task` entry point which RTEs before calling | ||||
|     `to_trio.send_nowait()`. | ||||
| 
 | ||||
|     ''' | ||||
|     await asyncio.sleep(0.2) | ||||
|     raise RuntimeError('Some shite went wrong before `.send_nowait()`!!') | ||||
| 
 | ||||
|     # to_trio.send_nowait('Uhh we shouldve RTE-d ^^ ??') | ||||
|     chan.started_nowait('Uhh we shouldve RTE-d ^^ ??') | ||||
|     await asyncio.sleep(float('inf')) | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def caching_ep( | ||||
|     ctx: tractor.Context, | ||||
| ): | ||||
| 
 | ||||
|     log = tractor.log.get_logger('caching_ep') | ||||
|     log.info('syncing via `ctx.started()`') | ||||
|     await ctx.started() | ||||
| 
 | ||||
|     # XXX, allocate the `open_channel_from()` inside | ||||
|     # a `.trionics.maybe_open_context()`. | ||||
|     chan: to_asyncio.LinkedTaskChannel | ||||
|     async with ( | ||||
|         tractor.trionics.maybe_open_context( | ||||
|             acm_func=tractor.to_asyncio.open_channel_from, | ||||
|             kwargs={ | ||||
|                 'target': raise_before_started, | ||||
|                 # ^XXX, kwarg to `open_channel_from()` | ||||
|             }, | ||||
| 
 | ||||
|             # lock around current actor task access | ||||
|             key=tractor.current_actor().uid, | ||||
| 
 | ||||
|         ) as (cache_hit, (clients, chan)), | ||||
|     ): | ||||
|         if cache_hit: | ||||
|             log.error( | ||||
|                 'Re-using cached `.open_from_channel()` call!\n' | ||||
|             ) | ||||
| 
 | ||||
|         else: | ||||
|             log.info( | ||||
|                 'Allocating SHOULD-FAIL `.open_from_channel()`\n' | ||||
|             ) | ||||
| 
 | ||||
|         await trio.sleep_forever() | ||||
| 
 | ||||
| 
 | ||||
| def test_aio_side_raises_before_started( | ||||
|     reg_addr: tuple[str, int], | ||||
|     debug_mode: bool, | ||||
|     loglevel: str, | ||||
| ): | ||||
|     ''' | ||||
|     Simulates connection-err from `piker.brokers.ib.api`.. | ||||
| 
 | ||||
|     Ensure any error raised by child-`asyncio.Task` BEFORE | ||||
|     `chan.started()` | ||||
| 
 | ||||
|     ''' | ||||
|     # delay = 999 if debug_mode else 1 | ||||
|     async def main(): | ||||
|         with trio.fail_after(3): | ||||
|             an: tractor.ActorNursery | ||||
|             async with tractor.open_nursery( | ||||
|                 debug_mode=debug_mode, | ||||
|                 loglevel=loglevel, | ||||
|             ) as an: | ||||
|                 p: tractor.Portal = await an.start_actor( | ||||
|                     'lchan_cacher_that_raises_fast', | ||||
|                     enable_modules=[__name__], | ||||
|                     infect_asyncio=True, | ||||
|                 ) | ||||
|                 async with p.open_context( | ||||
|                     caching_ep, | ||||
|                 ) as (ctx, first): | ||||
|                     assert not first | ||||
| 
 | ||||
|     with pytest.raises( | ||||
|         expected_exception=(RemoteActorError), | ||||
|     ) as excinfo: | ||||
|         trio.run(main) | ||||
| 
 | ||||
|     # ensure `asyncio.Task` exception is bubbled | ||||
|     # allll the way erp!! | ||||
|     rae = excinfo.value | ||||
|     assert rae.boxed_type is RuntimeError | ||||
| 
 | ||||
| # TODO: debug_mode tests once we get support for `asyncio`! | ||||
| # | ||||
| # -[ ] need tests to wrap both scripts: | ||||
|  | @ -1203,7 +944,7 @@ def test_aio_side_raises_before_started( | |||
| #    => completed using `.bestow_portal(task)` inside | ||||
| #     `.to_asyncio._run_asyncio_task()` right? | ||||
| #   -[ ] translation func to get from `asyncio` task calling to  | ||||
| #     `.debug.wait_for_parent_stdin_hijack()` which does root | ||||
| #     `._debug.wait_for_parent_stdin_hijack()` which does root | ||||
| #     call to do TTY locking. | ||||
| # | ||||
| def test_sync_breakpoint(): | ||||
|  |  | |||
|  | @ -24,10 +24,14 @@ from tractor._testing import ( | |||
| ) | ||||
| 
 | ||||
| # XXX TODO cases: | ||||
| # - [ ] peer cancelled itself - so other peers should | ||||
| #   get errors reflecting that the peer was itself the .canceller? | ||||
| 
 | ||||
| # - [x] WE cancelled the peer and thus should not see any raised | ||||
| #   `ContextCancelled` as it should be reaped silently? | ||||
| #   => pretty sure `test_context_stream_semantics::test_caller_cancels()` | ||||
| #      already covers this case? | ||||
| 
 | ||||
| # - [x] INTER-PEER: some arbitrary remote peer cancels via | ||||
| #   Portal.cancel_actor(). | ||||
| #   => all other connected peers should get that cancel requesting peer's | ||||
|  | @ -40,6 +44,16 @@ from tractor._testing import ( | |||
| #   that also spawned a remote task task in that same peer-parent. | ||||
| 
 | ||||
| 
 | ||||
| # def test_self_cancel(): | ||||
| #     ''' | ||||
| #     2 cases: | ||||
| #     - calls `Actor.cancel()` locally in some task | ||||
| #     - calls LocalPortal.cancel_actor()` ? | ||||
| 
 | ||||
| #     ''' | ||||
| #     ... | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def open_stream_then_sleep_forever( | ||||
|     ctx: Context, | ||||
|  | @ -156,7 +170,7 @@ def test_do_not_swallow_error_before_started_by_remote_contextcancelled( | |||
|         trio.run(main) | ||||
| 
 | ||||
|     rae = excinfo.value | ||||
|     assert rae.boxed_type is TypeError | ||||
|     assert rae.boxed_type == TypeError | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
|  | @ -396,6 +410,7 @@ def test_peer_canceller( | |||
|     ''' | ||||
|     async def main(): | ||||
|         async with tractor.open_nursery( | ||||
|             # NOTE: to halt the peer tasks on ctxc, uncomment this. | ||||
|             debug_mode=debug_mode, | ||||
|         ) as an: | ||||
|             canceller: Portal = await an.start_actor( | ||||
|  | @ -792,7 +807,7 @@ async def basic_echo_server( | |||
|     ctx: Context, | ||||
|     peer_name: str = 'wittle_bruv', | ||||
| 
 | ||||
|     err_after_imsg: int|None = None, | ||||
|     err_after: int|None = None, | ||||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|  | @ -821,9 +836,8 @@ async def basic_echo_server( | |||
|             await ipc.send(resp) | ||||
| 
 | ||||
|             if ( | ||||
|                 err_after_imsg | ||||
|                 and | ||||
|                 i > err_after_imsg | ||||
|                 err_after | ||||
|                 and i > err_after | ||||
|             ): | ||||
|                 raise RuntimeError( | ||||
|                     f'Simulated error in `{peer_name}`' | ||||
|  | @ -857,7 +871,7 @@ async def serve_subactors( | |||
|                 ) | ||||
|                 await ipc.send(( | ||||
|                     peer.chan.uid, | ||||
|                     peer.chan.raddr.unwrap(), | ||||
|                     peer.chan.raddr, | ||||
|                 )) | ||||
| 
 | ||||
|         print('Spawner exiting spawn serve loop!') | ||||
|  | @ -965,8 +979,7 @@ async def tell_little_bro( | |||
|     actor_name: str, | ||||
| 
 | ||||
|     caller: str = '', | ||||
|     err_after: float|None = None, | ||||
|     rng_seed: int = 50, | ||||
|     err_after: int|None = None, | ||||
| ): | ||||
|     # contact target actor, do a stream dialog. | ||||
|     async with ( | ||||
|  | @ -977,18 +990,14 @@ async def tell_little_bro( | |||
|             basic_echo_server, | ||||
| 
 | ||||
|             # XXX proxy any delayed err condition | ||||
|             err_after_imsg=( | ||||
|                 err_after * rng_seed | ||||
|                 if err_after is not None | ||||
|                 else None | ||||
|             ), | ||||
|             err_after=err_after, | ||||
|         ) as (sub_ctx, first), | ||||
| 
 | ||||
|         sub_ctx.open_stream() as echo_ipc, | ||||
|     ): | ||||
|         actor: Actor = current_actor() | ||||
|         uid: tuple = actor.uid | ||||
|         for i in range(rng_seed): | ||||
|         for i in range(100): | ||||
|             msg: tuple = ( | ||||
|                 uid, | ||||
|                 i, | ||||
|  | @ -1013,13 +1022,13 @@ async def tell_little_bro( | |||
| ) | ||||
| @pytest.mark.parametrize( | ||||
|     'raise_sub_spawn_error_after', | ||||
|     [None, 0.5], | ||||
|     [None, 50], | ||||
| ) | ||||
| def test_peer_spawns_and_cancels_service_subactor( | ||||
|     debug_mode: bool, | ||||
|     raise_client_error: str, | ||||
|     reg_addr: tuple[str, int], | ||||
|     raise_sub_spawn_error_after: float|None, | ||||
|     raise_sub_spawn_error_after: int|None, | ||||
| ): | ||||
|     # NOTE: this tests for the modden `mod wks open piker` bug | ||||
|     # discovered as part of implementing workspace ctx | ||||
|  | @ -1033,7 +1042,6 @@ def test_peer_spawns_and_cancels_service_subactor( | |||
|     #   and the server's spawned child should cancel and terminate! | ||||
|     peer_name: str = 'little_bro' | ||||
| 
 | ||||
| 
 | ||||
|     def check_inner_rte(rae: RemoteActorError): | ||||
|         ''' | ||||
|         Validate the little_bro's relayed inception! | ||||
|  | @ -1127,7 +1135,8 @@ def test_peer_spawns_and_cancels_service_subactor( | |||
|                         ) | ||||
| 
 | ||||
|                     try: | ||||
|                         res = await client_ctx.wait_for_result(hide_tb=False) | ||||
|                         res = await client_ctx.result(hide_tb=False) | ||||
| 
 | ||||
|                         # in remote (relayed inception) error | ||||
|                         # case, we should error on the line above! | ||||
|                         if raise_sub_spawn_error_after: | ||||
|  | @ -1138,23 +1147,6 @@ def test_peer_spawns_and_cancels_service_subactor( | |||
|                         assert isinstance(res, ContextCancelled) | ||||
|                         assert client_ctx.cancel_acked | ||||
|                         assert res.canceller == root.uid | ||||
|                         assert not raise_sub_spawn_error_after | ||||
| 
 | ||||
|                         # cancelling the spawner sub should | ||||
|                         # transitively cancel it's sub, the little | ||||
|                         # bruv. | ||||
|                         print('root cancelling server/client sub-actors') | ||||
|                         await spawn_ctx.cancel() | ||||
|                         async with tractor.find_actor( | ||||
|                             name=peer_name, | ||||
|                         ) as sub: | ||||
|                             assert not sub | ||||
| 
 | ||||
|                     # XXX, only for tracing | ||||
|                     # except BaseException as _berr: | ||||
|                     #     berr = _berr | ||||
|                     #     await tractor.pause(shield=True) | ||||
|                     #     raise berr | ||||
| 
 | ||||
|                     except RemoteActorError as rae: | ||||
|                         _err = rae | ||||
|  | @ -1183,8 +1175,19 @@ def test_peer_spawns_and_cancels_service_subactor( | |||
|                         raise | ||||
|                         # await tractor.pause() | ||||
| 
 | ||||
|                     else: | ||||
|                         assert not raise_sub_spawn_error_after | ||||
| 
 | ||||
|                         # cancelling the spawner sub should | ||||
|                         # transitively cancel it's sub, the little | ||||
|                         # bruv. | ||||
|                         print('root cancelling server/client sub-actors') | ||||
|                         await spawn_ctx.cancel() | ||||
|                         async with tractor.find_actor( | ||||
|                             name=peer_name, | ||||
|                         ) as sub: | ||||
|                             assert not sub | ||||
| 
 | ||||
|                     # await tractor.pause() | ||||
|                     # await server.cancel_actor() | ||||
| 
 | ||||
|             except RemoteActorError as rae: | ||||
|  | @ -1197,7 +1200,7 @@ def test_peer_spawns_and_cancels_service_subactor( | |||
| 
 | ||||
|             # since we called `.cancel_actor()`, `.cancel_ack` | ||||
|             # will not be set on the ctx bc `ctx.cancel()` was not | ||||
|             # called directly for this confext. | ||||
|             # called directly fot this confext. | ||||
|             except ContextCancelled as ctxc: | ||||
|                 _ctxc = ctxc | ||||
|                 print( | ||||
|  | @ -1237,19 +1240,12 @@ def test_peer_spawns_and_cancels_service_subactor( | |||
| 
 | ||||
|                 # assert spawn_ctx.cancelled_caught | ||||
| 
 | ||||
|     async def _main(): | ||||
|         with trio.fail_after( | ||||
|             3 if not debug_mode | ||||
|             else 999 | ||||
|         ): | ||||
|             await main() | ||||
| 
 | ||||
|     if raise_sub_spawn_error_after: | ||||
|         with pytest.raises(RemoteActorError) as excinfo: | ||||
|             trio.run(_main) | ||||
|             trio.run(main) | ||||
| 
 | ||||
|         rae: RemoteActorError = excinfo.value | ||||
|         check_inner_rte(rae) | ||||
| 
 | ||||
|     else: | ||||
|         trio.run(_main) | ||||
|         trio.run(main) | ||||
|  |  | |||
|  | @ -235,16 +235,10 @@ async def cancel_after(wait, reg_addr): | |||
| 
 | ||||
| 
 | ||||
| @pytest.fixture(scope='module') | ||||
| def time_quad_ex( | ||||
|     reg_addr: tuple, | ||||
|     ci_env: bool, | ||||
|     spawn_backend: str, | ||||
| ): | ||||
| def time_quad_ex(reg_addr, ci_env, spawn_backend): | ||||
|     if spawn_backend == 'mp': | ||||
|         ''' | ||||
|         no idea but the  mp *nix runs are flaking out here often... | ||||
| 
 | ||||
|         ''' | ||||
|         """no idea but the  mp *nix runs are flaking out here often... | ||||
|         """ | ||||
|         pytest.skip("Test is too flaky on mp in CI") | ||||
| 
 | ||||
|     timeout = 7 if platform.system() in ('Windows', 'Darwin') else 4 | ||||
|  | @ -255,24 +249,12 @@ def time_quad_ex( | |||
|     return results, diff | ||||
| 
 | ||||
| 
 | ||||
| def test_a_quadruple_example( | ||||
|     time_quad_ex: tuple, | ||||
|     ci_env: bool, | ||||
|     spawn_backend: str, | ||||
| ): | ||||
|     ''' | ||||
|     This also serves as a kind of "we'd like to be this fast test". | ||||
| def test_a_quadruple_example(time_quad_ex, ci_env, spawn_backend): | ||||
|     """This also serves as a kind of "we'd like to be this fast test".""" | ||||
| 
 | ||||
|     ''' | ||||
|     results, diff = time_quad_ex | ||||
|     assert results | ||||
|     this_fast = ( | ||||
|         6 if platform.system() in ( | ||||
|             'Windows', | ||||
|             'Darwin', | ||||
|         ) | ||||
|         else 3 | ||||
|     ) | ||||
|     this_fast = 6 if platform.system() in ('Windows', 'Darwin') else 3 | ||||
|     assert diff < this_fast | ||||
| 
 | ||||
| 
 | ||||
|  |  | |||
|  | @ -38,7 +38,7 @@ async def test_self_is_registered_localportal(reg_addr): | |||
|     "Verify waiting on the arbiter to register itself using a local portal." | ||||
|     actor = tractor.current_actor() | ||||
|     assert actor.is_arbiter | ||||
|     async with tractor.get_registry(reg_addr) as portal: | ||||
|     async with tractor.get_registry(*reg_addr) as portal: | ||||
|         assert isinstance(portal, tractor._portal.LocalPortal) | ||||
| 
 | ||||
|         with trio.fail_after(0.2): | ||||
|  |  | |||
|  | @ -10,7 +10,7 @@ import tractor | |||
| from tractor._testing import ( | ||||
|     tractor_test, | ||||
| ) | ||||
| from .conftest import ( | ||||
| from conftest import ( | ||||
|     sig_prog, | ||||
|     _INT_SIGNAL, | ||||
|     _INT_RETURN_CODE, | ||||
|  | @ -32,7 +32,7 @@ def test_abort_on_sigint(daemon): | |||
| @tractor_test | ||||
| async def test_cancel_remote_arbiter(daemon, reg_addr): | ||||
|     assert not tractor.current_actor().is_arbiter | ||||
|     async with tractor.get_registry(reg_addr) as portal: | ||||
|     async with tractor.get_registry(*reg_addr) as portal: | ||||
|         await portal.cancel_actor() | ||||
| 
 | ||||
|     time.sleep(0.1) | ||||
|  | @ -41,7 +41,7 @@ async def test_cancel_remote_arbiter(daemon, reg_addr): | |||
| 
 | ||||
|     # no arbiter socket should exist | ||||
|     with pytest.raises(OSError): | ||||
|         async with tractor.get_registry(reg_addr) as portal: | ||||
|         async with tractor.get_registry(*reg_addr) as portal: | ||||
|             pass | ||||
| 
 | ||||
| 
 | ||||
|  |  | |||
|  | @ -1,239 +0,0 @@ | |||
| ''' | ||||
| Define the details of inter-actor "out-of-band" (OoB) cancel | ||||
| semantics, that is how cancellation works when a cancel request comes | ||||
| from the different concurrency (primitive's) "layer" then where the | ||||
| eventual `trio.Task` actually raises a signal. | ||||
| 
 | ||||
| ''' | ||||
| from functools import partial | ||||
| # from contextlib import asynccontextmanager as acm | ||||
| # import itertools | ||||
| 
 | ||||
| import pytest | ||||
| import trio | ||||
| import tractor | ||||
| from tractor import (  # typing | ||||
|     ActorNursery, | ||||
|     Portal, | ||||
|     Context, | ||||
|     # ContextCancelled, | ||||
|     # RemoteActorError, | ||||
| ) | ||||
| # from tractor._testing import ( | ||||
| #     tractor_test, | ||||
| #     expect_ctxc, | ||||
| # ) | ||||
| 
 | ||||
| # XXX TODO cases: | ||||
| # - [ ] peer cancelled itself - so other peers should | ||||
| #   get errors reflecting that the peer was itself the .canceller? | ||||
| 
 | ||||
| # def test_self_cancel(): | ||||
| #     ''' | ||||
| #     2 cases: | ||||
| #     - calls `Actor.cancel()` locally in some task | ||||
| #     - calls LocalPortal.cancel_actor()` ? | ||||
| # | ||||
| # things to ensure! | ||||
| # -[ ] the ctxc raised in a child should ideally show the tb of the | ||||
| #     underlying `Cancelled` checkpoint, i.e. | ||||
| #     `raise scope_error from ctxc`? | ||||
| # | ||||
| # -[ ] a self-cancelled context, if not allowed to block on | ||||
| #     `ctx.result()` at some point will hang since the `ctx._scope` | ||||
| #     is never `.cancel_called`; cases for this include, | ||||
| #     - an `open_ctx()` which never starteds before being OoB actor | ||||
| #       cancelled. | ||||
| #       |_ parent task will be blocked in `.open_context()` for the | ||||
| #         `Started` msg, and when the OoB ctxc arrives `ctx._scope` | ||||
| #         will never have been signalled.. | ||||
| 
 | ||||
| #     ''' | ||||
| #     ... | ||||
| 
 | ||||
| # TODO, sanity test against the case in `/examples/trio/lockacquire_not_unmasked.py` | ||||
| # but with the `Lock.acquire()` from a `@context` to ensure the | ||||
| # implicit ignore-case-non-unmasking. | ||||
| # | ||||
| # @tractor.context | ||||
| # async def acquire_actor_global_lock( | ||||
| #     ctx: tractor.Context, | ||||
| #     ignore_special_cases: bool, | ||||
| # ): | ||||
| 
 | ||||
| #     async with maybe_unmask_excs( | ||||
| #         ignore_special_cases=ignore_special_cases, | ||||
| #     ): | ||||
| #         await ctx.started('locked') | ||||
| 
 | ||||
| #     # block til cancelled | ||||
| #     await trio.sleep_forever() | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def sleep_forever( | ||||
|     ctx: tractor.Context, | ||||
|     # ignore_special_cases: bool, | ||||
|     do_started: bool, | ||||
| ): | ||||
| 
 | ||||
|     # async with maybe_unmask_excs( | ||||
|     #     ignore_special_cases=ignore_special_cases, | ||||
|     # ): | ||||
|     #     await ctx.started('locked') | ||||
|     if do_started: | ||||
|         await ctx.started() | ||||
| 
 | ||||
|     # block til cancelled | ||||
|     print('sleepin on child-side..') | ||||
|     await trio.sleep_forever() | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     'cancel_ctx', | ||||
|     [True, False], | ||||
| ) | ||||
| def test_cancel_ctx_with_parent_side_entered_in_bg_task( | ||||
|     debug_mode: bool, | ||||
|     loglevel: str, | ||||
|     cancel_ctx: bool, | ||||
| ): | ||||
|     ''' | ||||
|     The most "basic" out-of-band-task self-cancellation case where | ||||
|     `Portal.open_context()` is entered in a bg task and the | ||||
|     parent-task (of the containing nursery) calls `Context.cancel()` | ||||
|     without the child knowing; the `Context._scope` should be | ||||
|     `.cancel_called` when the IPC ctx's child-side relays | ||||
|     a `ContextCancelled` with a `.canceller` set to the parent | ||||
|     actor('s task). | ||||
| 
 | ||||
|     ''' | ||||
|     async def main(): | ||||
|         with trio.fail_after( | ||||
|             2 if not debug_mode else 999, | ||||
|         ): | ||||
|             an: ActorNursery | ||||
|             async with ( | ||||
|                 tractor.open_nursery( | ||||
|                     debug_mode=debug_mode, | ||||
|                     loglevel='devx', | ||||
|                     enable_stack_on_sig=True, | ||||
|                 ) as an, | ||||
|                 trio.open_nursery() as tn, | ||||
|             ): | ||||
|                 ptl: Portal = await an.start_actor( | ||||
|                     'sub', | ||||
|                     enable_modules=[__name__], | ||||
|                 ) | ||||
| 
 | ||||
|                 async def _open_ctx_async( | ||||
|                     do_started: bool = True, | ||||
|                     task_status=trio.TASK_STATUS_IGNORED, | ||||
|                 ): | ||||
|                     # do we expect to never enter the | ||||
|                     # `.open_context()` below. | ||||
|                     if not do_started: | ||||
|                         task_status.started() | ||||
| 
 | ||||
|                     async with ptl.open_context( | ||||
|                         sleep_forever, | ||||
|                         do_started=do_started, | ||||
|                     ) as (ctx, first): | ||||
|                         task_status.started(ctx) | ||||
|                         await trio.sleep_forever() | ||||
| 
 | ||||
|                 # XXX, this is the key OoB part! | ||||
|                 # | ||||
|                 # - start the `.open_context()` in a bg task which | ||||
|                 #   blocks inside the embedded scope-body, | ||||
|                 # | ||||
|                 # -  when we call `Context.cancel()` it **is | ||||
|                 #   not** from the same task which eventually runs | ||||
|                 #   `.__aexit__()`, | ||||
|                 # | ||||
|                 # - since the bg "opener" task will be in | ||||
|                 #   a `trio.sleep_forever()`, it must be interrupted | ||||
|                 #   by the `ContextCancelled` delivered from the | ||||
|                 #   child-side; `Context._scope: CancelScope` MUST | ||||
|                 #   be `.cancel_called`! | ||||
|                 # | ||||
|                 print('ASYNC opening IPC context in subtask..') | ||||
|                 maybe_ctx: Context|None = await tn.start(partial( | ||||
|                     _open_ctx_async, | ||||
|                 )) | ||||
| 
 | ||||
|                 if ( | ||||
|                     maybe_ctx | ||||
|                     and | ||||
|                     cancel_ctx | ||||
|                 ): | ||||
|                     print('cancelling first IPC ctx!') | ||||
|                     await maybe_ctx.cancel() | ||||
| 
 | ||||
|                 # XXX, note that despite `maybe_context.cancel()` | ||||
|                 # being called above, it's the parent (bg) task | ||||
|                 # which was originally never interrupted in | ||||
|                 # the `ctx._scope` body due to missing case logic in | ||||
|                 # `ctx._maybe_cancel_and_set_remote_error()`. | ||||
|                 # | ||||
|                 # It didn't matter that the subactor process was | ||||
|                 # already terminated and reaped, nothing was | ||||
|                 # cancelling the ctx-parent task's scope! | ||||
|                 # | ||||
|                 print('cancelling subactor!') | ||||
|                 await ptl.cancel_actor() | ||||
| 
 | ||||
|                 if maybe_ctx: | ||||
|                     try: | ||||
|                         await maybe_ctx.wait_for_result() | ||||
|                     except tractor.ContextCancelled as ctxc: | ||||
|                         assert not cancel_ctx | ||||
|                         assert ( | ||||
|                             ctxc.canceller | ||||
|                             == | ||||
|                             tractor.current_actor().aid.uid | ||||
|                         ) | ||||
|                         # don't re-raise since it'll trigger | ||||
|                         # an EG from the above tn. | ||||
| 
 | ||||
|     if cancel_ctx: | ||||
|         # graceful self-cancel | ||||
|         trio.run(main) | ||||
| 
 | ||||
|     else: | ||||
|         # ctx parent task should see OoB ctxc due to | ||||
|         # `ptl.cancel_actor()`. | ||||
|         with pytest.raises(tractor.ContextCancelled) as excinfo: | ||||
|             trio.run(main) | ||||
| 
 | ||||
|         assert 'root' in excinfo.value.canceller[0] | ||||
| 
 | ||||
| 
 | ||||
| # def test_parent_actor_cancels_subactor_with_gt1_ctxs_open_to_it( | ||||
| #     debug_mode: bool, | ||||
| #     loglevel: str, | ||||
| # ): | ||||
| #     ''' | ||||
| #     Demos OoB cancellation from the perspective of a ctx opened with | ||||
| #     a child subactor where the parent cancels the child at the "actor | ||||
| #     layer" using `Portal.cancel_actor()` and thus the | ||||
| #     `ContextCancelled.canceller` received by the ctx's parent-side | ||||
| #     task will appear to be a "self cancellation" even though that | ||||
| #     specific task itself was not cancelled and thus | ||||
| #     `Context.cancel_called ==False`. | ||||
| #     ''' | ||||
|                 # TODO, do we have an existing implied ctx | ||||
|                 # cancel test like this? | ||||
|                 # with trio.move_on_after(0.5):# as cs: | ||||
|                 #     await _open_ctx_async( | ||||
|                 #         do_started=False, | ||||
|                 #     ) | ||||
| 
 | ||||
| 
 | ||||
|                 # in-line ctx scope should definitely raise | ||||
|                 # a ctxc with `.canceller = 'root'` | ||||
|                 # async with ptl.open_context( | ||||
|                 #     sleep_forever, | ||||
|                 #     do_started=True, | ||||
|                 # ) as pair: | ||||
| 
 | ||||
|  | @ -1,237 +0,0 @@ | |||
| ''' | ||||
| Special case testing for issues not (dis)covered in the primary | ||||
| `Context` related functional/scenario suites. | ||||
| 
 | ||||
| **NOTE: this mod is a WIP** space for handling | ||||
| odd/rare/undiscovered/not-yet-revealed faults which either | ||||
| loudly (ideal case) breakl our supervision protocol | ||||
| or (worst case) result in distributed sys hangs. | ||||
| 
 | ||||
| Suites here further try to clarify (if [partially] ill-defined) and | ||||
| verify our edge case semantics for inter-actor-relayed-exceptions | ||||
| including, | ||||
| 
 | ||||
| - lowlevel: what remote obj-data is interchanged for IPC and what is | ||||
|   native-obj form is expected from unpacking in the the new | ||||
|   mem-domain. | ||||
| 
 | ||||
| - which kinds of `RemoteActorError` (and its derivs) are expected by which | ||||
|   (types of) peers (parent, child, sibling, etc) with what | ||||
|   particular meta-data set such as, | ||||
| 
 | ||||
|   - `.src_uid`: the original (maybe) peer who raised. | ||||
|   - `.relay_uid`: the next-hop-peer who sent it. | ||||
|   - `.relay_path`: the sequence of peer actor hops. | ||||
|   - `.is_inception`: a predicate that denotes multi-hop remote errors. | ||||
| 
 | ||||
| - when should `ExceptionGroup`s be relayed from a particular | ||||
|   remote endpoint, they should never be caused by implicit `._rpc` | ||||
|   nursery machinery! | ||||
| 
 | ||||
| - various special `trio` edge cases around its cancellation semantics | ||||
|   and how we (currently) leverage `trio.Cancelled` as a signal for | ||||
|   whether a `Context` task should raise `ContextCancelled` (ctx). | ||||
| 
 | ||||
| ''' | ||||
| import pytest | ||||
| import trio | ||||
| import tractor | ||||
| from tractor import (  # typing | ||||
|     ActorNursery, | ||||
|     Portal, | ||||
|     Context, | ||||
|     ContextCancelled, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def sleep_n_chkpt_in_finally( | ||||
|     ctx: Context, | ||||
|     sleep_n_raise: bool, | ||||
| 
 | ||||
|     chld_raise_delay: float, | ||||
|     chld_finally_delay: float, | ||||
| 
 | ||||
|     rent_cancels: bool, | ||||
|     rent_ctxc_delay: float, | ||||
| 
 | ||||
|     expect_exc: str|None = None, | ||||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|     Sync, open a tn, then wait for cancel, run a chkpt inside | ||||
|     the user's `finally:` teardown. | ||||
| 
 | ||||
|     This covers a footgun case that `trio` core doesn't seem to care about | ||||
|     wherein an exc can be masked by a `trio.Cancelled` raised inside a tn emedded | ||||
|     `finally:`. | ||||
| 
 | ||||
|     Also see `test_trioisms::test_acm_embedded_nursery_propagates_enter_err` | ||||
|     for the down and gritty details. | ||||
| 
 | ||||
|     Since a `@context` endpoint fn can also contain code like this, | ||||
|     **and** bc we currently have no easy way other then | ||||
|     `trio.Cancelled` to signal cancellation on each side of an IPC `Context`, | ||||
|     the footgun issue can compound itself as demonstrated in this suite.. | ||||
| 
 | ||||
|     Here are some edge cases codified with our WIP "sclang" syntax | ||||
|     (note the parent(rent)/child(chld) naming here is just | ||||
|     pragmatism, generally these most of these cases can occurr | ||||
|     regardless of the distributed-task's supervision hiearchy), | ||||
| 
 | ||||
|     - rent c)=> chld.raises-then-taskc-in-finally | ||||
|      |_ chld's body raises an `exc: BaseException`. | ||||
|       _ in its `finally:` block it runs a chkpoint | ||||
|         which raises a taskc (`trio.Cancelled`) which | ||||
|         masks `exc` instead raising taskc up to the first tn. | ||||
|       _ the embedded/chld tn captures the masking taskc and then | ||||
|         raises it up to the ._rpc-ep-tn instead of `exc`. | ||||
|       _ the rent thinks the child ctxc-ed instead of errored.. | ||||
| 
 | ||||
|     ''' | ||||
|     await ctx.started() | ||||
| 
 | ||||
|     if expect_exc: | ||||
|         expect_exc: BaseException = tractor._exceptions.get_err_type( | ||||
|             type_name=expect_exc, | ||||
|         ) | ||||
| 
 | ||||
|     berr: BaseException|None = None | ||||
|     try: | ||||
|         if not sleep_n_raise: | ||||
|             await trio.sleep_forever() | ||||
|         elif sleep_n_raise: | ||||
| 
 | ||||
|             # XXX this sleep is less then the sleep the parent | ||||
|             # does before calling `ctx.cancel()` | ||||
|             await trio.sleep(chld_raise_delay) | ||||
| 
 | ||||
|             # XXX this will be masked by a taskc raised in | ||||
|             # the `finally:` if this fn doesn't terminate | ||||
|             # before any ctxc-req arrives AND a checkpoint is hit | ||||
|             # in that `finally:`. | ||||
|             raise RuntimeError('my app krurshed..') | ||||
| 
 | ||||
|     except BaseException as _berr: | ||||
|         berr = _berr | ||||
| 
 | ||||
|         # TODO: it'd sure be nice to be able to inject our own | ||||
|         # `ContextCancelled` here instead of of `trio.Cancelled` | ||||
|         # so that our runtime can expect it and this "user code" | ||||
|         # would be able to tell the diff between a generic trio | ||||
|         # cancel and a tractor runtime-IPC cancel. | ||||
|         if expect_exc: | ||||
|             if not isinstance( | ||||
|                 berr, | ||||
|                 expect_exc, | ||||
|             ): | ||||
|                 raise ValueError( | ||||
|                     f'Unexpected exc type ??\n' | ||||
|                     f'{berr!r}\n' | ||||
|                     f'\n' | ||||
|                     f'Expected a {expect_exc!r}\n' | ||||
|                 ) | ||||
| 
 | ||||
|         raise berr | ||||
| 
 | ||||
|     # simulate what user code might try even though | ||||
|     # it's a known boo-boo.. | ||||
|     finally: | ||||
|         # maybe wait for rent ctxc to arrive | ||||
|         with trio.CancelScope(shield=True): | ||||
|             await trio.sleep(chld_finally_delay) | ||||
| 
 | ||||
|         # !!XXX this will raise `trio.Cancelled` which | ||||
|         # will mask the RTE from above!!! | ||||
|         # | ||||
|         # YES, it's the same case as our extant | ||||
|         # `test_trioisms::test_acm_embedded_nursery_propagates_enter_err` | ||||
|         try: | ||||
|             await trio.lowlevel.checkpoint() | ||||
|         except trio.Cancelled as taskc: | ||||
|             if (scope_err := taskc.__context__): | ||||
|                 print( | ||||
|                     f'XXX MASKED REMOTE ERROR XXX\n' | ||||
|                     f'ENDPOINT exception -> {scope_err!r}\n' | ||||
|                     f'will be masked by -> {taskc!r}\n' | ||||
|                 ) | ||||
|                 # await tractor.pause(shield=True) | ||||
| 
 | ||||
|             raise taskc | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     'chld_callspec', | ||||
|     [ | ||||
|         dict( | ||||
|             sleep_n_raise=None, | ||||
|             chld_raise_delay=0.1, | ||||
|             chld_finally_delay=0.1, | ||||
|             expect_exc='Cancelled', | ||||
|             rent_cancels=True, | ||||
|             rent_ctxc_delay=0.1, | ||||
|         ), | ||||
|         dict( | ||||
|             sleep_n_raise='RuntimeError', | ||||
|             chld_raise_delay=0.1, | ||||
|             chld_finally_delay=1, | ||||
|             expect_exc='RuntimeError', | ||||
|             rent_cancels=False, | ||||
|             rent_ctxc_delay=0.1, | ||||
|         ), | ||||
|     ], | ||||
|     ids=lambda item: f'chld_callspec={item!r}' | ||||
| ) | ||||
| def test_unmasked_remote_exc( | ||||
|     debug_mode: bool, | ||||
|     chld_callspec: dict, | ||||
|     tpt_proto: str, | ||||
| ): | ||||
|     expect_exc_str: str|None = chld_callspec['sleep_n_raise'] | ||||
|     rent_ctxc_delay: float|None = chld_callspec['rent_ctxc_delay'] | ||||
|     async def main(): | ||||
|         an: ActorNursery | ||||
|         async with tractor.open_nursery( | ||||
|             debug_mode=debug_mode, | ||||
|             enable_transports=[tpt_proto], | ||||
|         ) as an: | ||||
|             ptl: Portal = await an.start_actor( | ||||
|                 'cancellee', | ||||
|                 enable_modules=[__name__], | ||||
|             ) | ||||
|             ctx: Context | ||||
|             async with ( | ||||
|                 ptl.open_context( | ||||
|                     sleep_n_chkpt_in_finally, | ||||
|                     **chld_callspec, | ||||
|                 ) as (ctx, sent), | ||||
|             ): | ||||
|                 assert not sent | ||||
|                 await trio.sleep(rent_ctxc_delay) | ||||
|                 await ctx.cancel() | ||||
| 
 | ||||
|                 # recv error or result from chld | ||||
|                 ctxc: ContextCancelled = await ctx.wait_for_result() | ||||
|                 assert ( | ||||
|                     ctxc is ctx.outcome | ||||
|                     and | ||||
|                     isinstance(ctxc, ContextCancelled) | ||||
|                 ) | ||||
| 
 | ||||
|             # always graceful terminate the sub in non-error cases | ||||
|             await an.cancel() | ||||
| 
 | ||||
|     if expect_exc_str: | ||||
|         expect_exc: BaseException = tractor._exceptions.get_err_type( | ||||
|             type_name=expect_exc_str, | ||||
|         ) | ||||
|         with pytest.raises( | ||||
|             expected_exception=tractor.RemoteActorError, | ||||
|         ) as excinfo: | ||||
|             trio.run(main) | ||||
| 
 | ||||
|         rae = excinfo.value | ||||
|         assert expect_exc == rae.boxed_type | ||||
| 
 | ||||
|     else: | ||||
|         trio.run(main) | ||||
|  | @ -1,6 +1,5 @@ | |||
| ''' | ||||
| Suites for our `.trionics.maybe_open_context()` multi-task | ||||
| shared-cached `@acm` API. | ||||
| Async context manager cache api testing: ``trionics.maybe_open_context():`` | ||||
| 
 | ||||
| ''' | ||||
| from contextlib import asynccontextmanager as acm | ||||
|  | @ -10,15 +9,6 @@ from typing import Awaitable | |||
| import pytest | ||||
| import trio | ||||
| import tractor | ||||
| from tractor.trionics import ( | ||||
|     maybe_open_context, | ||||
| ) | ||||
| from tractor.log import ( | ||||
|     get_console_log, | ||||
|     get_logger, | ||||
| ) | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| _resource: int = 0 | ||||
|  | @ -62,7 +52,7 @@ def test_resource_only_entered_once(key_on): | |||
|                 # different task names per task will be used | ||||
|                 kwargs = {'task_name': name} | ||||
| 
 | ||||
|             async with maybe_open_context( | ||||
|             async with tractor.trionics.maybe_open_context( | ||||
|                 maybe_increment_counter, | ||||
|                 kwargs=kwargs, | ||||
|                 key=key, | ||||
|  | @ -82,13 +72,11 @@ def test_resource_only_entered_once(key_on): | |||
|         with trio.move_on_after(0.5): | ||||
|             async with ( | ||||
|                 tractor.open_root_actor(), | ||||
|                 trio.open_nursery() as tn, | ||||
|                 trio.open_nursery() as n, | ||||
|             ): | ||||
| 
 | ||||
|                 for i in range(10): | ||||
|                     tn.start_soon( | ||||
|                         enter_cached_mngr, | ||||
|                         f'task_{i}', | ||||
|                     ) | ||||
|                     n.start_soon(enter_cached_mngr, f'task_{i}') | ||||
|                     await trio.sleep(0.001) | ||||
| 
 | ||||
|     trio.run(main) | ||||
|  | @ -110,55 +98,27 @@ async def streamer( | |||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def open_stream() -> Awaitable[ | ||||
|     tuple[ | ||||
|         tractor.ActorNursery, | ||||
|         tractor.MsgStream, | ||||
|     ] | ||||
| ]: | ||||
|     try: | ||||
|         async with tractor.open_nursery() as an: | ||||
|             portal = await an.start_actor( | ||||
|                 'streamer', | ||||
|                 enable_modules=[__name__], | ||||
|             ) | ||||
|             try: | ||||
|                 async with ( | ||||
|                     portal.open_context(streamer) as (ctx, first), | ||||
|                     ctx.open_stream() as stream, | ||||
|                 ): | ||||
|                     print('Entered open_stream() caller') | ||||
|                     yield an, stream | ||||
|                     print('Exited open_stream() caller') | ||||
| async def open_stream() -> Awaitable[tractor.MsgStream]: | ||||
| 
 | ||||
|             finally: | ||||
|                 print( | ||||
|                     'Cancelling streamer with,\n' | ||||
|                     '=> `Portal.cancel_actor()`' | ||||
|                 ) | ||||
|                 await portal.cancel_actor() | ||||
|                 print('Cancelled streamer') | ||||
|     async with tractor.open_nursery() as tn: | ||||
|         portal = await tn.start_actor('streamer', enable_modules=[__name__]) | ||||
|         async with ( | ||||
|             portal.open_context(streamer) as (ctx, first), | ||||
|             ctx.open_stream() as stream, | ||||
|         ): | ||||
|             yield stream | ||||
| 
 | ||||
|     except Exception as err: | ||||
|         print( | ||||
|             f'`open_stream()` errored?\n' | ||||
|             f'{err!r}\n' | ||||
|         ) | ||||
|         await tractor.pause(shield=True) | ||||
|         raise err | ||||
|         await portal.cancel_actor() | ||||
|     print('CANCELLED STREAMER') | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def maybe_open_stream(taskname: str): | ||||
|     async with maybe_open_context( | ||||
|     async with tractor.trionics.maybe_open_context( | ||||
|         # NOTE: all secondary tasks should cache hit on the same key | ||||
|         acm_func=open_stream, | ||||
|     ) as ( | ||||
|         cache_hit, | ||||
|         (an, stream) | ||||
|     ): | ||||
|         # when the actor + portal + ctx + stream has already been | ||||
|         # allocated we want to just bcast to this task. | ||||
|     ) as (cache_hit, stream): | ||||
| 
 | ||||
|         if cache_hit: | ||||
|             print(f'{taskname} loaded from cache') | ||||
| 
 | ||||
|  | @ -166,77 +126,27 @@ async def maybe_open_stream(taskname: str): | |||
|             # if this feed is already allocated by the first | ||||
|             # task that entereed | ||||
|             async with stream.subscribe() as bstream: | ||||
|                 yield an, bstream | ||||
|                 print( | ||||
|                     f'cached task exited\n' | ||||
|                     f')>\n' | ||||
|                     f' |_{taskname}\n' | ||||
|                 ) | ||||
| 
 | ||||
|             # we should always unreg the "cloned" bcrc for this | ||||
|             # consumer-task | ||||
|             assert id(bstream) not in bstream._state.subs | ||||
| 
 | ||||
|                 yield bstream | ||||
|         else: | ||||
|             # yield the actual stream | ||||
|             try: | ||||
|                 yield an, stream | ||||
|             finally: | ||||
|                 print( | ||||
|                     f'NON-cached task exited\n' | ||||
|                     f')>\n' | ||||
|                     f' |_{taskname}\n' | ||||
|                 ) | ||||
| 
 | ||||
|         first_bstream = stream._broadcaster | ||||
|         bcrx_state = first_bstream._state | ||||
|         subs: dict[int, int] = bcrx_state.subs | ||||
|         if len(subs) == 1: | ||||
|             assert id(first_bstream) in subs | ||||
|             # ^^TODO! the bcrx should always de-allocate all subs, | ||||
|             # including the implicit first one allocated on entry | ||||
|             # by the first subscribing peer task, no? | ||||
|             # | ||||
|             # -[ ] adjust `MsgStream.subscribe()` to do this mgmt! | ||||
|             #  |_ allows reverting `MsgStream.receive()` to the | ||||
|             #    non-bcaster method. | ||||
|             #  |_ we can decide whether to reset `._broadcaster`? | ||||
|             # | ||||
|             # await tractor.pause(shield=True) | ||||
|             yield stream | ||||
| 
 | ||||
| 
 | ||||
| def test_open_local_sub_to_stream( | ||||
|     debug_mode: bool, | ||||
| ): | ||||
| def test_open_local_sub_to_stream(): | ||||
|     ''' | ||||
|     Verify a single inter-actor stream can can be fanned-out shared to | ||||
|     N local tasks using `trionics.maybe_open_context()`. | ||||
|     N local tasks using ``trionics.maybe_open_context():``. | ||||
| 
 | ||||
|     ''' | ||||
|     timeout: float = 3.6 | ||||
|     if platform.system() == "Windows": | ||||
|         timeout: float = 10 | ||||
| 
 | ||||
|     if debug_mode: | ||||
|         timeout = 999 | ||||
|         print(f'IN debug_mode, setting large timeout={timeout!r}..') | ||||
|     timeout: float = 3.6 if platform.system() != "Windows" else 10 | ||||
| 
 | ||||
|     async def main(): | ||||
| 
 | ||||
|         full = list(range(1000)) | ||||
|         an: tractor.ActorNursery|None = None | ||||
|         num_tasks: int = 10 | ||||
| 
 | ||||
|         async def get_sub_and_pull(taskname: str): | ||||
| 
 | ||||
|             nonlocal an | ||||
| 
 | ||||
|             stream: tractor.MsgStream | ||||
|             async with ( | ||||
|                 maybe_open_stream(taskname) as ( | ||||
|                     an, | ||||
|                     stream, | ||||
|                 ), | ||||
|                 maybe_open_stream(taskname) as stream, | ||||
|             ): | ||||
|                 if '0' in taskname: | ||||
|                     assert isinstance(stream, tractor.MsgStream) | ||||
|  | @ -248,159 +158,24 @@ def test_open_local_sub_to_stream( | |||
| 
 | ||||
|                 first = await stream.receive() | ||||
|                 print(f'{taskname} started with value {first}') | ||||
|                 seq: list[int] = [] | ||||
|                 seq = [] | ||||
|                 async for msg in stream: | ||||
|                     seq.append(msg) | ||||
| 
 | ||||
|                 assert set(seq).issubset(set(full)) | ||||
| 
 | ||||
|             # end of @acm block | ||||
|             print(f'{taskname} finished') | ||||
| 
 | ||||
|         root: tractor.Actor | ||||
|         with trio.fail_after(timeout) as cs: | ||||
|         with trio.fail_after(timeout): | ||||
|             # TODO: turns out this isn't multi-task entrant XD | ||||
|             # We probably need an indepotent entry semantic? | ||||
|             async with tractor.open_root_actor( | ||||
|                 debug_mode=debug_mode, | ||||
|                 # maybe_enable_greenback=True, | ||||
|                 # | ||||
|                 # ^TODO? doesn't seem to mk breakpoint() usage work | ||||
|                 # bc each bg task needs to open a portal?? | ||||
|                 # - [ ] we should consider making this part of | ||||
|                 #      our taskman defaults? | ||||
|                 #   |_see https://github.com/goodboy/tractor/pull/363 | ||||
|                 # | ||||
|             ) as root: | ||||
|                 assert root.is_registrar | ||||
| 
 | ||||
|             async with tractor.open_root_actor(): | ||||
|                 async with ( | ||||
|                     trio.open_nursery() as tn, | ||||
|                     trio.open_nursery() as nurse, | ||||
|                 ): | ||||
|                     for i in range(num_tasks): | ||||
|                         tn.start_soon( | ||||
|                             get_sub_and_pull, | ||||
|                             f'task_{i}', | ||||
|                         ) | ||||
|                     for i in range(10): | ||||
|                         nurse.start_soon(get_sub_and_pull, f'task_{i}') | ||||
|                         await trio.sleep(0.001) | ||||
| 
 | ||||
|                 print('all consumer tasks finished!') | ||||
| 
 | ||||
|                 # ?XXX, ensure actor-nursery is shutdown or we might | ||||
|                 # hang here due to a minor task deadlock/race-condition? | ||||
|                 # | ||||
|                 # - seems that all we need is a checkpoint to ensure | ||||
|                 #   the last suspended task, which is inside | ||||
|                 #   `.maybe_open_context()`, can do the | ||||
|                 #   `Portal.cancel_actor()` call? | ||||
|                 # | ||||
|                 # - if that bg task isn't resumed, then this blocks | ||||
|                 #   timeout might hit before that? | ||||
|                 # | ||||
|                 if root.ipc_server.has_peers(): | ||||
|                     await trio.lowlevel.checkpoint() | ||||
| 
 | ||||
|                     # alt approach, cancel the entire `an` | ||||
|                     # await tractor.pause() | ||||
|                     # await an.cancel() | ||||
| 
 | ||||
|             # end of runtime scope | ||||
|             print('root actor terminated.') | ||||
| 
 | ||||
|         if cs.cancelled_caught: | ||||
|             pytest.fail( | ||||
|                 'Should NOT time out in `open_root_actor()` ?' | ||||
|             ) | ||||
| 
 | ||||
|         print('exiting main.') | ||||
| 
 | ||||
|     trio.run(main) | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def cancel_outer_cs( | ||||
|     cs: trio.CancelScope|None = None, | ||||
|     delay: float = 0, | ||||
| ): | ||||
|     # on first task delay this enough to block | ||||
|     # the 2nd task but then cancel it mid sleep | ||||
|     # so that the tn.start() inside the key-err handler block | ||||
|     # is cancelled and would previously corrupt the | ||||
|     # mutext state. | ||||
|     log.info(f'task entering sleep({delay})') | ||||
|     await trio.sleep(delay) | ||||
|     if cs: | ||||
|         log.info('task calling cs.cancel()') | ||||
|         cs.cancel() | ||||
|     trio.lowlevel.checkpoint() | ||||
|     yield | ||||
|     await trio.sleep_forever() | ||||
| 
 | ||||
| 
 | ||||
| def test_lock_not_corrupted_on_fast_cancel( | ||||
|     debug_mode: bool, | ||||
|     loglevel: str, | ||||
| ): | ||||
|     ''' | ||||
|     Verify that if the caching-task (the first to enter | ||||
|     `maybe_open_context()`) is cancelled mid-cache-miss, the embedded | ||||
|     mutex can never be left in a corrupted state. | ||||
| 
 | ||||
|     That is, the lock is always eventually released ensuring a peer | ||||
|     (cache-hitting) task will never, | ||||
| 
 | ||||
|     - be left to inf-block/hang on the `lock.acquire()`. | ||||
|     - try to release the lock when still owned by the caching-task | ||||
|       due to it having erronously exited without calling | ||||
|       `lock.release()`. | ||||
| 
 | ||||
| 
 | ||||
|     ''' | ||||
|     delay: float = 1. | ||||
| 
 | ||||
|     async def use_moc( | ||||
|         cs: trio.CancelScope|None, | ||||
|         delay: float, | ||||
|     ): | ||||
|         log.info('task entering moc') | ||||
|         async with maybe_open_context( | ||||
|             cancel_outer_cs, | ||||
|             kwargs={ | ||||
|                 'cs': cs, | ||||
|                 'delay': delay, | ||||
|             }, | ||||
|         ) as (cache_hit, _null): | ||||
|             if cache_hit: | ||||
|                 log.info('2nd task entered') | ||||
|             else: | ||||
|                 log.info('1st task entered') | ||||
| 
 | ||||
|             await trio.sleep_forever() | ||||
| 
 | ||||
|     async def main(): | ||||
|         with trio.fail_after(delay + 2): | ||||
|             async with ( | ||||
|                 tractor.open_root_actor( | ||||
|                     debug_mode=debug_mode, | ||||
|                     loglevel=loglevel, | ||||
|                 ), | ||||
|                 trio.open_nursery() as tn, | ||||
|             ): | ||||
|                 get_console_log('info') | ||||
|                 log.info('yo starting') | ||||
|                 cs = tn.cancel_scope | ||||
|                 tn.start_soon( | ||||
|                     use_moc, | ||||
|                     cs, | ||||
|                     delay, | ||||
|                     name='child', | ||||
|                 ) | ||||
|                 with trio.CancelScope() as rent_cs: | ||||
|                     await use_moc( | ||||
|                         cs=rent_cs, | ||||
|                         delay=delay, | ||||
|                     ) | ||||
| 
 | ||||
|                 print('all consumer tasks finished') | ||||
| 
 | ||||
|     trio.run(main) | ||||
|  |  | |||
|  | @ -1,211 +0,0 @@ | |||
| import time | ||||
| 
 | ||||
| import trio | ||||
| import pytest | ||||
| 
 | ||||
| import tractor | ||||
| from tractor.ipc._ringbuf import ( | ||||
|     open_ringbuf, | ||||
|     RBToken, | ||||
|     RingBuffSender, | ||||
|     RingBuffReceiver | ||||
| ) | ||||
| from tractor._testing.samples import ( | ||||
|     generate_sample_messages, | ||||
| ) | ||||
| 
 | ||||
| # in case you don't want to melt your cores, uncomment dis! | ||||
| pytestmark = pytest.mark.skip | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def child_read_shm( | ||||
|     ctx: tractor.Context, | ||||
|     msg_amount: int, | ||||
|     token: RBToken, | ||||
|     total_bytes: int, | ||||
| ) -> None: | ||||
|     recvd_bytes = 0 | ||||
|     await ctx.started() | ||||
|     start_ts = time.time() | ||||
|     async with RingBuffReceiver(token) as receiver: | ||||
|         while recvd_bytes < total_bytes: | ||||
|             msg = await receiver.receive_some() | ||||
|             recvd_bytes += len(msg) | ||||
| 
 | ||||
|         # make sure we dont hold any memoryviews | ||||
|         # before the ctx manager aclose() | ||||
|         msg = None | ||||
| 
 | ||||
|     end_ts = time.time() | ||||
|     elapsed = end_ts - start_ts | ||||
|     elapsed_ms = int(elapsed * 1000) | ||||
| 
 | ||||
|     print(f'\n\telapsed ms: {elapsed_ms}') | ||||
|     print(f'\tmsg/sec: {int(msg_amount / elapsed):,}') | ||||
|     print(f'\tbytes/sec: {int(recvd_bytes / elapsed):,}') | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def child_write_shm( | ||||
|     ctx: tractor.Context, | ||||
|     msg_amount: int, | ||||
|     rand_min: int, | ||||
|     rand_max: int, | ||||
|     token: RBToken, | ||||
| ) -> None: | ||||
|     msgs, total_bytes = generate_sample_messages( | ||||
|         msg_amount, | ||||
|         rand_min=rand_min, | ||||
|         rand_max=rand_max, | ||||
|     ) | ||||
|     await ctx.started(total_bytes) | ||||
|     async with RingBuffSender(token) as sender: | ||||
|         for msg in msgs: | ||||
|             await sender.send_all(msg) | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     'msg_amount,rand_min,rand_max,buf_size', | ||||
|     [ | ||||
|         # simple case, fixed payloads, large buffer | ||||
|         (100_000, 0, 0, 10 * 1024), | ||||
| 
 | ||||
|         # guaranteed wrap around on every write | ||||
|         (100, 10 * 1024, 20 * 1024, 10 * 1024), | ||||
| 
 | ||||
|         # large payload size, but large buffer | ||||
|         (10_000, 256 * 1024, 512 * 1024, 10 * 1024 * 1024) | ||||
|     ], | ||||
|     ids=[ | ||||
|         'fixed_payloads_large_buffer', | ||||
|         'wrap_around_every_write', | ||||
|         'large_payloads_large_buffer', | ||||
|     ] | ||||
| ) | ||||
| def test_ringbuf( | ||||
|     msg_amount: int, | ||||
|     rand_min: int, | ||||
|     rand_max: int, | ||||
|     buf_size: int | ||||
| ): | ||||
|     async def main(): | ||||
|         with open_ringbuf( | ||||
|             'test_ringbuf', | ||||
|             buf_size=buf_size | ||||
|         ) as token: | ||||
|             proc_kwargs = { | ||||
|                 'pass_fds': (token.write_eventfd, token.wrap_eventfd) | ||||
|             } | ||||
| 
 | ||||
|             common_kwargs = { | ||||
|                 'msg_amount': msg_amount, | ||||
|                 'token': token, | ||||
|             } | ||||
|             async with tractor.open_nursery() as an: | ||||
|                 send_p = await an.start_actor( | ||||
|                     'ring_sender', | ||||
|                     enable_modules=[__name__], | ||||
|                     proc_kwargs=proc_kwargs | ||||
|                 ) | ||||
|                 recv_p = await an.start_actor( | ||||
|                     'ring_receiver', | ||||
|                     enable_modules=[__name__], | ||||
|                     proc_kwargs=proc_kwargs | ||||
|                 ) | ||||
|                 async with ( | ||||
|                     send_p.open_context( | ||||
|                         child_write_shm, | ||||
|                         rand_min=rand_min, | ||||
|                         rand_max=rand_max, | ||||
|                         **common_kwargs | ||||
|                     ) as (sctx, total_bytes), | ||||
|                     recv_p.open_context( | ||||
|                         child_read_shm, | ||||
|                         **common_kwargs, | ||||
|                         total_bytes=total_bytes, | ||||
|                     ) as (sctx, _sent), | ||||
|                 ): | ||||
|                     await recv_p.result() | ||||
| 
 | ||||
|                 await send_p.cancel_actor() | ||||
|                 await recv_p.cancel_actor() | ||||
| 
 | ||||
| 
 | ||||
|     trio.run(main) | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def child_blocked_receiver( | ||||
|     ctx: tractor.Context, | ||||
|     token: RBToken | ||||
| ): | ||||
|     async with RingBuffReceiver(token) as receiver: | ||||
|         await ctx.started() | ||||
|         await receiver.receive_some() | ||||
| 
 | ||||
| 
 | ||||
| def test_ring_reader_cancel(): | ||||
|     async def main(): | ||||
|         with open_ringbuf('test_ring_cancel_reader') as token: | ||||
|             async with ( | ||||
|                 tractor.open_nursery() as an, | ||||
|                 RingBuffSender(token) as _sender, | ||||
|             ): | ||||
|                 recv_p = await an.start_actor( | ||||
|                     'ring_blocked_receiver', | ||||
|                     enable_modules=[__name__], | ||||
|                     proc_kwargs={ | ||||
|                         'pass_fds': (token.write_eventfd, token.wrap_eventfd) | ||||
|                     } | ||||
|                 ) | ||||
|                 async with ( | ||||
|                     recv_p.open_context( | ||||
|                         child_blocked_receiver, | ||||
|                         token=token | ||||
|                     ) as (sctx, _sent), | ||||
|                 ): | ||||
|                     await trio.sleep(1) | ||||
|                     await an.cancel() | ||||
| 
 | ||||
| 
 | ||||
|     with pytest.raises(tractor._exceptions.ContextCancelled): | ||||
|         trio.run(main) | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def child_blocked_sender( | ||||
|     ctx: tractor.Context, | ||||
|     token: RBToken | ||||
| ): | ||||
|     async with RingBuffSender(token) as sender: | ||||
|         await ctx.started() | ||||
|         await sender.send_all(b'this will wrap') | ||||
| 
 | ||||
| 
 | ||||
| def test_ring_sender_cancel(): | ||||
|     async def main(): | ||||
|         with open_ringbuf( | ||||
|             'test_ring_cancel_sender', | ||||
|             buf_size=1 | ||||
|         ) as token: | ||||
|             async with tractor.open_nursery() as an: | ||||
|                 recv_p = await an.start_actor( | ||||
|                     'ring_blocked_sender', | ||||
|                     enable_modules=[__name__], | ||||
|                     proc_kwargs={ | ||||
|                         'pass_fds': (token.write_eventfd, token.wrap_eventfd) | ||||
|                     } | ||||
|                 ) | ||||
|                 async with ( | ||||
|                     recv_p.open_context( | ||||
|                         child_blocked_sender, | ||||
|                         token=token | ||||
|                     ) as (sctx, _sent), | ||||
|                 ): | ||||
|                     await trio.sleep(1) | ||||
|                     await an.cancel() | ||||
| 
 | ||||
| 
 | ||||
|     with pytest.raises(tractor._exceptions.ContextCancelled): | ||||
|         trio.run(main) | ||||
|  | @ -39,7 +39,7 @@ def test_infected_root_actor( | |||
| 
 | ||||
|     ''' | ||||
|     async def _trio_main(): | ||||
|         with trio.fail_after(2 if not debug_mode else 999): | ||||
|         with trio.fail_after(2): | ||||
|             first: str | ||||
|             chan: to_asyncio.LinkedTaskChannel | ||||
|             async with ( | ||||
|  | @ -59,11 +59,7 @@ def test_infected_root_actor( | |||
|                     assert out == i | ||||
|                     print(f'asyncio echoing {i}') | ||||
| 
 | ||||
|                     if ( | ||||
|                         raise_error_mid_stream | ||||
|                         and | ||||
|                         i == 500 | ||||
|                     ): | ||||
|                     if raise_error_mid_stream and i == 500: | ||||
|                         raise raise_error_mid_stream | ||||
| 
 | ||||
|                     if out is None: | ||||
|  | @ -147,7 +143,8 @@ def test_trio_prestarted_task_bubbles( | |||
|         await trio.sleep_forever() | ||||
| 
 | ||||
|     async def _trio_main(): | ||||
|         with trio.fail_after(2 if not debug_mode else 999): | ||||
|         # with trio.fail_after(2): | ||||
|         with trio.fail_after(999): | ||||
|             first: str | ||||
|             chan: to_asyncio.LinkedTaskChannel | ||||
|             aio_ev = asyncio.Event() | ||||
|  | @ -216,25 +213,32 @@ def test_trio_prestarted_task_bubbles( | |||
|                         ): | ||||
|                             aio_ev.set() | ||||
| 
 | ||||
|     with pytest.raises( | ||||
|         expected_exception=ExceptionGroup, | ||||
|     ) as excinfo: | ||||
|         tractor.to_asyncio.run_as_asyncio_guest( | ||||
|             trio_main=_trio_main, | ||||
|         ) | ||||
| 
 | ||||
|     eg = excinfo.value | ||||
|     rte_eg, rest_eg = eg.split(RuntimeError) | ||||
| 
 | ||||
|     # ensure the trio-task's error bubbled despite the aio-side | ||||
|     # having (maybe) errored first. | ||||
|     if aio_err_trigger in ( | ||||
|         'after_trio_task_starts', | ||||
|         'after_start_point', | ||||
|     ): | ||||
|         patt: str = 'trio-side' | ||||
|         expect_exc = TypeError | ||||
|         assert len(errs := rest_eg.exceptions) == 1 | ||||
|         typerr = errs[0] | ||||
|         assert ( | ||||
|             type(typerr) is TypeError | ||||
|             and | ||||
|             'trio-side' in typerr.args | ||||
|         ) | ||||
| 
 | ||||
|     # when aio errors BEFORE (last) trio task is scheduled, we should | ||||
|     # never see anythinb but the aio-side. | ||||
|     else: | ||||
|         patt: str = 'asyncio-side' | ||||
|         expect_exc = RuntimeError | ||||
| 
 | ||||
|     with pytest.raises(expect_exc) as excinfo: | ||||
|         tractor.to_asyncio.run_as_asyncio_guest( | ||||
|             trio_main=_trio_main, | ||||
|         ) | ||||
| 
 | ||||
|     caught_exc = excinfo.value | ||||
|     assert patt in caught_exc.args | ||||
|         assert len(rtes := rte_eg.exceptions) == 1 | ||||
|         assert 'asyncio-side' in rtes[0].args[0] | ||||
|  |  | |||
|  | @ -1,108 +0,0 @@ | |||
| ''' | ||||
| Runtime boot/init sanity. | ||||
| 
 | ||||
| ''' | ||||
| 
 | ||||
| import pytest | ||||
| import trio | ||||
| 
 | ||||
| import tractor | ||||
| from tractor._exceptions import RuntimeFailure | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def open_new_root_in_sub( | ||||
|     ctx: tractor.Context, | ||||
| ) -> None: | ||||
| 
 | ||||
|     async with tractor.open_root_actor(): | ||||
|         pass | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     'open_root_in', | ||||
|     ['root', 'sub'], | ||||
|     ids='open_2nd_root_in={}'.format, | ||||
| ) | ||||
| def test_only_one_root_actor( | ||||
|     open_root_in: str, | ||||
|     reg_addr: tuple, | ||||
|     debug_mode: bool | ||||
| ): | ||||
|     ''' | ||||
|     Verify we specially fail whenever more then one root actor | ||||
|     is attempted to be opened within an already opened tree. | ||||
| 
 | ||||
|     ''' | ||||
|     async def main(): | ||||
|         async with tractor.open_nursery() as an: | ||||
| 
 | ||||
|             if open_root_in == 'root': | ||||
|                 async with tractor.open_root_actor( | ||||
|                     registry_addrs=[reg_addr], | ||||
|                 ): | ||||
|                     pass | ||||
| 
 | ||||
|             ptl: tractor.Portal = await an.start_actor( | ||||
|                 name='bad_rooty_boi', | ||||
|                 enable_modules=[__name__], | ||||
|             ) | ||||
| 
 | ||||
|             async with ptl.open_context( | ||||
|                 open_new_root_in_sub, | ||||
|             ) as (ctx, first): | ||||
|                 pass | ||||
| 
 | ||||
|     if open_root_in == 'root': | ||||
|         with pytest.raises( | ||||
|             RuntimeFailure | ||||
|         ) as excinfo: | ||||
|             trio.run(main) | ||||
| 
 | ||||
|     else: | ||||
|         with pytest.raises( | ||||
|             tractor.RemoteActorError, | ||||
|         ) as excinfo: | ||||
|             trio.run(main) | ||||
| 
 | ||||
|         assert excinfo.value.boxed_type is RuntimeFailure | ||||
| 
 | ||||
| 
 | ||||
| def test_implicit_root_via_first_nursery( | ||||
|     reg_addr: tuple, | ||||
|     debug_mode: bool | ||||
| ): | ||||
|     ''' | ||||
|     The first `ActorNursery` open should implicitly call | ||||
|     `_root.open_root_actor()`. | ||||
| 
 | ||||
|     ''' | ||||
|     async def main(): | ||||
|         async with tractor.open_nursery() as an: | ||||
|             assert an._implicit_runtime_started | ||||
|             assert tractor.current_actor().aid.name == 'root' | ||||
| 
 | ||||
|     trio.run(main) | ||||
| 
 | ||||
| 
 | ||||
| def test_runtime_vars_unset( | ||||
|     reg_addr: tuple, | ||||
|     debug_mode: bool | ||||
| ): | ||||
|     ''' | ||||
|     Ensure any `._state._runtime_vars` are restored to default values | ||||
|     after the root actor-runtime exits! | ||||
| 
 | ||||
|     ''' | ||||
|     assert not tractor._state._runtime_vars['_debug_mode'] | ||||
|     async def main(): | ||||
|         assert not tractor._state._runtime_vars['_debug_mode'] | ||||
|         async with tractor.open_nursery( | ||||
|             debug_mode=True, | ||||
|         ): | ||||
|             assert tractor._state._runtime_vars['_debug_mode'] | ||||
| 
 | ||||
|         # after runtime closure, should be reverted! | ||||
|         assert not tractor._state._runtime_vars['_debug_mode'] | ||||
| 
 | ||||
|     trio.run(main) | ||||
|  | @ -8,7 +8,7 @@ import uuid | |||
| import pytest | ||||
| import trio | ||||
| import tractor | ||||
| from tractor.ipc._shm import ( | ||||
| from tractor._shm import ( | ||||
|     open_shm_list, | ||||
|     attach_shm_list, | ||||
| ) | ||||
|  |  | |||
|  | @ -2,7 +2,6 @@ | |||
| Spawning basics | ||||
| 
 | ||||
| """ | ||||
| from functools import partial | ||||
| from typing import ( | ||||
|     Any, | ||||
| ) | ||||
|  | @ -13,99 +12,74 @@ import tractor | |||
| 
 | ||||
| from tractor._testing import tractor_test | ||||
| 
 | ||||
| data_to_pass_down = { | ||||
|     'doggy': 10, | ||||
|     'kitty': 4, | ||||
| } | ||||
| data_to_pass_down = {'doggy': 10, 'kitty': 4} | ||||
| 
 | ||||
| 
 | ||||
| async def spawn( | ||||
|     should_be_root: bool, | ||||
|     is_arbiter: bool, | ||||
|     data: dict, | ||||
|     reg_addr: tuple[str, int], | ||||
| 
 | ||||
|     debug_mode: bool = False, | ||||
| ): | ||||
|     namespaces = [__name__] | ||||
| 
 | ||||
|     await trio.sleep(0.1) | ||||
|     actor = tractor.current_actor(err_on_no_runtime=False) | ||||
| 
 | ||||
|     if should_be_root: | ||||
|         assert actor is None  # no runtime yet | ||||
|         async with ( | ||||
|             tractor.open_root_actor( | ||||
|                 arbiter_addr=reg_addr, | ||||
|             ), | ||||
|             tractor.open_nursery() as an, | ||||
|         ): | ||||
|             # now runtime exists | ||||
|             actor: tractor.Actor = tractor.current_actor() | ||||
|             assert actor.is_arbiter == should_be_root | ||||
|     async with tractor.open_root_actor( | ||||
|         arbiter_addr=reg_addr, | ||||
|     ): | ||||
|         actor = tractor.current_actor() | ||||
|         assert actor.is_arbiter == is_arbiter | ||||
|         data = data_to_pass_down | ||||
| 
 | ||||
|             # spawns subproc here | ||||
|             portal: tractor.Portal = await an.run_in_actor( | ||||
|                 fn=spawn, | ||||
|         if actor.is_arbiter: | ||||
|             async with tractor.open_nursery() as nursery: | ||||
| 
 | ||||
|                 # spawning args | ||||
|                 name='sub-actor', | ||||
|                 enable_modules=[__name__], | ||||
|                 # forks here | ||||
|                 portal = await nursery.run_in_actor( | ||||
|                     spawn, | ||||
|                     is_arbiter=False, | ||||
|                     name='sub-actor', | ||||
|                     data=data, | ||||
|                     reg_addr=reg_addr, | ||||
|                     enable_modules=namespaces, | ||||
|                 ) | ||||
| 
 | ||||
|                 # passed to a subactor-recursive RPC invoke | ||||
|                 # of this same `spawn()` fn. | ||||
|                 should_be_root=False, | ||||
|                 data=data_to_pass_down, | ||||
|                 reg_addr=reg_addr, | ||||
|             ) | ||||
| 
 | ||||
|             assert len(an._children) == 1 | ||||
|             assert ( | ||||
|                 portal.channel.uid | ||||
|                 in | ||||
|                 tractor.current_actor().ipc_server._peers | ||||
|             ) | ||||
| 
 | ||||
|             # get result from child subactor | ||||
|             result = await portal.result() | ||||
|             assert result == 10 | ||||
|             return result | ||||
|     else: | ||||
|         assert actor.is_arbiter == should_be_root | ||||
|         return 10 | ||||
|                 assert len(nursery._children) == 1 | ||||
|                 assert portal.channel.uid in tractor.current_actor()._peers | ||||
|                 # be sure we can still get the result | ||||
|                 result = await portal.result() | ||||
|                 assert result == 10 | ||||
|                 return result | ||||
|         else: | ||||
|             return 10 | ||||
| 
 | ||||
| 
 | ||||
| def test_run_in_actor_same_func_in_child( | ||||
|     reg_addr: tuple, | ||||
|     debug_mode: bool, | ||||
| def test_local_arbiter_subactor_global_state( | ||||
|     reg_addr, | ||||
| ): | ||||
|     result = trio.run( | ||||
|         partial( | ||||
|             spawn, | ||||
|             should_be_root=True, | ||||
|             data=data_to_pass_down, | ||||
|             reg_addr=reg_addr, | ||||
|             debug_mode=debug_mode, | ||||
|         ) | ||||
|         spawn, | ||||
|         True, | ||||
|         data_to_pass_down, | ||||
|         reg_addr, | ||||
|     ) | ||||
|     assert result == 10 | ||||
| 
 | ||||
| 
 | ||||
| async def movie_theatre_question(): | ||||
|     ''' | ||||
|     A question asked in a dark theatre, in a tangent | ||||
|     """A question asked in a dark theatre, in a tangent | ||||
|     (errr, I mean different) process. | ||||
| 
 | ||||
|     ''' | ||||
|     """ | ||||
|     return 'have you ever seen a portal?' | ||||
| 
 | ||||
| 
 | ||||
| @tractor_test | ||||
| async def test_movie_theatre_convo(start_method): | ||||
|     ''' | ||||
|     The main ``tractor`` routine. | ||||
|     """The main ``tractor`` routine. | ||||
|     """ | ||||
|     async with tractor.open_nursery() as n: | ||||
| 
 | ||||
|     ''' | ||||
|     async with tractor.open_nursery(debug_mode=True) as an: | ||||
| 
 | ||||
|         portal = await an.start_actor( | ||||
|         portal = await n.start_actor( | ||||
|             'frank', | ||||
|             # enable the actor to run funcs from this current module | ||||
|             enable_modules=[__name__], | ||||
|  | @ -144,8 +118,8 @@ async def test_most_beautiful_word( | |||
|     with trio.fail_after(1): | ||||
|         async with tractor.open_nursery( | ||||
|             debug_mode=debug_mode, | ||||
|         ) as an: | ||||
|             portal = await an.run_in_actor( | ||||
|         ) as n: | ||||
|             portal = await n.run_in_actor( | ||||
|                 cellar_door, | ||||
|                 return_value=return_value, | ||||
|                 name='some_linguist', | ||||
|  |  | |||
|  | @ -2,9 +2,7 @@ | |||
| Broadcast channels for fan-out to local tasks. | ||||
| 
 | ||||
| """ | ||||
| from contextlib import ( | ||||
|     asynccontextmanager as acm, | ||||
| ) | ||||
| from contextlib import asynccontextmanager | ||||
| from functools import partial | ||||
| from itertools import cycle | ||||
| import time | ||||
|  | @ -17,7 +15,6 @@ import tractor | |||
| from tractor.trionics import ( | ||||
|     broadcast_receiver, | ||||
|     Lagged, | ||||
|     collapse_eg, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
|  | @ -65,7 +62,7 @@ async def ensure_sequence( | |||
|                 break | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| @asynccontextmanager | ||||
| async def open_sequence_streamer( | ||||
| 
 | ||||
|     sequence: list[int], | ||||
|  | @ -77,9 +74,9 @@ async def open_sequence_streamer( | |||
|     async with tractor.open_nursery( | ||||
|         arbiter_addr=reg_addr, | ||||
|         start_method=start_method, | ||||
|     ) as an: | ||||
|     ) as tn: | ||||
| 
 | ||||
|         portal = await an.start_actor( | ||||
|         portal = await tn.start_actor( | ||||
|             'sequence_echoer', | ||||
|             enable_modules=[__name__], | ||||
|         ) | ||||
|  | @ -158,12 +155,9 @@ def test_consumer_and_parent_maybe_lag( | |||
|         ) as stream: | ||||
| 
 | ||||
|             try: | ||||
|                 async with ( | ||||
|                     collapse_eg(), | ||||
|                     trio.open_nursery() as tn, | ||||
|                 ): | ||||
|                 async with trio.open_nursery() as n: | ||||
| 
 | ||||
|                     tn.start_soon( | ||||
|                     n.start_soon( | ||||
|                         ensure_sequence, | ||||
|                         stream, | ||||
|                         sequence.copy(), | ||||
|  | @ -236,8 +230,8 @@ def test_faster_task_to_recv_is_cancelled_by_slower( | |||
| 
 | ||||
|         ) as stream: | ||||
| 
 | ||||
|             async with trio.open_nursery() as tn: | ||||
|                 tn.start_soon( | ||||
|             async with trio.open_nursery() as n: | ||||
|                 n.start_soon( | ||||
|                     ensure_sequence, | ||||
|                     stream, | ||||
|                     sequence.copy(), | ||||
|  | @ -259,7 +253,7 @@ def test_faster_task_to_recv_is_cancelled_by_slower( | |||
|                         continue | ||||
| 
 | ||||
|                 print('cancelling faster subtask') | ||||
|                 tn.cancel_scope.cancel() | ||||
|                 n.cancel_scope.cancel() | ||||
| 
 | ||||
|             try: | ||||
|                 value = await stream.receive() | ||||
|  | @ -377,13 +371,13 @@ def test_ensure_slow_consumers_lag_out( | |||
|                                     f'on {lags}:{value}') | ||||
|                                 return | ||||
| 
 | ||||
|             async with trio.open_nursery() as tn: | ||||
|             async with trio.open_nursery() as nursery: | ||||
| 
 | ||||
|                 for i in range(1, num_laggers): | ||||
| 
 | ||||
|                     task_name = f'sub_{i}' | ||||
|                     laggers[task_name] = 0 | ||||
|                     tn.start_soon( | ||||
|                     nursery.start_soon( | ||||
|                         partial( | ||||
|                             sub_and_print, | ||||
|                             delay=i*0.001, | ||||
|  | @ -503,7 +497,6 @@ def test_no_raise_on_lag(): | |||
|                 # internals when the no raise flag is set. | ||||
|                 loglevel='warning', | ||||
|             ), | ||||
|             collapse_eg(), | ||||
|             trio.open_nursery() as n, | ||||
|         ): | ||||
|             n.start_soon(slow) | ||||
|  |  | |||
|  | @ -6,18 +6,10 @@ want to see changed. | |||
| from contextlib import ( | ||||
|     asynccontextmanager as acm, | ||||
| ) | ||||
| from types import ModuleType | ||||
| 
 | ||||
| from functools import partial | ||||
| 
 | ||||
| import pytest | ||||
| from _pytest import pathlib | ||||
| from tractor.trionics import collapse_eg | ||||
| import trio | ||||
| from trio import TaskStatus | ||||
| from tractor._testing import ( | ||||
|     examples_dir, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|  | @ -72,7 +64,6 @@ def test_stashed_child_nursery(use_start_soon): | |||
|     async def main(): | ||||
| 
 | ||||
|         async with ( | ||||
|             collapse_eg(), | ||||
|             trio.open_nursery() as pn, | ||||
|         ): | ||||
|             cn = await pn.start(mk_child_nursery) | ||||
|  | @ -110,24 +101,68 @@ def test_stashed_child_nursery(use_start_soon): | |||
| def test_acm_embedded_nursery_propagates_enter_err( | ||||
|     canc_from_finally: bool, | ||||
|     unmask_from_canc: bool, | ||||
|     debug_mode: bool, | ||||
| ): | ||||
|     ''' | ||||
|     Demo how a masking `trio.Cancelled` could be handled by unmasking | ||||
|     from the `.__context__` field when a user (by accident) re-raises | ||||
|     from a `finally:`. | ||||
|     Demo how a masking `trio.Cancelled` could be handled by unmasking from the | ||||
|     `.__context__` field when a user (by accident) re-raises from a `finally:`. | ||||
| 
 | ||||
|     ''' | ||||
|     import tractor | ||||
| 
 | ||||
|     @acm | ||||
|     async def maybe_raise_from_masking_exc( | ||||
|         tn: trio.Nursery, | ||||
|         unmask_from: BaseException|None = trio.Cancelled | ||||
| 
 | ||||
|         # TODO, maybe offer a collection? | ||||
|         # unmask_from: set[BaseException] = { | ||||
|         #     trio.Cancelled, | ||||
|         # }, | ||||
|     ): | ||||
|         if not unmask_from: | ||||
|             yield | ||||
|             return | ||||
| 
 | ||||
|         try: | ||||
|             yield | ||||
|         except* unmask_from as be_eg: | ||||
| 
 | ||||
|             # TODO, if we offer `unmask_from: set` | ||||
|             # for masker_exc_type in unmask_from: | ||||
| 
 | ||||
|             matches, rest = be_eg.split(unmask_from) | ||||
|             if not matches: | ||||
|                 raise | ||||
| 
 | ||||
|             for exc_match in be_eg.exceptions: | ||||
|                 if ( | ||||
|                     (exc_ctx := exc_match.__context__) | ||||
|                     and | ||||
|                     type(exc_ctx) not in { | ||||
|                         # trio.Cancelled,  # always by default? | ||||
|                         unmask_from, | ||||
|                     } | ||||
|                 ): | ||||
|                     exc_ctx.add_note( | ||||
|                         f'\n' | ||||
|                         f'WARNING: the above error was masked by a {unmask_from!r} !?!\n' | ||||
|                         f'Are you always cancelling? Say from a `finally:` ?\n\n' | ||||
| 
 | ||||
|                         f'{tn!r}' | ||||
|                     ) | ||||
|                     raise exc_ctx from exc_match | ||||
| 
 | ||||
| 
 | ||||
|     @acm | ||||
|     async def wraps_tn_that_always_cancels(): | ||||
|         async with ( | ||||
|             trio.open_nursery() as tn, | ||||
|             tractor.trionics.maybe_raise_from_masking_exc( | ||||
|             maybe_raise_from_masking_exc( | ||||
|                 tn=tn, | ||||
|                 unmask_from=( | ||||
|                     (trio.Cancelled,) if unmask_from_canc | ||||
|                     else () | ||||
|                     trio.Cancelled | ||||
|                     if unmask_from_canc | ||||
|                     else None | ||||
|                 ), | ||||
|             ) | ||||
|         ): | ||||
|  | @ -139,9 +174,7 @@ def test_acm_embedded_nursery_propagates_enter_err( | |||
|                     await trio.lowlevel.checkpoint() | ||||
| 
 | ||||
|     async def _main(): | ||||
|         with tractor.devx.maybe_open_crash_handler( | ||||
|             pdb=debug_mode, | ||||
|         ) as bxerr: | ||||
|         with tractor.devx.open_crash_handler() as bxerr: | ||||
|             assert not bxerr.value | ||||
| 
 | ||||
|             async with ( | ||||
|  | @ -150,12 +183,11 @@ def test_acm_embedded_nursery_propagates_enter_err( | |||
|                 assert not tn.cancel_scope.cancel_called | ||||
|                 assert 0 | ||||
| 
 | ||||
|         if debug_mode: | ||||
|             assert ( | ||||
|                 (err := bxerr.value) | ||||
|                 and | ||||
|                 type(err) is AssertionError | ||||
|             ) | ||||
|         assert ( | ||||
|             (err := bxerr.value) | ||||
|             and | ||||
|             type(err) is AssertionError | ||||
|         ) | ||||
| 
 | ||||
|     with pytest.raises(ExceptionGroup) as excinfo: | ||||
|         trio.run(_main) | ||||
|  | @ -164,139 +196,3 @@ def test_acm_embedded_nursery_propagates_enter_err( | |||
|     assert_eg, rest_eg = eg.split(AssertionError) | ||||
| 
 | ||||
|     assert len(assert_eg.exceptions) == 1 | ||||
| 
 | ||||
| 
 | ||||
| def test_gatherctxs_with_memchan_breaks_multicancelled( | ||||
|     debug_mode: bool, | ||||
| ): | ||||
|     ''' | ||||
|     Demo how a using an `async with sndchan` inside | ||||
|     a `.trionics.gather_contexts()` task will break a strict-eg-tn's | ||||
|     multi-cancelled absorption.. | ||||
| 
 | ||||
|     ''' | ||||
|     from tractor import ( | ||||
|         trionics, | ||||
|     ) | ||||
| 
 | ||||
|     @acm | ||||
|     async def open_memchan() -> trio.abc.ReceiveChannel: | ||||
| 
 | ||||
|         task: trio.Task = trio.lowlevel.current_task() | ||||
|         print( | ||||
|             f'Opening {task!r}\n' | ||||
|         ) | ||||
| 
 | ||||
|         # 1 to force eager sending | ||||
|         send, recv = trio.open_memory_channel(16) | ||||
| 
 | ||||
|         try: | ||||
|             async with send: | ||||
|                 yield recv | ||||
|         finally: | ||||
|             print( | ||||
|                 f'Closed {task!r}\n' | ||||
|             ) | ||||
| 
 | ||||
|     async def main(): | ||||
|         async with ( | ||||
|             # XXX should ensure ONLY the KBI | ||||
|             # is relayed upward | ||||
|             collapse_eg(), | ||||
|             trio.open_nursery(), # as tn, | ||||
| 
 | ||||
|             trionics.gather_contexts([ | ||||
|                 open_memchan(), | ||||
|                 open_memchan(), | ||||
|             ]) as recv_chans, | ||||
|         ): | ||||
|             assert len(recv_chans) == 2 | ||||
| 
 | ||||
|             await trio.sleep(1) | ||||
|             raise KeyboardInterrupt | ||||
|             # tn.cancel_scope.cancel() | ||||
| 
 | ||||
|     with pytest.raises(KeyboardInterrupt): | ||||
|         trio.run(main) | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     'raise_unmasked', [ | ||||
|         True, | ||||
|         pytest.param( | ||||
|             False, | ||||
|             marks=pytest.mark.xfail( | ||||
|                 reason="see examples/trio/send_chan_aclose_masks.py" | ||||
|             ) | ||||
|         ), | ||||
|     ] | ||||
| ) | ||||
| @pytest.mark.parametrize( | ||||
|     'child_errors_mid_stream', | ||||
|     [True, False], | ||||
| ) | ||||
| def test_unmask_aclose_as_checkpoint_on_aexit( | ||||
|     raise_unmasked: bool, | ||||
|     child_errors_mid_stream: bool, | ||||
|     debug_mode: bool, | ||||
| ): | ||||
|     ''' | ||||
|     Verify that our unmasker util works over the common case where | ||||
|     a mem-chan's `.aclose()` is included in an `@acm` stack | ||||
|     and it being currently a checkpoint, can `trio.Cancelled`-mask an embedded | ||||
|     exception from user code resulting in a silent failure which | ||||
|     appears like graceful cancellation. | ||||
| 
 | ||||
|     This test suite is mostly implemented as an example script so it | ||||
|     could more easily be shared with `trio`-core peeps as `tractor`-less | ||||
|     minimum reproducing example. | ||||
| 
 | ||||
|     ''' | ||||
|     mod: ModuleType = pathlib.import_path( | ||||
|         examples_dir() | ||||
|         / 'trio' | ||||
|         / 'send_chan_aclose_masks_beg.py', | ||||
|         root=examples_dir(), | ||||
|         consider_namespace_packages=False, | ||||
|     ) | ||||
|     with pytest.raises(RuntimeError): | ||||
|         trio.run(partial( | ||||
|             mod.main, | ||||
|             raise_unmasked=raise_unmasked, | ||||
|             child_errors_mid_stream=child_errors_mid_stream, | ||||
|         )) | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     'ignore_special_cases', [ | ||||
|         True, | ||||
|         pytest.param( | ||||
|             False, | ||||
|             marks=pytest.mark.xfail( | ||||
|                 reason="see examples/trio/lockacquire_not_umasked.py" | ||||
|             ) | ||||
|         ), | ||||
|     ] | ||||
| ) | ||||
| def test_cancelled_lockacquire_in_ipctx_not_unmasked( | ||||
|     ignore_special_cases: bool, | ||||
|     loglevel: str, | ||||
|     debug_mode: bool, | ||||
| ): | ||||
|     mod: ModuleType = pathlib.import_path( | ||||
|         examples_dir() | ||||
|         / 'trio' | ||||
|         / 'lockacquire_not_unmasked.py', | ||||
|         root=examples_dir(), | ||||
|         consider_namespace_packages=False, | ||||
|     ) | ||||
|     async def _main(): | ||||
|         with trio.fail_after(2): | ||||
|             await mod.main( | ||||
|                 ignore_special_cases=ignore_special_cases, | ||||
|                 loglevel=loglevel, | ||||
|                 debug_mode=debug_mode, | ||||
|             ) | ||||
| 
 | ||||
|     trio.run(_main) | ||||
|  |  | |||
|  | @ -18,7 +18,6 @@ | |||
| tractor: structured concurrent ``trio``-"actors". | ||||
| 
 | ||||
| """ | ||||
| 
 | ||||
| from ._clustering import ( | ||||
|     open_actor_cluster as open_actor_cluster, | ||||
| ) | ||||
|  | @ -44,7 +43,6 @@ from ._state import ( | |||
|     current_actor as current_actor, | ||||
|     is_root_process as is_root_process, | ||||
|     current_ipc_ctx as current_ipc_ctx, | ||||
|     debug_mode as debug_mode | ||||
| ) | ||||
| from ._exceptions import ( | ||||
|     ContextCancelled as ContextCancelled, | ||||
|  | @ -64,7 +62,6 @@ from ._root import ( | |||
|     run_daemon as run_daemon, | ||||
|     open_root_actor as open_root_actor, | ||||
| ) | ||||
| from .ipc import Channel as Channel | ||||
| from ._ipc import Channel as Channel | ||||
| from ._portal import Portal as Portal | ||||
| from ._runtime import Actor as Actor | ||||
| # from . import hilevel as hilevel | ||||
|  |  | |||
							
								
								
									
										282
									
								
								tractor/_addr.py
								
								
								
								
							
							
						
						
									
										282
									
								
								tractor/_addr.py
								
								
								
								
							|  | @ -1,282 +0,0 @@ | |||
| # tractor: structured concurrent "actors". | ||||
| # Copyright 2018-eternity Tyler Goodlet. | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| from __future__ import annotations | ||||
| from uuid import uuid4 | ||||
| from typing import ( | ||||
|     Protocol, | ||||
|     ClassVar, | ||||
|     Type, | ||||
|     TYPE_CHECKING, | ||||
| ) | ||||
| 
 | ||||
| from bidict import bidict | ||||
| from trio import ( | ||||
|     SocketListener, | ||||
| ) | ||||
| 
 | ||||
| from .log import get_logger | ||||
| from ._state import ( | ||||
|     _def_tpt_proto, | ||||
| ) | ||||
| from .ipc._tcp import TCPAddress | ||||
| from .ipc._uds import UDSAddress | ||||
| 
 | ||||
| if TYPE_CHECKING: | ||||
|     from ._runtime import Actor | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
| 
 | ||||
| # TODO, maybe breakout the netns key to a struct? | ||||
| # class NetNs(Struct)[str, int]: | ||||
| #     ... | ||||
| 
 | ||||
| # TODO, can't we just use a type alias | ||||
| # for this? namely just some `tuple[str, int, str, str]`? | ||||
| # | ||||
| # -[ ] would also just be simpler to keep this as SockAddr[tuple] | ||||
| #     or something, implying it's just a simple pair of values which can | ||||
| #     presumably be mapped to all transports? | ||||
| # -[ ] `pydoc socket.socket.getsockname()` delivers a 4-tuple for | ||||
| #     ipv6 `(hostaddr, port, flowinfo, scope_id)`.. so how should we | ||||
| #     handle that? | ||||
| # -[ ] as a further alternative to this wrap()/unwrap() approach we | ||||
| #     could just implement `enc/dec_hook()`s for the `Address`-types | ||||
| #     and just deal with our internal objs directly and always and | ||||
| #     leave it to the codec layer to figure out marshalling? | ||||
| #    |_ would mean only one spot to do the `.unwrap()` (which we may | ||||
| #       end up needing to call from the hook()s anyway?) | ||||
| # -[x] rename to `UnwrappedAddress[Descriptor]` ?? | ||||
| #    seems like the right name as per, | ||||
| #    https://www.geeksforgeeks.org/introduction-to-address-descriptor/ | ||||
| # | ||||
| UnwrappedAddress = ( | ||||
|     # tcp/udp/uds | ||||
|     tuple[ | ||||
|         str,  # host/domain(tcp), filesys-dir(uds) | ||||
|         int|str,  # port/path(uds) | ||||
|     ] | ||||
|     # ?TODO? should we also include another 2 fields from | ||||
|     # our `Aid` msg such that we include the runtime `Actor.uid` | ||||
|     # of `.name` and `.uuid`? | ||||
|     # - would ensure uniqueness across entire net? | ||||
|     # - allows for easier runtime-level filtering of "actors by | ||||
|     #   service name" | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| # TODO, maybe rename to `SocketAddress`? | ||||
| class Address(Protocol): | ||||
|     proto_key: ClassVar[str] | ||||
|     unwrapped_type: ClassVar[UnwrappedAddress] | ||||
| 
 | ||||
|     # TODO, i feel like an `.is_bound()` is a better thing to | ||||
|     # support? | ||||
|     # Lke, what use does this have besides a noop and if it's not | ||||
|     # valid why aren't we erroring on creation/use? | ||||
|     @property | ||||
|     def is_valid(self) -> bool: | ||||
|         ... | ||||
| 
 | ||||
|     # TODO, maybe `.netns` is a better name? | ||||
|     @property | ||||
|     def namespace(self) -> tuple[str, int]|None: | ||||
|         ''' | ||||
|         The if-available, OS-specific "network namespace" key. | ||||
| 
 | ||||
|         ''' | ||||
|         ... | ||||
| 
 | ||||
|     @property | ||||
|     def bindspace(self) -> str: | ||||
|         ''' | ||||
|         Deliver the socket address' "bindable space" from | ||||
|         a `socket.socket.bind()` and thus from the perspective of | ||||
|         specific transport protocol domain. | ||||
| 
 | ||||
|         I.e. for most (layer-4) network-socket protocols this is | ||||
|         normally the ipv4/6 address, for UDS this is normally | ||||
|         a filesystem (sub-directory). | ||||
| 
 | ||||
|         For (distributed) network protocols this is normally the routing | ||||
|         layer's domain/(ip-)address, though it might also include a "network namespace" | ||||
|         key different then the default. | ||||
| 
 | ||||
|         For local-host-only transports this is either an explicit | ||||
|         namespace (with types defined by the OS: netns, Cgroup, IPC, | ||||
|         pid, etc. on linux) or failing that the sub-directory in the | ||||
|         filesys in which socket/shm files are located *under*. | ||||
| 
 | ||||
|         ''' | ||||
|         ... | ||||
| 
 | ||||
|     @classmethod | ||||
|     def from_addr(cls, addr: UnwrappedAddress) -> Address: | ||||
|         ... | ||||
| 
 | ||||
|     def unwrap(self) -> UnwrappedAddress: | ||||
|         ''' | ||||
|         Deliver the underying minimum field set in | ||||
|         a primitive python data type-structure. | ||||
|         ''' | ||||
|         ... | ||||
| 
 | ||||
|     @classmethod | ||||
|     def get_random( | ||||
|         cls, | ||||
|         current_actor: Actor, | ||||
|         bindspace: str|None = None, | ||||
|     ) -> Address: | ||||
|         ... | ||||
| 
 | ||||
|     # TODO, this should be something like a `.get_def_registar_addr()` | ||||
|     # or similar since, | ||||
|     # - it should be a **host singleton** (not root/tree singleton) | ||||
|     # - we **only need this value** when one isn't provided to the | ||||
|     #   runtime at boot and we want to implicitly provide a host-wide | ||||
|     #   registrar. | ||||
|     # - each rooted-actor-tree should likely have its own | ||||
|     #   micro-registry (likely the root being it), also see | ||||
|     @classmethod | ||||
|     def get_root(cls) -> Address: | ||||
|         ... | ||||
| 
 | ||||
|     def __repr__(self) -> str: | ||||
|         ... | ||||
| 
 | ||||
|     def __eq__(self, other) -> bool: | ||||
|         ... | ||||
| 
 | ||||
|     async def open_listener( | ||||
|         self, | ||||
|         **kwargs, | ||||
|     ) -> SocketListener: | ||||
|         ... | ||||
| 
 | ||||
|     async def close_listener(self): | ||||
|         ... | ||||
| 
 | ||||
| 
 | ||||
| _address_types: bidict[str, Type[Address]] = { | ||||
|     'tcp': TCPAddress, | ||||
|     'uds': UDSAddress | ||||
| } | ||||
| 
 | ||||
| 
 | ||||
| # TODO! really these are discovery sys default addrs ONLY useful for | ||||
| # when none is provided to a root actor on first boot. | ||||
| _default_lo_addrs: dict[ | ||||
|     str, | ||||
|     UnwrappedAddress | ||||
| ] = { | ||||
|     'tcp': TCPAddress.get_root().unwrap(), | ||||
|     'uds': UDSAddress.get_root().unwrap(), | ||||
| } | ||||
| 
 | ||||
| 
 | ||||
| def get_address_cls(name: str) -> Type[Address]: | ||||
|     return _address_types[name] | ||||
| 
 | ||||
| 
 | ||||
| def is_wrapped_addr(addr: any) -> bool: | ||||
|     return type(addr) in _address_types.values() | ||||
| 
 | ||||
| 
 | ||||
| def mk_uuid() -> str: | ||||
|     ''' | ||||
|     Encapsulate creation of a uuid4 as `str` as used | ||||
|     for creating `Actor.uid: tuple[str, str]` and/or | ||||
|     `.msg.types.Aid`. | ||||
| 
 | ||||
|     ''' | ||||
|     return str(uuid4()) | ||||
| 
 | ||||
| 
 | ||||
| def wrap_address( | ||||
|     addr: UnwrappedAddress | ||||
| ) -> Address: | ||||
|     ''' | ||||
|     Wrap an `UnwrappedAddress` as an `Address`-type based | ||||
|     on matching builtin python data-structures which we adhoc | ||||
|     use for each. | ||||
| 
 | ||||
|     XXX NOTE, careful care must be placed to ensure | ||||
|     `UnwrappedAddress` cases are **definitely unique** otherwise the | ||||
|     wrong transport backend may be loaded and will break many | ||||
|     low-level things in our runtime in a not-fun-to-debug way! | ||||
| 
 | ||||
|     XD | ||||
| 
 | ||||
|     ''' | ||||
|     if is_wrapped_addr(addr): | ||||
|         return addr | ||||
| 
 | ||||
|     cls: Type|None = None | ||||
|     # if 'sock' in addr[0]: | ||||
|     #     import pdbp; pdbp.set_trace() | ||||
|     match addr: | ||||
| 
 | ||||
|         # classic network socket-address as tuple/list | ||||
|         case ( | ||||
|             (str(), int()) | ||||
|             | | ||||
|             [str(), int()] | ||||
|         ): | ||||
|             cls = TCPAddress | ||||
| 
 | ||||
|         case ( | ||||
|             # (str()|Path(), str()|Path()), | ||||
|             # ^TODO? uhh why doesn't this work!? | ||||
| 
 | ||||
|             (_, filename) | ||||
|         ) if type(filename) is str: | ||||
|             cls = UDSAddress | ||||
| 
 | ||||
|         # likely an unset UDS or TCP reg address as defaulted in | ||||
|         # `_state._runtime_vars['_root_mailbox']` | ||||
|         # | ||||
|         # TODO? figure out when/if we even need this? | ||||
|         case ( | ||||
|             None | ||||
|             | | ||||
|             [None, None] | ||||
|         ): | ||||
|             cls: Type[Address] = get_address_cls(_def_tpt_proto) | ||||
|             addr: UnwrappedAddress = cls.get_root().unwrap() | ||||
| 
 | ||||
|         case _: | ||||
|             # import pdbp; pdbp.set_trace() | ||||
|             raise TypeError( | ||||
|                 f'Can not wrap unwrapped-address ??\n' | ||||
|                 f'type(addr): {type(addr)!r}\n' | ||||
|                 f'addr: {addr!r}\n' | ||||
|             ) | ||||
| 
 | ||||
|     return cls.from_addr(addr) | ||||
| 
 | ||||
| 
 | ||||
| def default_lo_addrs( | ||||
|     transports: list[str], | ||||
| ) -> list[Type[Address]]: | ||||
|     ''' | ||||
|     Return the default, host-singleton, registry address | ||||
|     for an input transport key set. | ||||
| 
 | ||||
|     ''' | ||||
|     return [ | ||||
|         _default_lo_addrs[transport] | ||||
|         for transport in transports | ||||
|     ] | ||||
|  | @ -31,12 +31,8 @@ def parse_uid(arg): | |||
|     return str(name), str(uuid)  # ensures str encoding | ||||
| 
 | ||||
| def parse_ipaddr(arg): | ||||
|     try: | ||||
|         return literal_eval(arg) | ||||
| 
 | ||||
|     except (ValueError, SyntaxError): | ||||
|         # UDS: try to interpret as a straight up str | ||||
|         return arg | ||||
|     host, port = literal_eval(arg) | ||||
|     return (str(host), int(port)) | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == "__main__": | ||||
|  | @ -50,8 +46,8 @@ if __name__ == "__main__": | |||
|     args = parser.parse_args() | ||||
| 
 | ||||
|     subactor = Actor( | ||||
|         name=args.uid[0], | ||||
|         uuid=args.uid[1], | ||||
|         args.uid[0], | ||||
|         uid=args.uid[1], | ||||
|         loglevel=args.loglevel, | ||||
|         spawn_method="trio" | ||||
|     ) | ||||
|  |  | |||
|  | @ -19,13 +19,10 @@ Actor cluster helpers. | |||
| 
 | ||||
| ''' | ||||
| from __future__ import annotations | ||||
| from contextlib import ( | ||||
|     asynccontextmanager as acm, | ||||
| ) | ||||
| 
 | ||||
| from contextlib import asynccontextmanager as acm | ||||
| from multiprocessing import cpu_count | ||||
| from typing import ( | ||||
|     AsyncGenerator, | ||||
| ) | ||||
| from typing import AsyncGenerator, Optional | ||||
| 
 | ||||
| import trio | ||||
| import tractor | ||||
|  | @ -55,17 +52,10 @@ async def open_actor_cluster( | |||
|         raise ValueError( | ||||
|             'Number of names is {len(names)} but count it {count}') | ||||
| 
 | ||||
|     async with ( | ||||
|         # tractor.trionics.collapse_eg(), | ||||
|         tractor.open_nursery( | ||||
|             **runtime_kwargs, | ||||
|         ) as an | ||||
|     ): | ||||
|         async with ( | ||||
|             # tractor.trionics.collapse_eg(), | ||||
|             trio.open_nursery() as tn, | ||||
|             tractor.trionics.maybe_raise_from_masking_exc() | ||||
|         ): | ||||
|     async with tractor.open_nursery( | ||||
|         **runtime_kwargs, | ||||
|     ) as an: | ||||
|         async with trio.open_nursery() as n: | ||||
|             uid = tractor.current_actor().uid | ||||
| 
 | ||||
|             async def _start(name: str) -> None: | ||||
|  | @ -76,8 +66,9 @@ async def open_actor_cluster( | |||
|                 ) | ||||
| 
 | ||||
|             for name in names: | ||||
|                 tn.start_soon(_start, name) | ||||
|                 n.start_soon(_start, name) | ||||
| 
 | ||||
|         assert len(portals) == count | ||||
|         yield portals | ||||
| 
 | ||||
|         await an.cancel(hard_kill=hard_kill) | ||||
|  |  | |||
|  | @ -47,9 +47,6 @@ from functools import partial | |||
| import inspect | ||||
| from pprint import pformat | ||||
| import textwrap | ||||
| from types import ( | ||||
|     UnionType, | ||||
| ) | ||||
| from typing import ( | ||||
|     Any, | ||||
|     AsyncGenerator, | ||||
|  | @ -82,14 +79,13 @@ from .msg import ( | |||
|     MsgType, | ||||
|     NamespacePath, | ||||
|     PayloadT, | ||||
|     Return, | ||||
|     Started, | ||||
|     Stop, | ||||
|     Yield, | ||||
|     pretty_struct, | ||||
|     _ops as msgops, | ||||
| ) | ||||
| from .ipc import ( | ||||
| from ._ipc import ( | ||||
|     Channel, | ||||
| ) | ||||
| from ._streaming import ( | ||||
|  | @ -101,14 +97,11 @@ from ._state import ( | |||
|     debug_mode, | ||||
|     _ctxvar_Context, | ||||
| ) | ||||
| from .trionics import ( | ||||
|     collapse_eg, | ||||
| ) | ||||
| # ------ - ------ | ||||
| if TYPE_CHECKING: | ||||
|     from ._portal import Portal | ||||
|     from ._runtime import Actor | ||||
|     from .ipc._transport import MsgTransport | ||||
|     from ._ipc import MsgTransport | ||||
|     from .devx._frame_stack import ( | ||||
|         CallerInfo, | ||||
|     ) | ||||
|  | @ -154,7 +147,7 @@ class Context: | |||
|     2 cancel-scope-linked, communicating and parallel executing | ||||
|     `Task`s. Contexts are allocated on each side of any task | ||||
|     RPC-linked msg dialog, i.e. for every request to a remote | ||||
|     actor from a `Portal`. On the "child" side a context is | ||||
|     actor from a `Portal`. On the "callee" side a context is | ||||
|     always allocated inside `._rpc._invoke()`. | ||||
| 
 | ||||
|     TODO: more detailed writeup on cancellation, error and | ||||
|  | @ -222,8 +215,8 @@ class Context: | |||
|     # `._runtime.invoke()`. | ||||
|     _remote_func_type: str | None = None | ||||
| 
 | ||||
|     # NOTE: (for now) only set (a portal) on the parent side since | ||||
|     # the child doesn't generally need a ref to one and should | ||||
|     # NOTE: (for now) only set (a portal) on the caller side since | ||||
|     # the callee doesn't generally need a ref to one and should | ||||
|     # normally need to explicitly ask for handle to its peer if | ||||
|     # more the the `Context` is needed? | ||||
|     _portal: Portal | None = None | ||||
|  | @ -249,15 +242,13 @@ class Context: | |||
|     # a drain loop? | ||||
|     # _res_scope: trio.CancelScope|None = None | ||||
| 
 | ||||
|     _outcome_msg: Return|Error|ContextCancelled = Unresolved | ||||
| 
 | ||||
|     # on a clean exit there should be a final value | ||||
|     # delivered from the far end "child" task, so | ||||
|     # delivered from the far end "callee" task, so | ||||
|     # this value is only set on one side. | ||||
|     # _result: Any | int = None | ||||
|     _result: PayloadT|Unresolved = Unresolved | ||||
|     _result: Any|Unresolved = Unresolved | ||||
| 
 | ||||
|     # if the local "parent"  task errors this value is always set | ||||
|     # if the local "caller"  task errors this value is always set | ||||
|     # to the error that was captured in the | ||||
|     # `Portal.open_context().__aexit__()` teardown block OR, in | ||||
|     # 2 special cases when an (maybe) expected remote error | ||||
|  | @ -293,9 +284,9 @@ class Context: | |||
|     # a `ContextCancelled` due to a call to `.cancel()` triggering | ||||
|     # "graceful closure" on either side: | ||||
|     # - `._runtime._invoke()` will check this flag before engaging | ||||
|     #   the crash handler REPL in such cases where the "child" | ||||
|     #   the crash handler REPL in such cases where the "callee" | ||||
|     #   raises the cancellation, | ||||
|     # - `.devx.debug.lock_stdio_for_peer()` will set it to `False` if | ||||
|     # - `.devx._debug.lock_stdio_for_peer()` will set it to `False` if | ||||
|     #   the global tty-lock has been configured to filter out some | ||||
|     #   actors from being able to acquire the debugger lock. | ||||
|     _enter_debugger_on_cancel: bool = True | ||||
|  | @ -307,8 +298,8 @@ class Context: | |||
|     _stream_opened: bool = False | ||||
|     _stream: MsgStream|None = None | ||||
| 
 | ||||
|     # the parent-task's calling-fn's frame-info, the frame above | ||||
|     # `Portal.open_context()`, for introspection/logging. | ||||
|     # caller of `Portal.open_context()` for | ||||
|     # logging purposes mostly | ||||
|     _caller_info: CallerInfo|None = None | ||||
| 
 | ||||
|     # overrun handling machinery | ||||
|  | @ -369,7 +360,7 @@ class Context: | |||
|             # f'   ---\n' | ||||
|             f' |_ipc: {self.dst_maddr}\n' | ||||
|             # f'   dst_maddr{ds}{self.dst_maddr}\n' | ||||
|             f"   uid{ds}'{self.chan.aid}'\n" | ||||
|             f"   uid{ds}'{self.chan.uid}'\n" | ||||
|             f"   cid{ds}'{self.cid}'\n" | ||||
|             # f'   ---\n' | ||||
|             f'\n' | ||||
|  | @ -442,25 +433,25 @@ class Context: | |||
|         ''' | ||||
|         Records whether cancellation has been requested for this context | ||||
|         by a call to  `.cancel()` either due to, | ||||
|         - an explicit call by some local task, | ||||
|         - either an explicit call by some local task, | ||||
|         - or an implicit call due to an error caught inside | ||||
|           the `Portal.open_context()` block. | ||||
|           the ``Portal.open_context()`` block. | ||||
| 
 | ||||
|         ''' | ||||
|         return self._cancel_called | ||||
| 
 | ||||
|     # XXX, to debug who frickin sets it.. | ||||
|     # @cancel_called.setter | ||||
|     # def cancel_called(self, val: bool) -> None: | ||||
|     #     ''' | ||||
|     #     Set the self-cancelled request `bool` value. | ||||
|     @cancel_called.setter | ||||
|     def cancel_called(self, val: bool) -> None: | ||||
|         ''' | ||||
|         Set the self-cancelled request `bool` value. | ||||
| 
 | ||||
|     #     ''' | ||||
|     #     if val: | ||||
|     #         from .devx import pause_from_sync | ||||
|     #         pause_from_sync() | ||||
|         ''' | ||||
|         # to debug who frickin sets it.. | ||||
|         # if val: | ||||
|         #     from .devx import pause_from_sync | ||||
|         #     pause_from_sync() | ||||
| 
 | ||||
|     #     self._cancel_called = val | ||||
|         self._cancel_called = val | ||||
| 
 | ||||
|     @property | ||||
|     def canceller(self) -> tuple[str, str]|None: | ||||
|  | @ -529,11 +520,11 @@ class Context: | |||
|         ''' | ||||
|         Exactly the value of `self._scope.cancelled_caught` | ||||
|         (delegation) and should only be (able to be read as) | ||||
|         `True` for a `.side == "parent"` ctx wherein the | ||||
|         `True` for a `.side == "caller"` ctx wherein the | ||||
|         `Portal.open_context()` block was exited due to a call to | ||||
|         `._scope.cancel()` - which should only ocurr in 2 cases: | ||||
| 
 | ||||
|         - a parent side calls `.cancel()`, the far side cancels | ||||
|         - a caller side calls `.cancel()`, the far side cancels | ||||
|           and delivers back a `ContextCancelled` (making | ||||
|           `.cancel_acked == True`) and `._scope.cancel()` is | ||||
|           called by `._maybe_cancel_and_set_remote_error()` which | ||||
|  | @ -542,20 +533,20 @@ class Context: | |||
|           => `._scope.cancelled_caught == True` by normal `trio` | ||||
|           cs semantics. | ||||
| 
 | ||||
|         - a parent side is delivered a `._remote_error: | ||||
|         - a caller side is delivered a `._remote_error: | ||||
|           RemoteActorError` via `._deliver_msg()` and a transitive | ||||
|           call to `_maybe_cancel_and_set_remote_error()` calls | ||||
|           `._scope.cancel()` and that cancellation eventually | ||||
|           results in `trio.Cancelled`(s) caught in the | ||||
|           `.open_context()` handling around the @acm's `yield`. | ||||
| 
 | ||||
|         Only as an FYI, in the "child" side case it can also be | ||||
|         Only as an FYI, in the "callee" side case it can also be | ||||
|         set but never is readable by any task outside the RPC | ||||
|         machinery in `._invoke()` since,: | ||||
|         - when a child side calls `.cancel()`, `._scope.cancel()` | ||||
|         - when a callee side calls `.cancel()`, `._scope.cancel()` | ||||
|           is called immediately and handled specially inside | ||||
|           `._invoke()` to raise a `ContextCancelled` which is then | ||||
|           sent to the parent side. | ||||
|           sent to the caller side. | ||||
| 
 | ||||
|           However, `._scope.cancelled_caught` can NEVER be | ||||
|           accessed/read as `True` by any RPC invoked task since it | ||||
|  | @ -635,71 +626,6 @@ class Context: | |||
|         ''' | ||||
|         await self.chan.send(Stop(cid=self.cid)) | ||||
| 
 | ||||
|     @property | ||||
|     def parent_task(self) -> trio.Task: | ||||
|         ''' | ||||
|         This IPC context's "owning task" which is a `trio.Task` | ||||
|         on one of the "sides" of the IPC. | ||||
| 
 | ||||
|         Note that the "parent_" prefix here refers to the local | ||||
|         `trio` task tree using the same interface as | ||||
|         `trio.Nursery.parent_task` whereas for IPC contexts, | ||||
|         a different cross-actor task hierarchy exists: | ||||
| 
 | ||||
|         - a "parent"-side which originally entered | ||||
|           `Portal.open_context()`, | ||||
| 
 | ||||
|         - the "child"-side which was spawned and scheduled to invoke | ||||
|           a function decorated with `@tractor.context`. | ||||
| 
 | ||||
|         This task is thus a handle to mem-domain-distinct/per-process | ||||
|         `Nursery.parent_task` depending on in which of the above | ||||
|         "sides" this context exists. | ||||
| 
 | ||||
|         ''' | ||||
|         return self._task | ||||
| 
 | ||||
|     def _is_blocked_on_rx_chan(self) -> bool: | ||||
|         ''' | ||||
|         Predicate to indicate whether the owner `._task: trio.Task` is | ||||
|         currently blocked (by `.receive()`-ing) on its underlying RPC | ||||
|         feeder `._rx_chan`. | ||||
| 
 | ||||
|         This knowledge is highly useful when handling so called | ||||
|         "out-of-band" (OoB) cancellation conditions where a peer | ||||
|         actor's task transmitted some remote error/cancel-msg and we | ||||
|         must know whether to signal-via-cancel currently executing | ||||
|         "user-code" (user defined code embedded in `ctx._scope`) or | ||||
|         simply to forward the IPC-msg-as-error **without calling** | ||||
|         `._scope.cancel()`. | ||||
| 
 | ||||
|         In the latter case it is presumed that if the owner task is | ||||
|         blocking for the next IPC msg, it will eventually receive, | ||||
|         process and raise the equivalent local error **without** | ||||
|         requiring `._scope.cancel()` to be explicitly called by the | ||||
|         *delivering OoB RPC-task* (via `_deliver_msg()`). | ||||
| 
 | ||||
|         ''' | ||||
|         # NOTE, see the mem-chan meth-impls for *why* this | ||||
|         # logic works, | ||||
|         # `trio._channel.MemoryReceiveChannel.receive[_nowait]()` | ||||
|         # | ||||
|         # XXX realize that this is NOT an | ||||
|         # official/will-be-loudly-deprecated API: | ||||
|         # - https://trio.readthedocs.io/en/stable/reference-lowlevel.html#trio.lowlevel.Task.custom_sleep_data | ||||
|         #  |_https://trio.readthedocs.io/en/stable/reference-lowlevel.html#trio.lowlevel.wait_task_rescheduled | ||||
|         # | ||||
|         # orig repo intro in the mem-chan change over patch: | ||||
|         # - https://github.com/python-trio/trio/pull/586#issuecomment-414039117 | ||||
|         #  |_https://github.com/python-trio/trio/pull/616 | ||||
|         #  |_https://github.com/njsmith/trio/commit/98c38cef6f62e731bf8c7190e8756976bface8f0 | ||||
|         # | ||||
|         return ( | ||||
|             self._task.custom_sleep_data | ||||
|             is | ||||
|             self._rx_chan | ||||
|         ) | ||||
| 
 | ||||
|     def _maybe_cancel_and_set_remote_error( | ||||
|         self, | ||||
|         error: BaseException, | ||||
|  | @ -731,7 +657,7 @@ class Context: | |||
|         when called/closed by actor local task(s). | ||||
| 
 | ||||
|         NOTEs:  | ||||
|           - It is expected that the parent has previously unwrapped | ||||
|           - It is expected that the caller has previously unwrapped | ||||
|             the remote error using a call to `unpack_error()` and | ||||
|             provides that output exception value as the input | ||||
|             `error` argument *here*. | ||||
|  | @ -741,7 +667,7 @@ class Context: | |||
|             `Portal.open_context()` (ideally) we want to interrupt | ||||
|             any ongoing local tasks operating within that | ||||
|             `Context`'s cancel-scope so as to be notified ASAP of | ||||
|             the remote error and engage any parent handling (eg. | ||||
|             the remote error and engage any caller handling (eg. | ||||
|             for cross-process task supervision). | ||||
| 
 | ||||
|           - In some cases we may want to raise the remote error | ||||
|  | @ -808,8 +734,6 @@ class Context: | |||
|             # cancelled, NOT their reported canceller. IOW in the | ||||
|             # latter case we're cancelled by someone else getting | ||||
|             # cancelled. | ||||
|             # | ||||
|             # !TODO, switching to `Actor.aid` here! | ||||
|             if (canc := error.canceller) == self._actor.uid: | ||||
|                 whom: str = 'us' | ||||
|                 self._canceller = canc | ||||
|  | @ -852,27 +776,13 @@ class Context: | |||
|         if self._canceller is None: | ||||
|             log.error('Ctx has no canceller set!?') | ||||
| 
 | ||||
|         cs: trio.CancelScope = self._scope | ||||
| 
 | ||||
|         # ?TODO? see comment @ .start_remote_task()` | ||||
|         # | ||||
|         # if not cs: | ||||
|         #     from .devx import mk_pdb | ||||
|         #     mk_pdb().set_trace() | ||||
|         #     raise RuntimeError( | ||||
|         #         f'IPC ctx was not be opened prior to remote error delivery !?\n' | ||||
|         #         f'{self}\n' | ||||
|         #         f'\n' | ||||
|         #         f'`Portal.open_context()` must be entered (somewhere) beforehand!\n' | ||||
|         #     ) | ||||
| 
 | ||||
|         # Cancel the local `._scope`, catch that | ||||
|         # `._scope.cancelled_caught` and re-raise any remote error | ||||
|         # once exiting (or manually calling `.wait_for_result()`) the | ||||
|         # `.open_context()`  block. | ||||
|         cs: trio.CancelScope = self._scope | ||||
|         if ( | ||||
|             cs | ||||
|             and not cs.cancel_called | ||||
| 
 | ||||
|             # XXX this is an expected cancel request response | ||||
|             # message and we **don't need to raise it** in the | ||||
|  | @ -881,7 +791,8 @@ class Context: | |||
|             # if `._cancel_called` then `.cancel_acked and .cancel_called` | ||||
|             # always should be set. | ||||
|             and not self._is_self_cancelled() | ||||
|             # and not cs.cancelled_caught | ||||
|             and not cs.cancel_called | ||||
|             and not cs.cancelled_caught | ||||
|         ): | ||||
|             if ( | ||||
|                 msgerr | ||||
|  | @ -892,7 +803,7 @@ class Context: | |||
|                 not self._cancel_on_msgerr | ||||
|             ): | ||||
|                 message: str = ( | ||||
|                     f'NOT Cancelling `Context._scope` since,\n' | ||||
|                     'NOT Cancelling `Context._scope` since,\n' | ||||
|                     f'Context._cancel_on_msgerr = {self._cancel_on_msgerr}\n\n' | ||||
|                     f'AND we got a msg-type-error!\n' | ||||
|                     f'{error}\n' | ||||
|  | @ -902,43 +813,13 @@ class Context: | |||
|                 # `trio.Cancelled` subtype here ;) | ||||
|                 # https://github.com/goodboy/tractor/issues/368 | ||||
|                 message: str = 'Cancelling `Context._scope` !\n\n' | ||||
|                 cs.cancel() | ||||
| 
 | ||||
|         # TODO, explicit condition for OoB (self-)cancellation? | ||||
|         # - we called `Portal.cancel_actor()` from this actor | ||||
|         #   and the peer ctx task delivered ctxc due to it. | ||||
|         # - currently `self._is_self_cancelled()` will be true | ||||
|         #   since the ctxc.canceller check will match us even though it | ||||
|         #   wasn't from this ctx specifically! | ||||
|         elif ( | ||||
|             cs | ||||
|             and self._is_self_cancelled() | ||||
|             and not cs.cancel_called | ||||
|         ): | ||||
|             message: str = ( | ||||
|                 'Cancelling `ctx._scope` due to OoB self-cancel ?!\n' | ||||
|                 '\n' | ||||
|             ) | ||||
|                 # from .devx import pause_from_sync | ||||
|                 # pause_from_sync() | ||||
|                 self._scope.cancel() | ||||
|         else: | ||||
|             message: str = 'NOT cancelling `Context._scope` !\n\n' | ||||
|             # from .devx import mk_pdb | ||||
|             # mk_pdb().set_trace() | ||||
|             # TODO XXX, required to fix timeout failure in | ||||
|             # `test_cancelled_lockacquire_in_ipctx_not_unmaskeed` | ||||
|             # | ||||
| 
 | ||||
|             # XXX NOTE XXX, this is SUPER SUBTLE! | ||||
|             # we only want to cancel our embedded `._scope` | ||||
|             # if the ctx's current/using task is NOT blocked | ||||
|             # on `._rx_chan.receive()` and on some other | ||||
|             # `trio`-checkpoint since in the former case | ||||
|             # any `._remote_error` will be relayed through | ||||
|             # the rx-chan and appropriately raised by the owning | ||||
|             # `._task` directly. IF the owner task is however | ||||
|             # blocking elsewhere we need to interrupt it **now**. | ||||
|             if not self._is_blocked_on_rx_chan(): | ||||
|                 cs.cancel() | ||||
|         else: | ||||
|             # rx_stats = self._rx_chan.statistics() | ||||
|             message: str = 'NOT cancelling `Context._scope` !\n\n' | ||||
| 
 | ||||
|         fmt_str: str = 'No `self._scope: CancelScope` was set/used ?\n' | ||||
|         if ( | ||||
|  | @ -962,7 +843,6 @@ class Context: | |||
|                 + | ||||
|                 cs_fmt | ||||
|             ) | ||||
| 
 | ||||
|         log.cancel( | ||||
|             message | ||||
|             + | ||||
|  | @ -973,10 +853,19 @@ class Context: | |||
|     @property | ||||
|     def dst_maddr(self) -> str: | ||||
|         chan: Channel = self.chan | ||||
|         dst_addr, dst_port = chan.raddr | ||||
|         trans: MsgTransport = chan.transport | ||||
|         # cid: str = self.cid | ||||
|         # cid_head, cid_tail = cid[:6], cid[-6:] | ||||
|         return trans.maddr | ||||
|         return ( | ||||
|             f'/ipv4/{dst_addr}' | ||||
|             f'/{trans.name_key}/{dst_port}' | ||||
|             # f'/{self.chan.uid[0]}' | ||||
|             # f'/{self.cid}' | ||||
| 
 | ||||
|             # f'/cid={cid_head}..{cid_tail}' | ||||
|             # TODO: ? not use this ^ right ? | ||||
|         ) | ||||
| 
 | ||||
|     dmaddr = dst_maddr | ||||
| 
 | ||||
|  | @ -995,11 +884,6 @@ class Context: | |||
| 
 | ||||
|     @property | ||||
|     def repr_caller(self) -> str: | ||||
|         ''' | ||||
|         Render a "namespace-path" style representation of the calling | ||||
|         task-fn. | ||||
| 
 | ||||
|         ''' | ||||
|         ci: CallerInfo|None = self._caller_info | ||||
|         if ci: | ||||
|             return ( | ||||
|  | @ -1013,7 +897,7 @@ class Context: | |||
|     def repr_api(self) -> str: | ||||
|         return 'Portal.open_context()' | ||||
| 
 | ||||
|         # TODO: use `.dev._frame_stack` scanning to find caller fn! | ||||
|         # TODO: use `.dev._frame_stack` scanning to find caller! | ||||
|         # ci: CallerInfo|None = self._caller_info | ||||
|         # if ci: | ||||
|         #     return ( | ||||
|  | @ -1048,27 +932,26 @@ class Context: | |||
|         => That is, an IPC `Context` (this) **does not** | ||||
|            have the same semantics as a `trio.CancelScope`. | ||||
| 
 | ||||
|         If the parent (who entered the `Portal.open_context()`) | ||||
|         If the caller (who entered the `Portal.open_context()`) | ||||
|         desires that the internal block's cancel-scope  be | ||||
|         cancelled it should open its own `trio.CancelScope` and | ||||
|         manage it as needed. | ||||
| 
 | ||||
|         ''' | ||||
|         side: str = self.side | ||||
|         self._cancel_called = True | ||||
|         # ^ XXX for debug via the `@.setter` | ||||
|         # self.cancel_called = True | ||||
|         # XXX for debug via the `@.setter` | ||||
|         self.cancel_called = True | ||||
| 
 | ||||
|         header: str = ( | ||||
|             f'Cancelling ctx from {side!r}-side\n' | ||||
|             f'Cancelling ctx from {side.upper()}-side\n' | ||||
|         ) | ||||
|         reminfo: str = ( | ||||
|             # ' =>\n' | ||||
|             # f'Context.cancel() => {self.chan.uid}\n' | ||||
|             f'\n' | ||||
|             f'c)=> {self.chan.uid}\n' | ||||
|             f'   |_[{self.dst_maddr}\n' | ||||
|             f'     >> {self.repr_rpc}\n' | ||||
|             # f'{self.chan.uid}\n' | ||||
|             f' |_ @{self.dst_maddr}\n' | ||||
|             f'    >> {self.repr_rpc}\n' | ||||
|             # f'    >> {self._nsf}() -> {codec}[dict]:\n\n' | ||||
|             # TODO: pull msg-type from spec re #320 | ||||
|         ) | ||||
|  | @ -1120,7 +1003,7 @@ class Context: | |||
|                     ) | ||||
|                 else: | ||||
|                     log.cancel( | ||||
|                         f'Timed out on cancel request of remote task?\n' | ||||
|                         'Timed out on cancel request of remote task?\n' | ||||
|                         f'{reminfo}' | ||||
|                     ) | ||||
| 
 | ||||
|  | @ -1131,7 +1014,7 @@ class Context: | |||
|         # `_invoke()` RPC task. | ||||
|         # | ||||
|         # NOTE: on this side we ALWAYS cancel the local scope | ||||
|         # since the parent expects a `ContextCancelled` to be sent | ||||
|         # since the caller expects a `ContextCancelled` to be sent | ||||
|         # from `._runtime._invoke()` back to the other side. The | ||||
|         # logic for catching the result of the below | ||||
|         # `._scope.cancel()` is inside the `._runtime._invoke()` | ||||
|  | @ -1188,25 +1071,9 @@ class Context: | |||
|         |RemoteActorError  # stream overrun caused and ignored by us | ||||
|     ): | ||||
|         ''' | ||||
|         Maybe raise a remote error depending on the type of error and | ||||
|         *who*, i.e. which side of the task pair across actors, | ||||
|         requested a cancellation (if any). | ||||
| 
 | ||||
|         Depending on the input config-params suppress raising | ||||
|         certain remote excs: | ||||
| 
 | ||||
|         - if `remote_error: ContextCancelled` (ctxc) AND this side's | ||||
|           task is the "requester", it at somem point called | ||||
|           `Context.cancel()`, then the peer's ctxc is treated | ||||
|           as a "cancel ack". | ||||
| 
 | ||||
|          |_ this behaves exactly like how `trio.Nursery.cancel_scope` | ||||
|             absorbs any `BaseExceptionGroup[trio.Cancelled]` wherein the | ||||
|             owning parent task never will raise a `trio.Cancelled` | ||||
|             if `CancelScope.cancel_called == True`. | ||||
| 
 | ||||
|         - `remote_error: StreamOverrrun` (overrun) AND | ||||
|            `raise_overrun_from_self` is set. | ||||
|         Maybe raise a remote error depending on the type of error | ||||
|         and *who* (i.e. which task from which actor) requested | ||||
|         a  cancellation (if any). | ||||
| 
 | ||||
|         ''' | ||||
|         __tracebackhide__: bool = hide_tb | ||||
|  | @ -1248,19 +1115,18 @@ class Context: | |||
|             # for this ^, NO right? | ||||
| 
 | ||||
|         ) or ( | ||||
|             # NOTE: whenever this side is the cause of an | ||||
|             # overrun on the peer side, i.e. we sent msgs too | ||||
|             # fast and the peer task was overrun according | ||||
|             # to `MsgStream` buffer settings, AND this was | ||||
|             # called with `raise_overrun_from_self=True` (the | ||||
|             # default), silently absorb any `StreamOverrun`. | ||||
|             # | ||||
|             # XXX, this is namely useful for supressing such faults | ||||
|             # during cancellation/error/final-result handling inside | ||||
|             # `.msg._ops.drain_to_final_msg()` such that we do not | ||||
|             # raise during a cancellation-request, i.e. when | ||||
|             # NOTE: whenever this context is the cause of an | ||||
|             # overrun on the remote side (aka we sent msgs too | ||||
|             # fast that the remote task was overrun according | ||||
|             # to `MsgStream` buffer settings) AND the caller | ||||
|             # has requested to not raise overruns this side | ||||
|             # caused, we also silently absorb any remotely | ||||
|             # boxed `StreamOverrun`. This is mostly useful for | ||||
|             # supressing such faults during | ||||
|             # cancellation/error/final-result handling inside | ||||
|             # `msg._ops.drain_to_final_msg()` such that we do not | ||||
|             # raise such errors particularly in the case where | ||||
|             # `._cancel_called == True`. | ||||
|             # | ||||
|             not raise_overrun_from_self | ||||
|             and isinstance(remote_error, RemoteActorError) | ||||
|             and remote_error.boxed_type is StreamOverrun | ||||
|  | @ -1304,8 +1170,8 @@ class Context: | |||
| 
 | ||||
|     ) -> Any|Exception: | ||||
|         ''' | ||||
|         From some (parent) side task, wait for and return the final | ||||
|         result from the remote (child) side's task. | ||||
|         From some (caller) side task, wait for and return the final | ||||
|         result from the remote (callee) side's task. | ||||
| 
 | ||||
|         This provides a mechanism for one task running in some actor to wait | ||||
|         on another task at the other side, in some other actor, to terminate. | ||||
|  | @ -1329,11 +1195,9 @@ class Context: | |||
| 
 | ||||
|         ''' | ||||
|         __tracebackhide__: bool = hide_tb | ||||
|         if not self._portal: | ||||
|             raise RuntimeError( | ||||
|                 'Invalid usage of `Context.wait_for_result()`!\n' | ||||
|                 'Not valid on child-side IPC ctx!\n' | ||||
|             ) | ||||
|         assert self._portal, ( | ||||
|             '`Context.wait_for_result()` can not be called from callee side!' | ||||
|         ) | ||||
|         if self._final_result_is_set(): | ||||
|             return self._result | ||||
| 
 | ||||
|  | @ -1354,8 +1218,6 @@ class Context: | |||
|             # since every message should be delivered via the normal | ||||
|             # `._deliver_msg()` route which will appropriately set | ||||
|             # any `.maybe_error`. | ||||
|             outcome_msg: Return|Error|ContextCancelled | ||||
|             drained_msgs: list[MsgType] | ||||
|             ( | ||||
|                 outcome_msg, | ||||
|                 drained_msgs, | ||||
|  | @ -1363,19 +1225,11 @@ class Context: | |||
|                 ctx=self, | ||||
|                 hide_tb=hide_tb, | ||||
|             ) | ||||
| 
 | ||||
|             drained_status: str = ( | ||||
|                 'Ctx drained to final outcome msg\n\n' | ||||
|                 f'{outcome_msg}\n' | ||||
|             ) | ||||
| 
 | ||||
|             # ?XXX, should already be set in `._deliver_msg()` right? | ||||
|             if self._outcome_msg is not Unresolved: | ||||
|                 # from .devx import debug | ||||
|                 # await debug.pause() | ||||
|                 assert self._outcome_msg is outcome_msg | ||||
|             else: | ||||
|                 self._outcome_msg = outcome_msg | ||||
| 
 | ||||
|             if drained_msgs: | ||||
|                 drained_status += ( | ||||
|                     '\n' | ||||
|  | @ -1601,12 +1455,6 @@ class Context: | |||
|                 ): | ||||
|                     status = 'peer-cancelled' | ||||
| 
 | ||||
|             case ( | ||||
|                 Unresolved, | ||||
|                 trio.Cancelled(),  # any error-type | ||||
|             ) if self.canceller: | ||||
|                 status = 'actor-cancelled' | ||||
| 
 | ||||
|             # (remote) error condition | ||||
|             case ( | ||||
|                 Unresolved, | ||||
|  | @ -1712,15 +1560,15 @@ class Context: | |||
|                     strict_pld_parity=strict_pld_parity, | ||||
|                     hide_tb=hide_tb, | ||||
|                 ) | ||||
|             except BaseException as _bexc: | ||||
|                 err = _bexc | ||||
|             except BaseException as err: | ||||
|                 if not isinstance(err, MsgTypeError): | ||||
|                     __tracebackhide__: bool = False | ||||
| 
 | ||||
|                 raise err | ||||
|                 raise | ||||
| 
 | ||||
| 
 | ||||
|         # TODO: maybe a flag to by-pass encode op if already done | ||||
|         # here in parent? | ||||
|         # here in caller? | ||||
|         await self.chan.send(started_msg) | ||||
| 
 | ||||
|         # set msg-related internal runtime-state | ||||
|  | @ -1796,7 +1644,7 @@ class Context: | |||
| 
 | ||||
|          XXX RULES XXX | ||||
|         ------ - ------ | ||||
|         - NEVER raise remote errors from this method; a calling runtime-task. | ||||
|         - NEVER raise remote errors from this method; a runtime task caller. | ||||
|           An error "delivered" to a ctx should always be raised by | ||||
|           the corresponding local task operating on the | ||||
|           `Portal`/`Context` APIs. | ||||
|  | @ -1872,7 +1720,7 @@ class Context: | |||
| 
 | ||||
|             else: | ||||
|                 report = ( | ||||
|                     'Queueing OVERRUN msg on parent task:\n\n' | ||||
|                     'Queueing OVERRUN msg on caller task:\n\n' | ||||
|                     + report | ||||
|                 ) | ||||
|                 log.debug(report) | ||||
|  | @ -1889,6 +1737,7 @@ class Context: | |||
| 
 | ||||
|                 f'{structfmt(msg)}\n' | ||||
|             ) | ||||
| 
 | ||||
|             # NOTE: if an error is deteced we should always still | ||||
|             # send it through the feeder-mem-chan and expect | ||||
|             # it to be raised by any context (stream) consumer | ||||
|  | @ -1900,21 +1749,6 @@ class Context: | |||
|             # normally the task that should get cancelled/error | ||||
|             # from some remote fault! | ||||
|             send_chan.send_nowait(msg) | ||||
|             match msg: | ||||
|                 case Stop(): | ||||
|                     if (stream := self._stream): | ||||
|                         stream._stop_msg = msg | ||||
| 
 | ||||
|                 case Return(): | ||||
|                     if not self._outcome_msg: | ||||
|                         log.warning( | ||||
|                             f'Setting final outcome msg AFTER ' | ||||
|                             f'`._rx_chan.send()`??\n' | ||||
|                             f'\n' | ||||
|                             f'{msg}' | ||||
|                         ) | ||||
|                         self._outcome_msg = msg | ||||
| 
 | ||||
|             return True | ||||
| 
 | ||||
|         except trio.BrokenResourceError: | ||||
|  | @ -2068,12 +1902,12 @@ async def open_context_from_portal( | |||
|     IPC protocol. | ||||
| 
 | ||||
|     The yielded `tuple` is a pair delivering a `tractor.Context` | ||||
|     and any first value "sent" by the "child" task via a call | ||||
|     and any first value "sent" by the "callee" task via a call | ||||
|     to `Context.started(<value: Any>)`; this side of the | ||||
|     context does not unblock until the "child" task calls | ||||
|     context does not unblock until the "callee" task calls | ||||
|     `.started()` in similar style to `trio.Nursery.start()`. | ||||
|     When the "child" (side that is "called"/started by a call | ||||
|     to *this* method) returns, the parent side (this) unblocks | ||||
|     When the "callee" (side that is "called"/started by a call | ||||
|     to *this* method) returns, the caller side (this) unblocks | ||||
|     and any final value delivered from the other end can be | ||||
|     retrieved using the `Contex.wait_for_result()` api. | ||||
| 
 | ||||
|  | @ -2086,7 +1920,7 @@ async def open_context_from_portal( | |||
|     __tracebackhide__: bool = hide_tb | ||||
| 
 | ||||
|     # denote this frame as a "runtime frame" for stack | ||||
|     # introspection where we report the parent code in logging | ||||
|     # introspection where we report the caller code in logging | ||||
|     # and error message content. | ||||
|     # NOTE: 2 bc of the wrapping `@acm` | ||||
|     __runtimeframe__: int = 2  # noqa | ||||
|  | @ -2121,9 +1955,6 @@ async def open_context_from_portal( | |||
|             f'|_{portal.actor}\n' | ||||
|         ) | ||||
| 
 | ||||
|     # ?TODO? could we move this to inside the `tn` block? | ||||
|     # -> would allow doing `ctx.parent_task = tn.parent_task` ? | ||||
|     # -> would allow a `if not ._scope: => raise RTE` ? | ||||
|     ctx: Context = await portal.actor.start_remote_task( | ||||
|         portal.channel, | ||||
|         nsf=nsf, | ||||
|  | @ -2148,11 +1979,9 @@ async def open_context_from_portal( | |||
|     # placeholder for any exception raised in the runtime | ||||
|     # or by user tasks which cause this context's closure. | ||||
|     scope_err: BaseException|None = None | ||||
|     ctxc_from_child: ContextCancelled|None = None | ||||
|     ctxc_from_callee: ContextCancelled|None = None | ||||
|     try: | ||||
|         # from .devx import pause | ||||
|         async with ( | ||||
|             collapse_eg(), | ||||
|             trio.open_nursery() as tn, | ||||
|             msgops.maybe_limit_plds( | ||||
|                 ctx=ctx, | ||||
|  | @ -2173,11 +2002,7 @@ async def open_context_from_portal( | |||
|             # the dialog, the `Error` msg should be raised from the `msg` | ||||
|             # handling block below. | ||||
|             try: | ||||
|                 log.runtime( | ||||
|                     f'IPC ctx parent waiting on Started msg..\n' | ||||
|                     f'ctx.cid: {ctx.cid!r}\n' | ||||
|                 ) | ||||
|                 started_msg, first = await ctx._pld_rx.recv_msg( | ||||
|                 started_msg, first = await ctx._pld_rx.recv_msg_w_pld( | ||||
|                     ipc=ctx, | ||||
|                     expect_msg=Started, | ||||
|                     passthrough_non_pld_msgs=False, | ||||
|  | @ -2185,16 +2010,16 @@ async def open_context_from_portal( | |||
|                 ) | ||||
|             except trio.Cancelled as taskc: | ||||
|                 ctx_cs: trio.CancelScope = ctx._scope | ||||
|                 log.cancel( | ||||
|                     f'IPC ctx was cancelled during "child" task sync due to\n\n' | ||||
|                     f'.cid: {ctx.cid!r}\n' | ||||
|                     f'.maybe_error: {ctx.maybe_error!r}\n' | ||||
|                 ) | ||||
|                 # await pause(shield=True) | ||||
| 
 | ||||
|                 if not ctx_cs.cancel_called: | ||||
|                     raise | ||||
| 
 | ||||
|                 # from .devx import pause | ||||
|                 # await pause(shield=True) | ||||
| 
 | ||||
|                 log.cancel( | ||||
|                     'IPC ctx was cancelled during "child" task sync due to\n\n' | ||||
|                     f'{ctx.maybe_error}\n' | ||||
|                 ) | ||||
|                 # OW if the ctx's scope was cancelled manually, | ||||
|                 # likely the `Context` was cancelled via a call to | ||||
|                 # `._maybe_cancel_and_set_remote_error()` so ensure | ||||
|  | @ -2232,7 +2057,7 @@ async def open_context_from_portal( | |||
|             # that we can re-use it around the `yield` ^ here | ||||
|             # or vice versa? | ||||
|             # | ||||
|             # maybe TODO NOTE: between the parent exiting and | ||||
|             # maybe TODO NOTE: between the caller exiting and | ||||
|             # arriving here the far end may have sent a ctxc-msg or | ||||
|             # other error, so the quetion is whether we should check | ||||
|             # for it here immediately and maybe raise so as to engage | ||||
|  | @ -2298,16 +2123,16 @@ async def open_context_from_portal( | |||
|     #   request in which case we DO let the error bubble to the | ||||
|     #   opener. | ||||
|     # | ||||
|     # 2-THIS "parent" task somewhere invoked `Context.cancel()` | ||||
|     #   and received a `ContextCanclled` from the "child" | ||||
|     # 2-THIS "caller" task somewhere invoked `Context.cancel()` | ||||
|     #   and received a `ContextCanclled` from the "callee" | ||||
|     #   task, in which case we mask the `ContextCancelled` from | ||||
|     #   bubbling to this "parent" (much like how `trio.Nursery` | ||||
|     #   bubbling to this "caller" (much like how `trio.Nursery` | ||||
|     #   swallows any `trio.Cancelled` bubbled by a call to | ||||
|     #   `Nursery.cancel_scope.cancel()`) | ||||
|     except ContextCancelled as ctxc: | ||||
|         scope_err = ctxc | ||||
|         ctx._local_error: BaseException = scope_err | ||||
|         ctxc_from_child = ctxc | ||||
|         ctxc_from_callee = ctxc | ||||
| 
 | ||||
|         # XXX TODO XXX: FIX THIS debug_mode BUGGGG!!! | ||||
|         # using this code and then resuming the REPL will | ||||
|  | @ -2317,8 +2142,8 @@ async def open_context_from_portal( | |||
|         #   documenting it as a definittive example of | ||||
|         #   debugging the tractor-runtime itself using it's | ||||
|         #   own `.devx.` tooling! | ||||
|         # | ||||
|         # await debug.pause() | ||||
|         #  | ||||
|         # await _debug.pause() | ||||
| 
 | ||||
|         # CASE 2: context was cancelled by local task calling | ||||
|         # `.cancel()`, we don't raise and the exit block should | ||||
|  | @ -2344,11 +2169,11 @@ async def open_context_from_portal( | |||
|     # the above `._scope` can be cancelled due to: | ||||
|     # 1. an explicit self cancel via `Context.cancel()` or | ||||
|     #    `Actor.cancel()`, | ||||
|     # 2. any "child"-side remote error, possibly also a cancellation | ||||
|     # 2. any "callee"-side remote error, possibly also a cancellation | ||||
|     #    request by some peer, | ||||
|     # 3. any "parent" (aka THIS scope's) local error raised in the above `yield` | ||||
|     # 3. any "caller" (aka THIS scope's) local error raised in the above `yield` | ||||
|     except ( | ||||
|         # CASE 3: standard local error in this parent/yieldee | ||||
|         # CASE 3: standard local error in this caller/yieldee | ||||
|         Exception, | ||||
| 
 | ||||
|         # CASES 1 & 2: can manifest as a `ctx._scope_nursery` | ||||
|  | @ -2362,9 +2187,9 @@ async def open_context_from_portal( | |||
|         #   any `Context._maybe_raise_remote_err()` call. | ||||
|         # | ||||
|         # 2.-`BaseExceptionGroup[ContextCancelled | RemoteActorError]` | ||||
|         #    from any error delivered from the "child" side | ||||
|         #    from any error delivered from the "callee" side | ||||
|         #    AND a group-exc is only raised if there was > 1 | ||||
|         #    tasks started *here* in the "parent" / opener | ||||
|         #    tasks started *here* in the "caller" / opener | ||||
|         #    block. If any one of those tasks calls | ||||
|         #    `.wait_for_result()` or `MsgStream.receive()` | ||||
|         #    `._maybe_raise_remote_err()` will be transitively | ||||
|  | @ -2377,42 +2202,39 @@ async def open_context_from_portal( | |||
|         trio.Cancelled,  # NOTE: NOT from inside the ctx._scope | ||||
|         KeyboardInterrupt, | ||||
| 
 | ||||
|     ) as rent_err: | ||||
|         scope_err = rent_err | ||||
|     ) as caller_err: | ||||
|         scope_err = caller_err | ||||
|         ctx._local_error: BaseException = scope_err | ||||
| 
 | ||||
|         # XXX: ALWAYS request the context to CANCEL ON any ERROR. | ||||
|         # NOTE: `Context.cancel()` is conversely NEVER CALLED in | ||||
|         # the `ContextCancelled` "self cancellation absorbed" case | ||||
|         # handled in the block above ^^^ !! | ||||
|         # await debug.pause() | ||||
|         # await _debug.pause() | ||||
|         # log.cancel( | ||||
|         match scope_err: | ||||
|             case trio.Cancelled(): | ||||
|             case trio.Cancelled: | ||||
|                 logmeth = log.cancel | ||||
|                 cause: str = 'cancelled' | ||||
| 
 | ||||
|             # XXX explicitly report on any non-graceful-taskc cases | ||||
|             case _: | ||||
|                 cause: str = 'errored' | ||||
|                 logmeth = log.exception | ||||
| 
 | ||||
|         logmeth( | ||||
|             f'ctx {ctx.side!r}-side {cause!r} with,\n' | ||||
|             f'{ctx.repr_outcome()!r}\n' | ||||
|             f'ctx {ctx.side!r}-side exited with {ctx.repr_outcome()}\n' | ||||
|         ) | ||||
| 
 | ||||
|         if debug_mode(): | ||||
|             # async with debug.acquire_debug_lock(portal.actor.uid): | ||||
|             # async with _debug.acquire_debug_lock(portal.actor.uid): | ||||
|             #     pass | ||||
|             # TODO: factor ^ into below for non-root cases? | ||||
|             # | ||||
|             from .devx.debug import maybe_wait_for_debugger | ||||
|             from .devx import maybe_wait_for_debugger | ||||
|             was_acquired: bool = await maybe_wait_for_debugger( | ||||
|                 # header_msg=( | ||||
|                 #     'Delaying `ctx.cancel()` until debug lock ' | ||||
|                 #     'acquired..\n' | ||||
|                 # ), | ||||
|                 header_msg=( | ||||
|                     'Delaying `ctx.cancel()` until debug lock ' | ||||
|                     'acquired..\n' | ||||
|                 ), | ||||
|             ) | ||||
|             if was_acquired: | ||||
|                 log.pdb( | ||||
|  | @ -2420,11 +2242,10 @@ async def open_context_from_portal( | |||
|                     'Calling `ctx.cancel()`!\n' | ||||
|                 ) | ||||
| 
 | ||||
|         # we don't need to cancel the child if it already | ||||
|         # we don't need to cancel the callee if it already | ||||
|         # told us it's cancelled ;p | ||||
|         if ctxc_from_child is None: | ||||
|         if ctxc_from_callee is None: | ||||
|             try: | ||||
|                 # await pause(shield=True) | ||||
|                 await ctx.cancel() | ||||
|             except ( | ||||
|                 trio.BrokenResourceError, | ||||
|  | @ -2454,8 +2275,8 @@ async def open_context_from_portal( | |||
|             # via a call to | ||||
|             # `Context._maybe_cancel_and_set_remote_error()`. | ||||
|             # As per `Context._deliver_msg()`, that error IS | ||||
|             # ALWAYS SET any time "child" side fails and causes | ||||
|             # "parent side" cancellation via a `ContextCancelled` here. | ||||
|             # ALWAYS SET any time "callee" side fails and causes "caller | ||||
|             # side" cancellation via a `ContextCancelled` here. | ||||
|             try: | ||||
|                 result_or_err: Exception|Any = await ctx.wait_for_result() | ||||
|             except BaseException as berr: | ||||
|  | @ -2471,8 +2292,8 @@ async def open_context_from_portal( | |||
|                 raise | ||||
| 
 | ||||
|             # yes this worx! | ||||
|             # from .devx import debug | ||||
|             # await debug.pause() | ||||
|             # from .devx import _debug | ||||
|             # await _debug.pause() | ||||
| 
 | ||||
|             # an exception type boxed in a `RemoteActorError` | ||||
|             # is returned (meaning it was obvi not raised) | ||||
|  | @ -2491,7 +2312,7 @@ async def open_context_from_portal( | |||
|                     ) | ||||
|                 case (None, _): | ||||
|                     log.runtime( | ||||
|                         'Context returned final result from child task:\n' | ||||
|                         'Context returned final result from callee task:\n' | ||||
|                         f'<= peer: {uid}\n' | ||||
|                         f'  |_ {nsf}()\n\n' | ||||
| 
 | ||||
|  | @ -2507,7 +2328,7 @@ async def open_context_from_portal( | |||
|         # where the root is waiting on the lock to clear but the | ||||
|         # child has already cleared it and clobbered IPC. | ||||
|         if debug_mode(): | ||||
|             from .devx.debug import maybe_wait_for_debugger | ||||
|             from .devx import maybe_wait_for_debugger | ||||
|             await maybe_wait_for_debugger() | ||||
| 
 | ||||
|         # though it should be impossible for any tasks | ||||
|  | @ -2546,8 +2367,7 @@ async def open_context_from_portal( | |||
|             # displaying `ContextCancelled` traces where the | ||||
|             # cause of crash/exit IS due to something in | ||||
|             # user/app code on either end of the context. | ||||
|             and | ||||
|             not rxchan._closed | ||||
|             and not rxchan._closed | ||||
|         ): | ||||
|             # XXX NOTE XXX: and again as per above, we mask any | ||||
|             # `trio.Cancelled` raised here so as to NOT mask | ||||
|  | @ -2581,14 +2401,12 @@ async def open_context_from_portal( | |||
|                 log.cancel( | ||||
|                     f'Context cancelled by local {ctx.side!r}-side task\n' | ||||
|                     f'c)>\n' | ||||
|                     f'  |_{ctx.parent_task}\n' | ||||
|                     f'   .cid={ctx.cid!r}\n' | ||||
|                     f'\n' | ||||
|                     f'{scope_err!r}\n' | ||||
|                     f' |_{ctx._task}\n\n' | ||||
|                     f'{repr(scope_err)}\n' | ||||
|                 ) | ||||
| 
 | ||||
|             # TODO: should we add a `._cancel_req_received` | ||||
|             # flag to determine if the child manually called | ||||
|             # flag to determine if the callee manually called | ||||
|             # `ctx.cancel()`? | ||||
|             # -[ ] going to need a cid check no? | ||||
| 
 | ||||
|  | @ -2608,7 +2426,6 @@ async def open_context_from_portal( | |||
|         # FINALLY, remove the context from runtime tracking and | ||||
|         # exit! | ||||
|         log.runtime( | ||||
|         # log.cancel( | ||||
|             f'De-allocating IPC ctx opened with {ctx.side!r} peer \n' | ||||
|             f'uid: {uid}\n' | ||||
|             f'cid: {ctx.cid}\n' | ||||
|  | @ -2644,7 +2461,7 @@ def mk_context( | |||
|     recv_chan: trio.MemoryReceiveChannel | ||||
|     send_chan, recv_chan = trio.open_memory_channel(msg_buffer_size) | ||||
| 
 | ||||
|     # TODO: only scan parent-info if log level so high! | ||||
|     # TODO: only scan caller-info if log level so high! | ||||
|     from .devx._frame_stack import find_caller_info | ||||
|     caller_info: CallerInfo|None = find_caller_info() | ||||
| 
 | ||||
|  | @ -2664,6 +2481,7 @@ def mk_context( | |||
|         _caller_info=caller_info, | ||||
|         **kwargs, | ||||
|     ) | ||||
|     pld_rx._ctx = ctx | ||||
|     ctx._result = Unresolved | ||||
|     return ctx | ||||
| 
 | ||||
|  | @ -2726,14 +2544,7 @@ def context( | |||
|     name: str | ||||
|     param: Type | ||||
|     for name, param in annots.items(): | ||||
|         if ( | ||||
|             param is Context | ||||
|             or ( | ||||
|                 isinstance(param, UnionType) | ||||
|                 and | ||||
|                 Context in param.__args__ | ||||
|             ) | ||||
|         ): | ||||
|         if param is Context: | ||||
|             ctx_var_name: str = name | ||||
|             break | ||||
|     else: | ||||
|  |  | |||
|  | @ -28,16 +28,8 @@ from typing import ( | |||
| from contextlib import asynccontextmanager as acm | ||||
| 
 | ||||
| from tractor.log import get_logger | ||||
| from .trionics import ( | ||||
|     gather_contexts, | ||||
|     collapse_eg, | ||||
| ) | ||||
| from .ipc import _connect_chan, Channel | ||||
| from ._addr import ( | ||||
|     UnwrappedAddress, | ||||
|     Address, | ||||
|     wrap_address | ||||
| ) | ||||
| from .trionics import gather_contexts | ||||
| from ._ipc import _connect_chan, Channel | ||||
| from ._portal import ( | ||||
|     Portal, | ||||
|     open_portal, | ||||
|  | @ -46,7 +38,6 @@ from ._portal import ( | |||
| from ._state import ( | ||||
|     current_actor, | ||||
|     _runtime_vars, | ||||
|     _def_tpt_proto, | ||||
| ) | ||||
| 
 | ||||
| if TYPE_CHECKING: | ||||
|  | @ -58,7 +49,9 @@ log = get_logger(__name__) | |||
| 
 | ||||
| @acm | ||||
| async def get_registry( | ||||
|     addr: UnwrappedAddress|None = None, | ||||
|     host: str, | ||||
|     port: int, | ||||
| 
 | ||||
| ) -> AsyncGenerator[ | ||||
|     Portal | LocalPortal | None, | ||||
|     None, | ||||
|  | @ -76,20 +69,19 @@ async def get_registry( | |||
|         # (likely a re-entrant call from the arbiter actor) | ||||
|         yield LocalPortal( | ||||
|             actor, | ||||
|             Channel(transport=None) | ||||
|             # ^XXX, we DO NOT actually provide nor connect an | ||||
|             # underlying transport since this is merely an API shim. | ||||
|             Channel((host, port)) | ||||
|         ) | ||||
|     else: | ||||
|         # TODO: try to look pre-existing connection from | ||||
|         # `Server._peers` and use it instead? | ||||
|         # `Actor._peers` and use it instead? | ||||
|         async with ( | ||||
|             _connect_chan(addr) as chan, | ||||
|             _connect_chan(host, port) as chan, | ||||
|             open_portal(chan) as regstr_ptl, | ||||
|         ): | ||||
|             yield regstr_ptl | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def get_root( | ||||
|     **kwargs, | ||||
|  | @ -97,10 +89,11 @@ async def get_root( | |||
| 
 | ||||
|     # TODO: rename mailbox to `_root_maddr` when we finally | ||||
|     # add and impl libp2p multi-addrs? | ||||
|     addr = _runtime_vars['_root_mailbox'] | ||||
|     host, port = _runtime_vars['_root_mailbox'] | ||||
|     assert host is not None | ||||
| 
 | ||||
|     async with ( | ||||
|         _connect_chan(addr) as chan, | ||||
|         _connect_chan(host, port) as chan, | ||||
|         open_portal(chan, **kwargs) as portal, | ||||
|     ): | ||||
|         yield portal | ||||
|  | @ -113,23 +106,17 @@ def get_peer_by_name( | |||
| ) -> list[Channel]|None:  # at least 1 | ||||
|     ''' | ||||
|     Scan for an existing connection (set) to a named actor | ||||
|     and return any channels from `Server._peers: dict`. | ||||
|     and return any channels from `Actor._peers`. | ||||
| 
 | ||||
|     This is an optimization method over querying the registrar for | ||||
|     the same info. | ||||
| 
 | ||||
|     ''' | ||||
|     actor: Actor = current_actor() | ||||
|     to_scan: dict[tuple, list[Channel]] = actor.ipc_server._peers.copy() | ||||
| 
 | ||||
|     # TODO: is this ever needed? creates a duplicate channel on actor._peers | ||||
|     # when multiple find_actor calls are made to same actor from a single ctx | ||||
|     # which causes actor exit to hang waiting forever on | ||||
|     # `actor._no_more_peers.wait()` in `_runtime.async_main` | ||||
| 
 | ||||
|     # pchan: Channel|None = actor._parent_chan | ||||
|     # if pchan and pchan.uid not in to_scan: | ||||
|     #     to_scan[pchan.uid].append(pchan) | ||||
|     to_scan: dict[tuple, list[Channel]] = actor._peers.copy() | ||||
|     pchan: Channel|None = actor._parent_chan | ||||
|     if pchan: | ||||
|         to_scan[pchan.uid].append(pchan) | ||||
| 
 | ||||
|     for aid, chans in to_scan.items(): | ||||
|         _, peer_name = aid | ||||
|  | @ -147,10 +134,10 @@ def get_peer_by_name( | |||
| @acm | ||||
| async def query_actor( | ||||
|     name: str, | ||||
|     regaddr: UnwrappedAddress|None = None, | ||||
|     regaddr: tuple[str, int]|None = None, | ||||
| 
 | ||||
| ) -> AsyncGenerator[ | ||||
|     UnwrappedAddress|None, | ||||
|     tuple[str, int]|None, | ||||
|     None, | ||||
| ]: | ||||
|     ''' | ||||
|  | @ -176,31 +163,31 @@ async def query_actor( | |||
|         return | ||||
| 
 | ||||
|     reg_portal: Portal | ||||
|     regaddr: Address = wrap_address(regaddr) or actor.reg_addrs[0] | ||||
|     async with get_registry(regaddr) as reg_portal: | ||||
|     regaddr: tuple[str, int] = regaddr or actor.reg_addrs[0] | ||||
|     async with get_registry(*regaddr) as reg_portal: | ||||
|         # TODO: return portals to all available actors - for now | ||||
|         # just the last one that registered | ||||
|         addr: UnwrappedAddress = await reg_portal.run_from_ns( | ||||
|         sockaddr: tuple[str, int] = await reg_portal.run_from_ns( | ||||
|             'self', | ||||
|             'find_actor', | ||||
|             name=name, | ||||
|         ) | ||||
|         yield addr | ||||
|         yield sockaddr | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def maybe_open_portal( | ||||
|     addr: UnwrappedAddress, | ||||
|     addr: tuple[str, int], | ||||
|     name: str, | ||||
| ): | ||||
|     async with query_actor( | ||||
|         name=name, | ||||
|         regaddr=addr, | ||||
|     ) as addr: | ||||
|     ) as sockaddr: | ||||
|         pass | ||||
| 
 | ||||
|     if addr: | ||||
|         async with _connect_chan(addr) as chan: | ||||
|     if sockaddr: | ||||
|         async with _connect_chan(*sockaddr) as chan: | ||||
|             async with open_portal(chan) as portal: | ||||
|                 yield portal | ||||
|     else: | ||||
|  | @ -210,8 +197,7 @@ async def maybe_open_portal( | |||
| @acm | ||||
| async def find_actor( | ||||
|     name: str, | ||||
|     registry_addrs: list[UnwrappedAddress]|None = None, | ||||
|     enable_transports: list[str] = [_def_tpt_proto], | ||||
|     registry_addrs: list[tuple[str, int]]|None = None, | ||||
| 
 | ||||
|     only_first: bool = True, | ||||
|     raise_on_none: bool = False, | ||||
|  | @ -238,15 +224,15 @@ async def find_actor( | |||
|         # XXX NOTE: make sure to dynamically read the value on | ||||
|         # every call since something may change it globally (eg. | ||||
|         # like in our discovery test suite)! | ||||
|         from ._addr import default_lo_addrs | ||||
|         from . import _root | ||||
|         registry_addrs = ( | ||||
|             _runtime_vars['_registry_addrs'] | ||||
|             or | ||||
|             default_lo_addrs(enable_transports) | ||||
|             _root._default_lo_addrs | ||||
|         ) | ||||
| 
 | ||||
|     maybe_portals: list[ | ||||
|         AsyncContextManager[UnwrappedAddress] | ||||
|         AsyncContextManager[tuple[str, int]] | ||||
|     ] = list( | ||||
|         maybe_open_portal( | ||||
|             addr=addr, | ||||
|  | @ -255,12 +241,9 @@ async def find_actor( | |||
|         for addr in registry_addrs | ||||
|     ) | ||||
|     portals: list[Portal] | ||||
|     async with ( | ||||
|         collapse_eg(), | ||||
|         gather_contexts( | ||||
|             mngrs=maybe_portals, | ||||
|         ) as portals, | ||||
|     ): | ||||
|     async with gather_contexts( | ||||
|         mngrs=maybe_portals, | ||||
|     ) as portals: | ||||
|         # log.runtime( | ||||
|         #     'Gathered portals:\n' | ||||
|         #     f'{portals}' | ||||
|  | @ -291,7 +274,7 @@ async def find_actor( | |||
| @acm | ||||
| async def wait_for_actor( | ||||
|     name: str, | ||||
|     registry_addr: UnwrappedAddress | None = None, | ||||
|     registry_addr: tuple[str, int] | None = None, | ||||
| 
 | ||||
| ) -> AsyncGenerator[Portal, None]: | ||||
|     ''' | ||||
|  | @ -308,7 +291,7 @@ async def wait_for_actor( | |||
|             yield peer_portal | ||||
|             return | ||||
| 
 | ||||
|     regaddr: UnwrappedAddress = ( | ||||
|     regaddr: tuple[str, int] = ( | ||||
|         registry_addr | ||||
|         or | ||||
|         actor.reg_addrs[0] | ||||
|  | @ -316,8 +299,8 @@ async def wait_for_actor( | |||
|     # TODO: use `.trionics.gather_contexts()` like | ||||
|     # above in `find_actor()` as well? | ||||
|     reg_portal: Portal | ||||
|     async with get_registry(regaddr) as reg_portal: | ||||
|         addrs = await reg_portal.run_from_ns( | ||||
|     async with get_registry(*regaddr) as reg_portal: | ||||
|         sockaddrs = await reg_portal.run_from_ns( | ||||
|             'self', | ||||
|             'wait_for_actor', | ||||
|             name=name, | ||||
|  | @ -325,8 +308,8 @@ async def wait_for_actor( | |||
| 
 | ||||
|         # get latest registered addr by default? | ||||
|         # TODO: offer multi-portal yields in multi-homed case? | ||||
|         addr: UnwrappedAddress = addrs[-1] | ||||
|         sockaddr: tuple[str, int] = sockaddrs[-1] | ||||
| 
 | ||||
|         async with _connect_chan(addr) as chan: | ||||
|         async with _connect_chan(*sockaddr) as chan: | ||||
|             async with open_portal(chan) as portal: | ||||
|                 yield portal | ||||
|  |  | |||
|  | @ -21,7 +21,8 @@ Sub-process entry points. | |||
| from __future__ import annotations | ||||
| from functools import partial | ||||
| import multiprocessing as mp | ||||
| # import os | ||||
| import os | ||||
| import textwrap | ||||
| from typing import ( | ||||
|     Any, | ||||
|     TYPE_CHECKING, | ||||
|  | @ -34,13 +35,8 @@ from .log import ( | |||
|     get_logger, | ||||
| ) | ||||
| from . import _state | ||||
| from .devx import ( | ||||
|     _frame_stack, | ||||
|     pformat, | ||||
| ) | ||||
| # from .msg import pretty_struct | ||||
| from .devx import _debug | ||||
| from .to_asyncio import run_as_asyncio_guest | ||||
| from ._addr import UnwrappedAddress | ||||
| from ._runtime import ( | ||||
|     async_main, | ||||
|     Actor, | ||||
|  | @ -56,10 +52,10 @@ log = get_logger(__name__) | |||
| def _mp_main( | ||||
| 
 | ||||
|     actor: Actor, | ||||
|     accept_addrs: list[UnwrappedAddress], | ||||
|     accept_addrs: list[tuple[str, int]], | ||||
|     forkserver_info: tuple[Any, Any, Any, Any, Any], | ||||
|     start_method: SpawnMethodKey, | ||||
|     parent_addr: UnwrappedAddress | None = None, | ||||
|     parent_addr: tuple[str, int] | None = None, | ||||
|     infect_asyncio: bool = False, | ||||
| 
 | ||||
| ) -> None: | ||||
|  | @ -106,10 +102,111 @@ def _mp_main( | |||
|         ) | ||||
| 
 | ||||
| 
 | ||||
| # TODO: move this func to some kinda `.devx._conc_lang.py` eventually | ||||
| # as we work out our multi-domain state-flow-syntax! | ||||
| def nest_from_op( | ||||
|     input_op: str, | ||||
|     # | ||||
|     # ?TODO? an idea for a syntax to the state of concurrent systems | ||||
|     # as a "3-domain" (execution, scope, storage) model and using | ||||
|     # a minimal ascii/utf-8 operator-set. | ||||
|     # | ||||
|     # try not to take any of this seriously yet XD | ||||
|     # | ||||
|     # > is a "play operator" indicating (CPU bound) | ||||
|     #   exec/work/ops required at the "lowest level computing" | ||||
|     # | ||||
|     # execution primititves (tasks, threads, actors..) denote their | ||||
|     # lifetime with '(' and ')' since parentheses normally are used | ||||
|     # in many langs to denote function calls. | ||||
|     # | ||||
|     # starting = ( | ||||
|     # >(  opening/starting; beginning of the thread-of-exec (toe?) | ||||
|     # (>  opened/started,  (finished spawning toe) | ||||
|     # |_<Task: blah blah..>  repr of toe, in py these look like <objs> | ||||
|     # | ||||
|     # >) closing/exiting/stopping, | ||||
|     # )> closed/exited/stopped, | ||||
|     # |_<Task: blah blah..> | ||||
|     #   [OR <), )< ?? ] | ||||
|     # | ||||
|     # ending = ) | ||||
|     # >c) cancelling to close/exit | ||||
|     # c)> cancelled (caused close), OR? | ||||
|     #  |_<Actor: ..> | ||||
|     #   OR maybe "<c)" which better indicates the cancel being | ||||
|     #   "delivered/returned" / returned" to LHS? | ||||
|     # | ||||
|     # >x)  erroring to eventuall exit | ||||
|     # x)>  errored and terminated | ||||
|     #  |_<Actor: ...> | ||||
|     # | ||||
|     # scopes: supers/nurseries, IPC-ctxs, sessions, perms, etc. | ||||
|     # >{  opening | ||||
|     # {>  opened | ||||
|     # }>  closed | ||||
|     # >}  closing | ||||
|     # | ||||
|     # storage: like queues, shm-buffers, files, etc.. | ||||
|     # >[  opening | ||||
|     # [>  opened | ||||
|     #  |_<FileObj: ..> | ||||
|     # | ||||
|     # >]  closing | ||||
|     # ]>  closed | ||||
| 
 | ||||
|     # IPC ops: channels, transports, msging | ||||
|     # =>  req msg | ||||
|     # <=  resp msg | ||||
|     # <=> 2-way streaming (of msgs) | ||||
|     # <-  recv 1 msg | ||||
|     # ->  send 1 msg | ||||
|     # | ||||
|     # TODO: still not sure on R/L-HS approach..? | ||||
|     # =>(  send-req to exec start (task, actor, thread..) | ||||
|     # (<=  recv-req to ^ | ||||
|     # | ||||
|     # (<=  recv-req ^ | ||||
|     # <=(  recv-resp opened remote exec primitive | ||||
|     # <=)  recv-resp closed | ||||
|     # | ||||
|     # )<=c req to stop due to cancel | ||||
|     # c=>) req to stop due to cancel | ||||
|     # | ||||
|     # =>{  recv-req to open | ||||
|     # <={  send-status that it closed | ||||
| 
 | ||||
|     tree_str: str, | ||||
| 
 | ||||
|     # NOTE: so move back-from-the-left of the `input_op` by | ||||
|     # this amount. | ||||
|     back_from_op: int = 0, | ||||
| ) -> str: | ||||
|     ''' | ||||
|     Depth-increment the input (presumably hierarchy/supervision) | ||||
|     input "tree string" below the provided `input_op` execution | ||||
|     operator, so injecting a `"\n|_{input_op}\n"`and indenting the | ||||
|     `tree_str` to nest content aligned with the ops last char. | ||||
| 
 | ||||
|     ''' | ||||
|     return ( | ||||
|         f'{input_op}\n' | ||||
|         + | ||||
|         textwrap.indent( | ||||
|             tree_str, | ||||
|             prefix=( | ||||
|                 len(input_op) | ||||
|                 - | ||||
|                 (back_from_op + 1) | ||||
|             ) * ' ', | ||||
|         ) | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| def _trio_main( | ||||
|     actor: Actor, | ||||
|     *, | ||||
|     parent_addr: UnwrappedAddress|None = None, | ||||
|     parent_addr: tuple[str, int] | None = None, | ||||
|     infect_asyncio: bool = False, | ||||
| 
 | ||||
| ) -> None: | ||||
|  | @ -117,7 +214,7 @@ def _trio_main( | |||
|     Entry point for a `trio_run_in_process` subactor. | ||||
| 
 | ||||
|     ''' | ||||
|     _frame_stack.hide_runtime_frames() | ||||
|     _debug.hide_runtime_frames() | ||||
| 
 | ||||
|     _state._current_actor = actor | ||||
|     trio_main = partial( | ||||
|  | @ -128,23 +225,30 @@ def _trio_main( | |||
| 
 | ||||
|     if actor.loglevel is not None: | ||||
|         get_console_log(actor.loglevel) | ||||
|         actor_info: str = ( | ||||
|             f'|_{actor}\n' | ||||
|             f'  uid: {actor.uid}\n' | ||||
|             f'  pid: {os.getpid()}\n' | ||||
|             f'  parent_addr: {parent_addr}\n' | ||||
|             f'  loglevel: {actor.loglevel}\n' | ||||
|         ) | ||||
|         log.info( | ||||
|             f'Starting `trio` subactor from parent @ ' | ||||
|             f'{parent_addr}\n' | ||||
|             'Starting new `trio` subactor:\n' | ||||
|             + | ||||
|             pformat.nest_from_op( | ||||
|             nest_from_op( | ||||
|                 input_op='>(',  # see syntax ideas above | ||||
|                 text=f'{actor}', | ||||
|                 tree_str=actor_info, | ||||
|                 back_from_op=1, | ||||
|             ) | ||||
|         ) | ||||
|     logmeth = log.info | ||||
|     exit_status: str = ( | ||||
|         'Subactor exited\n' | ||||
|         + | ||||
|         pformat.nest_from_op( | ||||
|         nest_from_op( | ||||
|             input_op=')>',  # like a "closed-to-play"-icon from super perspective | ||||
|             text=f'{actor}', | ||||
|             nest_indent=1, | ||||
|             tree_str=actor_info, | ||||
|             back_from_op=1, | ||||
|         ) | ||||
|     ) | ||||
|     try: | ||||
|  | @ -159,9 +263,9 @@ def _trio_main( | |||
|         exit_status: str = ( | ||||
|             'Actor received KBI (aka an OS-cancel)\n' | ||||
|             + | ||||
|             pformat.nest_from_op( | ||||
|             nest_from_op( | ||||
|                 input_op='c)>',  # closed due to cancel (see above) | ||||
|                 text=f'{actor}', | ||||
|                 tree_str=actor_info, | ||||
|             ) | ||||
|         ) | ||||
|     except BaseException as err: | ||||
|  | @ -169,9 +273,9 @@ def _trio_main( | |||
|         exit_status: str = ( | ||||
|             'Main actor task exited due to crash?\n' | ||||
|             + | ||||
|             pformat.nest_from_op( | ||||
|             nest_from_op( | ||||
|                 input_op='x)>',  # closed by error | ||||
|                 text=f'{actor}', | ||||
|                 tree_str=actor_info, | ||||
|             ) | ||||
|         ) | ||||
|         # NOTE since we raise a tb will already be shown on the | ||||
|  |  | |||
|  | @ -22,7 +22,7 @@ from __future__ import annotations | |||
| import builtins | ||||
| import importlib | ||||
| from pprint import pformat | ||||
| from pdb import bdb | ||||
| import sys | ||||
| from types import ( | ||||
|     TracebackType, | ||||
| ) | ||||
|  | @ -64,29 +64,15 @@ if TYPE_CHECKING: | |||
|     from ._context import Context | ||||
|     from .log import StackLevelAdapter | ||||
|     from ._stream import MsgStream | ||||
|     from .ipc import Channel | ||||
|     from ._ipc import Channel | ||||
| 
 | ||||
| log = get_logger('tractor') | ||||
| 
 | ||||
| _this_mod = importlib.import_module(__name__) | ||||
| 
 | ||||
| 
 | ||||
| class RuntimeFailure(RuntimeError): | ||||
|     ''' | ||||
|     General `Actor`-runtime failure due to, | ||||
| 
 | ||||
|     - a bad runtime-env, | ||||
|     - falied spawning (bad input to process), | ||||
|     -   API usage. | ||||
| 
 | ||||
|     ''' | ||||
| 
 | ||||
| 
 | ||||
| class ActorFailure(RuntimeFailure): | ||||
|     ''' | ||||
|     `Actor` failed to boot before/after spawn | ||||
| 
 | ||||
|     ''' | ||||
| class ActorFailure(Exception): | ||||
|     "General actor failure" | ||||
| 
 | ||||
| 
 | ||||
| class InternalError(RuntimeError): | ||||
|  | @ -96,54 +82,6 @@ class InternalError(RuntimeError): | |||
| 
 | ||||
|     ''' | ||||
| 
 | ||||
| class AsyncioCancelled(Exception): | ||||
|     ''' | ||||
|     Asyncio cancelled translation (non-base) error | ||||
|     for use with the ``to_asyncio`` module | ||||
|     to be raised in the ``trio`` side task | ||||
| 
 | ||||
|     NOTE: this should NOT inherit from `asyncio.CancelledError` or | ||||
|     tests should break! | ||||
| 
 | ||||
|     ''' | ||||
| 
 | ||||
| 
 | ||||
| class AsyncioTaskExited(Exception): | ||||
|     ''' | ||||
|     asyncio.Task "exited" translation error for use with the | ||||
|     `to_asyncio` APIs to be raised in the `trio` side task indicating | ||||
|     on `.run_task()`/`.open_channel_from()` exit that the aio side | ||||
|     exited early/silently. | ||||
| 
 | ||||
|     ''' | ||||
| 
 | ||||
| class TrioCancelled(Exception): | ||||
|     ''' | ||||
|     Trio cancelled translation (non-base) error | ||||
|     for use with the `to_asyncio` module | ||||
|     to be raised in the `asyncio.Task` to indicate | ||||
|     that the `trio` side raised `Cancelled` or an error. | ||||
| 
 | ||||
|     ''' | ||||
| 
 | ||||
| class TrioTaskExited(Exception): | ||||
|     ''' | ||||
|     The `trio`-side task exited without explicitly cancelling the | ||||
|     `asyncio.Task` peer. | ||||
| 
 | ||||
|     This is very similar to how `trio.ClosedResource` acts as | ||||
|     a "clean shutdown" signal to the consumer side of a mem-chan, | ||||
| 
 | ||||
|     https://trio.readthedocs.io/en/stable/reference-core.html#clean-shutdown-with-channels | ||||
| 
 | ||||
|     ''' | ||||
| 
 | ||||
| 
 | ||||
| class DebugRequestError(RuntimeError): | ||||
|     ''' | ||||
|     Failed to request stdio lock from root actor! | ||||
| 
 | ||||
|     ''' | ||||
| 
 | ||||
| # NOTE: more or less should be close to these: | ||||
| # 'boxed_type', | ||||
|  | @ -189,8 +127,8 @@ _body_fields: list[str] = list( | |||
| 
 | ||||
| def get_err_type(type_name: str) -> BaseException|None: | ||||
|     ''' | ||||
|     Look up an exception type by name from the set of locally known | ||||
|     namespaces: | ||||
|     Look up an exception type by name from the set of locally | ||||
|     known namespaces: | ||||
| 
 | ||||
|     - `builtins` | ||||
|     - `tractor._exceptions` | ||||
|  | @ -201,7 +139,6 @@ def get_err_type(type_name: str) -> BaseException|None: | |||
|         builtins, | ||||
|         _this_mod, | ||||
|         trio, | ||||
|         bdb, | ||||
|     ]: | ||||
|         if type_ref := getattr( | ||||
|             ns, | ||||
|  | @ -210,8 +147,6 @@ def get_err_type(type_name: str) -> BaseException|None: | |||
|         ): | ||||
|             return type_ref | ||||
| 
 | ||||
|     return None | ||||
| 
 | ||||
| 
 | ||||
| def pack_from_raise( | ||||
|     local_err: ( | ||||
|  | @ -423,13 +358,6 @@ class RemoteActorError(Exception): | |||
|                 self._ipc_msg.src_type_str | ||||
|             ) | ||||
| 
 | ||||
|             if not self._src_type: | ||||
|                 raise TypeError( | ||||
|                     f'Failed to lookup src error type with ' | ||||
|                     f'`tractor._exceptions.get_err_type()` :\n' | ||||
|                     f'{self.src_type_str}' | ||||
|                 ) | ||||
| 
 | ||||
|         return self._src_type | ||||
| 
 | ||||
|     @property | ||||
|  | @ -438,9 +366,6 @@ class RemoteActorError(Exception): | |||
|         String-name of the (last hop's) boxed error type. | ||||
| 
 | ||||
|         ''' | ||||
|         # TODO, maybe support also serializing the | ||||
|         # `ExceptionGroup.exeptions: list[BaseException]` set under | ||||
|         # certain conditions? | ||||
|         bt: Type[BaseException] = self.boxed_type | ||||
|         if bt: | ||||
|             return str(bt.__name__) | ||||
|  | @ -453,13 +378,9 @@ class RemoteActorError(Exception): | |||
|         Error type boxed by last actor IPC hop. | ||||
| 
 | ||||
|         ''' | ||||
|         if ( | ||||
|             self._boxed_type is None | ||||
|             and | ||||
|             (ipc_msg := self._ipc_msg) | ||||
|         ): | ||||
|         if self._boxed_type is None: | ||||
|             self._boxed_type = get_err_type( | ||||
|                 ipc_msg.boxed_type_str | ||||
|                 self._ipc_msg.boxed_type_str | ||||
|             ) | ||||
| 
 | ||||
|         return self._boxed_type | ||||
|  | @ -542,6 +463,7 @@ class RemoteActorError(Exception): | |||
|             if val: | ||||
|                 _repr += f'{key}={val_str}{end_char}' | ||||
| 
 | ||||
| 
 | ||||
|         return _repr | ||||
| 
 | ||||
|     def reprol(self) -> str: | ||||
|  | @ -620,9 +542,56 @@ class RemoteActorError(Exception): | |||
|             the type name is already implicitly shown by python). | ||||
| 
 | ||||
|         ''' | ||||
|         header: str = '' | ||||
|         body: str = '' | ||||
|         message: str = '' | ||||
| 
 | ||||
|         # XXX when the currently raised exception is this instance, | ||||
|         # we do not ever use the "type header" style repr. | ||||
|         is_being_raised: bool = False | ||||
|         if ( | ||||
|             (exc := sys.exception()) | ||||
|             and | ||||
|             exc is self | ||||
|         ): | ||||
|             is_being_raised: bool = True | ||||
| 
 | ||||
|         with_type_header: bool = ( | ||||
|             with_type_header | ||||
|             and | ||||
|             not is_being_raised | ||||
|         ) | ||||
| 
 | ||||
|         # <RemoteActorError( .. )> style | ||||
|         if with_type_header: | ||||
|             header: str = f'<{type(self).__name__}(' | ||||
| 
 | ||||
|         if message := self._message: | ||||
| 
 | ||||
|             # split off the first line so, if needed, it isn't | ||||
|             # indented the same like the "boxed content" which | ||||
|             # since there is no `.tb_str` is just the `.message`. | ||||
|             lines: list[str] = message.splitlines() | ||||
|             first: str = lines[0] | ||||
|             message: str = message.removeprefix(first) | ||||
| 
 | ||||
|             # with a type-style header we, | ||||
|             # - have no special message "first line" extraction/handling | ||||
|             # - place the message a space in from the header: | ||||
|             #  `MsgTypeError( <message> ..` | ||||
|             #                 ^-here | ||||
|             # - indent the `.message` inside the type body. | ||||
|             if with_type_header: | ||||
|                 first = f' {first} )>' | ||||
| 
 | ||||
|             message: str = textwrap.indent( | ||||
|                 message, | ||||
|                 prefix=' '*2, | ||||
|             ) | ||||
|             message: str = first + message | ||||
| 
 | ||||
|         # IFF there is an embedded traceback-str we always | ||||
|         # draw the ascii-box around it. | ||||
|         body: str = '' | ||||
|         if tb_str := self.tb_str: | ||||
|             fields: str = self._mk_fields_str( | ||||
|                 _body_fields | ||||
|  | @ -643,15 +612,21 @@ class RemoteActorError(Exception): | |||
|                 boxer_header=self.relay_uid, | ||||
|             ) | ||||
| 
 | ||||
|         # !TODO, it'd be nice to import these top level without | ||||
|         # cycles! | ||||
|         from tractor.devx.pformat import ( | ||||
|             pformat_exc, | ||||
|         ) | ||||
|         return pformat_exc( | ||||
|             exc=self, | ||||
|             with_type_header=with_type_header, | ||||
|             body=body, | ||||
|         tail = '' | ||||
|         if ( | ||||
|             with_type_header | ||||
|             and not message | ||||
|         ): | ||||
|             tail: str = '>' | ||||
| 
 | ||||
|         return ( | ||||
|             header | ||||
|             + | ||||
|             message | ||||
|             + | ||||
|             f'{body}' | ||||
|             + | ||||
|             tail | ||||
|         ) | ||||
| 
 | ||||
|     __repr__ = pformat | ||||
|  | @ -677,10 +652,16 @@ class RemoteActorError(Exception): | |||
|         failing actor's remote env. | ||||
| 
 | ||||
|         ''' | ||||
|         src_type_ref: Type[BaseException] = self.src_type | ||||
|         if not src_type_ref: | ||||
|             raise TypeError( | ||||
|                 'Failed to lookup src error type:\n' | ||||
|                 f'{self.src_type_str}' | ||||
|             ) | ||||
| 
 | ||||
|         # TODO: better tb insertion and all the fancier dunder | ||||
|         # metadata stuff as per `.__context__` etc. and friends: | ||||
|         # https://github.com/python-trio/trio/issues/611 | ||||
|         src_type_ref: Type[BaseException] = self.src_type | ||||
|         return src_type_ref(self.tb_str) | ||||
| 
 | ||||
|     # TODO: local recontruction of nested inception for a given | ||||
|  | @ -806,11 +787,8 @@ class MsgTypeError( | |||
|         ''' | ||||
|         if ( | ||||
|             (_bad_msg := self.msgdata.get('_bad_msg')) | ||||
|             and ( | ||||
|                 isinstance(_bad_msg, PayloadMsg) | ||||
|                 or | ||||
|                 isinstance(_bad_msg, msgtypes.Start) | ||||
|             ) | ||||
|             and | ||||
|             isinstance(_bad_msg, PayloadMsg) | ||||
|         ): | ||||
|             return _bad_msg | ||||
| 
 | ||||
|  | @ -929,7 +907,7 @@ class StreamOverrun( | |||
|     ''' | ||||
| 
 | ||||
| 
 | ||||
| class TransportClosed(Exception): | ||||
| class TransportClosed(trio.BrokenResourceError): | ||||
|     ''' | ||||
|     IPC transport (protocol) connection was closed or broke and | ||||
|     indicates that the wrapping communication `Channel` can no longer | ||||
|  | @ -940,39 +918,24 @@ class TransportClosed(Exception): | |||
|         self, | ||||
|         message: str, | ||||
|         loglevel: str = 'transport', | ||||
|         src_exc: Exception|None = None, | ||||
|         cause: BaseException|None = None, | ||||
|         raise_on_report: bool = False, | ||||
| 
 | ||||
|     ) -> None: | ||||
|         self.message: str = message | ||||
|         self._loglevel: str = loglevel | ||||
|         self._loglevel = loglevel | ||||
|         super().__init__(message) | ||||
| 
 | ||||
|         self._src_exc = src_exc | ||||
|         # set the cause manually if not already set by python | ||||
|         if ( | ||||
|             src_exc is not None | ||||
|             and | ||||
|             not self.__cause__ | ||||
|         ): | ||||
|             self.__cause__ = src_exc | ||||
|         if cause is not None: | ||||
|             self.__cause__ = cause | ||||
| 
 | ||||
|         # flag to toggle whether the msg loop should raise | ||||
|         # the exc in its `TransportClosed` handler block. | ||||
|         self._raise_on_report = raise_on_report | ||||
| 
 | ||||
|     @property | ||||
|     def src_exc(self) -> Exception: | ||||
|         return ( | ||||
|             self.__cause__ | ||||
|             or | ||||
|             self._src_exc | ||||
|         ) | ||||
| 
 | ||||
|     def report_n_maybe_raise( | ||||
|         self, | ||||
|         message: str|None = None, | ||||
|         hide_tb: bool = True, | ||||
| 
 | ||||
|     ) -> None: | ||||
|         ''' | ||||
|  | @ -980,10 +943,9 @@ class TransportClosed(Exception): | |||
|         for this error. | ||||
| 
 | ||||
|         ''' | ||||
|         __tracebackhide__: bool = hide_tb | ||||
|         message: str = message or self.message | ||||
|         # when a cause is set, slap it onto the log emission. | ||||
|         if cause := self.src_exc: | ||||
|         if cause := self.__cause__: | ||||
|             cause_tb_str: str = ''.join( | ||||
|                 traceback.format_tb(cause.__traceback__) | ||||
|             ) | ||||
|  | @ -992,86 +954,13 @@ class TransportClosed(Exception): | |||
|                 f'    {cause}\n'  # exc repr | ||||
|             ) | ||||
| 
 | ||||
|         getattr( | ||||
|             log, | ||||
|             self._loglevel | ||||
|         )(message) | ||||
|         getattr(log, self._loglevel)(message) | ||||
| 
 | ||||
|         # some errors we want to blow up from | ||||
|         # inside the RPC msg loop | ||||
|         if self._raise_on_report: | ||||
|             raise self from cause | ||||
| 
 | ||||
|     @classmethod | ||||
|     def repr_src_exc( | ||||
|         self, | ||||
|         src_exc: Exception|None = None, | ||||
|     ) -> str: | ||||
| 
 | ||||
|         if src_exc is None: | ||||
|             return '<unknown>' | ||||
| 
 | ||||
|         src_msg: tuple[str] = src_exc.args | ||||
|         src_exc_repr: str = ( | ||||
|             f'{type(src_exc).__name__}[ {src_msg} ]' | ||||
|         ) | ||||
|         return src_exc_repr | ||||
| 
 | ||||
|     def pformat(self) -> str: | ||||
|         from tractor.devx.pformat import ( | ||||
|             pformat_exc, | ||||
|         ) | ||||
|         return pformat_exc( | ||||
|             exc=self, | ||||
|         ) | ||||
| 
 | ||||
|     # delegate to `str`-ified pformat | ||||
|     __repr__ = pformat | ||||
| 
 | ||||
|     @classmethod | ||||
|     def from_src_exc( | ||||
|         cls, | ||||
|         src_exc: ( | ||||
|             Exception| | ||||
|             trio.ClosedResource| | ||||
|             trio.BrokenResourceError | ||||
|         ), | ||||
|         message: str, | ||||
|         body: str = '', | ||||
|         **init_kws, | ||||
|     ) -> TransportClosed: | ||||
|         ''' | ||||
|         Convenience constructor for creation from an underlying | ||||
|         `trio`-sourced async-resource/chan/stream error. | ||||
| 
 | ||||
|         Embeds the original `src_exc`'s repr within the | ||||
|         `Exception.args` via a first-line-in-`.message`-put-in-header | ||||
|         pre-processing and allows inserting additional content beyond | ||||
|         the main message via a `body: str`. | ||||
| 
 | ||||
|         ''' | ||||
|         repr_src_exc: str = cls.repr_src_exc( | ||||
|             src_exc, | ||||
|         ) | ||||
|         next_line: str = f'  src_exc: {repr_src_exc}\n' | ||||
|         if body: | ||||
|             body: str = textwrap.indent( | ||||
|                 body, | ||||
|                 prefix=' '*2, | ||||
|             ) | ||||
| 
 | ||||
|         return TransportClosed( | ||||
|             message=( | ||||
|                 message | ||||
|                 + | ||||
|                 next_line | ||||
|                 + | ||||
|                 body | ||||
|             ), | ||||
|             src_exc=src_exc, | ||||
|             **init_kws, | ||||
|         ) | ||||
| 
 | ||||
| 
 | ||||
| class NoResult(RuntimeError): | ||||
|     "No final result is expected for this actor" | ||||
|  | @ -1092,6 +981,18 @@ class MessagingError(Exception): | |||
| 
 | ||||
|     ''' | ||||
| 
 | ||||
| class AsyncioCancelled(Exception): | ||||
|     ''' | ||||
|     Asyncio cancelled translation (non-base) error | ||||
|     for use with the ``to_asyncio`` module | ||||
|     to be raised in the ``trio`` side task | ||||
| 
 | ||||
|     NOTE: this should NOT inherit from `asyncio.CancelledError` or | ||||
|     tests should break! | ||||
| 
 | ||||
|     ''' | ||||
| 
 | ||||
| 
 | ||||
| def pack_error( | ||||
|     exc: BaseException|RemoteActorError, | ||||
| 
 | ||||
|  | @ -1203,8 +1104,6 @@ def unpack_error( | |||
|     which is the responsibilitiy of the caller. | ||||
| 
 | ||||
|     ''' | ||||
|     # XXX, apparently we pass all sorts of msgs here? | ||||
|     # kinda odd but seems like maybe they shouldn't be? | ||||
|     if not isinstance(msg, Error): | ||||
|         return None | ||||
| 
 | ||||
|  | @ -1246,6 +1145,55 @@ def unpack_error( | |||
|     return exc | ||||
| 
 | ||||
| 
 | ||||
| def is_multi_cancelled( | ||||
|     exc: BaseException|BaseExceptionGroup, | ||||
| 
 | ||||
|     ignore_nested: set[BaseException] = set(), | ||||
| 
 | ||||
| ) -> bool|BaseExceptionGroup: | ||||
|     ''' | ||||
|     Predicate to determine if an `BaseExceptionGroup` only contains | ||||
|     some (maybe nested) set of sub-grouped exceptions (like only | ||||
|     `trio.Cancelled`s which get swallowed silently by default) and is | ||||
|     thus the result of "gracefully cancelling" a collection of | ||||
|     sub-tasks (or other conc primitives) and receiving a "cancelled | ||||
|     ACK" from each after termination. | ||||
| 
 | ||||
|     Docs: | ||||
|     ---- | ||||
|     - https://docs.python.org/3/library/exceptions.html#exception-groups | ||||
|     - https://docs.python.org/3/library/exceptions.html#BaseExceptionGroup.subgroup | ||||
| 
 | ||||
|     ''' | ||||
| 
 | ||||
|     if ( | ||||
|         not ignore_nested | ||||
|         or | ||||
|         trio.Cancelled in ignore_nested | ||||
|         # XXX always count-in `trio`'s native signal | ||||
|     ): | ||||
|         ignore_nested |= {trio.Cancelled} | ||||
| 
 | ||||
|     if isinstance(exc, BaseExceptionGroup): | ||||
|         matched_exc: BaseExceptionGroup|None = exc.subgroup( | ||||
|             tuple(ignore_nested), | ||||
| 
 | ||||
|             # TODO, complain about why not allowed XD | ||||
|             # condition=tuple(ignore_nested), | ||||
|         ) | ||||
|         if matched_exc is not None: | ||||
|             return matched_exc | ||||
| 
 | ||||
|     # NOTE, IFF no excs types match (throughout the error-tree) | ||||
|     # -> return `False`, OW return the matched sub-eg. | ||||
|     # | ||||
|     # IOW, for the inverse of ^ for the purpose of | ||||
|     # maybe-enter-REPL--logic: "only debug when the err-tree contains | ||||
|     # at least one exc-type NOT in `ignore_nested`" ; i.e. the case where | ||||
|     # we fallthrough and return `False` here. | ||||
|     return False | ||||
| 
 | ||||
| 
 | ||||
| def _raise_from_unexpected_msg( | ||||
|     ctx: Context, | ||||
|     msg: MsgType, | ||||
|  |  | |||
|  | @ -0,0 +1,820 @@ | |||
| # tractor: structured concurrent "actors". | ||||
| # Copyright 2018-eternity Tyler Goodlet. | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| """ | ||||
| Inter-process comms abstractions | ||||
| 
 | ||||
| """ | ||||
| from __future__ import annotations | ||||
| from collections.abc import ( | ||||
|     AsyncGenerator, | ||||
|     AsyncIterator, | ||||
| ) | ||||
| from contextlib import ( | ||||
|     asynccontextmanager as acm, | ||||
|     contextmanager as cm, | ||||
| ) | ||||
| import platform | ||||
| from pprint import pformat | ||||
| import struct | ||||
| import typing | ||||
| from typing import ( | ||||
|     Any, | ||||
|     Callable, | ||||
|     runtime_checkable, | ||||
|     Protocol, | ||||
|     Type, | ||||
|     TypeVar, | ||||
| ) | ||||
| 
 | ||||
| import msgspec | ||||
| from tricycle import BufferedReceiveStream | ||||
| import trio | ||||
| 
 | ||||
| from tractor.log import get_logger | ||||
| from tractor._exceptions import ( | ||||
|     MsgTypeError, | ||||
|     pack_from_raise, | ||||
|     TransportClosed, | ||||
|     _mk_send_mte, | ||||
|     _mk_recv_mte, | ||||
| ) | ||||
| from tractor.msg import ( | ||||
|     _ctxvar_MsgCodec, | ||||
|     # _codec,  XXX see `self._codec` sanity/debug checks | ||||
|     MsgCodec, | ||||
|     types as msgtypes, | ||||
|     pretty_struct, | ||||
| ) | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
| _is_windows = platform.system() == 'Windows' | ||||
| 
 | ||||
| 
 | ||||
| def get_stream_addrs( | ||||
|     stream: trio.SocketStream | ||||
| ) -> tuple[ | ||||
|     tuple[str, int],  # local | ||||
|     tuple[str, int],  # remote | ||||
| ]: | ||||
|     ''' | ||||
|     Return the `trio` streaming transport prot's socket-addrs for | ||||
|     both the local and remote sides as a pair. | ||||
| 
 | ||||
|     ''' | ||||
|     # rn, should both be IP sockets | ||||
|     lsockname = stream.socket.getsockname() | ||||
|     rsockname = stream.socket.getpeername() | ||||
|     return ( | ||||
|         tuple(lsockname[:2]), | ||||
|         tuple(rsockname[:2]), | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| # from tractor.msg.types import MsgType | ||||
| # ?TODO? this should be our `Union[*msgtypes.__spec__]` alias now right..? | ||||
| # => BLEH, except can't bc prots must inherit typevar or param-spec | ||||
| #   vars.. | ||||
| MsgType = TypeVar('MsgType') | ||||
| 
 | ||||
| 
 | ||||
| # TODO: break up this mod into a subpkg so we can start adding new | ||||
| # backends and move this type stuff into a dedicated file.. Bo | ||||
| # | ||||
| @runtime_checkable | ||||
| class MsgTransport(Protocol[MsgType]): | ||||
| # | ||||
| # ^-TODO-^ consider using a generic def and indexing with our | ||||
| # eventual msg definition/types? | ||||
| # - https://docs.python.org/3/library/typing.html#typing.Protocol | ||||
| 
 | ||||
|     stream: trio.SocketStream | ||||
|     drained: list[MsgType] | ||||
| 
 | ||||
|     def __init__(self, stream: trio.SocketStream) -> None: | ||||
|         ... | ||||
| 
 | ||||
|     # XXX: should this instead be called `.sendall()`? | ||||
|     async def send(self, msg: MsgType) -> None: | ||||
|         ... | ||||
| 
 | ||||
|     async def recv(self) -> MsgType: | ||||
|         ... | ||||
| 
 | ||||
|     def __aiter__(self) -> MsgType: | ||||
|         ... | ||||
| 
 | ||||
|     def connected(self) -> bool: | ||||
|         ... | ||||
| 
 | ||||
|     # defining this sync otherwise it causes a mypy error because it | ||||
|     # can't figure out it's a generator i guess?..? | ||||
|     def drain(self) -> AsyncIterator[dict]: | ||||
|         ... | ||||
| 
 | ||||
|     @property | ||||
|     def laddr(self) -> tuple[str, int]: | ||||
|         ... | ||||
| 
 | ||||
|     @property | ||||
|     def raddr(self) -> tuple[str, int]: | ||||
|         ... | ||||
| 
 | ||||
| 
 | ||||
| # TODO: typing oddity.. not sure why we have to inherit here, but it | ||||
| # seems to be an issue with `get_msg_transport()` returning | ||||
| # a `Type[Protocol]`; probably should make a `mypy` issue? | ||||
| class MsgpackTCPStream(MsgTransport): | ||||
|     ''' | ||||
|     A ``trio.SocketStream`` delivering ``msgpack`` formatted data | ||||
|     using the ``msgspec`` codec lib. | ||||
| 
 | ||||
|     ''' | ||||
|     layer_key: int = 4 | ||||
|     name_key: str = 'tcp' | ||||
| 
 | ||||
|     # TODO: better naming for this? | ||||
|     # -[ ] check how libp2p does naming for such things? | ||||
|     codec_key: str = 'msgpack' | ||||
| 
 | ||||
|     def __init__( | ||||
|         self, | ||||
|         stream: trio.SocketStream, | ||||
|         prefix_size: int = 4, | ||||
| 
 | ||||
|         # XXX optionally provided codec pair for `msgspec`: | ||||
|         # https://jcristharif.com/msgspec/extending.html#mapping-to-from-native-types | ||||
|         # | ||||
|         # TODO: define this as a `Codec` struct which can be | ||||
|         # overriden dynamically by the application/runtime? | ||||
|         codec: tuple[ | ||||
|             Callable[[Any], Any]|None,  # coder | ||||
|             Callable[[type, Any], Any]|None,  # decoder | ||||
|         ]|None = None, | ||||
| 
 | ||||
|     ) -> None: | ||||
| 
 | ||||
|         self.stream = stream | ||||
|         assert self.stream.socket | ||||
| 
 | ||||
|         # should both be IP sockets | ||||
|         self._laddr, self._raddr = get_stream_addrs(stream) | ||||
| 
 | ||||
|         # create read loop instance | ||||
|         self._aiter_pkts = self._iter_packets() | ||||
|         self._send_lock = trio.StrictFIFOLock() | ||||
| 
 | ||||
|         # public i guess? | ||||
|         self.drained: list[dict] = [] | ||||
| 
 | ||||
|         self.recv_stream = BufferedReceiveStream( | ||||
|             transport_stream=stream | ||||
|         ) | ||||
|         self.prefix_size = prefix_size | ||||
| 
 | ||||
|         # allow for custom IPC msg interchange format | ||||
|         # dynamic override Bo | ||||
|         self._task = trio.lowlevel.current_task() | ||||
| 
 | ||||
|         # XXX for ctxvar debug only! | ||||
|         # self._codec: MsgCodec = ( | ||||
|         #     codec | ||||
|         #     or | ||||
|         #     _codec._ctxvar_MsgCodec.get() | ||||
|         # ) | ||||
| 
 | ||||
|     async def _iter_packets(self) -> AsyncGenerator[dict, None]: | ||||
|         ''' | ||||
|         Yield `bytes`-blob decoded packets from the underlying TCP | ||||
|         stream using the current task's `MsgCodec`. | ||||
| 
 | ||||
|         This is a streaming routine implemented as an async generator | ||||
|         func (which was the original design, but could be changed?) | ||||
|         and is allocated by a `.__call__()` inside `.__init__()` where | ||||
|         it is assigned to the `._aiter_pkts` attr. | ||||
| 
 | ||||
|         ''' | ||||
|         decodes_failed: int = 0 | ||||
| 
 | ||||
|         while True: | ||||
|             try: | ||||
|                 header: bytes = await self.recv_stream.receive_exactly(4) | ||||
|             except ( | ||||
|                 ValueError, | ||||
|                 ConnectionResetError, | ||||
| 
 | ||||
|                 # not sure entirely why we need this but without it we | ||||
|                 # seem to be getting racy failures here on | ||||
|                 # arbiter/registry name subs.. | ||||
|                 trio.BrokenResourceError, | ||||
| 
 | ||||
|             ) as trans_err: | ||||
| 
 | ||||
|                 loglevel = 'transport' | ||||
|                 match trans_err: | ||||
|                     # case ( | ||||
|                     #     ConnectionResetError() | ||||
|                     # ): | ||||
|                     #     loglevel = 'transport' | ||||
| 
 | ||||
|                     # peer actor (graceful??) TCP EOF but `tricycle` | ||||
|                     # seems to raise a 0-bytes-read? | ||||
|                     case ValueError() if ( | ||||
|                         'unclean EOF' in trans_err.args[0] | ||||
|                     ): | ||||
|                         pass | ||||
| 
 | ||||
|                     # peer actor (task) prolly shutdown quickly due | ||||
|                     # to cancellation | ||||
|                     case trio.BrokenResourceError() if ( | ||||
|                         'Connection reset by peer' in trans_err.args[0] | ||||
|                     ): | ||||
|                         pass | ||||
| 
 | ||||
|                     # unless the disconnect condition falls under "a | ||||
|                     # normal operation breakage" we usualy console warn | ||||
|                     # about it. | ||||
|                     case _: | ||||
|                         loglevel: str = 'warning' | ||||
| 
 | ||||
| 
 | ||||
|                 raise TransportClosed( | ||||
|                     message=( | ||||
|                         f'IPC transport already closed by peer\n' | ||||
|                         f'x)> {type(trans_err)}\n' | ||||
|                         f' |_{self}\n' | ||||
|                     ), | ||||
|                     loglevel=loglevel, | ||||
|                 ) from trans_err | ||||
| 
 | ||||
|             # XXX definitely can happen if transport is closed | ||||
|             # manually by another `trio.lowlevel.Task` in the | ||||
|             # same actor; we use this in some simulated fault | ||||
|             # testing for ex, but generally should never happen | ||||
|             # under normal operation! | ||||
|             # | ||||
|             # NOTE: as such we always re-raise this error from the | ||||
|             #       RPC msg loop! | ||||
|             except trio.ClosedResourceError as closure_err: | ||||
|                 raise TransportClosed( | ||||
|                     message=( | ||||
|                         f'IPC transport already manually closed locally?\n' | ||||
|                         f'x)> {type(closure_err)} \n' | ||||
|                         f' |_{self}\n' | ||||
|                     ), | ||||
|                     loglevel='error', | ||||
|                     raise_on_report=( | ||||
|                         closure_err.args[0] == 'another task closed this fd' | ||||
|                         or | ||||
|                         closure_err.args[0] in ['another task closed this fd'] | ||||
|                     ), | ||||
|                 ) from closure_err | ||||
| 
 | ||||
|             # graceful TCP EOF disconnect | ||||
|             if header == b'': | ||||
|                 raise TransportClosed( | ||||
|                     message=( | ||||
|                         f'IPC transport already gracefully closed\n' | ||||
|                         f')>\n' | ||||
|                         f'|_{self}\n' | ||||
|                     ), | ||||
|                     loglevel='transport', | ||||
|                     # cause=???  # handy or no? | ||||
|                 ) | ||||
| 
 | ||||
|             size: int | ||||
|             size, = struct.unpack("<I", header) | ||||
| 
 | ||||
|             log.transport(f'received header {size}')  # type: ignore | ||||
|             msg_bytes: bytes = await self.recv_stream.receive_exactly(size) | ||||
| 
 | ||||
|             log.transport(f"received {msg_bytes}")  # type: ignore | ||||
|             try: | ||||
|                 # NOTE: lookup the `trio.Task.context`'s var for | ||||
|                 # the current `MsgCodec`. | ||||
|                 codec: MsgCodec = _ctxvar_MsgCodec.get() | ||||
| 
 | ||||
|                 # XXX for ctxvar debug only! | ||||
|                 # if self._codec.pld_spec != codec.pld_spec: | ||||
|                 #     assert ( | ||||
|                 #         task := trio.lowlevel.current_task() | ||||
|                 #     ) is not self._task | ||||
|                 #     self._task = task | ||||
|                 #     self._codec = codec | ||||
|                 #     log.runtime( | ||||
|                 #         f'Using new codec in {self}.recv()\n' | ||||
|                 #         f'codec: {self._codec}\n\n' | ||||
|                 #         f'msg_bytes: {msg_bytes}\n' | ||||
|                 #     ) | ||||
|                 yield codec.decode(msg_bytes) | ||||
| 
 | ||||
|             # XXX NOTE: since the below error derives from | ||||
|             # `DecodeError` we need to catch is specially | ||||
|             # and always raise such that spec violations | ||||
|             # are never allowed to be caught silently! | ||||
|             except msgspec.ValidationError as verr: | ||||
|                 msgtyperr: MsgTypeError = _mk_recv_mte( | ||||
|                     msg=msg_bytes, | ||||
|                     codec=codec, | ||||
|                     src_validation_error=verr, | ||||
|                 ) | ||||
|                 # XXX deliver up to `Channel.recv()` where | ||||
|                 # a re-raise and `Error`-pack can inject the far | ||||
|                 # end actor `.uid`. | ||||
|                 yield msgtyperr | ||||
| 
 | ||||
|             except ( | ||||
|                 msgspec.DecodeError, | ||||
|                 UnicodeDecodeError, | ||||
|             ): | ||||
|                 if decodes_failed < 4: | ||||
|                     # ignore decoding errors for now and assume they have to | ||||
|                     # do with a channel drop - hope that receiving from the | ||||
|                     # channel will raise an expected error and bubble up. | ||||
|                     try: | ||||
|                         msg_str: str|bytes = msg_bytes.decode() | ||||
|                     except UnicodeDecodeError: | ||||
|                         msg_str = msg_bytes | ||||
| 
 | ||||
|                     log.exception( | ||||
|                         'Failed to decode msg?\n' | ||||
|                         f'{codec}\n\n' | ||||
|                         'Rxed bytes from wire:\n\n' | ||||
|                         f'{msg_str!r}\n' | ||||
|                     ) | ||||
|                     decodes_failed += 1 | ||||
|                 else: | ||||
|                     raise | ||||
| 
 | ||||
|     async def send( | ||||
|         self, | ||||
|         msg: msgtypes.MsgType, | ||||
| 
 | ||||
|         strict_types: bool = True, | ||||
|         hide_tb: bool = False, | ||||
| 
 | ||||
|     ) -> None: | ||||
|         ''' | ||||
|         Send a msgpack encoded py-object-blob-as-msg over TCP. | ||||
| 
 | ||||
|         If `strict_types == True` then a `MsgTypeError` will be raised on any | ||||
|         invalid msg type | ||||
| 
 | ||||
|         ''' | ||||
|         __tracebackhide__: bool = hide_tb | ||||
| 
 | ||||
|         # XXX see `trio._sync.AsyncContextManagerMixin` for details | ||||
|         # on the `.acquire()`/`.release()` sequencing.. | ||||
|         async with self._send_lock: | ||||
| 
 | ||||
|             # NOTE: lookup the `trio.Task.context`'s var for | ||||
|             # the current `MsgCodec`. | ||||
|             codec: MsgCodec = _ctxvar_MsgCodec.get() | ||||
| 
 | ||||
|             # XXX for ctxvar debug only! | ||||
|             # if self._codec.pld_spec != codec.pld_spec: | ||||
|             #     self._codec = codec | ||||
|             #     log.runtime( | ||||
|             #         f'Using new codec in {self}.send()\n' | ||||
|             #         f'codec: {self._codec}\n\n' | ||||
|             #         f'msg: {msg}\n' | ||||
|             #     ) | ||||
| 
 | ||||
|             if type(msg) not in msgtypes.__msg_types__: | ||||
|                 if strict_types: | ||||
|                     raise _mk_send_mte( | ||||
|                         msg, | ||||
|                         codec=codec, | ||||
|                     ) | ||||
|                 else: | ||||
|                     log.warning( | ||||
|                         'Sending non-`Msg`-spec msg?\n\n' | ||||
|                         f'{msg}\n' | ||||
|                     ) | ||||
| 
 | ||||
|             try: | ||||
|                 bytes_data: bytes = codec.encode(msg) | ||||
|             except TypeError as _err: | ||||
|                 typerr = _err | ||||
|                 msgtyperr: MsgTypeError = _mk_send_mte( | ||||
|                     msg, | ||||
|                     codec=codec, | ||||
|                     message=( | ||||
|                         f'IPC-msg-spec violation in\n\n' | ||||
|                         f'{pretty_struct.Struct.pformat(msg)}' | ||||
|                     ), | ||||
|                     src_type_error=typerr, | ||||
|                 ) | ||||
|                 raise msgtyperr from typerr | ||||
| 
 | ||||
|             # supposedly the fastest says, | ||||
|             # https://stackoverflow.com/a/54027962 | ||||
|             size: bytes = struct.pack("<I", len(bytes_data)) | ||||
|             return await self.stream.send_all(size + bytes_data) | ||||
| 
 | ||||
|         # ?TODO? does it help ever to dynamically show this | ||||
|         # frame? | ||||
|         # try: | ||||
|         #     <the-above_code> | ||||
|         # except BaseException as _err: | ||||
|         #     err = _err | ||||
|         #     if not isinstance(err, MsgTypeError): | ||||
|         #         __tracebackhide__: bool = False | ||||
|         #     raise | ||||
| 
 | ||||
|     @property | ||||
|     def laddr(self) -> tuple[str, int]: | ||||
|         return self._laddr | ||||
| 
 | ||||
|     @property | ||||
|     def raddr(self) -> tuple[str, int]: | ||||
|         return self._raddr | ||||
| 
 | ||||
|     async def recv(self) -> Any: | ||||
|         return await self._aiter_pkts.asend(None) | ||||
| 
 | ||||
|     async def drain(self) -> AsyncIterator[dict]: | ||||
|         ''' | ||||
|         Drain the stream's remaining messages sent from | ||||
|         the far end until the connection is closed by | ||||
|         the peer. | ||||
| 
 | ||||
|         ''' | ||||
|         try: | ||||
|             async for msg in self._iter_packets(): | ||||
|                 self.drained.append(msg) | ||||
|         except TransportClosed: | ||||
|             for msg in self.drained: | ||||
|                 yield msg | ||||
| 
 | ||||
|     def __aiter__(self): | ||||
|         return self._aiter_pkts | ||||
| 
 | ||||
|     def connected(self) -> bool: | ||||
|         return self.stream.socket.fileno() != -1 | ||||
| 
 | ||||
| 
 | ||||
| def get_msg_transport( | ||||
| 
 | ||||
|     key: tuple[str, str], | ||||
| 
 | ||||
| ) -> Type[MsgTransport]: | ||||
| 
 | ||||
|     return { | ||||
|         ('msgpack', 'tcp'): MsgpackTCPStream, | ||||
|     }[key] | ||||
| 
 | ||||
| 
 | ||||
| class Channel: | ||||
|     ''' | ||||
|     An inter-process channel for communication between (remote) actors. | ||||
| 
 | ||||
|     Wraps a ``MsgStream``: transport + encoding IPC connection. | ||||
| 
 | ||||
|     Currently we only support ``trio.SocketStream`` for transport | ||||
|     (aka TCP) and the ``msgpack`` interchange format via the ``msgspec`` | ||||
|     codec libary. | ||||
| 
 | ||||
|     ''' | ||||
|     def __init__( | ||||
| 
 | ||||
|         self, | ||||
|         destaddr: tuple[str, int]|None, | ||||
| 
 | ||||
|         msg_transport_type_key: tuple[str, str] = ('msgpack', 'tcp'), | ||||
| 
 | ||||
|         # TODO: optional reconnection support? | ||||
|         # auto_reconnect: bool = False, | ||||
|         # on_reconnect: typing.Callable[..., typing.Awaitable] = None, | ||||
| 
 | ||||
|     ) -> None: | ||||
| 
 | ||||
|         # self._recon_seq = on_reconnect | ||||
|         # self._autorecon = auto_reconnect | ||||
| 
 | ||||
|         self._destaddr = destaddr | ||||
|         self._transport_key = msg_transport_type_key | ||||
| 
 | ||||
|         # Either created in ``.connect()`` or passed in by | ||||
|         # user in ``.from_stream()``. | ||||
|         self._stream: trio.SocketStream|None = None | ||||
|         self._transport: MsgTransport|None = None | ||||
| 
 | ||||
|         # set after handshake - always uid of far end | ||||
|         self.uid: tuple[str, str]|None = None | ||||
| 
 | ||||
|         self._aiter_msgs = self._iter_msgs() | ||||
|         self._exc: Exception|None = None  # set if far end actor errors | ||||
|         self._closed: bool = False | ||||
| 
 | ||||
|         # flag set by ``Portal.cancel_actor()`` indicating remote | ||||
|         # (possibly peer) cancellation of the far end actor | ||||
|         # runtime. | ||||
|         self._cancel_called: bool = False | ||||
| 
 | ||||
|     @property | ||||
|     def msgstream(self) -> MsgTransport: | ||||
|         log.info( | ||||
|             '`Channel.msgstream` is an old name, use `._transport`' | ||||
|         ) | ||||
|         return self._transport | ||||
| 
 | ||||
|     @property | ||||
|     def transport(self) -> MsgTransport: | ||||
|         return self._transport | ||||
| 
 | ||||
|     @classmethod | ||||
|     def from_stream( | ||||
|         cls, | ||||
|         stream: trio.SocketStream, | ||||
|         **kwargs, | ||||
| 
 | ||||
|     ) -> Channel: | ||||
| 
 | ||||
|         src, dst = get_stream_addrs(stream) | ||||
|         chan = Channel( | ||||
|             destaddr=dst, | ||||
|             **kwargs, | ||||
|         ) | ||||
| 
 | ||||
|         # set immediately here from provided instance | ||||
|         chan._stream: trio.SocketStream = stream | ||||
|         chan.set_msg_transport(stream) | ||||
|         return chan | ||||
| 
 | ||||
|     def set_msg_transport( | ||||
|         self, | ||||
|         stream: trio.SocketStream, | ||||
|         type_key: tuple[str, str]|None = None, | ||||
| 
 | ||||
|         # XXX optionally provided codec pair for `msgspec`: | ||||
|         # https://jcristharif.com/msgspec/extending.html#mapping-to-from-native-types | ||||
|         codec: MsgCodec|None = None, | ||||
| 
 | ||||
|     ) -> MsgTransport: | ||||
|         type_key = ( | ||||
|             type_key | ||||
|             or | ||||
|             self._transport_key | ||||
|         ) | ||||
|         # get transport type, then | ||||
|         self._transport = get_msg_transport( | ||||
|             type_key | ||||
|         # instantiate an instance of the msg-transport | ||||
|         )( | ||||
|             stream, | ||||
|             codec=codec, | ||||
|         ) | ||||
|         return self._transport | ||||
| 
 | ||||
|     @cm | ||||
|     def apply_codec( | ||||
|         self, | ||||
|         codec: MsgCodec, | ||||
| 
 | ||||
|     ) -> None: | ||||
|         ''' | ||||
|         Temporarily override the underlying IPC msg codec for | ||||
|         dynamic enforcement of messaging schema. | ||||
| 
 | ||||
|         ''' | ||||
|         orig: MsgCodec = self._transport.codec | ||||
|         try: | ||||
|             self._transport.codec = codec | ||||
|             yield | ||||
|         finally: | ||||
|             self._transport.codec = orig | ||||
| 
 | ||||
|     # TODO: do a .src/.dst: str for maddrs? | ||||
|     def __repr__(self) -> str: | ||||
|         if not self._transport: | ||||
|             return '<Channel with inactive transport?>' | ||||
| 
 | ||||
|         return repr( | ||||
|             self._transport.stream.socket._sock | ||||
|         ).replace(  # type: ignore | ||||
|             "socket.socket", | ||||
|             "Channel", | ||||
|         ) | ||||
| 
 | ||||
|     @property | ||||
|     def laddr(self) -> tuple[str, int]|None: | ||||
|         return self._transport.laddr if self._transport else None | ||||
| 
 | ||||
|     @property | ||||
|     def raddr(self) -> tuple[str, int]|None: | ||||
|         return self._transport.raddr if self._transport else None | ||||
| 
 | ||||
|     async def connect( | ||||
|         self, | ||||
|         destaddr: tuple[Any, ...] | None = None, | ||||
|         **kwargs | ||||
| 
 | ||||
|     ) -> MsgTransport: | ||||
| 
 | ||||
|         if self.connected(): | ||||
|             raise RuntimeError("channel is already connected?") | ||||
| 
 | ||||
|         destaddr = destaddr or self._destaddr | ||||
|         assert isinstance(destaddr, tuple) | ||||
| 
 | ||||
|         stream = await trio.open_tcp_stream( | ||||
|             *destaddr, | ||||
|             **kwargs | ||||
|         ) | ||||
|         transport = self.set_msg_transport(stream) | ||||
| 
 | ||||
|         log.transport( | ||||
|             f'Opened channel[{type(transport)}]: {self.laddr} -> {self.raddr}' | ||||
|         ) | ||||
|         return transport | ||||
| 
 | ||||
|     # TODO: something like, | ||||
|     # `pdbp.hideframe_on(errors=[MsgTypeError])` | ||||
|     # instead of the `try/except` hack we have rn.. | ||||
|     # seems like a pretty useful thing to have in general | ||||
|     # along with being able to filter certain stack frame(s / sets) | ||||
|     # possibly based on the current log-level? | ||||
|     async def send( | ||||
|         self, | ||||
|         payload: Any, | ||||
| 
 | ||||
|         hide_tb: bool = False, | ||||
| 
 | ||||
|     ) -> None: | ||||
|         ''' | ||||
|         Send a coded msg-blob over the transport. | ||||
| 
 | ||||
|         ''' | ||||
|         __tracebackhide__: bool = hide_tb | ||||
|         try: | ||||
|             log.transport( | ||||
|                 '=> send IPC msg:\n\n' | ||||
|                 f'{pformat(payload)}\n' | ||||
|             ) | ||||
|             # assert self._transport  # but why typing? | ||||
|             await self._transport.send( | ||||
|                 payload, | ||||
|                 hide_tb=hide_tb, | ||||
|             ) | ||||
|         except BaseException as _err: | ||||
|             err = _err  # bind for introspection | ||||
|             if not isinstance(_err, MsgTypeError): | ||||
|                 # assert err | ||||
|                 __tracebackhide__: bool = False | ||||
|             else: | ||||
|                 assert err.cid | ||||
| 
 | ||||
|             raise | ||||
| 
 | ||||
|     async def recv(self) -> Any: | ||||
|         assert self._transport | ||||
|         return await self._transport.recv() | ||||
| 
 | ||||
|         # TODO: auto-reconnect features like 0mq/nanomsg? | ||||
|         # -[ ] implement it manually with nods to SC prot | ||||
|         #      possibly on multiple transport backends? | ||||
|         #  -> seems like that might be re-inventing scalability | ||||
|         #     prots tho no? | ||||
|         # try: | ||||
|         #     return await self._transport.recv() | ||||
|         # except trio.BrokenResourceError: | ||||
|         #     if self._autorecon: | ||||
|         #         await self._reconnect() | ||||
|         #         return await self.recv() | ||||
|         #     raise | ||||
| 
 | ||||
|     async def aclose(self) -> None: | ||||
| 
 | ||||
|         log.transport( | ||||
|             f'Closing channel to {self.uid} ' | ||||
|             f'{self.laddr} -> {self.raddr}' | ||||
|         ) | ||||
|         assert self._transport | ||||
|         await self._transport.stream.aclose() | ||||
|         self._closed = True | ||||
| 
 | ||||
|     async def __aenter__(self): | ||||
|         await self.connect() | ||||
|         return self | ||||
| 
 | ||||
|     async def __aexit__(self, *args): | ||||
|         await self.aclose(*args) | ||||
| 
 | ||||
|     def __aiter__(self): | ||||
|         return self._aiter_msgs | ||||
| 
 | ||||
|     # ?TODO? run any reconnection sequence? | ||||
|     # -[ ] prolly should be impl-ed as deco-API? | ||||
|     # | ||||
|     # async def _reconnect(self) -> None: | ||||
|     #     """Handle connection failures by polling until a reconnect can be | ||||
|     #     established. | ||||
|     #     """ | ||||
|     #     down = False | ||||
|     #     while True: | ||||
|     #         try: | ||||
|     #             with trio.move_on_after(3) as cancel_scope: | ||||
|     #                 await self.connect() | ||||
|     #             cancelled = cancel_scope.cancelled_caught | ||||
|     #             if cancelled: | ||||
|     #                 log.transport( | ||||
|     #                     "Reconnect timed out after 3 seconds, retrying...") | ||||
|     #                 continue | ||||
|     #             else: | ||||
|     #                 log.transport("Stream connection re-established!") | ||||
| 
 | ||||
|     #                 # on_recon = self._recon_seq | ||||
|     #                 # if on_recon: | ||||
|     #                 #     await on_recon(self) | ||||
| 
 | ||||
|     #                 break | ||||
|     #         except (OSError, ConnectionRefusedError): | ||||
|     #             if not down: | ||||
|     #                 down = True | ||||
|     #                 log.transport( | ||||
|     #                     f"Connection to {self.raddr} went down, waiting" | ||||
|     #                     " for re-establishment") | ||||
|     #             await trio.sleep(1) | ||||
| 
 | ||||
|     async def _iter_msgs( | ||||
|         self | ||||
|     ) -> AsyncGenerator[Any, None]: | ||||
|         ''' | ||||
|         Yield `MsgType` IPC msgs decoded and deliverd from | ||||
|         an underlying `MsgTransport` protocol. | ||||
| 
 | ||||
|         This is a streaming routine alo implemented as an async-gen | ||||
|         func (same a `MsgTransport._iter_pkts()`) gets allocated by | ||||
|         a `.__call__()` inside `.__init__()` where it is assigned to | ||||
|         the `._aiter_msgs` attr. | ||||
| 
 | ||||
|         ''' | ||||
|         assert self._transport | ||||
|         while True: | ||||
|             try: | ||||
|                 async for msg in self._transport: | ||||
|                     match msg: | ||||
|                         # NOTE: if transport/interchange delivers | ||||
|                         # a type error, we pack it with the far | ||||
|                         # end peer `Actor.uid` and relay the | ||||
|                         # `Error`-msg upward to the `._rpc` stack | ||||
|                         # for normal RAE handling. | ||||
|                         case MsgTypeError(): | ||||
|                             yield pack_from_raise( | ||||
|                                 local_err=msg, | ||||
|                                 cid=msg.cid, | ||||
| 
 | ||||
|                                 # XXX we pack it here bc lower | ||||
|                                 # layers have no notion of an | ||||
|                                 # actor-id ;) | ||||
|                                 src_uid=self.uid, | ||||
|                             ) | ||||
|                         case _: | ||||
|                             yield msg | ||||
| 
 | ||||
|             except trio.BrokenResourceError: | ||||
| 
 | ||||
|                 # if not self._autorecon: | ||||
|                 raise | ||||
| 
 | ||||
|             await self.aclose() | ||||
| 
 | ||||
|             # if self._autorecon:  # attempt reconnect | ||||
|             #     await self._reconnect() | ||||
|             #     continue | ||||
| 
 | ||||
|     def connected(self) -> bool: | ||||
|         return self._transport.connected() if self._transport else False | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def _connect_chan( | ||||
|     host: str, | ||||
|     port: int | ||||
| 
 | ||||
| ) -> typing.AsyncGenerator[Channel, None]: | ||||
|     ''' | ||||
|     Create and connect a channel with disconnect on context manager | ||||
|     teardown. | ||||
| 
 | ||||
|     ''' | ||||
|     chan = Channel((host, port)) | ||||
|     await chan.connect() | ||||
|     yield chan | ||||
|     with trio.CancelScope(shield=True): | ||||
|         await chan.aclose() | ||||
|  | @ -39,14 +39,11 @@ import warnings | |||
| 
 | ||||
| import trio | ||||
| 
 | ||||
| from .trionics import ( | ||||
|     maybe_open_nursery, | ||||
|     collapse_eg, | ||||
| ) | ||||
| from .trionics import maybe_open_nursery | ||||
| from ._state import ( | ||||
|     current_actor, | ||||
| ) | ||||
| from .ipc import Channel | ||||
| from ._ipc import Channel | ||||
| from .log import get_logger | ||||
| from .msg import ( | ||||
|     # Error, | ||||
|  | @ -55,8 +52,8 @@ from .msg import ( | |||
|     Return, | ||||
| ) | ||||
| from ._exceptions import ( | ||||
|     # unpack_error, | ||||
|     NoResult, | ||||
|     TransportClosed, | ||||
| ) | ||||
| from ._context import ( | ||||
|     Context, | ||||
|  | @ -110,18 +107,10 @@ class Portal: | |||
|         # point. | ||||
|         self._expect_result_ctx: Context|None = None | ||||
|         self._streams: set[MsgStream] = set() | ||||
| 
 | ||||
|         # TODO, this should be PRIVATE (and never used publicly)! since it's just | ||||
|         # a cached ref to the local runtime instead of calling | ||||
|         # `current_actor()` everywhere.. XD | ||||
|         self.actor: Actor = current_actor() | ||||
| 
 | ||||
|     @property | ||||
|     def chan(self) -> Channel: | ||||
|         ''' | ||||
|         Ref to this ctx's underlying `tractor.ipc.Channel`. | ||||
| 
 | ||||
|         ''' | ||||
|         return self._chan | ||||
| 
 | ||||
|     @property | ||||
|  | @ -181,17 +170,10 @@ class Portal: | |||
| 
 | ||||
|         # not expecting a "main" result | ||||
|         if self._expect_result_ctx is None: | ||||
|             peer_id: str = f'{self.channel.aid.reprol()!r}' | ||||
|             log.warning( | ||||
|                 f'Portal to peer {peer_id} will not deliver a final result?\n' | ||||
|                 f'\n' | ||||
|                 f'Context.result() can only be called by the parent of ' | ||||
|                 f'a sub-actor when it was spawned with ' | ||||
|                 f'`ActorNursery.run_in_actor()`' | ||||
|                 f'\n' | ||||
|                 f'Further this `ActorNursery`-method-API will deprecated in the' | ||||
|                 f'near fututre!\n' | ||||
|             ) | ||||
|                 f"Portal for {self.channel.uid} not expecting a final" | ||||
|                 " result?\nresult() should only be called if subactor" | ||||
|                 " was spawned with `ActorNursery.run_in_actor()`") | ||||
|             return NoResult | ||||
| 
 | ||||
|         # expecting a "main" result | ||||
|  | @ -202,7 +184,7 @@ class Portal: | |||
|                 ( | ||||
|                     self._final_result_msg, | ||||
|                     self._final_result_pld, | ||||
|                 ) = await self._expect_result_ctx._pld_rx.recv_msg( | ||||
|                 ) = await self._expect_result_ctx._pld_rx.recv_msg_w_pld( | ||||
|                     ipc=self._expect_result_ctx, | ||||
|                     expect_msg=Return, | ||||
|                 ) | ||||
|  | @ -224,7 +206,6 @@ class Portal: | |||
|         typname: str = type(self).__name__ | ||||
|         log.warning( | ||||
|             f'`{typname}.result()` is DEPRECATED!\n' | ||||
|             f'\n' | ||||
|             f'Use `{typname}.wait_for_result()` instead!\n' | ||||
|         ) | ||||
|         return await self.wait_for_result( | ||||
|  | @ -236,10 +217,8 @@ class Portal: | |||
|         # terminate all locally running async generator | ||||
|         # IPC calls | ||||
|         if self._streams: | ||||
|             peer_id: str = f'{self.channel.aid.reprol()!r}' | ||||
|             report: str = ( | ||||
|                 f'Cancelling all msg-streams with {peer_id}\n' | ||||
|             ) | ||||
|             log.cancel( | ||||
|                 f"Cancelling all streams with {self.channel.uid}") | ||||
|             for stream in self._streams.copy(): | ||||
|                 try: | ||||
|                     await stream.aclose() | ||||
|  | @ -248,18 +227,10 @@ class Portal: | |||
|                     # (unless of course at some point down the road we | ||||
|                     # won't expect this to always be the case or need to | ||||
|                     # detect it for respawning purposes?) | ||||
|                     report += ( | ||||
|                         f'->) {stream!r} already closed\n' | ||||
|                     ) | ||||
| 
 | ||||
|             log.cancel(report) | ||||
|                     log.debug(f"{stream} was already closed.") | ||||
| 
 | ||||
|     async def aclose(self): | ||||
|         log.debug( | ||||
|             f'Closing portal\n' | ||||
|             f'>}}\n' | ||||
|             f'|_{self}\n' | ||||
|         ) | ||||
|         log.debug(f"Closing {self}") | ||||
|         # TODO: once we move to implementing our own `ReceiveChannel` | ||||
|         # (including remote task cancellation inside its `.aclose()`) | ||||
|         # we'll need to .aclose all those channels here | ||||
|  | @ -285,22 +256,23 @@ class Portal: | |||
|         __runtimeframe__: int = 1  # noqa | ||||
| 
 | ||||
|         chan: Channel = self.channel | ||||
|         peer_id: str = f'{self.channel.aid.reprol()!r}' | ||||
|         if not chan.connected(): | ||||
|             log.runtime( | ||||
|                 'Peer {peer_id} is already disconnected\n' | ||||
|                 '-> skipping cancel request..\n' | ||||
|                 'This channel is already closed, skipping cancel request..' | ||||
|             ) | ||||
|             return False | ||||
| 
 | ||||
|         reminfo: str = ( | ||||
|             f'c)=> {self.channel.uid}\n' | ||||
|             f'  |_{chan}\n' | ||||
|         ) | ||||
|         log.cancel( | ||||
|             f'Sending actor-runtime-cancel-req to peer\n' | ||||
|             f'\n' | ||||
|             f'c)=> {peer_id}\n' | ||||
|             f'Requesting actor-runtime cancel for peer\n\n' | ||||
|             f'{reminfo}' | ||||
|         ) | ||||
| 
 | ||||
|         # XXX the one spot we set it? | ||||
|         chan._cancel_called: bool = True | ||||
|         self.channel._cancel_called: bool = True | ||||
|         try: | ||||
|             # send cancel cmd - might not get response | ||||
|             # XXX: sure would be nice to make this work with | ||||
|  | @ -321,43 +293,22 @@ class Portal: | |||
|                 # may timeout and we never get an ack (obvi racy) | ||||
|                 # but that doesn't mean it wasn't cancelled. | ||||
|                 log.debug( | ||||
|                     f'May have failed to cancel peer?\n' | ||||
|                     f'\n' | ||||
|                     f'c)=?> {peer_id}\n' | ||||
|                     'May have failed to cancel peer?\n' | ||||
|                     f'{reminfo}' | ||||
|                 ) | ||||
| 
 | ||||
|             # if we get here some weird cancellation case happened | ||||
|             return False | ||||
| 
 | ||||
|         except ( | ||||
|             # XXX, should never really get raised unless we aren't | ||||
|             # wrapping them in the below type by mistake? | ||||
|             # | ||||
|             # Leaving the catch here for now until we're very sure | ||||
|             # all the cases (for various tpt protos) have indeed been | ||||
|             # re-wrapped ;p | ||||
|             trio.ClosedResourceError, | ||||
|             trio.BrokenResourceError, | ||||
| 
 | ||||
|             TransportClosed, | ||||
|         ) as tpt_err: | ||||
|             ipc_borked_report: str = ( | ||||
|                 f'IPC for actor already closed/broken?\n\n' | ||||
|                 f'\n' | ||||
|                 f'c)=x> {peer_id}\n' | ||||
|         ): | ||||
|             log.debug( | ||||
|                 'IPC chan for actor already closed or broken?\n\n' | ||||
|                 f'{self.channel.uid}\n' | ||||
|                 f' |_{self.channel}\n' | ||||
|             ) | ||||
|             match tpt_err: | ||||
|                 case TransportClosed(): | ||||
|                     log.debug(ipc_borked_report) | ||||
|                 case _: | ||||
|                     ipc_borked_report += ( | ||||
|                         f'\n' | ||||
|                         f'Unhandled low-level transport-closed/error during\n' | ||||
|                         f'Portal.cancel_actor()` request?\n' | ||||
|                         f'<{type(tpt_err).__name__}( {tpt_err} )>\n' | ||||
|                     ) | ||||
|                     log.warning(ipc_borked_report) | ||||
| 
 | ||||
|             return False | ||||
| 
 | ||||
|     # TODO: do we still need this for low level `Actor`-runtime | ||||
|  | @ -513,13 +464,10 @@ class Portal: | |||
|                 with trio.CancelScope(shield=True): | ||||
|                     await ctx.cancel() | ||||
| 
 | ||||
|             except trio.ClosedResourceError as cre: | ||||
|             except trio.ClosedResourceError: | ||||
|                 # if the far end terminates before we send a cancel the | ||||
|                 # underlying transport-channel may already be closed. | ||||
|                 log.cancel( | ||||
|                     f'Context.cancel() -> {cre!r}\n' | ||||
|                     f'cid: {ctx.cid!r} already closed?\n' | ||||
|                 ) | ||||
|                 log.cancel(f'Context {ctx} was already closed?') | ||||
| 
 | ||||
|             # XXX: should this always be done? | ||||
|             # await recv_chan.aclose() | ||||
|  | @ -556,12 +504,8 @@ class LocalPortal: | |||
|         return it's result. | ||||
| 
 | ||||
|         ''' | ||||
|         obj = ( | ||||
|             self.actor | ||||
|             if ns == 'self' | ||||
|             else importlib.import_module(ns) | ||||
|         ) | ||||
|         func: Callable = getattr(obj, func_name) | ||||
|         obj = self.actor if ns == 'self' else importlib.import_module(ns) | ||||
|         func = getattr(obj, func_name) | ||||
|         return await func(**kwargs) | ||||
| 
 | ||||
| 
 | ||||
|  | @ -586,30 +530,26 @@ async def open_portal( | |||
|     assert actor | ||||
|     was_connected: bool = False | ||||
| 
 | ||||
|     async with ( | ||||
|         collapse_eg(), | ||||
|         maybe_open_nursery( | ||||
|             tn, | ||||
|             shield=shield, | ||||
|         ) as tn, | ||||
|     ): | ||||
|     async with maybe_open_nursery( | ||||
|         tn, | ||||
|         shield=shield, | ||||
|     ) as tn: | ||||
| 
 | ||||
|         if not channel.connected(): | ||||
|             await channel.connect() | ||||
|             was_connected = True | ||||
| 
 | ||||
|         if channel.aid is None: | ||||
|             await channel._do_handshake( | ||||
|                 aid=actor.aid, | ||||
|             ) | ||||
|         if channel.uid is None: | ||||
|             await actor._do_handshake(channel) | ||||
| 
 | ||||
|         msg_loop_cs: trio.CancelScope|None = None | ||||
|         if start_msg_loop: | ||||
|             from . import _rpc | ||||
|             from ._runtime import process_messages | ||||
|             msg_loop_cs = await tn.start( | ||||
|                 partial( | ||||
|                     _rpc.process_messages, | ||||
|                     chan=channel, | ||||
|                     process_messages, | ||||
|                     actor, | ||||
|                     channel, | ||||
|                     # if the local task is cancelled we want to keep | ||||
|                     # the msg loop running until our block ends | ||||
|                     shield=True, | ||||
|  |  | |||
							
								
								
									
										855
									
								
								tractor/_root.py
								
								
								
								
							
							
						
						
									
										855
									
								
								tractor/_root.py
								
								
								
								
							|  | @ -18,9 +18,7 @@ | |||
| Root actor runtime ignition(s). | ||||
| 
 | ||||
| ''' | ||||
| from contextlib import ( | ||||
|     asynccontextmanager as acm, | ||||
| ) | ||||
| from contextlib import asynccontextmanager as acm | ||||
| from functools import partial | ||||
| import importlib | ||||
| import inspect | ||||
|  | @ -28,55 +26,96 @@ import logging | |||
| import os | ||||
| import signal | ||||
| import sys | ||||
| from typing import ( | ||||
|     Any, | ||||
|     Callable, | ||||
| ) | ||||
| from typing import Callable | ||||
| import warnings | ||||
| 
 | ||||
| 
 | ||||
| import trio | ||||
| 
 | ||||
| from . import _runtime | ||||
| from .devx import ( | ||||
|     debug, | ||||
|     _frame_stack, | ||||
|     pformat as _pformat, | ||||
| from ._runtime import ( | ||||
|     Actor, | ||||
|     Arbiter, | ||||
|     # TODO: rename and make a non-actor subtype? | ||||
|     # Arbiter as Registry, | ||||
|     async_main, | ||||
| ) | ||||
| from .devx import _debug | ||||
| from . import _spawn | ||||
| from . import _state | ||||
| from . import log | ||||
| from .ipc import ( | ||||
|     _connect_chan, | ||||
| ) | ||||
| from ._addr import ( | ||||
|     Address, | ||||
|     UnwrappedAddress, | ||||
|     default_lo_addrs, | ||||
|     mk_uuid, | ||||
|     wrap_address, | ||||
| ) | ||||
| from .trionics import ( | ||||
|     is_multi_cancelled, | ||||
|     collapse_eg, | ||||
| ) | ||||
| from ._exceptions import ( | ||||
|     RuntimeFailure, | ||||
| ) | ||||
| from ._ipc import _connect_chan | ||||
| from ._exceptions import is_multi_cancelled | ||||
| 
 | ||||
| 
 | ||||
| # set at startup and after forks | ||||
| _default_host: str = '127.0.0.1' | ||||
| _default_port: int = 1616 | ||||
| 
 | ||||
| # default registry always on localhost | ||||
| _default_lo_addrs: list[tuple[str, int]] = [( | ||||
|     _default_host, | ||||
|     _default_port, | ||||
| )] | ||||
| 
 | ||||
| 
 | ||||
| logger = log.get_logger('tractor') | ||||
| 
 | ||||
| 
 | ||||
| # TODO: stick this in a `@acm` defined in `devx.debug`? | ||||
| # -[ ] also maybe consider making this a `wrapt`-deco to | ||||
| #     save an indent level? | ||||
| # | ||||
| @acm | ||||
| async def maybe_block_bp( | ||||
|     debug_mode: bool, | ||||
|     maybe_enable_greenback: bool, | ||||
| ) -> bool: | ||||
| async def open_root_actor( | ||||
| 
 | ||||
|     *, | ||||
|     # defaults are above | ||||
|     registry_addrs: list[tuple[str, int]]|None = None, | ||||
| 
 | ||||
|     # defaults are above | ||||
|     arbiter_addr: tuple[str, int]|None = None, | ||||
| 
 | ||||
|     name: str|None = 'root', | ||||
| 
 | ||||
|     # either the `multiprocessing` start method: | ||||
|     # https://docs.python.org/3/library/multiprocessing.html#contexts-and-start-methods | ||||
|     # OR `trio` (the new default). | ||||
|     start_method: _spawn.SpawnMethodKey|None = None, | ||||
| 
 | ||||
|     # enables the multi-process debugger support | ||||
|     debug_mode: bool = False, | ||||
|     maybe_enable_greenback: bool = True,  # `.pause_from_sync()/breakpoint()` support | ||||
|     enable_stack_on_sig: bool = False, | ||||
| 
 | ||||
|     # internal logging | ||||
|     loglevel: str|None = None, | ||||
| 
 | ||||
|     enable_modules: list|None = None, | ||||
|     rpc_module_paths: list|None = None, | ||||
| 
 | ||||
|     # NOTE: allow caller to ensure that only one registry exists | ||||
|     # and that this call creates it. | ||||
|     ensure_registry: bool = False, | ||||
| 
 | ||||
|     hide_tb: bool = True, | ||||
| 
 | ||||
|     # XXX, proxied directly to `.devx._debug._maybe_enter_pm()` | ||||
|     # for REPL-entry logic. | ||||
|     debug_filter: Callable[ | ||||
|         [BaseException|BaseExceptionGroup], | ||||
|         bool, | ||||
|     ] = lambda err: not is_multi_cancelled(err), | ||||
| 
 | ||||
|     # TODO, a way for actors to augment passing derived | ||||
|     # read-only state to sublayers? | ||||
|     # extra_rt_vars: dict|None = None, | ||||
| 
 | ||||
| ) -> Actor: | ||||
|     ''' | ||||
|     Runtime init entry point for ``tractor``. | ||||
| 
 | ||||
|     ''' | ||||
|     __tracebackhide__: bool = hide_tb | ||||
|     _debug.hide_runtime_frames() | ||||
| 
 | ||||
|     # TODO: stick this in a `@cm` defined in `devx._debug`? | ||||
|     # | ||||
|     # Override the global debugger hook to make it play nice with | ||||
|     # ``trio``, see much discussion in: | ||||
|     # https://github.com/python-trio/trio/issues/1155#issuecomment-742964018 | ||||
|  | @ -85,25 +124,23 @@ async def maybe_block_bp( | |||
|         'PYTHONBREAKPOINT', | ||||
|         None, | ||||
|     ) | ||||
|     bp_blocked: bool | ||||
|     if ( | ||||
|         debug_mode | ||||
|         and maybe_enable_greenback | ||||
|         and ( | ||||
|             maybe_mod := await debug.maybe_init_greenback( | ||||
|             maybe_mod := await _debug.maybe_init_greenback( | ||||
|                 raise_not_found=False, | ||||
|             ) | ||||
|         ) | ||||
|     ): | ||||
|         logger.info( | ||||
|             f'Found `greenback` installed @ {maybe_mod}\n' | ||||
|             f'Enabling `tractor.pause_from_sync()` support!\n' | ||||
|             'Enabling `tractor.pause_from_sync()` support!\n' | ||||
|         ) | ||||
|         os.environ['PYTHONBREAKPOINT'] = ( | ||||
|             'tractor.devx.debug._sync_pause_from_builtin' | ||||
|             'tractor.devx._debug._sync_pause_from_builtin' | ||||
|         ) | ||||
|         _state._runtime_vars['use_greenback'] = True | ||||
|         bp_blocked = False | ||||
| 
 | ||||
|     else: | ||||
|         # TODO: disable `breakpoint()` by default (without | ||||
|  | @ -122,505 +159,310 @@ async def maybe_block_bp( | |||
|         # lol ok, | ||||
|         # https://docs.python.org/3/library/sys.html#sys.breakpointhook | ||||
|         os.environ['PYTHONBREAKPOINT'] = "0" | ||||
|         bp_blocked = True | ||||
| 
 | ||||
|     try: | ||||
|         yield bp_blocked | ||||
|     finally: | ||||
|         # restore any prior built-in `breakpoint()` hook state | ||||
|         if builtin_bp_handler is not None: | ||||
|             sys.breakpointhook = builtin_bp_handler | ||||
|     # attempt to retreive ``trio``'s sigint handler and stash it | ||||
|     # on our debugger lock state. | ||||
|     _debug.DebugStatus._trio_handler = signal.getsignal(signal.SIGINT) | ||||
| 
 | ||||
|         if orig_bp_path is not None: | ||||
|             os.environ['PYTHONBREAKPOINT'] = orig_bp_path | ||||
|     # mark top most level process as root actor | ||||
|     _state._runtime_vars['_is_root'] = True | ||||
| 
 | ||||
|         else: | ||||
|             # clear env back to having no entry | ||||
|             os.environ.pop('PYTHONBREAKPOINT', None) | ||||
|     # caps based rpc list | ||||
|     enable_modules = ( | ||||
|         enable_modules | ||||
|         or | ||||
|         [] | ||||
|     ) | ||||
| 
 | ||||
|     if rpc_module_paths: | ||||
|         warnings.warn( | ||||
|             "`rpc_module_paths` is now deprecated, use " | ||||
|             " `enable_modules` instead.", | ||||
|             DeprecationWarning, | ||||
|             stacklevel=2, | ||||
|         ) | ||||
|         enable_modules.extend(rpc_module_paths) | ||||
| 
 | ||||
|     if start_method is not None: | ||||
|         _spawn.try_set_start_method(start_method) | ||||
| 
 | ||||
| @acm | ||||
| async def open_root_actor( | ||||
|     *, | ||||
|     # defaults are above | ||||
|     registry_addrs: list[UnwrappedAddress]|None = None, | ||||
|     if arbiter_addr is not None: | ||||
|         warnings.warn( | ||||
|             '`arbiter_addr` is now deprecated\n' | ||||
|             'Use `registry_addrs: list[tuple]` instead..', | ||||
|             DeprecationWarning, | ||||
|             stacklevel=2, | ||||
|         ) | ||||
|         registry_addrs = [arbiter_addr] | ||||
| 
 | ||||
|     # defaults are above | ||||
|     arbiter_addr: tuple[UnwrappedAddress]|None = None, | ||||
|     registry_addrs: list[tuple[str, int]] = ( | ||||
|         registry_addrs | ||||
|         or | ||||
|         _default_lo_addrs | ||||
|     ) | ||||
|     assert registry_addrs | ||||
| 
 | ||||
|     enable_transports: list[ | ||||
|         # TODO, this should eventually be the pairs as | ||||
|         # defined by (codec, proto) as on `MsgTransport. | ||||
|         _state.TransportProtocolKey, | ||||
|     ]|None = None, | ||||
|     loglevel = ( | ||||
|         loglevel | ||||
|         or log._default_loglevel | ||||
|     ).upper() | ||||
| 
 | ||||
|     name: str|None = 'root', | ||||
| 
 | ||||
|     # either the `multiprocessing` start method: | ||||
|     # https://docs.python.org/3/library/multiprocessing.html#contexts-and-start-methods | ||||
|     # OR `trio` (the new default). | ||||
|     start_method: _spawn.SpawnMethodKey|None = None, | ||||
| 
 | ||||
|     # enables the multi-process debugger support | ||||
|     debug_mode: bool = False, | ||||
|     maybe_enable_greenback: bool = False,  # `.pause_from_sync()/breakpoint()` support | ||||
|     # ^XXX NOTE^ the perf implications of use, | ||||
|     # https://greenback.readthedocs.io/en/latest/principle.html#performance | ||||
|     enable_stack_on_sig: bool = False, | ||||
| 
 | ||||
|     # internal logging | ||||
|     loglevel: str|None = None, | ||||
| 
 | ||||
|     enable_modules: list|None = None, | ||||
|     rpc_module_paths: list|None = None, | ||||
| 
 | ||||
|     # NOTE: allow caller to ensure that only one registry exists | ||||
|     # and that this call creates it. | ||||
|     ensure_registry: bool = False, | ||||
| 
 | ||||
|     hide_tb: bool = True, | ||||
| 
 | ||||
|     # XXX, proxied directly to `.devx.debug._maybe_enter_pm()` | ||||
|     # for REPL-entry logic. | ||||
|     debug_filter: Callable[ | ||||
|         [BaseException|BaseExceptionGroup], | ||||
|         bool, | ||||
|     ] = lambda err: not is_multi_cancelled(err), | ||||
| 
 | ||||
|     # TODO, a way for actors to augment passing derived | ||||
|     # read-only state to sublayers? | ||||
|     # extra_rt_vars: dict|None = None, | ||||
| 
 | ||||
| ) -> _runtime.Actor: | ||||
|     ''' | ||||
|     Initialize the `tractor` runtime by starting a "root actor" in | ||||
|     a parent-most Python process. | ||||
| 
 | ||||
|     All (disjoint) actor-process-trees-as-programs are created via | ||||
|     this entrypoint. | ||||
| 
 | ||||
|     ''' | ||||
|     # XXX NEVER allow nested actor-trees! | ||||
|     if already_actor := _state.current_actor( | ||||
|         err_on_no_runtime=False, | ||||
|     if ( | ||||
|         debug_mode | ||||
|         and _spawn._spawn_method == 'trio' | ||||
|     ): | ||||
|         rtvs: dict[str, Any] = _state._runtime_vars | ||||
|         root_mailbox: list[str, int] = rtvs['_root_mailbox'] | ||||
|         registry_addrs: list[list[str, int]] = rtvs['_registry_addrs'] | ||||
|         raise RuntimeFailure( | ||||
|             f'A current actor already exists !?\n' | ||||
|             f'({already_actor}\n' | ||||
|             f'\n' | ||||
|             f'You can NOT open a second root actor from within ' | ||||
|             f'an existing tree and the current root of this ' | ||||
|             f'already exists !!\n' | ||||
|             f'\n' | ||||
|             f'_root_mailbox: {root_mailbox!r}\n' | ||||
|             f'_registry_addrs: {registry_addrs!r}\n' | ||||
|         _state._runtime_vars['_debug_mode'] = True | ||||
| 
 | ||||
|         # expose internal debug module to every actor allowing for | ||||
|         # use of ``await tractor.pause()`` | ||||
|         enable_modules.append('tractor.devx._debug') | ||||
| 
 | ||||
|         # if debug mode get's enabled *at least* use that level of | ||||
|         # logging for some informative console prompts. | ||||
|         if ( | ||||
|             logging.getLevelName( | ||||
|                 # lul, need the upper case for the -> int map? | ||||
|                 # sweet "dynamic function behaviour" stdlib... | ||||
|                 loglevel, | ||||
|             ) > logging.getLevelName('PDB') | ||||
|         ): | ||||
|             loglevel = 'PDB' | ||||
| 
 | ||||
| 
 | ||||
|     elif debug_mode: | ||||
|         raise RuntimeError( | ||||
|             "Debug mode is only supported for the `trio` backend!" | ||||
|         ) | ||||
| 
 | ||||
|     async with maybe_block_bp( | ||||
|         debug_mode=debug_mode, | ||||
|         maybe_enable_greenback=maybe_enable_greenback, | ||||
|     assert loglevel | ||||
|     _log = log.get_console_log(loglevel) | ||||
|     assert _log | ||||
| 
 | ||||
|     # TODO: factor this into `.devx._stackscope`!! | ||||
|     if ( | ||||
|         debug_mode | ||||
|         and | ||||
|         enable_stack_on_sig | ||||
|     ): | ||||
|         if enable_transports is None: | ||||
|             enable_transports: list[str] = _state.current_ipc_protos() | ||||
|         else: | ||||
|             _state._runtime_vars['_enable_tpts'] = enable_transports | ||||
|         from .devx._stackscope import enable_stack_on_sig | ||||
|         enable_stack_on_sig() | ||||
| 
 | ||||
|         # TODO! support multi-tpts per actor! | ||||
|         # Bo | ||||
|         if not len(enable_transports) == 1: | ||||
|             raise RuntimeError( | ||||
|                 f'No multi-tpt support yet!\n' | ||||
|                 f'enable_transports={enable_transports!r}\n' | ||||
|             ) | ||||
|     # closed into below ping task-func | ||||
|     ponged_addrs: list[tuple[str, int]] = [] | ||||
| 
 | ||||
|         _frame_stack.hide_runtime_frames() | ||||
|         __tracebackhide__: bool = hide_tb | ||||
|     async def ping_tpt_socket( | ||||
|         addr: tuple[str, int], | ||||
|         timeout: float = 1, | ||||
|     ) -> None: | ||||
|         ''' | ||||
|         Attempt temporary connection to see if a registry is | ||||
|         listening at the requested address by a tranport layer | ||||
|         ping. | ||||
| 
 | ||||
|         # attempt to retreive ``trio``'s sigint handler and stash it | ||||
|         # on our debugger lock state. | ||||
|         debug.DebugStatus._trio_handler = signal.getsignal(signal.SIGINT) | ||||
|         If a connection can't be made quickly we assume none no | ||||
|         server is listening at that addr. | ||||
| 
 | ||||
|         # mark top most level process as root actor | ||||
|         _state._runtime_vars['_is_root'] = True | ||||
| 
 | ||||
|         # caps based rpc list | ||||
|         enable_modules = ( | ||||
|             enable_modules | ||||
|             or | ||||
|             [] | ||||
|         ) | ||||
| 
 | ||||
|         if rpc_module_paths: | ||||
|             warnings.warn( | ||||
|                 "`rpc_module_paths` is now deprecated, use " | ||||
|                 " `enable_modules` instead.", | ||||
|                 DeprecationWarning, | ||||
|                 stacklevel=2, | ||||
|             ) | ||||
|             enable_modules.extend(rpc_module_paths) | ||||
| 
 | ||||
|         if start_method is not None: | ||||
|             _spawn.try_set_start_method(start_method) | ||||
| 
 | ||||
|         # TODO! remove this ASAP! | ||||
|         if arbiter_addr is not None: | ||||
|             warnings.warn( | ||||
|                 '`arbiter_addr` is now deprecated\n' | ||||
|                 'Use `registry_addrs: list[tuple]` instead..', | ||||
|                 DeprecationWarning, | ||||
|                 stacklevel=2, | ||||
|             ) | ||||
|             uw_reg_addrs = [arbiter_addr] | ||||
| 
 | ||||
|         uw_reg_addrs = registry_addrs | ||||
|         if not uw_reg_addrs: | ||||
|             uw_reg_addrs: list[UnwrappedAddress] = default_lo_addrs( | ||||
|                 enable_transports | ||||
|             ) | ||||
| 
 | ||||
|         # must exist by now since all below code is dependent | ||||
|         assert uw_reg_addrs | ||||
|         registry_addrs: list[Address] = [ | ||||
|             wrap_address(uw_addr) | ||||
|             for uw_addr in uw_reg_addrs | ||||
|         ] | ||||
| 
 | ||||
|         loglevel = ( | ||||
|             loglevel | ||||
|             or log._default_loglevel | ||||
|         ).upper() | ||||
| 
 | ||||
|         if ( | ||||
|             debug_mode | ||||
|             and | ||||
|             _spawn._spawn_method == 'trio' | ||||
|         ): | ||||
|             _state._runtime_vars['_debug_mode'] = True | ||||
| 
 | ||||
|             # expose internal debug module to every actor allowing for | ||||
|             # use of ``await tractor.pause()`` | ||||
|             enable_modules.append('tractor.devx.debug._tty_lock') | ||||
| 
 | ||||
|             # if debug mode get's enabled *at least* use that level of | ||||
|             # logging for some informative console prompts. | ||||
|             if ( | ||||
|                 logging.getLevelName( | ||||
|                     # lul, need the upper case for the -> int map? | ||||
|                     # sweet "dynamic function behaviour" stdlib... | ||||
|                     loglevel, | ||||
|                 ) > logging.getLevelName('PDB') | ||||
|             ): | ||||
|                 loglevel = 'PDB' | ||||
| 
 | ||||
| 
 | ||||
|         elif debug_mode: | ||||
|             raise RuntimeError( | ||||
|                 "Debug mode is only supported for the `trio` backend!" | ||||
|             ) | ||||
| 
 | ||||
|         assert loglevel | ||||
|         _log = log.get_console_log(loglevel) | ||||
|         assert _log | ||||
| 
 | ||||
|         # TODO: factor this into `.devx._stackscope`!! | ||||
|         if ( | ||||
|             debug_mode | ||||
|             and | ||||
|             enable_stack_on_sig | ||||
|         ): | ||||
|             from .devx._stackscope import enable_stack_on_sig | ||||
|             enable_stack_on_sig() | ||||
| 
 | ||||
|         # closed into below ping task-func | ||||
|         ponged_addrs: list[Address] = [] | ||||
| 
 | ||||
|         async def ping_tpt_socket( | ||||
|             addr: Address, | ||||
|             timeout: float = 1, | ||||
|         ) -> None: | ||||
|             ''' | ||||
|             Attempt temporary connection to see if a registry is | ||||
|             listening at the requested address by a tranport layer | ||||
|             ping. | ||||
| 
 | ||||
|             If a connection can't be made quickly we assume none no | ||||
|             server is listening at that addr. | ||||
| 
 | ||||
|             ''' | ||||
|             try: | ||||
|                 # TODO: this connect-and-bail forces us to have to | ||||
|                 # carefully rewrap TCP 104-connection-reset errors as | ||||
|                 # EOF so as to avoid propagating cancel-causing errors | ||||
|                 # to the channel-msg loop machinery. Likely it would | ||||
|                 # be better to eventually have a "discovery" protocol | ||||
|                 # with basic handshake instead? | ||||
|                 with trio.move_on_after(timeout): | ||||
|                     async with _connect_chan(addr.unwrap()): | ||||
|                         ponged_addrs.append(addr) | ||||
| 
 | ||||
|             except OSError: | ||||
|                 # ?TODO, make this a "discovery" log level? | ||||
|                 logger.info( | ||||
|                     f'No root-actor registry found @ {addr!r}\n' | ||||
|                 ) | ||||
| 
 | ||||
|         # !TODO, this is basically just another (abstract) | ||||
|         # happy-eyeballs, so we should try for formalize it somewhere | ||||
|         # in a `.[_]discovery` ya? | ||||
|         # | ||||
|         async with trio.open_nursery() as tn: | ||||
|             for uw_addr in uw_reg_addrs: | ||||
|                 addr: Address = wrap_address(uw_addr) | ||||
|                 tn.start_soon( | ||||
|                     ping_tpt_socket, | ||||
|                     addr, | ||||
|                 ) | ||||
| 
 | ||||
|         trans_bind_addrs: list[UnwrappedAddress] = [] | ||||
| 
 | ||||
|         # Create a new local root-actor instance which IS NOT THE | ||||
|         # REGISTRAR | ||||
|         if ponged_addrs: | ||||
|             if ensure_registry: | ||||
|                 raise RuntimeError( | ||||
|                      f'Failed to open `{name}`@{ponged_addrs}: ' | ||||
|                     'registry socket(s) already bound' | ||||
|                 ) | ||||
| 
 | ||||
|             # we were able to connect to an arbiter | ||||
|             logger.info( | ||||
|                 f'Registry(s) seem(s) to exist @ {ponged_addrs}' | ||||
|             ) | ||||
| 
 | ||||
|             actor = _runtime.Actor( | ||||
|                 name=name or 'anonymous', | ||||
|                 uuid=mk_uuid(), | ||||
|                 registry_addrs=ponged_addrs, | ||||
|                 loglevel=loglevel, | ||||
|                 enable_modules=enable_modules, | ||||
|             ) | ||||
|             # **DO NOT** use the registry_addrs as the | ||||
|             # ipc-transport-server's bind-addrs as this is | ||||
|             # a new NON-registrar, ROOT-actor. | ||||
|             # | ||||
|             # XXX INSTEAD, bind random addrs using the same tpt | ||||
|             # proto. | ||||
|             for addr in ponged_addrs: | ||||
|                 trans_bind_addrs.append( | ||||
|                     addr.get_random( | ||||
|                         bindspace=addr.bindspace, | ||||
|                     ) | ||||
|                 ) | ||||
| 
 | ||||
|         # Start this local actor as the "registrar", aka a regular | ||||
|         # actor who manages the local registry of "mailboxes" of | ||||
|         # other process-tree-local sub-actors. | ||||
|         else: | ||||
|             # NOTE that if the current actor IS THE REGISTAR, the | ||||
|             # following init steps are taken: | ||||
|             # - the tranport layer server is bound to each addr | ||||
|             #   pair defined in provided registry_addrs, or the default. | ||||
|             trans_bind_addrs = uw_reg_addrs | ||||
| 
 | ||||
|             # - it is normally desirable for any registrar to stay up | ||||
|             #   indefinitely until either all registered (child/sub) | ||||
|             #   actors are terminated (via SC supervision) or, | ||||
|             #   a re-election process has taken place. | ||||
|             # NOTE: all of ^ which is not implemented yet - see: | ||||
|             # https://github.com/goodboy/tractor/issues/216 | ||||
|             # https://github.com/goodboy/tractor/pull/348 | ||||
|             # https://github.com/goodboy/tractor/issues/296 | ||||
| 
 | ||||
|             # TODO: rename as `RootActor` or is that even necessary? | ||||
|             actor = _runtime.Arbiter( | ||||
|                 name=name or 'registrar', | ||||
|                 uuid=mk_uuid(), | ||||
|                 registry_addrs=registry_addrs, | ||||
|                 loglevel=loglevel, | ||||
|                 enable_modules=enable_modules, | ||||
|             ) | ||||
|             # XXX, in case the root actor runtime was actually run from | ||||
|             # `tractor.to_asyncio.run_as_asyncio_guest()` and NOt | ||||
|             # `.trio.run()`. | ||||
|             actor._infected_aio = _state._runtime_vars['_is_infected_aio'] | ||||
| 
 | ||||
|         # NOTE, only set the loopback addr for the | ||||
|         # process-tree-global "root" mailbox since all sub-actors | ||||
|         # should be able to speak to their root actor over that | ||||
|         # channel. | ||||
|         raddrs: list[Address] = _state._runtime_vars['_root_addrs'] | ||||
|         raddrs.extend(trans_bind_addrs) | ||||
|         # TODO, remove once we have also removed all usage; | ||||
|         # eventually all (root-)registry apis should expect > 1 addr. | ||||
|         _state._runtime_vars['_root_mailbox'] = raddrs[0] | ||||
| 
 | ||||
|         # Start up main task set via core actor-runtime nurseries. | ||||
|         ''' | ||||
|         try: | ||||
|             # assign process-local actor | ||||
|             _state._current_actor = actor | ||||
| 
 | ||||
|             # start local channel-server and fake the portal API | ||||
|             # NOTE: this won't block since we provide the nursery | ||||
|             report: str = f'Starting actor-runtime for {actor.aid.reprol()!r}\n' | ||||
|             if reg_addrs := actor.registry_addrs: | ||||
|                 report += ( | ||||
|                     '-> Opening new registry @ ' | ||||
|                     + | ||||
|                     '\n'.join( | ||||
|                         f'{addr}' for addr in reg_addrs | ||||
|                     ) | ||||
|                 ) | ||||
|             logger.info(f'{report}\n') | ||||
| 
 | ||||
|             # start runtime in a bg sub-task, yield to caller. | ||||
|             async with ( | ||||
|                 collapse_eg(), | ||||
|                 trio.open_nursery() as root_tn, | ||||
| 
 | ||||
|                 # ?TODO? finally-footgun below? | ||||
|                 # -> see note on why shielding. | ||||
|                 # maybe_raise_from_masking_exc(), | ||||
|             ): | ||||
|                 actor._root_tn = root_tn | ||||
|                 # `_runtime.async_main()` creates an internal nursery | ||||
|                 # and blocks here until any underlying actor(-process) | ||||
|                 # tree has terminated thereby conducting so called | ||||
|                 # "end-to-end" structured concurrency throughout an | ||||
|                 # entire hierarchical python sub-process set; all | ||||
|                 # "actor runtime" primitives are SC-compat and thus all | ||||
|                 # transitively spawned actors/processes must be as | ||||
|                 # well. | ||||
|                 await root_tn.start( | ||||
|                     partial( | ||||
|                         _runtime.async_main, | ||||
|                         actor, | ||||
|                         accept_addrs=trans_bind_addrs, | ||||
|                         parent_addr=None | ||||
|                     ) | ||||
|                 ) | ||||
|                 try: | ||||
|                     yield actor | ||||
|                 except ( | ||||
|                     Exception, | ||||
|                     BaseExceptionGroup, | ||||
|                 ) as err: | ||||
| 
 | ||||
|                     # TODO, in beginning to handle the subsubactor with | ||||
|                     # crashed grandparent cases.. | ||||
|                     # | ||||
|                     # was_locked: bool = await debug.maybe_wait_for_debugger( | ||||
|                     #     child_in_debug=True, | ||||
|                     # ) | ||||
|                     # XXX NOTE XXX see equiv note inside | ||||
|                     # `._runtime.Actor._stream_handler()` where in the | ||||
|                     # non-root or root-that-opened-this-mahually case we | ||||
|                     # wait for the local actor-nursery to exit before | ||||
|                     # exiting the transport channel handler. | ||||
|                     entered: bool = await debug._maybe_enter_pm( | ||||
|                         err, | ||||
|                         api_frame=inspect.currentframe(), | ||||
|                         debug_filter=debug_filter, | ||||
| 
 | ||||
|                         # XXX NOTE, required to debug root-actor | ||||
|                         # crashes under cancellation conditions; so | ||||
|                         # most of them! | ||||
|                         shield=root_tn.cancel_scope.cancel_called, | ||||
|                     ) | ||||
| 
 | ||||
|                     if ( | ||||
|                         not entered | ||||
|                         and | ||||
|                         not is_multi_cancelled( | ||||
|                             err, | ||||
|                         ) | ||||
|                     ): | ||||
|                         logger.exception( | ||||
|                             'Root actor crashed\n' | ||||
|                             f'>x)\n' | ||||
|                             f' |_{actor}\n' | ||||
|                         ) | ||||
| 
 | ||||
|                     # ALWAYS re-raise any error bubbled up from the | ||||
|                     # runtime! | ||||
|                     raise | ||||
| 
 | ||||
|                 finally: | ||||
|                     # NOTE/TODO?, not sure if we'll ever need this but it's | ||||
|                     # possibly better for even more determinism? | ||||
|                     # logger.cancel( | ||||
|                     #     f'Waiting on {len(nurseries)} nurseries in root..') | ||||
|                     # nurseries = actor._actoruid2nursery.values() | ||||
|                     # async with trio.open_nursery() as tempn: | ||||
|                     #     for an in nurseries: | ||||
|                     #         tempn.start_soon(an.exited.wait) | ||||
| 
 | ||||
|                     op_nested_actor_repr: str = _pformat.nest_from_op( | ||||
|                         input_op='>) ', | ||||
|                         text=actor.pformat(), | ||||
|                         nest_prefix='|_', | ||||
|                     ) | ||||
|                     logger.info( | ||||
|                         f'Closing down root actor\n' | ||||
|                         f'{op_nested_actor_repr}' | ||||
|                     ) | ||||
|                     # XXX, THIS IS A *finally-footgun*! | ||||
|                     # (also mentioned in with-block above) | ||||
|                     # -> though already shields iternally it can | ||||
|                     # taskc here and mask underlying errors raised in | ||||
|                     # the try-block above? | ||||
|                     with trio.CancelScope(shield=True): | ||||
|                         await actor.cancel(None)  # self cancel | ||||
|         finally: | ||||
|             # revert all process-global runtime state | ||||
|             if ( | ||||
|                 debug_mode | ||||
|                 and | ||||
|                 _spawn._spawn_method == 'trio' | ||||
|             ): | ||||
|                 _state._runtime_vars['_debug_mode'] = False | ||||
| 
 | ||||
|             _state._current_actor = None | ||||
|             _state._last_actor_terminated = actor | ||||
| 
 | ||||
|             sclang_repr: str = _pformat.nest_from_op( | ||||
|                 input_op=')>', | ||||
|                 text=actor.pformat(), | ||||
|                 nest_prefix='|_', | ||||
|                 nest_indent=1, | ||||
|             ) | ||||
|             # TODO: this connect-and-bail forces us to have to | ||||
|             # carefully rewrap TCP 104-connection-reset errors as | ||||
|             # EOF so as to avoid propagating cancel-causing errors | ||||
|             # to the channel-msg loop machinery. Likely it would | ||||
|             # be better to eventually have a "discovery" protocol | ||||
|             # with basic handshake instead? | ||||
|             with trio.move_on_after(timeout): | ||||
|                 async with _connect_chan(*addr): | ||||
|                     ponged_addrs.append(addr) | ||||
| 
 | ||||
|         except OSError: | ||||
|             # TODO: make this a "discovery" log level? | ||||
|             logger.info( | ||||
|                 f'Root actor terminated\n' | ||||
|                 f'{sclang_repr}' | ||||
|                 f'No actor registry found @ {addr}\n' | ||||
|             ) | ||||
| 
 | ||||
|     async with trio.open_nursery() as tn: | ||||
|         for addr in registry_addrs: | ||||
|             tn.start_soon( | ||||
|                 ping_tpt_socket, | ||||
|                 tuple(addr),  # TODO: just drop this requirement? | ||||
|             ) | ||||
| 
 | ||||
|     trans_bind_addrs: list[tuple[str, int]] = [] | ||||
| 
 | ||||
|     # Create a new local root-actor instance which IS NOT THE | ||||
|     # REGISTRAR | ||||
|     if ponged_addrs: | ||||
|         if ensure_registry: | ||||
|             raise RuntimeError( | ||||
|                  f'Failed to open `{name}`@{ponged_addrs}: ' | ||||
|                 'registry socket(s) already bound' | ||||
|             ) | ||||
| 
 | ||||
|         # we were able to connect to an arbiter | ||||
|         logger.info( | ||||
|             f'Registry(s) seem(s) to exist @ {ponged_addrs}' | ||||
|         ) | ||||
| 
 | ||||
|         actor = Actor( | ||||
|             name=name or 'anonymous', | ||||
|             registry_addrs=ponged_addrs, | ||||
|             loglevel=loglevel, | ||||
|             enable_modules=enable_modules, | ||||
|         ) | ||||
|         # DO NOT use the registry_addrs as the transport server | ||||
|         # addrs for this new non-registar, root-actor. | ||||
|         for host, port in ponged_addrs: | ||||
|             # NOTE: zero triggers dynamic OS port allocation | ||||
|             trans_bind_addrs.append((host, 0)) | ||||
| 
 | ||||
|     # Start this local actor as the "registrar", aka a regular | ||||
|     # actor who manages the local registry of "mailboxes" of | ||||
|     # other process-tree-local sub-actors. | ||||
|     else: | ||||
| 
 | ||||
|         # NOTE that if the current actor IS THE REGISTAR, the | ||||
|         # following init steps are taken: | ||||
|         # - the tranport layer server is bound to each (host, port) | ||||
|         #   pair defined in provided registry_addrs, or the default. | ||||
|         trans_bind_addrs = registry_addrs | ||||
| 
 | ||||
|         # - it is normally desirable for any registrar to stay up | ||||
|         #   indefinitely until either all registered (child/sub) | ||||
|         #   actors are terminated (via SC supervision) or, | ||||
|         #   a re-election process has taken place.  | ||||
|         # NOTE: all of ^ which is not implemented yet - see: | ||||
|         # https://github.com/goodboy/tractor/issues/216 | ||||
|         # https://github.com/goodboy/tractor/pull/348 | ||||
|         # https://github.com/goodboy/tractor/issues/296 | ||||
| 
 | ||||
|         actor = Arbiter( | ||||
|             name or 'registrar', | ||||
|             registry_addrs=registry_addrs, | ||||
|             loglevel=loglevel, | ||||
|             enable_modules=enable_modules, | ||||
|         ) | ||||
|         # XXX, in case the root actor runtime was actually run from | ||||
|         # `tractor.to_asyncio.run_as_asyncio_guest()` and NOt | ||||
|         # `.trio.run()`. | ||||
|         actor._infected_aio = _state._runtime_vars['_is_infected_aio'] | ||||
| 
 | ||||
|     # Start up main task set via core actor-runtime nurseries. | ||||
|     try: | ||||
|         # assign process-local actor | ||||
|         _state._current_actor = actor | ||||
| 
 | ||||
|         # start local channel-server and fake the portal API | ||||
|         # NOTE: this won't block since we provide the nursery | ||||
|         ml_addrs_str: str = '\n'.join( | ||||
|             f'@{addr}' for addr in trans_bind_addrs | ||||
|         ) | ||||
|         logger.info( | ||||
|             f'Starting local {actor.uid} on the following transport addrs:\n' | ||||
|             f'{ml_addrs_str}' | ||||
|         ) | ||||
| 
 | ||||
|         # start the actor runtime in a new task | ||||
|         async with trio.open_nursery() as nursery: | ||||
| 
 | ||||
|             # ``_runtime.async_main()`` creates an internal nursery | ||||
|             # and blocks here until any underlying actor(-process) | ||||
|             # tree has terminated thereby conducting so called | ||||
|             # "end-to-end" structured concurrency throughout an | ||||
|             # entire hierarchical python sub-process set; all | ||||
|             # "actor runtime" primitives are SC-compat and thus all | ||||
|             # transitively spawned actors/processes must be as | ||||
|             # well. | ||||
|             await nursery.start( | ||||
|                 partial( | ||||
|                     async_main, | ||||
|                     actor, | ||||
|                     accept_addrs=trans_bind_addrs, | ||||
|                     parent_addr=None | ||||
|                 ) | ||||
|             ) | ||||
|             try: | ||||
|                 yield actor | ||||
|             except ( | ||||
|                 Exception, | ||||
|                 BaseExceptionGroup, | ||||
|             ) as err: | ||||
| 
 | ||||
|                 # XXX NOTE XXX see equiv note inside | ||||
|                 # `._runtime.Actor._stream_handler()` where in the | ||||
|                 # non-root or root-that-opened-this-mahually case we | ||||
|                 # wait for the local actor-nursery to exit before | ||||
|                 # exiting the transport channel handler. | ||||
|                 entered: bool = await _debug._maybe_enter_pm( | ||||
|                     err, | ||||
|                     api_frame=inspect.currentframe(), | ||||
|                     debug_filter=debug_filter, | ||||
|                 ) | ||||
| 
 | ||||
|                 if ( | ||||
|                     not entered | ||||
|                     and | ||||
|                     not is_multi_cancelled( | ||||
|                         err, | ||||
|                     ) | ||||
|                 ): | ||||
|                     logger.exception('Root actor crashed\n') | ||||
| 
 | ||||
|                 # ALWAYS re-raise any error bubbled up from the | ||||
|                 # runtime! | ||||
|                 raise | ||||
| 
 | ||||
|             finally: | ||||
|                 # NOTE: not sure if we'll ever need this but it's | ||||
|                 # possibly better for even more determinism? | ||||
|                 # logger.cancel( | ||||
|                 #     f'Waiting on {len(nurseries)} nurseries in root..') | ||||
|                 # nurseries = actor._actoruid2nursery.values() | ||||
|                 # async with trio.open_nursery() as tempn: | ||||
|                 #     for an in nurseries: | ||||
|                 #         tempn.start_soon(an.exited.wait) | ||||
| 
 | ||||
|                 logger.info( | ||||
|                     'Closing down root actor' | ||||
|                 ) | ||||
|                 await actor.cancel(None)  # self cancel | ||||
|     finally: | ||||
|         _state._current_actor = None | ||||
|         _state._last_actor_terminated = actor | ||||
| 
 | ||||
|         # restore built-in `breakpoint()` hook state | ||||
|         if ( | ||||
|             debug_mode | ||||
|             and | ||||
|             maybe_enable_greenback | ||||
|         ): | ||||
|             if builtin_bp_handler is not None: | ||||
|                 sys.breakpointhook = builtin_bp_handler | ||||
| 
 | ||||
|             if orig_bp_path is not None: | ||||
|                 os.environ['PYTHONBREAKPOINT'] = orig_bp_path | ||||
| 
 | ||||
|             else: | ||||
|                 # clear env back to having no entry | ||||
|                 os.environ.pop('PYTHONBREAKPOINT', None) | ||||
| 
 | ||||
|         logger.runtime("Root actor terminated") | ||||
| 
 | ||||
| 
 | ||||
| def run_daemon( | ||||
|     enable_modules: list[str], | ||||
| 
 | ||||
|     # runtime kwargs | ||||
|     name: str | None = 'root', | ||||
|     registry_addrs: list[UnwrappedAddress]|None = None, | ||||
|     registry_addrs: list[tuple[str, int]] = _default_lo_addrs, | ||||
| 
 | ||||
|     start_method: str | None = None, | ||||
|     debug_mode: bool = False, | ||||
| 
 | ||||
|     # TODO, support `infected_aio=True` mode by, | ||||
|     # - calling the appropriate entrypoint-func from `.to_asyncio` | ||||
|     # - maybe init-ing `greenback` as done above in | ||||
|     #   `open_root_actor()`. | ||||
| 
 | ||||
|     **kwargs | ||||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|     Spawn a root (daemon) actor which will respond to RPC; the main | ||||
|     task simply starts the runtime and then blocks via embedded | ||||
|     `trio.sleep_forever()`. | ||||
|     Spawn daemon actor which will respond to RPC; the main task simply | ||||
|     starts the runtime and then sleeps forever. | ||||
| 
 | ||||
|     This is a very minimal convenience wrapper around starting | ||||
|     a "run-until-cancelled" root actor which can be started with a set | ||||
|  | @ -633,6 +475,7 @@ def run_daemon( | |||
|         importlib.import_module(path) | ||||
| 
 | ||||
|     async def _main(): | ||||
| 
 | ||||
|         async with open_root_actor( | ||||
|             registry_addrs=registry_addrs, | ||||
|             name=name, | ||||
|  |  | |||
							
								
								
									
										291
									
								
								tractor/_rpc.py
								
								
								
								
							
							
						
						
									
										291
									
								
								tractor/_rpc.py
								
								
								
								
							|  | @ -37,13 +37,12 @@ import warnings | |||
| 
 | ||||
| import trio | ||||
| from trio import ( | ||||
|     Cancelled, | ||||
|     CancelScope, | ||||
|     Nursery, | ||||
|     TaskStatus, | ||||
| ) | ||||
| 
 | ||||
| from .ipc import Channel | ||||
| from ._ipc import Channel | ||||
| from ._context import ( | ||||
|     Context, | ||||
| ) | ||||
|  | @ -53,18 +52,14 @@ from ._exceptions import ( | |||
|     ModuleNotExposed, | ||||
|     MsgTypeError, | ||||
|     TransportClosed, | ||||
|     is_multi_cancelled, | ||||
|     pack_error, | ||||
|     unpack_error, | ||||
| ) | ||||
| from .trionics import ( | ||||
|     collapse_eg, | ||||
|     is_multi_cancelled, | ||||
|     maybe_raise_from_masking_exc, | ||||
| ) | ||||
| from .devx import ( | ||||
|     debug, | ||||
|     maybe_wait_for_debugger, | ||||
|     _debug, | ||||
|     add_div, | ||||
|     pformat as _pformat, | ||||
| ) | ||||
| from . import _state | ||||
| from .log import get_logger | ||||
|  | @ -73,7 +68,7 @@ from .msg import ( | |||
|     MsgCodec, | ||||
|     PayloadT, | ||||
|     NamespacePath, | ||||
|     pretty_struct, | ||||
|     # pretty_struct, | ||||
|     _ops as msgops, | ||||
| ) | ||||
| from tractor.msg.types import ( | ||||
|  | @ -221,18 +216,11 @@ async def _invoke_non_context( | |||
|             task_status.started(ctx) | ||||
|             result = await coro | ||||
|             fname: str = func.__name__ | ||||
| 
 | ||||
|             op_nested_task: str = _pformat.nest_from_op( | ||||
|                 input_op=f')> cid: {ctx.cid!r}', | ||||
|                 text=f'{ctx._task}', | ||||
|                 nest_indent=1,  # under > | ||||
|             ) | ||||
|             log.runtime( | ||||
|                 f'RPC task complete\n' | ||||
|                 f'\n' | ||||
|                 f'{op_nested_task}\n' | ||||
|                 f'\n' | ||||
|                 f')> {fname}() -> {pformat(result)}\n' | ||||
|                 'RPC complete:\n' | ||||
|                 f'task: {ctx._task}\n' | ||||
|                 f'|_cid={ctx.cid}\n' | ||||
|                 f'|_{fname}() -> {pformat(result)}\n' | ||||
|             ) | ||||
| 
 | ||||
|             # NOTE: only send result if we know IPC isn't down | ||||
|  | @ -263,7 +251,7 @@ async def _errors_relayed_via_ipc( | |||
|     ctx: Context, | ||||
|     is_rpc: bool, | ||||
| 
 | ||||
|     hide_tb: bool = True, | ||||
|     hide_tb: bool = False, | ||||
|     debug_kbis: bool = False, | ||||
|     task_status: TaskStatus[ | ||||
|         Context | BaseException | ||||
|  | @ -279,7 +267,7 @@ async def _errors_relayed_via_ipc( | |||
| 
 | ||||
|     # TODO: a debug nursery when in debug mode! | ||||
|     # async with maybe_open_debugger_nursery() as debug_tn: | ||||
|     # => see matching comment in side `.debug._pause()` | ||||
|     # => see matching comment in side `._debug._pause()` | ||||
|     rpc_err: BaseException|None = None | ||||
|     try: | ||||
|         yield  # run RPC invoke body | ||||
|  | @ -331,7 +319,7 @@ async def _errors_relayed_via_ipc( | |||
|                     'RPC task crashed, attempting to enter debugger\n' | ||||
|                     f'|_{ctx}' | ||||
|                 ) | ||||
|                 entered_debug = await debug._maybe_enter_pm( | ||||
|                 entered_debug = await _debug._maybe_enter_pm( | ||||
|                     err, | ||||
|                     api_frame=inspect.currentframe(), | ||||
|                 ) | ||||
|  | @ -384,13 +372,13 @@ async def _errors_relayed_via_ipc( | |||
| 
 | ||||
|     # RPC task bookeeping. | ||||
|     # since RPC tasks are scheduled inside a flat | ||||
|     # `Actor._service_tn`, we add "handles" to each such that | ||||
|     # `Actor._service_n`, we add "handles" to each such that | ||||
|     # they can be individually ccancelled. | ||||
|     finally: | ||||
| 
 | ||||
|         # if the error is not from user code and instead a failure of | ||||
|         # an internal-runtime-RPC or IPC-connection, we do (prolly) want | ||||
|         # to show this frame! | ||||
|         # if the error is not from user code and instead a failure | ||||
|         # of a runtime RPC or transport failure we do prolly want to | ||||
|         # show this frame | ||||
|         if ( | ||||
|             rpc_err | ||||
|             and ( | ||||
|  | @ -462,7 +450,7 @@ async def _invoke( | |||
|     connected IPC channel. | ||||
| 
 | ||||
|     This is the core "RPC" `trio.Task` scheduling machinery used to start every | ||||
|     remotely invoked function, normally in `Actor._service_tn: Nursery`. | ||||
|     remotely invoked function, normally in `Actor._service_n: Nursery`. | ||||
| 
 | ||||
|     ''' | ||||
|     __tracebackhide__: bool = hide_tb | ||||
|  | @ -475,7 +463,7 @@ async def _invoke( | |||
|     ): | ||||
|         # XXX for .pause_from_sync()` usage we need to make sure | ||||
|         # `greenback` is boostrapped in the subactor! | ||||
|         await debug.maybe_init_greenback() | ||||
|         await _debug.maybe_init_greenback() | ||||
| 
 | ||||
|     # TODO: possibly a specially formatted traceback | ||||
|     # (not sure what typing is for this..)? | ||||
|  | @ -629,39 +617,28 @@ async def _invoke( | |||
|         #  -> the below scope is never exposed to the | ||||
|         #     `@context` marked RPC function. | ||||
|         # - `._portal` is never set. | ||||
|         scope_err: BaseException|None = None | ||||
|         try: | ||||
|             # TODO: better `trionics` primitive/tooling usage here! | ||||
|             # -[ ] should would be nice to have our `TaskMngr` | ||||
|             #   nursery here! | ||||
|             # -[ ] payload value checking like we do with | ||||
|             #   `.started()` such that the debbuger can engage | ||||
|             #   here in the child task instead of waiting for the | ||||
|             #   parent to crash with it's own MTE.. | ||||
|             # | ||||
|             tn: Nursery | ||||
|             tn: trio.Nursery | ||||
|             rpc_ctx_cs: CancelScope | ||||
|             async with ( | ||||
|                 collapse_eg(hide_tb=False), | ||||
|                 trio.open_nursery() as tn, | ||||
|                 msgops.maybe_limit_plds( | ||||
|                     ctx=ctx, | ||||
|                     spec=ctx_meta.get('pld_spec'), | ||||
|                     dec_hook=ctx_meta.get('dec_hook'), | ||||
|                 ), | ||||
| 
 | ||||
|                 # XXX NOTE, this being the "most embedded" | ||||
|                 # scope ensures unasking of the `await coro` below | ||||
|                 # *should* never be interfered with!! | ||||
|                 maybe_raise_from_masking_exc( | ||||
|                     unmask_from=(Cancelled,), | ||||
|                 ) as _mbme,  # maybe boxed masked exc | ||||
|             ): | ||||
|                 ctx._scope_nursery = tn | ||||
|                 rpc_ctx_cs = ctx._scope = tn.cancel_scope | ||||
|                 task_status.started(ctx) | ||||
| 
 | ||||
|                 # invoke user endpoint fn. | ||||
|                 # TODO: better `trionics` tooling: | ||||
|                 # -[ ] should would be nice to have our `TaskMngr` | ||||
|                 #   nursery here! | ||||
|                 # -[ ] payload value checking like we do with | ||||
|                 #   `.started()` such that the debbuger can engage | ||||
|                 #   here in the child task instead of waiting for the | ||||
|                 #   parent to crash with it's own MTE.. | ||||
|                 res: Any|PayloadT = await coro | ||||
|                 return_msg: Return|CancelAck = return_msg_type( | ||||
|                     cid=cid, | ||||
|  | @ -669,11 +646,6 @@ async def _invoke( | |||
|                 ) | ||||
|                 # set and shuttle final result to "parent"-side task. | ||||
|                 ctx._result = res | ||||
|                 log.runtime( | ||||
|                     f'Sending result msg and exiting {ctx.side!r}\n' | ||||
|                     f'\n' | ||||
|                     f'{pretty_struct.pformat(return_msg)}\n' | ||||
|                 ) | ||||
|                 await chan.send(return_msg) | ||||
| 
 | ||||
|             # NOTE: this happens IFF `ctx._scope.cancel()` is | ||||
|  | @ -762,54 +734,45 @@ async def _invoke( | |||
|         # XXX: do we ever trigger this block any more? | ||||
|         except ( | ||||
|             BaseExceptionGroup, | ||||
|             BaseException, | ||||
|             trio.Cancelled, | ||||
|         ) as _scope_err: | ||||
|             scope_err = _scope_err | ||||
|             BaseException, | ||||
| 
 | ||||
|         ) as scope_error: | ||||
|             if ( | ||||
|                 isinstance(scope_err, RuntimeError) | ||||
|                 and | ||||
|                 scope_err.args | ||||
|                 and | ||||
|                 'Cancel scope stack corrupted' in scope_err.args[0] | ||||
|                 isinstance(scope_error, RuntimeError) | ||||
|                 and scope_error.args | ||||
|                 and 'Cancel scope stack corrupted' in scope_error.args[0] | ||||
|             ): | ||||
|                 log.exception('Cancel scope stack corrupted!?\n') | ||||
|                 # debug.mk_pdb().set_trace() | ||||
|                 # _debug.mk_pdb().set_trace() | ||||
| 
 | ||||
|             # always set this (child) side's exception as the | ||||
|             # local error on the context | ||||
|             ctx._local_error: BaseException = scope_err | ||||
|             ctx._local_error: BaseException = scope_error | ||||
|             # ^-TODO-^ question, | ||||
|             # does this matter other then for | ||||
|             # consistentcy/testing? | ||||
|             # |_ no user code should be in this scope at this point | ||||
|             #    AND we already set this in the block below? | ||||
| 
 | ||||
|             # XXX if a remote error was set then likely the | ||||
|             # exc group was raised due to that, so | ||||
|             # if a remote error was set then likely the | ||||
|             # exception group was raised due to that, so | ||||
|             # and we instead raise that error immediately! | ||||
|             maybe_re: ( | ||||
|                 ContextCancelled|RemoteActorError | ||||
|             ) = ctx.maybe_raise() | ||||
|             if maybe_re: | ||||
|                 log.cancel( | ||||
|                     f'Suppressing remote-exc from peer,\n' | ||||
|                     f'{maybe_re!r}\n' | ||||
|                 ) | ||||
|             ctx.maybe_raise() | ||||
| 
 | ||||
|             # maybe TODO: pack in come kinda | ||||
|             # `trio.Cancelled.__traceback__` here so they can be | ||||
|             # unwrapped and displayed on the caller side? no se.. | ||||
|             raise scope_err | ||||
|             raise | ||||
| 
 | ||||
|         # `@context` entrypoint task bookeeping. | ||||
|         # i.e. only pop the context tracking if used ;) | ||||
|         finally: | ||||
|             assert chan.aid | ||||
|             assert chan.uid | ||||
| 
 | ||||
|             # don't pop the local context until we know the | ||||
|             # associated child isn't in debug any more | ||||
|             await debug.maybe_wait_for_debugger() | ||||
|             await maybe_wait_for_debugger() | ||||
|             ctx: Context = actor._contexts.pop(( | ||||
|                 chan.uid, | ||||
|                 cid, | ||||
|  | @ -822,49 +785,26 @@ async def _invoke( | |||
|                 f'after having {ctx.repr_state!r}\n' | ||||
|             ) | ||||
|             if merr: | ||||
| 
 | ||||
|                 logmeth: Callable = log.error | ||||
|                 if ( | ||||
|                     # ctxc: by `Context.cancel()` | ||||
|                     isinstance(merr, ContextCancelled) | ||||
|                 if isinstance(merr, ContextCancelled): | ||||
|                     logmeth: Callable = log.runtime | ||||
| 
 | ||||
|                     # out-of-layer cancellation, one of: | ||||
|                     # - actorc: by `Portal.cancel_actor()` | ||||
|                     # - OSc: by SIGINT or `Process.signal()` | ||||
|                     or ( | ||||
|                         isinstance(merr, trio.Cancelled) | ||||
|                         and | ||||
|                         ctx.canceller | ||||
|                     ) | ||||
|                 ): | ||||
|                     logmeth: Callable = log.cancel | ||||
|                     descr_str += ( | ||||
|                         f' with {merr!r}\n' | ||||
|                     ) | ||||
| 
 | ||||
|                 elif ( | ||||
|                     not isinstance(merr, RemoteActorError) | ||||
|                 ): | ||||
|                     tb_str: str = ''.join( | ||||
|                         traceback.format_exception(merr) | ||||
|                     ) | ||||
|                 if not isinstance(merr, RemoteActorError): | ||||
|                     tb_str: str = ''.join(traceback.format_exception(merr)) | ||||
|                     descr_str += ( | ||||
|                         f'\n{merr!r}\n'  # needed? | ||||
|                         f'{tb_str}\n' | ||||
|                     ) | ||||
|                 else: | ||||
|                     descr_str += ( | ||||
|                         f'{merr!r}\n' | ||||
|                     ) | ||||
|                     descr_str += f'\n{merr!r}\n' | ||||
|             else: | ||||
|                 descr_str += ( | ||||
|                     f'\n' | ||||
|                     f'with final result {ctx.outcome!r}\n' | ||||
|                 ) | ||||
|                 descr_str += f'\nand final result {ctx.outcome!r}\n' | ||||
| 
 | ||||
|             logmeth( | ||||
|                 f'{message}\n' | ||||
|                 f'\n' | ||||
|                 f'{descr_str}\n' | ||||
|                 message | ||||
|                 + | ||||
|                 descr_str | ||||
|             ) | ||||
| 
 | ||||
| 
 | ||||
|  | @ -908,8 +848,8 @@ async def try_ship_error_to_remote( | |||
|             log.critical( | ||||
|                 'IPC transport failure -> ' | ||||
|                 f'failed to ship error to {remote_descr}!\n\n' | ||||
|                 f'{type(msg)!r}[{msg.boxed_type_str}] X=> {channel.uid}\n' | ||||
|                 f'\n' | ||||
|                 f'X=> {channel.uid}\n\n' | ||||
| 
 | ||||
|                 # TODO: use `.msg.preetty_struct` for this! | ||||
|                 f'{msg}\n' | ||||
|             ) | ||||
|  | @ -922,6 +862,7 @@ async def try_ship_error_to_remote( | |||
| 
 | ||||
| 
 | ||||
| async def process_messages( | ||||
|     actor: Actor, | ||||
|     chan: Channel, | ||||
|     shield: bool = False, | ||||
|     task_status: TaskStatus[CancelScope] = trio.TASK_STATUS_IGNORED, | ||||
|  | @ -935,7 +876,7 @@ async def process_messages( | |||
| 
 | ||||
|     Receive (multiplexed) per-`Channel` RPC requests as msgs from | ||||
|     remote processes; schedule target async funcs as local | ||||
|     `trio.Task`s inside the `Actor._service_tn: Nursery`. | ||||
|     `trio.Task`s inside the `Actor._service_n: Nursery`. | ||||
| 
 | ||||
|     Depending on msg type, non-`cmd` (task spawning/starting) | ||||
|     request payloads (eg. `started`, `yield`, `return`, `error`) | ||||
|  | @ -959,8 +900,7 @@ async def process_messages( | |||
|       (as utilized inside `Portal.cancel_actor()` ). | ||||
| 
 | ||||
|     ''' | ||||
|     actor: Actor = _state.current_actor() | ||||
|     assert actor._service_tn  # runtime state sanity | ||||
|     assert actor._service_n  # runtime state sanity | ||||
| 
 | ||||
|     # TODO: once `trio` get's an "obvious way" for req/resp we | ||||
|     # should use it? | ||||
|  | @ -1031,10 +971,12 @@ async def process_messages( | |||
|                         cid=cid, | ||||
|                         kwargs=kwargs, | ||||
|                     ): | ||||
|                         kwargs |= {'req_chan': chan} | ||||
| 
 | ||||
|                         # XXX NOTE XXX don't start entire actor | ||||
|                         # runtime cancellation if this actor is | ||||
|                         # currently in debug mode! | ||||
|                         pdb_complete: trio.Event|None = debug.DebugStatus.repl_release | ||||
|                         pdb_complete: trio.Event|None = _debug.DebugStatus.repl_release | ||||
|                         if pdb_complete: | ||||
|                             await pdb_complete.wait() | ||||
| 
 | ||||
|  | @ -1049,14 +991,14 @@ async def process_messages( | |||
|                                 cid, | ||||
|                                 chan, | ||||
|                                 actor.cancel, | ||||
|                                 kwargs | {'req_chan': chan}, | ||||
|                                 kwargs, | ||||
|                                 is_rpc=False, | ||||
|                                 return_msg_type=CancelAck, | ||||
|                             ) | ||||
| 
 | ||||
|                         log.runtime( | ||||
|                             'Cancelling RPC-msg-loop with peer\n' | ||||
|                             f'->c}} {chan.aid.reprol()}@[{chan.maddr}]\n' | ||||
|                             'Cancelling IPC transport msg-loop with peer:\n' | ||||
|                             f'|_{chan}\n' | ||||
|                         ) | ||||
|                         loop_cs.cancel() | ||||
|                         break | ||||
|  | @ -1069,7 +1011,7 @@ async def process_messages( | |||
|                     ): | ||||
|                         target_cid: str = kwargs['cid'] | ||||
|                         kwargs |= { | ||||
|                             'requesting_aid': chan.aid, | ||||
|                             'requesting_uid': chan.uid, | ||||
|                             'ipc_msg': msg, | ||||
| 
 | ||||
|                             # XXX NOTE! ONLY the rpc-task-owning | ||||
|  | @ -1105,34 +1047,21 @@ async def process_messages( | |||
|                         ns=ns, | ||||
|                         func=funcname, | ||||
|                         kwargs=kwargs,  # type-spec this? see `msg.types` | ||||
|                         uid=actor_uuid, | ||||
|                         uid=actorid, | ||||
|                     ): | ||||
|                         if actor_uuid != chan.aid.uid: | ||||
|                             raise RuntimeError( | ||||
|                                 f'IPC <Start> msg <-> chan.aid mismatch!?\n' | ||||
|                                 f'Channel.aid = {chan.aid!r}\n' | ||||
|                                 f'Start.uid = {actor_uuid!r}\n' | ||||
|                             ) | ||||
|                         # await debug.pause() | ||||
|                         op_repr: str = 'Start <=) ' | ||||
|                         req_repr: str = _pformat.nest_from_op( | ||||
|                             input_op=op_repr, | ||||
|                             op_suffix='', | ||||
|                             nest_prefix='', | ||||
|                             text=f'{chan}', | ||||
| 
 | ||||
|                             nest_indent=len(op_repr)-1, | ||||
|                             rm_from_first_ln='<', | ||||
|                             # ^XXX, subtract -1 to account for | ||||
|                             # <Channel | ||||
|                             # ^_chevron to be stripped | ||||
|                         ) | ||||
|                         start_status: str = ( | ||||
|                             'Handling RPC request\n' | ||||
|                             f'{req_repr}\n' | ||||
|                             f'\n' | ||||
|                             f'->{{ ipc-context-id: {cid!r}\n' | ||||
|                             f'->{{ nsp for fn: `{ns}.{funcname}({kwargs})`\n' | ||||
|                             'Handling RPC `Start` request\n' | ||||
|                             f'<= peer: {actorid}\n\n' | ||||
|                             f'  |_{chan}\n' | ||||
|                             f'  |_cid: {cid}\n\n' | ||||
|                             # f'  |_{ns}.{funcname}({kwargs})\n' | ||||
|                             f'>> {actor.uid}\n' | ||||
|                             f'  |_{actor}\n' | ||||
|                             f'   -> nsp: `{ns}.{funcname}({kwargs})`\n' | ||||
| 
 | ||||
|                             # f'  |_{ns}.{funcname}({kwargs})\n\n' | ||||
| 
 | ||||
|                             # f'{pretty_struct.pformat(msg)}\n' | ||||
|                         ) | ||||
| 
 | ||||
|                         # runtime-internal endpoint: `Actor.<funcname>` | ||||
|  | @ -1161,6 +1090,10 @@ async def process_messages( | |||
|                                 await chan.send(err_msg) | ||||
|                                 continue | ||||
| 
 | ||||
|                         start_status += ( | ||||
|                             f'   -> func: {func}\n' | ||||
|                         ) | ||||
| 
 | ||||
|                         # schedule a task for the requested RPC function | ||||
|                         # in the actor's main "service nursery". | ||||
|                         # | ||||
|  | @ -1168,10 +1101,10 @@ async def process_messages( | |||
|                         # supervision isolation? would avoid having to | ||||
|                         # manage RPC tasks individually in `._rpc_tasks` | ||||
|                         # table? | ||||
|                         start_status += '->( scheduling new task..\n' | ||||
|                         start_status += '   -> scheduling new task..\n' | ||||
|                         log.runtime(start_status) | ||||
|                         try: | ||||
|                             ctx: Context = await actor._service_tn.start( | ||||
|                             ctx: Context = await actor._service_n.start( | ||||
|                                 partial( | ||||
|                                     _invoke, | ||||
|                                     actor, | ||||
|  | @ -1216,7 +1149,7 @@ async def process_messages( | |||
|                                 trio.Event(), | ||||
|                             ) | ||||
| 
 | ||||
|                     # XXX RUNTIME-SCOPED! remote (likely internal) error | ||||
|                     # runtime-scoped remote (internal) error | ||||
|                     # (^- bc no `Error.cid` -^) | ||||
|                     # | ||||
|                     # NOTE: this is the non-rpc error case, that | ||||
|  | @ -1252,24 +1185,12 @@ async def process_messages( | |||
|             # END-OF `async for`: | ||||
|             # IPC disconnected via `trio.EndOfChannel`, likely | ||||
|             # due to a (graceful) `Channel.aclose()`. | ||||
| 
 | ||||
|             chan_op_repr: str = '<=x] ' | ||||
|             chan_repr: str = _pformat.nest_from_op( | ||||
|                 input_op=chan_op_repr, | ||||
|                 op_suffix='', | ||||
|                 nest_prefix='', | ||||
|                 text=chan.pformat(), | ||||
|                 nest_indent=len(chan_op_repr)-1, | ||||
|                 rm_from_first_ln='<', | ||||
|             ) | ||||
|             log.runtime( | ||||
|                 f'IPC channel disconnected\n' | ||||
|                 f'{chan_repr}\n' | ||||
|                 f'\n' | ||||
|                 f'->c) cancelling RPC tasks.\n' | ||||
|                 f'channel for {chan.uid} disconnected, cancelling RPC tasks\n' | ||||
|                 f'|_{chan}\n' | ||||
|             ) | ||||
|             await actor.cancel_rpc_tasks( | ||||
|                 req_aid=actor.aid, | ||||
|                 req_uid=actor.uid, | ||||
|                 # a "self cancel" in terms of the lifetime of the | ||||
|                 # IPC connection which is presumed to be the | ||||
|                 # source of any requests for spawned tasks. | ||||
|  | @ -1291,10 +1212,8 @@ async def process_messages( | |||
|         # -[ ] figure out how this will break with other transports? | ||||
|         tc.report_n_maybe_raise( | ||||
|             message=( | ||||
|                 f'peer IPC channel closed abruptly?\n' | ||||
|                 f'\n' | ||||
|                 f'<=x[\n' | ||||
|                 f'  {chan}\n' | ||||
|                 f'peer IPC channel closed abruptly?\n\n' | ||||
|                 f'<=x {chan}\n' | ||||
|                 f'  |_{chan.raddr}\n\n' | ||||
|             ) | ||||
|             + | ||||
|  | @ -1311,7 +1230,7 @@ async def process_messages( | |||
|     ) as err: | ||||
| 
 | ||||
|         if nursery_cancelled_before_task: | ||||
|             sn: Nursery = actor._service_tn | ||||
|             sn: Nursery = actor._service_n | ||||
|             assert sn and sn.cancel_scope.cancel_called  # sanity | ||||
|             log.cancel( | ||||
|                 f'Service nursery cancelled before it handled {funcname}' | ||||
|  | @ -1341,37 +1260,13 @@ async def process_messages( | |||
|     finally: | ||||
|         # msg debugging for when he machinery is brokey | ||||
|         if msg is None: | ||||
|             message: str = 'Exiting RPC-loop without receiving a msg?' | ||||
|             message: str = 'Exiting IPC msg loop without receiving a msg?' | ||||
|         else: | ||||
|             task_op_repr: str = ')>' | ||||
|             task: trio.Task = trio.lowlevel.current_task() | ||||
| 
 | ||||
|             # maybe add cancelled opt prefix | ||||
|             if task._cancel_status.effectively_cancelled: | ||||
|                 task_op_repr = 'c' + task_op_repr | ||||
| 
 | ||||
|             task_repr: str = _pformat.nest_from_op( | ||||
|                 input_op=task_op_repr, | ||||
|                 text=f'{task!r}', | ||||
|                 nest_indent=1, | ||||
|             ) | ||||
|             # chan_op_repr: str = '<=} ' | ||||
|             # chan_repr: str = _pformat.nest_from_op( | ||||
|             #     input_op=chan_op_repr, | ||||
|             #     op_suffix='', | ||||
|             #     nest_prefix='', | ||||
|             #     text=chan.pformat(), | ||||
|             #     nest_indent=len(chan_op_repr)-1, | ||||
|             #     rm_from_first_ln='<', | ||||
|             # ) | ||||
|             message: str = ( | ||||
|                 f'Exiting RPC-loop with final msg\n' | ||||
|                 f'\n' | ||||
|                 # f'{chan_repr}\n' | ||||
|                 f'{task_repr}\n' | ||||
|                 f'\n' | ||||
|                 f'{pretty_struct.pformat(msg)}' | ||||
|                 f'\n' | ||||
|                 'Exiting IPC msg loop with final msg\n\n' | ||||
|                 f'<= peer: {chan.uid}\n' | ||||
|                 f'  |_{chan}\n\n' | ||||
|                 # f'{pretty_struct.pformat(msg)}' | ||||
|             ) | ||||
| 
 | ||||
|         log.runtime(message) | ||||
|  |  | |||
							
								
								
									
										1497
									
								
								tractor/_runtime.py
								
								
								
								
							
							
						
						
									
										1497
									
								
								tractor/_runtime.py
								
								
								
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							|  | @ -23,24 +23,19 @@ considered optional within the context of this runtime-library. | |||
| 
 | ||||
| """ | ||||
| from __future__ import annotations | ||||
| from multiprocessing import shared_memory as shm | ||||
| from multiprocessing.shared_memory import ( | ||||
|     # SharedMemory, | ||||
|     ShareableList, | ||||
| ) | ||||
| import platform | ||||
| from sys import byteorder | ||||
| import time | ||||
| from typing import Optional | ||||
| 
 | ||||
| from msgspec import ( | ||||
|     Struct, | ||||
|     to_builtins | ||||
| from multiprocessing import shared_memory as shm | ||||
| from multiprocessing.shared_memory import ( | ||||
|     SharedMemory, | ||||
|     ShareableList, | ||||
| ) | ||||
| 
 | ||||
| from msgspec import Struct | ||||
| import tractor | ||||
| 
 | ||||
| from tractor.ipc._mp_bs import disable_mantracker | ||||
| from tractor.log import get_logger | ||||
| from .log import get_logger | ||||
| 
 | ||||
| 
 | ||||
| _USE_POSIX = getattr(shm, '_USE_POSIX', False) | ||||
|  | @ -51,10 +46,7 @@ if _USE_POSIX: | |||
| try: | ||||
|     import numpy as np | ||||
|     from numpy.lib import recfunctions as rfn | ||||
|     # TODO ruff complains with, | ||||
|     # warning| F401: `nptyping` imported but unused; consider using | ||||
|     # `importlib.util.find_spec` to test for availability | ||||
|     import nptyping  # noqa | ||||
|     # import nptyping | ||||
| except ImportError: | ||||
|     pass | ||||
| 
 | ||||
|  | @ -62,7 +54,35 @@ except ImportError: | |||
| log = get_logger(__name__) | ||||
| 
 | ||||
| 
 | ||||
| SharedMemory = disable_mantracker() | ||||
| def disable_mantracker(): | ||||
|     ''' | ||||
|     Disable all ``multiprocessing``` "resource tracking" machinery since | ||||
|     it's an absolute multi-threaded mess of non-SC madness. | ||||
| 
 | ||||
|     ''' | ||||
|     from multiprocessing import resource_tracker as mantracker | ||||
| 
 | ||||
|     # Tell the "resource tracker" thing to fuck off. | ||||
|     class ManTracker(mantracker.ResourceTracker): | ||||
|         def register(self, name, rtype): | ||||
|             pass | ||||
| 
 | ||||
|         def unregister(self, name, rtype): | ||||
|             pass | ||||
| 
 | ||||
|         def ensure_running(self): | ||||
|             pass | ||||
| 
 | ||||
|     # "know your land and know your prey" | ||||
|     # https://www.dailymotion.com/video/x6ozzco | ||||
|     mantracker._resource_tracker = ManTracker() | ||||
|     mantracker.register = mantracker._resource_tracker.register | ||||
|     mantracker.ensure_running = mantracker._resource_tracker.ensure_running | ||||
|     mantracker.unregister = mantracker._resource_tracker.unregister | ||||
|     mantracker.getfd = mantracker._resource_tracker.getfd | ||||
| 
 | ||||
| 
 | ||||
| disable_mantracker() | ||||
| 
 | ||||
| 
 | ||||
| class SharedInt: | ||||
|  | @ -122,7 +142,7 @@ class NDToken(Struct, frozen=True): | |||
|         ).descr | ||||
| 
 | ||||
|     def as_msg(self): | ||||
|         return to_builtins(self) | ||||
|         return self.to_dict() | ||||
| 
 | ||||
|     @classmethod | ||||
|     def from_msg(cls, msg: dict) -> NDToken: | ||||
|  | @ -790,23 +810,11 @@ def open_shm_list( | |||
|         readonly=readonly, | ||||
|     ) | ||||
| 
 | ||||
|     # TODO, factor into a @actor_fixture acm-API? | ||||
|     # -[ ] also `@maybe_actor_fixture()` which inludes | ||||
|     #     the .current_actor() convenience check? | ||||
|     #   |_ orr can that just be in the sin-maybe-version? | ||||
|     # | ||||
|     # "close" attached shm on actor teardown | ||||
|     try: | ||||
|         actor = tractor.current_actor() | ||||
| 
 | ||||
|         actor.lifetime_stack.callback(shml.shm.close) | ||||
| 
 | ||||
|         # XXX on 3.13+ we don't need to call this? | ||||
|         # -> bc we pass `track=False` for `SharedMemeory` orr? | ||||
|         if ( | ||||
|             platform.python_version_tuple()[:-1] < ('3', '13') | ||||
|         ): | ||||
|             actor.lifetime_stack.callback(shml.shm.unlink) | ||||
|         actor.lifetime_stack.callback(shml.shm.unlink) | ||||
|     except RuntimeError: | ||||
|         log.warning('tractor runtime not active, skipping teardown steps') | ||||
| 
 | ||||
|  | @ -34,9 +34,9 @@ from typing import ( | |||
| import trio | ||||
| from trio import TaskStatus | ||||
| 
 | ||||
| from .devx import ( | ||||
|     debug, | ||||
|     pformat as _pformat | ||||
| from tractor.devx import ( | ||||
|     maybe_wait_for_debugger, | ||||
|     acquire_debug_lock, | ||||
| ) | ||||
| from tractor._state import ( | ||||
|     current_actor, | ||||
|  | @ -46,26 +46,19 @@ from tractor._state import ( | |||
|     _runtime_vars, | ||||
| ) | ||||
| from tractor.log import get_logger | ||||
| from tractor._addr import UnwrappedAddress | ||||
| from tractor._portal import Portal | ||||
| from tractor._runtime import Actor | ||||
| from tractor._entry import _mp_main | ||||
| from tractor._exceptions import ActorFailure | ||||
| from tractor.msg import ( | ||||
|     types as msgtypes, | ||||
|     pretty_struct, | ||||
| from tractor.msg.types import ( | ||||
|     SpawnSpec, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| if TYPE_CHECKING: | ||||
|     from ipc import ( | ||||
|         _server, | ||||
|         Channel, | ||||
|     ) | ||||
|     from ._supervise import ActorNursery | ||||
|     ProcessType = TypeVar('ProcessType', mp.Process, trio.Process) | ||||
| 
 | ||||
| 
 | ||||
| log = get_logger('tractor') | ||||
| 
 | ||||
| # placeholder for an mp start context if so using that backend | ||||
|  | @ -170,7 +163,7 @@ async def exhaust_portal( | |||
|         # TODO: merge with above? | ||||
|         log.warning( | ||||
|             'Cancelled portal result waiter task:\n' | ||||
|             f'uid: {portal.channel.aid}\n' | ||||
|             f'uid: {portal.channel.uid}\n' | ||||
|             f'error: {err}\n' | ||||
|         ) | ||||
|         return err | ||||
|  | @ -178,7 +171,7 @@ async def exhaust_portal( | |||
|     else: | ||||
|         log.debug( | ||||
|             f'Returning final result from portal:\n' | ||||
|             f'uid: {portal.channel.aid}\n' | ||||
|             f'uid: {portal.channel.uid}\n' | ||||
|             f'result: {final}\n' | ||||
|         ) | ||||
|         return final | ||||
|  | @ -297,23 +290,6 @@ async def hard_kill( | |||
|     # zombies (as a feature) we ask the OS to do send in the | ||||
|     # removal swad as the last resort. | ||||
|     if cs.cancelled_caught: | ||||
| 
 | ||||
|         # TODO? attempt at intermediary-rent-sub | ||||
|         # with child in debug lock? | ||||
|         # |_https://github.com/goodboy/tractor/issues/320 | ||||
|         # | ||||
|         # if not is_root_process(): | ||||
|         #     log.warning( | ||||
|         #         'Attempting to acquire debug-REPL-lock before zombie reap!' | ||||
|         #     ) | ||||
|         #     with trio.CancelScope(shield=True): | ||||
|         #         async with debug.acquire_debug_lock( | ||||
|         #             subactor_uid=current_actor().uid, | ||||
|         #         ) as _ctx: | ||||
|         #             log.warning( | ||||
|         #                 'Acquired debug lock, child ready to be killed ??\n' | ||||
|         #             ) | ||||
| 
 | ||||
|         # TODO: toss in the skynet-logo face as ascii art? | ||||
|         log.critical( | ||||
|             # 'Well, the #ZOMBIE_LORD_IS_HERE# to collect\n' | ||||
|  | @ -344,21 +320,19 @@ async def soft_kill( | |||
|     see `.hard_kill()`). | ||||
| 
 | ||||
|     ''' | ||||
|     chan: Channel = portal.channel | ||||
|     peer_aid: msgtypes.Aid = chan.aid | ||||
|     uid: tuple[str, str] = portal.channel.uid | ||||
|     try: | ||||
|         log.cancel( | ||||
|             f'Soft killing sub-actor via portal request\n' | ||||
|             f'\n' | ||||
|             f'c)=> {peer_aid.reprol()}@[{chan.maddr}]\n' | ||||
|             f'   |_{proc}\n' | ||||
|             'Soft killing sub-actor via portal request\n' | ||||
|             f'c)> {portal.chan.uid}\n' | ||||
|             f' |_{proc}\n' | ||||
|         ) | ||||
|         # wait on sub-proc to signal termination | ||||
|         await wait_func(proc) | ||||
| 
 | ||||
|     except trio.Cancelled: | ||||
|         with trio.CancelScope(shield=True): | ||||
|             await debug.maybe_wait_for_debugger( | ||||
|             await maybe_wait_for_debugger( | ||||
|                 child_in_debug=_runtime_vars.get( | ||||
|                     '_debug_mode', False | ||||
|                 ), | ||||
|  | @ -399,7 +373,7 @@ async def soft_kill( | |||
|             if proc.poll() is None:  # type: ignore | ||||
|                 log.warning( | ||||
|                     'Subactor still alive after cancel request?\n\n' | ||||
|                     f'uid: {peer_aid}\n' | ||||
|                     f'uid: {uid}\n' | ||||
|                     f'|_{proc}\n' | ||||
|                 ) | ||||
|                 n.cancel_scope.cancel() | ||||
|  | @ -413,15 +387,14 @@ async def new_proc( | |||
|     errors: dict[tuple[str, str], Exception], | ||||
| 
 | ||||
|     # passed through to actor main | ||||
|     bind_addrs: list[UnwrappedAddress], | ||||
|     parent_addr: UnwrappedAddress, | ||||
|     bind_addrs: list[tuple[str, int]], | ||||
|     parent_addr: tuple[str, int], | ||||
|     _runtime_vars: dict[str, Any],  # serialized and sent to _child | ||||
| 
 | ||||
|     *, | ||||
| 
 | ||||
|     infect_asyncio: bool = False, | ||||
|     task_status: TaskStatus[Portal] = trio.TASK_STATUS_IGNORED, | ||||
|     proc_kwargs: dict[str, any] = {} | ||||
|     task_status: TaskStatus[Portal] = trio.TASK_STATUS_IGNORED | ||||
| 
 | ||||
| ) -> None: | ||||
| 
 | ||||
|  | @ -441,7 +414,6 @@ async def new_proc( | |||
|         _runtime_vars,  # run time vars | ||||
|         infect_asyncio=infect_asyncio, | ||||
|         task_status=task_status, | ||||
|         proc_kwargs=proc_kwargs | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
|  | @ -452,13 +424,12 @@ async def trio_proc( | |||
|     errors: dict[tuple[str, str], Exception], | ||||
| 
 | ||||
|     # passed through to actor main | ||||
|     bind_addrs: list[UnwrappedAddress], | ||||
|     parent_addr: UnwrappedAddress, | ||||
|     bind_addrs: list[tuple[str, int]], | ||||
|     parent_addr: tuple[str, int], | ||||
|     _runtime_vars: dict[str, Any],  # serialized and sent to _child | ||||
|     *, | ||||
|     infect_asyncio: bool = False, | ||||
|     task_status: TaskStatus[Portal] = trio.TASK_STATUS_IGNORED, | ||||
|     proc_kwargs: dict[str, any] = {} | ||||
|     task_status: TaskStatus[Portal] = trio.TASK_STATUS_IGNORED | ||||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|  | @ -480,9 +451,6 @@ async def trio_proc( | |||
|         # the OS; it otherwise can be passed via the parent channel if | ||||
|         # we prefer in the future (for privacy). | ||||
|         "--uid", | ||||
|         # TODO, how to pass this over "wire" encodings like | ||||
|         # cmdline args? | ||||
|         # -[ ] maybe we can add an `msgtypes.Aid.min_tuple()` ? | ||||
|         str(subactor.uid), | ||||
|         # Address the child must connect to on startup | ||||
|         "--parent_addr", | ||||
|  | @ -500,20 +468,18 @@ async def trio_proc( | |||
| 
 | ||||
|     cancelled_during_spawn: bool = False | ||||
|     proc: trio.Process|None = None | ||||
|     ipc_server: _server.Server = actor_nursery._actor.ipc_server | ||||
|     try: | ||||
|         try: | ||||
|             proc: trio.Process = await trio.lowlevel.open_process(spawn_cmd, **proc_kwargs) | ||||
|             proc: trio.Process = await trio.lowlevel.open_process(spawn_cmd) | ||||
|             log.runtime( | ||||
|                 f'Started new child subproc\n' | ||||
|                 f'(>\n' | ||||
|                 f' |_{proc}\n' | ||||
|                 'Started new child\n' | ||||
|                 f'|_{proc}\n' | ||||
|             ) | ||||
| 
 | ||||
|             # wait for actor to spawn and connect back to us | ||||
|             # channel should have handshake completed by the | ||||
|             # local actor by the time we get a ref to it | ||||
|             event, chan = await ipc_server.wait_for_peer( | ||||
|             event, chan = await actor_nursery._actor.wait_for_peer( | ||||
|                 subactor.uid | ||||
|             ) | ||||
| 
 | ||||
|  | @ -525,10 +491,10 @@ async def trio_proc( | |||
|                 with trio.CancelScope(shield=True): | ||||
|                     # don't clobber an ongoing pdb | ||||
|                     if is_root_process(): | ||||
|                         await debug.maybe_wait_for_debugger() | ||||
|                         await maybe_wait_for_debugger() | ||||
| 
 | ||||
|                     elif proc is not None: | ||||
|                         async with debug.acquire_debug_lock(subactor.uid): | ||||
|                         async with acquire_debug_lock(subactor.uid): | ||||
|                             # soft wait on the proc to terminate | ||||
|                             with trio.move_on_after(0.5): | ||||
|                                 await proc.wait() | ||||
|  | @ -546,20 +512,15 @@ async def trio_proc( | |||
| 
 | ||||
|         # send a "spawning specification" which configures the | ||||
|         # initial runtime state of the child. | ||||
|         sspec = msgtypes.SpawnSpec( | ||||
|             _parent_main_data=subactor._parent_main_data, | ||||
|             enable_modules=subactor.enable_modules, | ||||
|             reg_addrs=subactor.reg_addrs, | ||||
|             bind_addrs=bind_addrs, | ||||
|             _runtime_vars=_runtime_vars, | ||||
|         await chan.send( | ||||
|             SpawnSpec( | ||||
|                 _parent_main_data=subactor._parent_main_data, | ||||
|                 enable_modules=subactor.enable_modules, | ||||
|                 reg_addrs=subactor.reg_addrs, | ||||
|                 bind_addrs=bind_addrs, | ||||
|                 _runtime_vars=_runtime_vars, | ||||
|             ) | ||||
|         ) | ||||
|         log.runtime( | ||||
|             f'Sending spawn spec to child\n' | ||||
|             f'{{}}=> {chan.aid.reprol()!r}\n' | ||||
|             f'\n' | ||||
|             f'{pretty_struct.pformat(sspec)}\n' | ||||
|         ) | ||||
|         await chan.send(sspec) | ||||
| 
 | ||||
|         # track subactor in current nursery | ||||
|         curr_actor: Actor = current_actor() | ||||
|  | @ -586,7 +547,7 @@ async def trio_proc( | |||
|             # condition. | ||||
|             await soft_kill( | ||||
|                 proc, | ||||
|                 trio.Process.wait,  # XXX, uses `pidfd_open()` below. | ||||
|                 trio.Process.wait, | ||||
|                 portal | ||||
|             ) | ||||
| 
 | ||||
|  | @ -594,7 +555,8 @@ async def trio_proc( | |||
|             # tandem if not done already | ||||
|             log.cancel( | ||||
|                 'Cancelling portal result reaper task\n' | ||||
|                 f'c)> {subactor.aid.reprol()!r}\n' | ||||
|                 f'>c)\n' | ||||
|                 f' |_{subactor.uid}\n' | ||||
|             ) | ||||
|             nursery.cancel_scope.cancel() | ||||
| 
 | ||||
|  | @ -603,24 +565,21 @@ async def trio_proc( | |||
|         # allowed! Do this **after** cancellation/teardown to avoid | ||||
|         # killing the process too early. | ||||
|         if proc: | ||||
|             reap_repr: str = _pformat.nest_from_op( | ||||
|                 input_op='>x)', | ||||
|                 text=subactor.pformat(), | ||||
|             ) | ||||
|             log.cancel( | ||||
|                 f'Hard reap sequence starting for subactor\n' | ||||
|                 f'{reap_repr}' | ||||
|                 f'>x)\n' | ||||
|                 f' |_{subactor}@{subactor.uid}\n' | ||||
|             ) | ||||
| 
 | ||||
|             with trio.CancelScope(shield=True): | ||||
|                 # don't clobber an ongoing pdb | ||||
|                 if cancelled_during_spawn: | ||||
|                     # Try again to avoid TTY clobbering. | ||||
|                     async with debug.acquire_debug_lock(subactor.uid): | ||||
|                     async with acquire_debug_lock(subactor.uid): | ||||
|                         with trio.move_on_after(0.5): | ||||
|                             await proc.wait() | ||||
| 
 | ||||
|                 await debug.maybe_wait_for_debugger( | ||||
|                 await maybe_wait_for_debugger( | ||||
|                     child_in_debug=_runtime_vars.get( | ||||
|                         '_debug_mode', False | ||||
|                     ), | ||||
|  | @ -649,7 +608,7 @@ async def trio_proc( | |||
|                 #     acquire the lock and get notified of who has it, | ||||
|                 #     check that uid against our known children? | ||||
|                 # this_uid: tuple[str, str] = current_actor().uid | ||||
|                 # await debug.acquire_debug_lock(this_uid) | ||||
|                 # await acquire_debug_lock(this_uid) | ||||
| 
 | ||||
|                 if proc.poll() is None: | ||||
|                     log.cancel(f"Attempting to hard kill {proc}") | ||||
|  | @ -671,13 +630,12 @@ async def mp_proc( | |||
|     subactor: Actor, | ||||
|     errors: dict[tuple[str, str], Exception], | ||||
|     # passed through to actor main | ||||
|     bind_addrs: list[UnwrappedAddress], | ||||
|     parent_addr: UnwrappedAddress, | ||||
|     bind_addrs: list[tuple[str, int]], | ||||
|     parent_addr: tuple[str, int], | ||||
|     _runtime_vars: dict[str, Any],  # serialized and sent to _child | ||||
|     *, | ||||
|     infect_asyncio: bool = False, | ||||
|     task_status: TaskStatus[Portal] = trio.TASK_STATUS_IGNORED, | ||||
|     proc_kwargs: dict[str, any] = {} | ||||
|     task_status: TaskStatus[Portal] = trio.TASK_STATUS_IGNORED | ||||
| 
 | ||||
| ) -> None: | ||||
| 
 | ||||
|  | @ -752,14 +710,12 @@ async def mp_proc( | |||
| 
 | ||||
|     log.runtime(f"Started {proc}") | ||||
| 
 | ||||
|     ipc_server: _server.Server = actor_nursery._actor.ipc_server | ||||
|     try: | ||||
|         # wait for actor to spawn and connect back to us | ||||
|         # channel should have handshake completed by the | ||||
|         # local actor by the time we get a ref to it | ||||
|         event, chan = await ipc_server.wait_for_peer( | ||||
|             subactor.uid, | ||||
|         ) | ||||
|         event, chan = await actor_nursery._actor.wait_for_peer( | ||||
|             subactor.uid) | ||||
| 
 | ||||
|         # XXX: monkey patch poll API to match the ``subprocess`` API.. | ||||
|         # not sure why they don't expose this but kk. | ||||
|  |  | |||
|  | @ -14,19 +14,16 @@ | |||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| Per actor-process runtime state mgmt APIs. | ||||
| """ | ||||
| Per process state | ||||
| 
 | ||||
| ''' | ||||
| """ | ||||
| from __future__ import annotations | ||||
| from contextvars import ( | ||||
|     ContextVar, | ||||
| ) | ||||
| import os | ||||
| from pathlib import Path | ||||
| from typing import ( | ||||
|     Any, | ||||
|     Literal, | ||||
|     TYPE_CHECKING, | ||||
| ) | ||||
| 
 | ||||
|  | @ -37,39 +34,20 @@ if TYPE_CHECKING: | |||
|     from ._context import Context | ||||
| 
 | ||||
| 
 | ||||
| # default IPC transport protocol settings | ||||
| TransportProtocolKey = Literal[ | ||||
|     'tcp', | ||||
|     'uds', | ||||
| ] | ||||
| _def_tpt_proto: TransportProtocolKey = 'tcp' | ||||
| 
 | ||||
| _current_actor: Actor|None = None  # type: ignore # noqa | ||||
| _last_actor_terminated: Actor|None = None | ||||
| 
 | ||||
| # TODO: mk this a `msgspec.Struct`! | ||||
| # -[ ] type out all fields obvi! | ||||
| # -[ ] (eventually) mk wire-ready for monitoring? | ||||
| _runtime_vars: dict[str, Any] = { | ||||
|     # root of actor-process tree info | ||||
|     '_is_root': False,  # bool | ||||
|     '_root_mailbox': (None, None),  # tuple[str|None, str|None] | ||||
|     '_root_addrs': [],  # tuple[str|None, str|None] | ||||
| 
 | ||||
|     # parent->chld ipc protocol caps | ||||
|     '_enable_tpts': [_def_tpt_proto], | ||||
| 
 | ||||
|     # registrar info | ||||
|     '_debug_mode': False, | ||||
|     '_is_root': False, | ||||
|     '_root_mailbox': (None, None), | ||||
|     '_registry_addrs': [], | ||||
| 
 | ||||
|     # `debug_mode: bool` settings | ||||
|     '_debug_mode': False,  # bool | ||||
|     'repl_fixture': False,  # |AbstractContextManager[bool] | ||||
|     '_is_infected_aio': False, | ||||
| 
 | ||||
|     # for `tractor.pause_from_sync()` & `breakpoint()` support | ||||
|     'use_greenback': False, | ||||
| 
 | ||||
|     # infected-`asyncio`-mode: `trio` running as guest. | ||||
|     '_is_infected_aio': False, | ||||
| } | ||||
| 
 | ||||
| 
 | ||||
|  | @ -121,7 +99,7 @@ def current_actor( | |||
|     return _current_actor | ||||
| 
 | ||||
| 
 | ||||
| def is_root_process() -> bool: | ||||
| def is_main_process() -> bool: | ||||
|     ''' | ||||
|     Bool determining if this actor is running in the top-most process. | ||||
| 
 | ||||
|  | @ -130,10 +108,7 @@ def is_root_process() -> bool: | |||
|     return mp.current_process().name == 'MainProcess' | ||||
| 
 | ||||
| 
 | ||||
| is_main_process = is_root_process | ||||
| 
 | ||||
| 
 | ||||
| def is_debug_mode() -> bool: | ||||
| def debug_mode() -> bool: | ||||
|     ''' | ||||
|     Bool determining if "debug mode" is on which enables | ||||
|     remote subactor pdb entry on crashes. | ||||
|  | @ -142,9 +117,6 @@ def is_debug_mode() -> bool: | |||
|     return bool(_runtime_vars['_debug_mode']) | ||||
| 
 | ||||
| 
 | ||||
| debug_mode = is_debug_mode | ||||
| 
 | ||||
| 
 | ||||
| def is_root_process() -> bool: | ||||
|     return _runtime_vars['_is_root'] | ||||
| 
 | ||||
|  | @ -170,34 +142,3 @@ def current_ipc_ctx( | |||
|             f'|_{current_task()}\n' | ||||
|         ) | ||||
|     return ctx | ||||
| 
 | ||||
| 
 | ||||
| # std ODE (mutable) app state location | ||||
| _rtdir: Path = Path(os.environ['XDG_RUNTIME_DIR']) | ||||
| 
 | ||||
| 
 | ||||
| def get_rt_dir( | ||||
|     subdir: str = 'tractor' | ||||
| ) -> Path: | ||||
|     ''' | ||||
|     Return the user "runtime dir" where most userspace apps stick | ||||
|     their IPC and cache related system util-files; we take hold | ||||
|     of a `'XDG_RUNTIME_DIR'/tractor/` subdir by default. | ||||
| 
 | ||||
|     ''' | ||||
|     rtdir: Path = _rtdir / subdir | ||||
|     if not rtdir.is_dir(): | ||||
|         rtdir.mkdir() | ||||
|     return rtdir | ||||
| 
 | ||||
| 
 | ||||
| def current_ipc_protos() -> list[str]: | ||||
|     ''' | ||||
|     Return the list of IPC transport protocol keys currently | ||||
|     in use by this actor. | ||||
| 
 | ||||
|     The keys are as declared by `MsgTransport` and `Address` | ||||
|     concrete-backend sub-types defined throughout `tractor.ipc`. | ||||
| 
 | ||||
|     ''' | ||||
|     return _runtime_vars['_enable_tpts'] | ||||
|  |  | |||
|  | @ -45,18 +45,16 @@ from .trionics import ( | |||
|     BroadcastReceiver, | ||||
| ) | ||||
| from tractor.msg import ( | ||||
|     Error, | ||||
|     Return, | ||||
|     Stop, | ||||
|     # Return, | ||||
|     # Stop, | ||||
|     MsgType, | ||||
|     PayloadT, | ||||
|     Yield, | ||||
| ) | ||||
| 
 | ||||
| if TYPE_CHECKING: | ||||
|     from ._runtime import Actor | ||||
|     from ._context import Context | ||||
|     from .ipc import Channel | ||||
|     from ._ipc import Channel | ||||
| 
 | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
|  | @ -72,7 +70,8 @@ class MsgStream(trio.abc.Channel): | |||
|     A bidirectional message stream for receiving logically sequenced | ||||
|     values over an inter-actor IPC `Channel`. | ||||
| 
 | ||||
| 
 | ||||
|     This is the type returned to a local task which entered either | ||||
|     `Portal.open_stream_from()` or `Context.open_stream()`. | ||||
| 
 | ||||
|     Termination rules: | ||||
| 
 | ||||
|  | @ -95,9 +94,6 @@ class MsgStream(trio.abc.Channel): | |||
|         self._rx_chan = rx_chan | ||||
|         self._broadcaster = _broadcaster | ||||
| 
 | ||||
|         # any actual IPC msg which is effectively an `EndOfStream` | ||||
|         self._stop_msg: bool|Stop = False | ||||
| 
 | ||||
|         # flag to denote end of stream | ||||
|         self._eoc: bool|trio.EndOfChannel = False | ||||
|         self._closed: bool|trio.ClosedResourceError = False | ||||
|  | @ -129,67 +125,16 @@ class MsgStream(trio.abc.Channel): | |||
|     def receive_nowait( | ||||
|         self, | ||||
|         expect_msg: MsgType = Yield, | ||||
|     ) -> PayloadT: | ||||
|     ): | ||||
|         ctx: Context = self._ctx | ||||
|         ( | ||||
|             msg, | ||||
|             pld, | ||||
|         ) = ctx._pld_rx.recv_msg_nowait( | ||||
|         return ctx._pld_rx.recv_pld_nowait( | ||||
|             ipc=self, | ||||
|             expect_msg=expect_msg, | ||||
|         ) | ||||
| 
 | ||||
|         # ?TODO, maybe factor this into a hyper-common `unwrap_pld()` | ||||
|         # | ||||
|         match msg: | ||||
| 
 | ||||
|             # XXX, these never seems to ever hit? cool? | ||||
|             case Stop(): | ||||
|                 log.cancel( | ||||
|                     f'Msg-stream was ended via stop msg\n' | ||||
|                     f'{msg}' | ||||
|                 ) | ||||
|             case Error(): | ||||
|                 log.error( | ||||
|                     f'Msg-stream was ended via error msg\n' | ||||
|                     f'{msg}' | ||||
|                 ) | ||||
| 
 | ||||
|             # XXX NOTE, always set any final result on the ctx to | ||||
|             # avoid teardown race conditions where previously this msg | ||||
|             # would be consumed silently (by `.aclose()` doing its | ||||
|             # own "msg drain loop" but WITHOUT those `drained: lists[MsgType]` | ||||
|             # being post-close-processed! | ||||
|             # | ||||
|             # !!TODO, see the equiv todo-comment in `.receive()` | ||||
|             # around the `if drained:` where we should prolly | ||||
|             # ACTUALLY be doing this post-close processing?? | ||||
|             # | ||||
|             case Return(pld=pld): | ||||
|                 log.warning( | ||||
|                     f'Msg-stream final result msg for IPC ctx?\n' | ||||
|                     f'{msg}' | ||||
|                 ) | ||||
|                 # XXX TODO, this **should be covered** by higher | ||||
|                 # scoped runtime-side method calls such as | ||||
|                 # `Context._deliver_msg()`, so you should never | ||||
|                 # really see the warning above or else something | ||||
|                 # racy/out-of-order is likely going on between | ||||
|                 # actor-runtime-side push tasks and the user-app-side | ||||
|                 # consume tasks! | ||||
|                 # -[ ] figure out that set of race cases and fix! | ||||
|                 # -[ ] possibly return the `msg` given an input | ||||
|                 #     arg-flag is set so we can process the `Return` | ||||
|                 #     from the `.aclose()` caller? | ||||
|                 # | ||||
|                 # breakpoint()  # to debug this RACE CASE! | ||||
|                 ctx._result = pld | ||||
|                 ctx._outcome_msg = msg | ||||
| 
 | ||||
|         return pld | ||||
| 
 | ||||
|     async def receive( | ||||
|         self, | ||||
| 
 | ||||
|         hide_tb: bool = False, | ||||
|     ): | ||||
|         ''' | ||||
|  | @ -209,7 +154,7 @@ class MsgStream(trio.abc.Channel): | |||
|         #     except trio.EndOfChannel: | ||||
|         #         raise StopAsyncIteration | ||||
|         # | ||||
|         # see `.aclose()` for notes on the old behaviour prior to | ||||
|         # see ``.aclose()`` for notes on the old behaviour prior to | ||||
|         # introducing this | ||||
|         if self._eoc: | ||||
|             raise self._eoc | ||||
|  | @ -220,11 +165,7 @@ class MsgStream(trio.abc.Channel): | |||
|         src_err: Exception|None = None  # orig tb | ||||
|         try: | ||||
|             ctx: Context = self._ctx | ||||
|             pld = await ctx._pld_rx.recv_pld( | ||||
|                 ipc=self, | ||||
|                 expect_msg=Yield, | ||||
|             ) | ||||
|             return pld | ||||
|             return await ctx._pld_rx.recv_pld(ipc=self) | ||||
| 
 | ||||
|         # XXX: the stream terminates on either of: | ||||
|         # - `self._rx_chan.receive()` raising  after manual closure | ||||
|  | @ -233,7 +174,7 @@ class MsgStream(trio.abc.Channel): | |||
|         # - via a `Stop`-msg received from remote peer task. | ||||
|         #   NOTE | ||||
|         #   |_ previously this was triggered by calling | ||||
|         #   `._rx_chan.aclose()` on the send side of the channel | ||||
|         #   ``._rx_chan.aclose()`` on the send side of the channel | ||||
|         #   inside `Actor._deliver_ctx_payload()`, but now the 'stop' | ||||
|         #   message handling gets delegated to `PldRFx.recv_pld()` | ||||
|         #   internals. | ||||
|  | @ -257,14 +198,11 @@ class MsgStream(trio.abc.Channel): | |||
|         # terminated and signal this local iterator to stop | ||||
|         drained: list[Exception|dict] = await self.aclose() | ||||
|         if drained: | ||||
|         #  ^^^^^^^^TODO? pass these to the `._ctx._drained_msgs: | ||||
|         #  deque` and then iterate them as part of any | ||||
|         #  `.wait_for_result()` call? | ||||
|         # | ||||
|         # -[ ] move the match-case processing from | ||||
|         #     `.receive_nowait()` instead to right here, use it from | ||||
|         #     a for msg in drained:` post-proc loop? | ||||
|         # | ||||
|             # ?TODO? pass these to the `._ctx._drained_msgs: deque` | ||||
|             # and then iterate them as part of any `.wait_for_result()` call? | ||||
|             # | ||||
|             # from .devx import pause | ||||
|             # await pause() | ||||
|             log.warning( | ||||
|                 'Drained context msgs during closure\n\n' | ||||
|                 f'{drained}' | ||||
|  | @ -327,6 +265,9 @@ class MsgStream(trio.abc.Channel): | |||
|          - more or less we try to maintain adherance to trio's `.aclose()` semantics: | ||||
|            https://trio.readthedocs.io/en/stable/reference-io.html#trio.abc.AsyncResource.aclose | ||||
|         ''' | ||||
| 
 | ||||
|         # rx_chan = self._rx_chan | ||||
| 
 | ||||
|         # XXX NOTE XXX | ||||
|         # it's SUPER IMPORTANT that we ensure we don't DOUBLE | ||||
|         # DRAIN msgs on closure so avoid getting stuck handing on | ||||
|  | @ -338,16 +279,15 @@ class MsgStream(trio.abc.Channel): | |||
|             # this stream has already been closed so silently succeed as | ||||
|             # per ``trio.AsyncResource`` semantics. | ||||
|             # https://trio.readthedocs.io/en/stable/reference-io.html#trio.abc.AsyncResource.aclose | ||||
|             # import tractor | ||||
|             # await tractor.pause() | ||||
|             return [] | ||||
| 
 | ||||
|         ctx: Context = self._ctx | ||||
|         drained: list[Exception|dict] = [] | ||||
|         while not drained: | ||||
|             try: | ||||
|                 maybe_final_msg: Yield|Return = self.receive_nowait( | ||||
|                     expect_msg=Yield|Return, | ||||
|                 maybe_final_msg = self.receive_nowait( | ||||
|                     # allow_msgs=[Yield, Return], | ||||
|                     expect_msg=Yield, | ||||
|                 ) | ||||
|                 if maybe_final_msg: | ||||
|                     log.debug( | ||||
|  | @ -426,37 +366,24 @@ class MsgStream(trio.abc.Channel): | |||
|             self._closed = re | ||||
| 
 | ||||
|         # if caught_eoc: | ||||
|         #     # from .devx import debug | ||||
|         #     # await debug.pause() | ||||
|         #     # from .devx import _debug | ||||
|         #     # await _debug.pause() | ||||
|         #     with trio.CancelScope(shield=True): | ||||
|         #         await rx_chan.aclose() | ||||
| 
 | ||||
|         if not self._eoc: | ||||
|             this_side: str = self._ctx.side | ||||
|             peer_side: str = self._ctx.peer_side | ||||
|             message: str = ( | ||||
|                 f'Stream self-closed by {this_side!r}-side before EoC from {peer_side!r}\n' | ||||
|                 f'Stream self-closed by {self._ctx.side!r}-side before EoC\n' | ||||
|                 # } bc a stream is a "scope"/msging-phase inside an IPC | ||||
|                 f'c}}>\n' | ||||
|                 f'  |_{self}\n' | ||||
|                 f'x}}>\n' | ||||
|                 f'|_{self}\n' | ||||
|             ) | ||||
|             if ( | ||||
|                 (rx_chan := self._rx_chan) | ||||
|                 and | ||||
|                 (stats := rx_chan.statistics()).tasks_waiting_receive | ||||
|             ): | ||||
|                 message += ( | ||||
|                     f'AND there is still reader tasks,\n' | ||||
|                     f'\n' | ||||
|                     f'{stats}\n' | ||||
|                 ) | ||||
| 
 | ||||
|             log.cancel(message) | ||||
|             self._eoc = trio.EndOfChannel(message) | ||||
| 
 | ||||
|         # ?XXX WAIT, why do we not close the local mem chan `._rx_chan` XXX? | ||||
|         # => NO, DEFINITELY NOT! <= | ||||
|         # if we're a bi-dir `MsgStream` BECAUSE this same | ||||
|         # if we're a bi-dir ``MsgStream`` BECAUSE this same | ||||
|         # core-msg-loop mem recv-chan is used to deliver the | ||||
|         # potential final result from the surrounding inter-actor | ||||
|         # `Context` so we don't want to close it until that | ||||
|  | @ -596,17 +523,8 @@ class MsgStream(trio.abc.Channel): | |||
|             trio.ClosedResourceError, | ||||
|             trio.BrokenResourceError, | ||||
|             BrokenPipeError, | ||||
|         ) as _trans_err: | ||||
|             trans_err = _trans_err | ||||
|             if ( | ||||
|                 hide_tb | ||||
|                 and | ||||
|                 self._ctx.chan._exc is trans_err | ||||
|                 # ^XXX, IOW, only if the channel is marked errored | ||||
|                 # for the same reason as whatever its underlying | ||||
|                 # transport raised, do we keep the full low-level tb | ||||
|                 # suppressed from the user. | ||||
|             ): | ||||
|         ) as trans_err: | ||||
|             if hide_tb: | ||||
|                 raise type(trans_err)( | ||||
|                     *trans_err.args | ||||
|                 ) from trans_err | ||||
|  | @ -812,12 +730,13 @@ async def open_stream_from_ctx( | |||
|                 # sanity, can remove? | ||||
|                 assert eoc is stream._eoc | ||||
| 
 | ||||
|                 log.runtime( | ||||
|                 log.warning( | ||||
|                     'Stream was terminated by EoC\n\n' | ||||
|                     # NOTE: won't show the error <Type> but | ||||
|                     # does show txt followed by IPC msg. | ||||
|                     f'{str(eoc)}\n' | ||||
|                 ) | ||||
| 
 | ||||
|         finally: | ||||
|             if ctx._portal: | ||||
|                 try: | ||||
|  |  | |||
|  | @ -21,49 +21,34 @@ | |||
| from contextlib import asynccontextmanager as acm | ||||
| from functools import partial | ||||
| import inspect | ||||
| from typing import ( | ||||
|     TYPE_CHECKING, | ||||
| ) | ||||
| from pprint import pformat | ||||
| from typing import TYPE_CHECKING | ||||
| import typing | ||||
| import warnings | ||||
| 
 | ||||
| import trio | ||||
| 
 | ||||
| 
 | ||||
| from .devx import ( | ||||
|     debug, | ||||
|     pformat as _pformat, | ||||
| ) | ||||
| from ._addr import ( | ||||
|     UnwrappedAddress, | ||||
|     mk_uuid, | ||||
| ) | ||||
| from .devx._debug import maybe_wait_for_debugger | ||||
| from ._state import current_actor, is_main_process | ||||
| from .log import get_logger, get_loglevel | ||||
| from ._runtime import Actor | ||||
| from ._portal import Portal | ||||
| from .trionics import ( | ||||
|     is_multi_cancelled, | ||||
|     collapse_eg, | ||||
| ) | ||||
| from ._exceptions import ( | ||||
|     is_multi_cancelled, | ||||
|     ContextCancelled, | ||||
| ) | ||||
| from ._root import ( | ||||
|     open_root_actor, | ||||
| ) | ||||
| from ._root import open_root_actor | ||||
| from . import _state | ||||
| from . import _spawn | ||||
| 
 | ||||
| 
 | ||||
| if TYPE_CHECKING: | ||||
|     import multiprocessing as mp | ||||
|     # from .ipc._server import IPCServer | ||||
|     from .ipc import IPCServer | ||||
| 
 | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
| _default_bind_addr: tuple[str, int] = ('127.0.0.1', 0) | ||||
| 
 | ||||
| 
 | ||||
| class ActorNursery: | ||||
|     ''' | ||||
|  | @ -117,6 +102,7 @@ class ActorNursery: | |||
|             ] | ||||
|         ] = {} | ||||
| 
 | ||||
|         self.cancelled: bool = False | ||||
|         self._join_procs = trio.Event() | ||||
|         self._at_least_one_child_in_debug: bool = False | ||||
|         self.errors = errors | ||||
|  | @ -134,62 +120,18 @@ class ActorNursery: | |||
|         # TODO: remove the `.run_in_actor()` API and thus this 2ndary | ||||
|         # nursery when that API get's moved outside this primitive! | ||||
|         self._ria_nursery = ria_nursery | ||||
| 
 | ||||
|         # TODO, factor this into a .hilevel api! | ||||
|         # | ||||
|         # portals spawned with ``run_in_actor()`` are | ||||
|         # cancelled when their "main" result arrives | ||||
|         self._cancel_after_result_on_exit: set = set() | ||||
| 
 | ||||
|         # trio.Nursery-like cancel (request) statuses | ||||
|         self._cancelled_caught: bool = False | ||||
|         self._cancel_called: bool = False | ||||
| 
 | ||||
|     @property | ||||
|     def cancel_called(self) -> bool: | ||||
|         ''' | ||||
|         Records whether cancellation has been requested for this | ||||
|         actor-nursery by a call to  `.cancel()` either due to, | ||||
|         - an explicit call by some actor-local-task, | ||||
|         - an implicit call due to an error/cancel emited inside | ||||
|           the `tractor.open_nursery()` block. | ||||
| 
 | ||||
|         ''' | ||||
|         return self._cancel_called | ||||
| 
 | ||||
|     @property | ||||
|     def cancelled_caught(self) -> bool: | ||||
|         ''' | ||||
|         Set when this nursery was able to cance all spawned subactors | ||||
|         gracefully via an (implicit) call to `.cancel()`. | ||||
| 
 | ||||
|         ''' | ||||
|         return self._cancelled_caught | ||||
| 
 | ||||
|     # TODO! remove internal/test-suite usage! | ||||
|     @property | ||||
|     def cancelled(self) -> bool: | ||||
|         warnings.warn( | ||||
|             "`ActorNursery.cancelled` is now deprecated, use " | ||||
|             " `.cancel_called` instead.", | ||||
|             DeprecationWarning, | ||||
|             stacklevel=2, | ||||
|         ) | ||||
|         return ( | ||||
|             self._cancel_called | ||||
|             # and | ||||
|             # self._cancelled_caught | ||||
|         ) | ||||
| 
 | ||||
|     async def start_actor( | ||||
|         self, | ||||
|         name: str, | ||||
| 
 | ||||
|         *, | ||||
| 
 | ||||
|         bind_addrs: list[UnwrappedAddress]|None = None, | ||||
|         bind_addrs: list[tuple[str, int]] = [_default_bind_addr], | ||||
|         rpc_module_paths: list[str]|None = None, | ||||
|         enable_transports: list[str] = [_state._def_tpt_proto], | ||||
|         enable_modules: list[str]|None = None, | ||||
|         loglevel: str|None = None,  # set log level per subactor | ||||
|         debug_mode: bool|None = None, | ||||
|  | @ -199,7 +141,6 @@ class ActorNursery: | |||
|         # a `._ria_nursery` since the dependent APIs have been | ||||
|         # removed! | ||||
|         nursery: trio.Nursery|None = None, | ||||
|         proc_kwargs: dict[str, any] = {} | ||||
| 
 | ||||
|     ) -> Portal: | ||||
|         ''' | ||||
|  | @ -236,17 +177,15 @@ class ActorNursery: | |||
|             enable_modules.extend(rpc_module_paths) | ||||
| 
 | ||||
|         subactor = Actor( | ||||
|             name=name, | ||||
|             uuid=mk_uuid(), | ||||
| 
 | ||||
|             name, | ||||
|             # modules allowed to invoked funcs from | ||||
|             enable_modules=enable_modules, | ||||
|             loglevel=loglevel, | ||||
| 
 | ||||
|             # verbatim relay this actor's registrar addresses | ||||
|             registry_addrs=current_actor().registry_addrs, | ||||
|             registry_addrs=current_actor().reg_addrs, | ||||
|         ) | ||||
|         parent_addr: UnwrappedAddress = self._actor.accept_addr | ||||
|         parent_addr = self._actor.accept_addr | ||||
|         assert parent_addr | ||||
| 
 | ||||
|         # start a task to spawn a process | ||||
|  | @ -265,7 +204,6 @@ class ActorNursery: | |||
|                 parent_addr, | ||||
|                 _rtv,  # run time vars | ||||
|                 infect_asyncio=infect_asyncio, | ||||
|                 proc_kwargs=proc_kwargs | ||||
|             ) | ||||
|         ) | ||||
| 
 | ||||
|  | @ -284,12 +222,11 @@ class ActorNursery: | |||
|         *, | ||||
| 
 | ||||
|         name: str | None = None, | ||||
|         bind_addrs: UnwrappedAddress|None = None, | ||||
|         bind_addrs: tuple[str, int] = [_default_bind_addr], | ||||
|         rpc_module_paths: list[str] | None = None, | ||||
|         enable_modules: list[str] | None = None, | ||||
|         loglevel: str | None = None,  # set log level per subactor | ||||
|         infect_asyncio: bool = False, | ||||
|         proc_kwargs: dict[str, any] = {}, | ||||
| 
 | ||||
|         **kwargs,  # explicit args to ``fn`` | ||||
| 
 | ||||
|  | @ -320,7 +257,6 @@ class ActorNursery: | |||
|             # use the run_in_actor nursery | ||||
|             nursery=self._ria_nursery, | ||||
|             infect_asyncio=infect_asyncio, | ||||
|             proc_kwargs=proc_kwargs | ||||
|         ) | ||||
| 
 | ||||
|         # XXX: don't allow stream funcs | ||||
|  | @ -358,21 +294,15 @@ class ActorNursery: | |||
| 
 | ||||
|         ''' | ||||
|         __runtimeframe__: int = 1  # noqa | ||||
|         self._cancel_called = True | ||||
|         self.cancelled = True | ||||
| 
 | ||||
|         # TODO: impl a repr for spawn more compact | ||||
|         # then `._children`.. | ||||
|         children: dict = self._children | ||||
|         child_count: int = len(children) | ||||
|         msg: str = f'Cancelling actor nursery with {child_count} children\n' | ||||
| 
 | ||||
|         server: IPCServer = self._actor.ipc_server | ||||
| 
 | ||||
|         with trio.move_on_after(3) as cs: | ||||
|             async with ( | ||||
|                 collapse_eg(), | ||||
|                 trio.open_nursery() as tn, | ||||
|             ): | ||||
|             async with trio.open_nursery() as tn: | ||||
| 
 | ||||
|                 subactor: Actor | ||||
|                 proc: trio.Process | ||||
|  | @ -391,7 +321,7 @@ class ActorNursery: | |||
| 
 | ||||
|                     else: | ||||
|                         if portal is None:  # actor hasn't fully spawned yet | ||||
|                             event: trio.Event = server._peer_connected[subactor.uid] | ||||
|                             event = self._actor._peer_connected[subactor.uid] | ||||
|                             log.warning( | ||||
|                                 f"{subactor.uid} never 't finished spawning?" | ||||
|                             ) | ||||
|  | @ -407,7 +337,7 @@ class ActorNursery: | |||
|                             if portal is None: | ||||
|                                 # cancelled while waiting on the event | ||||
|                                 # to arrive | ||||
|                                 chan = server._peers[subactor.uid][-1] | ||||
|                                 chan = self._actor._peers[subactor.uid][-1] | ||||
|                                 if chan: | ||||
|                                     portal = Portal(chan) | ||||
|                                 else:  # there's no other choice left | ||||
|  | @ -436,8 +366,6 @@ class ActorNursery: | |||
|             ) in children.values(): | ||||
|                 log.warning(f"Hard killing process {proc}") | ||||
|                 proc.terminate() | ||||
|         else: | ||||
|             self._cancelled_caught | ||||
| 
 | ||||
|         # mark ourselves as having (tried to have) cancelled all subactors | ||||
|         self._join_procs.set() | ||||
|  | @ -446,12 +374,12 @@ class ActorNursery: | |||
| @acm | ||||
| async def _open_and_supervise_one_cancels_all_nursery( | ||||
|     actor: Actor, | ||||
|     hide_tb: bool = True, | ||||
|     tb_hide: bool = False, | ||||
| 
 | ||||
| ) -> typing.AsyncGenerator[ActorNursery, None]: | ||||
| 
 | ||||
|     # normally don't need to show user by default | ||||
|     __tracebackhide__: bool = hide_tb | ||||
|     __tracebackhide__: bool = tb_hide | ||||
| 
 | ||||
|     outer_err: BaseException|None = None | ||||
|     inner_err: BaseException|None = None | ||||
|  | @ -467,23 +395,18 @@ async def _open_and_supervise_one_cancels_all_nursery( | |||
|     # `ActorNursery.start_actor()`). | ||||
| 
 | ||||
|     # errors from this daemon actor nursery bubble up to caller | ||||
|     async with ( | ||||
|         collapse_eg(), | ||||
|         trio.open_nursery() as da_nursery, | ||||
|     ): | ||||
|     async with trio.open_nursery() as da_nursery: | ||||
|         try: | ||||
|             # This is the inner level "run in actor" nursery. It is | ||||
|             # awaited first since actors spawned in this way (using | ||||
|             # `ActorNusery.run_in_actor()`) are expected to only | ||||
|             # ``ActorNusery.run_in_actor()``) are expected to only | ||||
|             # return a single result and then complete (i.e. be canclled | ||||
|             # gracefully). Errors collected from these actors are | ||||
|             # immediately raised for handling by a supervisor strategy. | ||||
|             # As such if the strategy propagates any error(s) upwards | ||||
|             # the above "daemon actor" nursery will be notified. | ||||
|             async with ( | ||||
|                 collapse_eg(), | ||||
|                 trio.open_nursery() as ria_nursery, | ||||
|             ): | ||||
|             async with trio.open_nursery() as ria_nursery: | ||||
| 
 | ||||
|                 an = ActorNursery( | ||||
|                     actor, | ||||
|                     ria_nursery, | ||||
|  | @ -500,7 +423,7 @@ async def _open_and_supervise_one_cancels_all_nursery( | |||
|                     # the "hard join phase". | ||||
|                     log.runtime( | ||||
|                         'Waiting on subactors to complete:\n' | ||||
|                         f'>}} {len(an._children)}\n' | ||||
|                         f'{pformat(an._children)}\n' | ||||
|                     ) | ||||
|                     an._join_procs.set() | ||||
| 
 | ||||
|  | @ -514,7 +437,7 @@ async def _open_and_supervise_one_cancels_all_nursery( | |||
|                     # will make the pdb repl unusable. | ||||
|                     # Instead try to wait for pdb to be released before | ||||
|                     # tearing down. | ||||
|                     await debug.maybe_wait_for_debugger( | ||||
|                     await maybe_wait_for_debugger( | ||||
|                         child_in_debug=an._at_least_one_child_in_debug | ||||
|                     ) | ||||
| 
 | ||||
|  | @ -549,8 +472,8 @@ async def _open_and_supervise_one_cancels_all_nursery( | |||
|                             ContextCancelled, | ||||
|                         }: | ||||
|                             log.cancel( | ||||
|                                 'Actor-nursery caught remote cancellation\n' | ||||
|                                 '\n' | ||||
|                                 'Actor-nursery caught remote cancellation\n\n' | ||||
| 
 | ||||
|                                 f'{inner_err.tb_str}' | ||||
|                             ) | ||||
|                         else: | ||||
|  | @ -590,7 +513,7 @@ async def _open_and_supervise_one_cancels_all_nursery( | |||
| 
 | ||||
|             # XXX: yet another guard before allowing the cancel | ||||
|             # sequence in case a (single) child is in debug. | ||||
|             await debug.maybe_wait_for_debugger( | ||||
|             await maybe_wait_for_debugger( | ||||
|                 child_in_debug=an._at_least_one_child_in_debug | ||||
|             ) | ||||
| 
 | ||||
|  | @ -639,18 +562,10 @@ async def _open_and_supervise_one_cancels_all_nursery( | |||
|     # final exit | ||||
| 
 | ||||
| 
 | ||||
| _shutdown_msg: str = ( | ||||
|     'Actor-runtime-shutdown' | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| # @api_frame | ||||
| async def open_nursery( | ||||
|     *,  # named params only! | ||||
|     hide_tb: bool = True, | ||||
|     **kwargs, | ||||
|     # ^TODO, paramspec for `open_root_actor()` | ||||
| 
 | ||||
| ) -> typing.AsyncGenerator[ActorNursery, None]: | ||||
|     ''' | ||||
|  | @ -668,7 +583,7 @@ async def open_nursery( | |||
|     which cancellation scopes correspond to each spawned subactor set. | ||||
| 
 | ||||
|     ''' | ||||
|     __tracebackhide__: bool = hide_tb | ||||
|     __tracebackhide__: bool = True | ||||
|     implicit_runtime: bool = False | ||||
|     actor: Actor = current_actor(err_on_no_runtime=False) | ||||
|     an: ActorNursery|None = None | ||||
|  | @ -684,10 +599,7 @@ async def open_nursery( | |||
|             # mark us for teardown on exit | ||||
|             implicit_runtime: bool = True | ||||
| 
 | ||||
|             async with open_root_actor( | ||||
|                 hide_tb=hide_tb, | ||||
|                 **kwargs, | ||||
|             ) as actor: | ||||
|             async with open_root_actor(**kwargs) as actor: | ||||
|                 assert actor is current_actor() | ||||
| 
 | ||||
|                 try: | ||||
|  | @ -725,33 +637,22 @@ async def open_nursery( | |||
|         # show frame on any internal runtime-scope error | ||||
|         if ( | ||||
|             an | ||||
|             and | ||||
|             not an.cancelled | ||||
|             and | ||||
|             an._scope_error | ||||
|             and not an.cancelled | ||||
|             and an._scope_error | ||||
|         ): | ||||
|             __tracebackhide__: bool = False | ||||
| 
 | ||||
| 
 | ||||
|         op_nested_an_repr: str = _pformat.nest_from_op( | ||||
|             input_op=')>', | ||||
|             text=f'{an}', | ||||
|             # nest_prefix='|_', | ||||
|             nest_indent=1,  # under > | ||||
|         msg: str = ( | ||||
|             'Actor-nursery exited\n' | ||||
|             f'|_{an}\n' | ||||
|         ) | ||||
|         an_msg: str = ( | ||||
|             f'Actor-nursery exited\n' | ||||
|             f'{op_nested_an_repr}\n' | ||||
|         ) | ||||
|         # keep noise low during std operation. | ||||
|         log.runtime(an_msg) | ||||
| 
 | ||||
|         if implicit_runtime: | ||||
|             # shutdown runtime if it was started and report noisly | ||||
|             # that we're did so. | ||||
|             msg: str = ( | ||||
|                 '\n' | ||||
|                 '\n' | ||||
|                 f'{_shutdown_msg} )>\n' | ||||
|             ) | ||||
|             msg += '=> Shutting down actor runtime <=\n' | ||||
|             log.info(msg) | ||||
| 
 | ||||
|         else: | ||||
|             # keep noise low during std operation. | ||||
|             log.runtime(msg) | ||||
|  |  | |||
|  | @ -19,16 +19,10 @@ Various helpers/utils for auditing your `tractor` app and/or the | |||
| core runtime. | ||||
| 
 | ||||
| ''' | ||||
| from contextlib import ( | ||||
|     asynccontextmanager as acm, | ||||
| ) | ||||
| import os | ||||
| from contextlib import asynccontextmanager as acm | ||||
| import pathlib | ||||
| 
 | ||||
| import tractor | ||||
| from tractor.devx.debug import ( | ||||
|     BoxedMaybeException, | ||||
| ) | ||||
| from .pytest import ( | ||||
|     tractor_test as tractor_test | ||||
| ) | ||||
|  | @ -37,9 +31,6 @@ from .fault_simulation import ( | |||
| ) | ||||
| 
 | ||||
| 
 | ||||
| # TODO, use dulwhich for this instead? | ||||
| # -> we're going to likely need it (or something similar) | ||||
| #   for supporting hot-coad reload feats eventually anyway! | ||||
| def repodir() -> pathlib.Path: | ||||
|     ''' | ||||
|     Return the abspath to the repo directory. | ||||
|  | @ -68,12 +59,7 @@ def mk_cmd( | |||
|     exs_subpath: str = 'debugging', | ||||
| ) -> str: | ||||
|     ''' | ||||
|     Generate a shell command suitable to pass to `pexpect.spawn()` | ||||
|     which runs the script as a python program's entrypoint. | ||||
| 
 | ||||
|     In particular ensure we disable the new tb coloring via unsetting | ||||
|     `$PYTHON_COLORS` so that `pexpect` can pattern match without | ||||
|     color-escape-codes. | ||||
|     Generate a shell command suitable to pass to ``pexpect.spawn()``. | ||||
| 
 | ||||
|     ''' | ||||
|     script_path: pathlib.Path = ( | ||||
|  | @ -81,15 +67,10 @@ def mk_cmd( | |||
|         / exs_subpath | ||||
|         / f'{ex_name}.py' | ||||
|     ) | ||||
|     py_cmd: str = ' '.join([ | ||||
|     return ' '.join([ | ||||
|         'python', | ||||
|         str(script_path) | ||||
|     ]) | ||||
|     # XXX, required for py 3.13+ | ||||
|     # https://docs.python.org/3/using/cmdline.html#using-on-controlling-color | ||||
|     # https://docs.python.org/3/using/cmdline.html#envvar-PYTHON_COLORS | ||||
|     os.environ['PYTHON_COLORS'] = '0' | ||||
|     return py_cmd | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
|  | @ -104,13 +85,12 @@ async def expect_ctxc( | |||
|     ''' | ||||
|     if yay: | ||||
|         try: | ||||
|             yield (maybe_exc := BoxedMaybeException()) | ||||
|             yield | ||||
|             raise RuntimeError('Never raised ctxc?') | ||||
|         except tractor.ContextCancelled as ctxc: | ||||
|             maybe_exc.value = ctxc | ||||
|         except tractor.ContextCancelled: | ||||
|             if reraise: | ||||
|                 raise | ||||
|             else: | ||||
|                 return | ||||
|     else: | ||||
|         yield (maybe_exc := BoxedMaybeException()) | ||||
|         yield | ||||
|  |  | |||
|  | @ -1,70 +0,0 @@ | |||
| # tractor: structured concurrent "actors". | ||||
| # Copyright 2018-eternity Tyler Goodlet. | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| Random IPC addr generation for isolating | ||||
| the discovery space between test sessions. | ||||
| 
 | ||||
| Might be eventually useful to expose as a util set from | ||||
| our `tractor.discovery` subsys? | ||||
| 
 | ||||
| ''' | ||||
| import random | ||||
| from typing import ( | ||||
|     Type, | ||||
| ) | ||||
| from tractor import ( | ||||
|     _addr, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| def get_rando_addr( | ||||
|     tpt_proto: str, | ||||
|     *, | ||||
| 
 | ||||
|     # choose random port at import time | ||||
|     _rando_port: str = random.randint(1000, 9999) | ||||
| 
 | ||||
| ) -> tuple[str, str|int]: | ||||
|     ''' | ||||
|     Used to globally override the runtime to the | ||||
|     per-test-session-dynamic addr so that all tests never conflict | ||||
|     with any other actor tree using the default. | ||||
| 
 | ||||
|     ''' | ||||
|     addr_type: Type[_addr.Addres] = _addr._address_types[tpt_proto] | ||||
|     def_reg_addr: tuple[str, int] = _addr._default_lo_addrs[tpt_proto] | ||||
| 
 | ||||
|     # this is the "unwrapped" form expected to be passed to | ||||
|     # `.open_root_actor()` by test body. | ||||
|     testrun_reg_addr: tuple[str, int|str] | ||||
|     match tpt_proto: | ||||
|         case 'tcp': | ||||
|             testrun_reg_addr = ( | ||||
|                 addr_type.def_bindspace, | ||||
|                 _rando_port, | ||||
|             ) | ||||
| 
 | ||||
|         # NOTE, file-name uniqueness (no-collisions) will be based on | ||||
|         # the runtime-directory and root (pytest-proc's) pid. | ||||
|         case 'uds': | ||||
|             testrun_reg_addr = addr_type.get_random().unwrap() | ||||
| 
 | ||||
|     # XXX, as sanity it should never the same as the default for the | ||||
|     # host-singleton registry actor. | ||||
|     assert def_reg_addr != testrun_reg_addr | ||||
| 
 | ||||
|     return testrun_reg_addr | ||||
|  | @ -26,46 +26,29 @@ from functools import ( | |||
| import inspect | ||||
| import platform | ||||
| 
 | ||||
| import pytest | ||||
| import tractor | ||||
| import trio | ||||
| 
 | ||||
| 
 | ||||
| def tractor_test(fn): | ||||
|     ''' | ||||
|     Decorator for async test fns to decorator-wrap them as "native" | ||||
|     looking sync funcs runnable by `pytest` and auto invoked with | ||||
|     `trio.run()` (much like the `pytest-trio` plugin's approach). | ||||
|     Decorator for async test funcs to present them as "native" | ||||
|     looking sync funcs runnable by `pytest` using `trio.run()`. | ||||
| 
 | ||||
|     Further the test fn body will be invoked AFTER booting the actor | ||||
|     runtime, i.e. from inside a `tractor.open_root_actor()` block AND | ||||
|     with various runtime and tooling parameters implicitly passed as | ||||
|     requested by by the test session's config; see immediately below. | ||||
|     Use: | ||||
| 
 | ||||
|     Basic deco use: | ||||
|     --------------- | ||||
|     @tractor_test | ||||
|     async def test_whatever(): | ||||
|         await ... | ||||
| 
 | ||||
|       @tractor_test | ||||
|       async def test_whatever(): | ||||
|           await ... | ||||
|     If fixtures: | ||||
| 
 | ||||
|         - ``reg_addr`` (a socket addr tuple where arbiter is listening) | ||||
|         - ``loglevel`` (logging level passed to tractor internals) | ||||
|         - ``start_method`` (subprocess spawning backend) | ||||
| 
 | ||||
|     Runtime config via special fixtures: | ||||
|     ------------------------------------ | ||||
|     If any of the following fixture are requested by the wrapped test | ||||
|     fn (via normal func-args declaration), | ||||
| 
 | ||||
|     - `reg_addr` (a socket addr tuple where arbiter is listening) | ||||
|     - `loglevel` (logging level passed to tractor internals) | ||||
|     - `start_method` (subprocess spawning backend) | ||||
| 
 | ||||
|     (TODO support) | ||||
|     - `tpt_proto` (IPC transport protocol key) | ||||
| 
 | ||||
|     they will be automatically injected to each test as normally | ||||
|     expected as well as passed to the initial | ||||
|     `tractor.open_root_actor()` funcargs. | ||||
| 
 | ||||
|     are defined in the `pytest` fixture space they will be automatically | ||||
|     injected to tests declaring these funcargs. | ||||
|     ''' | ||||
|     @wraps(fn) | ||||
|     def wrapper( | ||||
|  | @ -128,164 +111,3 @@ def tractor_test(fn): | |||
|         return trio.run(main) | ||||
| 
 | ||||
|     return wrapper | ||||
| 
 | ||||
| 
 | ||||
| def pytest_addoption( | ||||
|     parser: pytest.Parser, | ||||
| ): | ||||
|     # parser.addoption( | ||||
|     #     "--ll", | ||||
|     #     action="store", | ||||
|     #     dest='loglevel', | ||||
|     #     default='ERROR', help="logging level to set when testing" | ||||
|     # ) | ||||
| 
 | ||||
|     parser.addoption( | ||||
|         "--spawn-backend", | ||||
|         action="store", | ||||
|         dest='spawn_backend', | ||||
|         default='trio', | ||||
|         help="Processing spawning backend to use for test run", | ||||
|     ) | ||||
| 
 | ||||
|     parser.addoption( | ||||
|         "--tpdb", | ||||
|         "--debug-mode", | ||||
|         action="store_true", | ||||
|         dest='tractor_debug_mode', | ||||
|         # default=False, | ||||
|         help=( | ||||
|             'Enable a flag that can be used by tests to to set the ' | ||||
|             '`debug_mode: bool` for engaging the internal ' | ||||
|             'multi-proc debugger sys.' | ||||
|         ), | ||||
|     ) | ||||
| 
 | ||||
|     # provide which IPC transport protocols opting-in test suites | ||||
|     # should accumulatively run against. | ||||
|     parser.addoption( | ||||
|         "--tpt-proto", | ||||
|         nargs='+',  # accumulate-multiple-args | ||||
|         action="store", | ||||
|         dest='tpt_protos', | ||||
|         default=['tcp'], | ||||
|         help="Transport protocol to use under the `tractor.ipc.Channel`", | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| def pytest_configure(config): | ||||
|     backend = config.option.spawn_backend | ||||
|     tractor._spawn.try_set_start_method(backend) | ||||
| 
 | ||||
| 
 | ||||
| @pytest.fixture(scope='session') | ||||
| def debug_mode(request) -> bool: | ||||
|     ''' | ||||
|     Flag state for whether `--tpdb` (for `tractor`-py-debugger) | ||||
|     was passed to the test run. | ||||
| 
 | ||||
|     Normally tests should pass this directly to `.open_root_actor()` | ||||
|     to allow the user to opt into suite-wide crash handling. | ||||
| 
 | ||||
|     ''' | ||||
|     debug_mode: bool = request.config.option.tractor_debug_mode | ||||
|     return debug_mode | ||||
| 
 | ||||
| 
 | ||||
| @pytest.fixture(scope='session') | ||||
| def spawn_backend(request) -> str: | ||||
|     return request.config.option.spawn_backend | ||||
| 
 | ||||
| 
 | ||||
| @pytest.fixture(scope='session') | ||||
| def tpt_protos(request) -> list[str]: | ||||
| 
 | ||||
|     # allow quoting on CLI | ||||
|     proto_keys: list[str] = [ | ||||
|         proto_key.replace('"', '').replace("'", "") | ||||
|         for proto_key in request.config.option.tpt_protos | ||||
|     ] | ||||
| 
 | ||||
|     # ?TODO, eventually support multiple protos per test-sesh? | ||||
|     if len(proto_keys) > 1: | ||||
|         pytest.fail( | ||||
|             'We only support one `--tpt-proto <key>` atm!\n' | ||||
|         ) | ||||
| 
 | ||||
|     # XXX ensure we support the protocol by name via lookup! | ||||
|     for proto_key in proto_keys: | ||||
|         addr_type = tractor._addr._address_types[proto_key] | ||||
|         assert addr_type.proto_key == proto_key | ||||
| 
 | ||||
|     yield proto_keys | ||||
| 
 | ||||
| 
 | ||||
| @pytest.fixture( | ||||
|     scope='session', | ||||
|     autouse=True, | ||||
| ) | ||||
| def tpt_proto( | ||||
|     tpt_protos: list[str], | ||||
| ) -> str: | ||||
|     proto_key: str = tpt_protos[0] | ||||
| 
 | ||||
|     from tractor import _state | ||||
|     if _state._def_tpt_proto != proto_key: | ||||
|         _state._def_tpt_proto = proto_key | ||||
| 
 | ||||
|     yield proto_key | ||||
| 
 | ||||
| 
 | ||||
| @pytest.fixture(scope='session') | ||||
| def reg_addr( | ||||
|     tpt_proto: str, | ||||
| ) -> tuple[str, int|str]: | ||||
|     ''' | ||||
|     Deliver a test-sesh unique registry address such | ||||
|     that each run's (tests which use this fixture) will | ||||
|     have no conflicts/cross-talk when running simultaneously | ||||
|     nor will interfere with other live `tractor` apps active | ||||
|     on the same network-host (namespace). | ||||
| 
 | ||||
|     ''' | ||||
|     from tractor._testing.addr import get_rando_addr | ||||
|     return get_rando_addr( | ||||
|         tpt_proto=tpt_proto, | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| def pytest_generate_tests( | ||||
|     metafunc: pytest.Metafunc, | ||||
| ): | ||||
|     spawn_backend: str = metafunc.config.option.spawn_backend | ||||
| 
 | ||||
|     if not spawn_backend: | ||||
|         # XXX some weird windows bug with `pytest`? | ||||
|         spawn_backend = 'trio' | ||||
| 
 | ||||
|     # TODO: maybe just use the literal `._spawn.SpawnMethodKey`? | ||||
|     assert spawn_backend in ( | ||||
|         'mp_spawn', | ||||
|         'mp_forkserver', | ||||
|         'trio', | ||||
|     ) | ||||
| 
 | ||||
|     # NOTE: used-to-be-used-to dyanmically parametrize tests for when | ||||
|     # you just passed --spawn-backend=`mp` on the cli, but now we expect | ||||
|     # that cli input to be manually specified, BUT, maybe we'll do | ||||
|     # something like this again in the future? | ||||
|     if 'start_method' in metafunc.fixturenames: | ||||
|         metafunc.parametrize( | ||||
|             "start_method", | ||||
|             [spawn_backend], | ||||
|             scope='module', | ||||
|         ) | ||||
| 
 | ||||
|     # TODO, parametrize any `tpt_proto: str` declaring tests! | ||||
|     # proto_tpts: list[str] = metafunc.config.option.proto_tpts | ||||
|     # if 'tpt_proto' in metafunc.fixturenames: | ||||
|     #     metafunc.parametrize( | ||||
|     #         'tpt_proto', | ||||
|     #         proto_tpts,  # TODO, double check this list usage! | ||||
|     #         scope='module', | ||||
|     #     ) | ||||
|  |  | |||
|  | @ -1,35 +0,0 @@ | |||
| import os | ||||
| import random | ||||
| 
 | ||||
| 
 | ||||
| def generate_sample_messages( | ||||
|     amount: int, | ||||
|     rand_min: int = 0, | ||||
|     rand_max: int = 0, | ||||
|     silent: bool = False | ||||
| ) -> tuple[list[bytes], int]: | ||||
| 
 | ||||
|     msgs = [] | ||||
|     size = 0 | ||||
| 
 | ||||
|     if not silent: | ||||
|         print(f'\ngenerating {amount} messages...') | ||||
| 
 | ||||
|     for i in range(amount): | ||||
|         msg = f'[{i:08}]'.encode('utf-8') | ||||
| 
 | ||||
|         if rand_max > 0: | ||||
|             msg += os.urandom( | ||||
|                 random.randint(rand_min, rand_max)) | ||||
| 
 | ||||
|         size += len(msg) | ||||
| 
 | ||||
|         msgs.append(msg) | ||||
| 
 | ||||
|         if not silent and i and i % 10_000 == 0: | ||||
|             print(f'{i} generated') | ||||
| 
 | ||||
|     if not silent: | ||||
|         print(f'done, {size:,} bytes in total') | ||||
| 
 | ||||
|     return msgs, size | ||||
|  | @ -20,7 +20,7 @@ Runtime "developer experience" utils and addons to aid our | |||
| and working with/on the actor runtime. | ||||
| 
 | ||||
| """ | ||||
| from .debug import ( | ||||
| from ._debug import ( | ||||
|     maybe_wait_for_debugger as maybe_wait_for_debugger, | ||||
|     acquire_debug_lock as acquire_debug_lock, | ||||
|     breakpoint as breakpoint, | ||||
|  | @ -43,7 +43,6 @@ from .pformat import ( | |||
| ) | ||||
| 
 | ||||
| 
 | ||||
| # TODO, move this to a new `.devx._pdbp` mod? | ||||
| def _enable_readline_feats() -> str: | ||||
|     ''' | ||||
|     Handle `readline` when compiled with `libedit` to avoid breaking | ||||
|  | @ -75,4 +74,5 @@ def _enable_readline_feats() -> str: | |||
|         return 'readline' | ||||
| 
 | ||||
| 
 | ||||
| # TODO, move this to a new `.devx._pdbp` mod? | ||||
| _enable_readline_feats() | ||||
|  |  | |||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							|  | @ -20,18 +20,13 @@ as it pertains to improving the grok-ability of our runtime! | |||
| 
 | ||||
| ''' | ||||
| from __future__ import annotations | ||||
| from contextlib import ( | ||||
|     _GeneratorContextManager, | ||||
|     _AsyncGeneratorContextManager, | ||||
| ) | ||||
| from functools import partial | ||||
| import inspect | ||||
| import textwrap | ||||
| from types import ( | ||||
|     FrameType, | ||||
|     FunctionType, | ||||
|     MethodType, | ||||
|     CodeType, | ||||
|     # CodeType, | ||||
| ) | ||||
| from typing import ( | ||||
|     Any, | ||||
|  | @ -39,9 +34,6 @@ from typing import ( | |||
|     Type, | ||||
| ) | ||||
| 
 | ||||
| import pdbp | ||||
| from tractor.log import get_logger | ||||
| import trio | ||||
| from tractor.msg import ( | ||||
|     pretty_struct, | ||||
|     NamespacePath, | ||||
|  | @ -49,8 +41,6 @@ from tractor.msg import ( | |||
| import wrapt | ||||
| 
 | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
| # TODO: yeah, i don't love this and we should prolly just | ||||
| # write a decorator that actually keeps a stupid ref to the func | ||||
| # obj.. | ||||
|  | @ -311,70 +301,3 @@ def api_frame( | |||
| #     error_set: set[BaseException], | ||||
| # ) -> TracebackType: | ||||
| #     ... | ||||
| 
 | ||||
| 
 | ||||
| def hide_runtime_frames() -> dict[FunctionType, CodeType]: | ||||
|     ''' | ||||
|     Hide call-stack frames for various std-lib and `trio`-API primitives | ||||
|     such that the tracebacks presented from our runtime are as minimized | ||||
|     as possible, particularly from inside a `PdbREPL`. | ||||
| 
 | ||||
|     ''' | ||||
|     # XXX HACKZONE XXX | ||||
|     #  hide exit stack frames on nurseries and cancel-scopes! | ||||
|     # |_ so avoid seeing it when the `pdbp` REPL is first engaged from | ||||
|     #    inside a `trio.open_nursery()` scope (with no line after it | ||||
|     #    in before the block end??). | ||||
|     # | ||||
|     # TODO: FINALLY got this workin originally with | ||||
|     #  `@pdbp.hideframe` around the `wrapper()` def embedded inside | ||||
|     #  `_ki_protection_decoratior()`.. which is in the module: | ||||
|     #  /home/goodboy/.virtualenvs/tractor311/lib/python3.11/site-packages/trio/_core/_ki.py | ||||
|     # | ||||
|     # -[ ] make an issue and patch for `trio` core? maybe linked | ||||
|     #    to the long outstanding `pdb` one below? | ||||
|     #   |_ it's funny that there's frame hiding throughout `._run.py` | ||||
|     #      but not where it matters on the below exit funcs.. | ||||
|     # | ||||
|     # -[ ] provide a patchset for the lonstanding | ||||
|     #   |_ https://github.com/python-trio/trio/issues/1155 | ||||
|     # | ||||
|     # -[ ] make a linked issue to ^ and propose allowing all the | ||||
|     #     `._core._run` code to have their `__tracebackhide__` value | ||||
|     #     configurable by a `RunVar` to allow getting scheduler frames | ||||
|     #     if desired through configuration? | ||||
|     # | ||||
|     # -[ ] maybe dig into the core `pdb` issue why the extra frame is shown | ||||
|     #      at all? | ||||
|     # | ||||
|     funcs: list[FunctionType] = [ | ||||
|         trio._core._run.NurseryManager.__aexit__, | ||||
|         trio._core._run.CancelScope.__exit__, | ||||
|          _GeneratorContextManager.__exit__, | ||||
|          _AsyncGeneratorContextManager.__aexit__, | ||||
|          _AsyncGeneratorContextManager.__aenter__, | ||||
|          trio.Event.wait, | ||||
|     ] | ||||
|     func_list_str: str = textwrap.indent( | ||||
|         "\n".join(f.__qualname__ for f in funcs), | ||||
|         prefix=' |_ ', | ||||
|     ) | ||||
|     log.devx( | ||||
|         'Hiding the following runtime frames by default:\n' | ||||
|         f'{func_list_str}\n' | ||||
|     ) | ||||
| 
 | ||||
|     codes: dict[FunctionType, CodeType] = {} | ||||
|     for ref in funcs: | ||||
|         # stash a pre-modified version of each ref's code-obj | ||||
|         # so it can be reverted later if needed. | ||||
|         codes[ref] = ref.__code__ | ||||
|         pdbp.hideframe(ref) | ||||
|     # | ||||
|     # pdbp.hideframe(trio._core._run.NurseryManager.__aexit__) | ||||
|     # pdbp.hideframe(trio._core._run.CancelScope.__exit__) | ||||
|     # pdbp.hideframe(_GeneratorContextManager.__exit__) | ||||
|     # pdbp.hideframe(_AsyncGeneratorContextManager.__aexit__) | ||||
|     # pdbp.hideframe(_AsyncGeneratorContextManager.__aenter__) | ||||
|     # pdbp.hideframe(trio.Event.wait) | ||||
|     return codes | ||||
|  |  | |||
|  | @ -35,7 +35,6 @@ from signal import ( | |||
|     signal, | ||||
|     getsignal, | ||||
|     SIGUSR1, | ||||
|     SIGINT, | ||||
| ) | ||||
| # import traceback | ||||
| from types import ModuleType | ||||
|  | @ -49,7 +48,6 @@ from tractor import ( | |||
|     _state, | ||||
|     log as logmod, | ||||
| ) | ||||
| from tractor.devx import debug | ||||
| 
 | ||||
| log = logmod.get_logger(__name__) | ||||
| 
 | ||||
|  | @ -78,45 +76,22 @@ def dump_task_tree() -> None: | |||
|     ) | ||||
|     actor: Actor = _state.current_actor() | ||||
|     thr: Thread = current_thread() | ||||
|     current_sigint_handler: Callable = getsignal(SIGINT) | ||||
|     if ( | ||||
|         current_sigint_handler | ||||
|         is not | ||||
|         debug.DebugStatus._trio_handler | ||||
|     ): | ||||
|         sigint_handler_report: str = ( | ||||
|             'The default `trio` SIGINT handler was replaced?!' | ||||
|         ) | ||||
|     else: | ||||
|         sigint_handler_report: str = ( | ||||
|             'The default `trio` SIGINT handler is in use?!' | ||||
|         ) | ||||
| 
 | ||||
|     # sclang symbology | ||||
|     # |_<object> | ||||
|     # |_(Task/Thread/Process/Actor | ||||
|     # |_{Supervisor/Scope | ||||
|     # |_[Storage/Memory/IPC-Stream/Data-Struct | ||||
| 
 | ||||
|     log.devx( | ||||
|         f'Dumping `stackscope` tree for actor\n' | ||||
|         f'(>: {actor.uid!r}\n' | ||||
|         f' |_{mp.current_process()}\n' | ||||
|         f'   |_{thr}\n' | ||||
|         f'     |_{actor}\n' | ||||
|         f'\n' | ||||
|         f'{sigint_handler_report}\n' | ||||
|         f'signal.getsignal(SIGINT) -> {current_sigint_handler!r}\n' | ||||
|         # f'\n' | ||||
|         f'{actor.uid}:\n' | ||||
|         f'|_{mp.current_process()}\n' | ||||
|         f'  |_{thr}\n' | ||||
|         f'    |_{actor}\n\n' | ||||
| 
 | ||||
|         # start-of-trace-tree delimiter (mostly for testing) | ||||
|         # f'------ {actor.uid!r} ------\n' | ||||
|         f'\n' | ||||
|         f'------ start-of-{actor.uid!r} ------\n' | ||||
|         f'|\n' | ||||
|         f'{tree_str}' | ||||
|         '------ - ------\n' | ||||
|         '\n' | ||||
|         + | ||||
|         f'{tree_str}\n' | ||||
|         + | ||||
|         # end-of-trace-tree delimiter (mostly for testing) | ||||
|         f'|\n' | ||||
|         f'|_____ end-of-{actor.uid!r} ______\n' | ||||
|         f'\n' | ||||
|         f'------ {actor.uid!r} ------\n' | ||||
|     ) | ||||
|     # TODO: can remove this right? | ||||
|     # -[ ] was original code from author | ||||
|  | @ -148,11 +123,11 @@ def dump_tree_on_sig( | |||
| ) -> None: | ||||
|     global _tree_dumped, _handler_lock | ||||
|     with _handler_lock: | ||||
|         # if _tree_dumped: | ||||
|         #     log.warning( | ||||
|         #         'Already dumped for this actor...??' | ||||
|         #     ) | ||||
|         #     return | ||||
|         if _tree_dumped: | ||||
|             log.warning( | ||||
|                 'Already dumped for this actor...??' | ||||
|             ) | ||||
|             return | ||||
| 
 | ||||
|         _tree_dumped = True | ||||
| 
 | ||||
|  | @ -186,9 +161,9 @@ def dump_tree_on_sig( | |||
|             ) | ||||
|             raise | ||||
| 
 | ||||
|         # log.devx( | ||||
|         #     'Supposedly we dumped just fine..?' | ||||
|         # ) | ||||
|         log.devx( | ||||
|             'Supposedly we dumped just fine..?' | ||||
|         ) | ||||
| 
 | ||||
|     if not relay_to_subs: | ||||
|         return | ||||
|  | @ -227,19 +202,18 @@ def enable_stack_on_sig( | |||
|     (https://www.gnu.org/software/bash/manual/bash.html#Command-Substitution) | ||||
|     you could use: | ||||
| 
 | ||||
|     >> kill -SIGUSR1 $(pgrep -f <part-of-cmd: str>) | ||||
|     >> kill -SIGUSR1 $(pgrep -f '<cmd>') | ||||
| 
 | ||||
|     OR without a sub-shell, | ||||
|     Or with with `xonsh` (which has diff capture-from-subproc syntax) | ||||
| 
 | ||||
|     >> pkill --signal SIGUSR1 -f <part-of-cmd: str> | ||||
|     >> kill -SIGUSR1 @$(pgrep -f '<cmd>') | ||||
| 
 | ||||
|     ''' | ||||
|     try: | ||||
|         import stackscope | ||||
|     except ImportError: | ||||
|         log.warning( | ||||
|             'The `stackscope` lib is not installed!\n' | ||||
|             '`Ignoring enable_stack_on_sig() call!\n' | ||||
|             '`stackscope` not installed for use in debug mode!' | ||||
|         ) | ||||
|         return None | ||||
| 
 | ||||
|  | @ -256,8 +230,8 @@ def enable_stack_on_sig( | |||
|         dump_tree_on_sig, | ||||
|     ) | ||||
|     log.devx( | ||||
|         f'Enabling trace-trees on `SIGUSR1` ' | ||||
|         f'since `stackscope` is installed @ \n' | ||||
|         'Enabling trace-trees on `SIGUSR1` ' | ||||
|         'since `stackscope` is installed @ \n' | ||||
|         f'{stackscope!r}\n\n' | ||||
|         f'With `SIGUSR1` handler\n' | ||||
|         f'|_{dump_tree_on_sig}\n' | ||||
|  |  | |||
|  | @ -1,100 +0,0 @@ | |||
| # tractor: structured concurrent "actors". | ||||
| # Copyright 2018-eternity Tyler Goodlet. | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or | ||||
| # modify it under the terms of the GNU Affero General Public License | ||||
| # as published by the Free Software Foundation, either version 3 of | ||||
| # the License, or (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, but | ||||
| # WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU | ||||
| # Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public | ||||
| # License along with this program.  If not, see | ||||
| # <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| Multi-actor debugging for da peeps! | ||||
| 
 | ||||
| ''' | ||||
| from __future__ import annotations | ||||
| from tractor.log import get_logger | ||||
| from ._repl import ( | ||||
|     PdbREPL as PdbREPL, | ||||
|     mk_pdb as mk_pdb, | ||||
|     TractorConfig as TractorConfig, | ||||
| ) | ||||
| from ._tty_lock import ( | ||||
|     DebugStatus as DebugStatus, | ||||
|     DebugStateError as DebugStateError, | ||||
| ) | ||||
| from ._trace import ( | ||||
|     Lock as Lock, | ||||
|     _pause_msg as _pause_msg, | ||||
|     _repl_fail_msg as _repl_fail_msg, | ||||
|     _set_trace as _set_trace, | ||||
|     _sync_pause_from_builtin as _sync_pause_from_builtin, | ||||
|     breakpoint as breakpoint, | ||||
|     maybe_init_greenback as maybe_init_greenback, | ||||
|     maybe_import_greenback as maybe_import_greenback, | ||||
|     pause as pause, | ||||
|     pause_from_sync as pause_from_sync, | ||||
| ) | ||||
| from ._post_mortem import ( | ||||
|     BoxedMaybeException as BoxedMaybeException, | ||||
|     maybe_open_crash_handler as maybe_open_crash_handler, | ||||
|     open_crash_handler as open_crash_handler, | ||||
|     post_mortem as post_mortem, | ||||
|     _crash_msg as _crash_msg, | ||||
|     _maybe_enter_pm as _maybe_enter_pm, | ||||
| ) | ||||
| from ._sync import ( | ||||
|     maybe_wait_for_debugger as maybe_wait_for_debugger, | ||||
|     acquire_debug_lock as acquire_debug_lock, | ||||
| ) | ||||
| from ._sigint import ( | ||||
|     sigint_shield as sigint_shield, | ||||
|     _ctlc_ignore_header as _ctlc_ignore_header | ||||
| ) | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
| # ---------------- | ||||
| # XXX PKG TODO XXX | ||||
| # ---------------- | ||||
| # refine the internal impl and APIs! | ||||
| # | ||||
| # -[ ] rework `._pause()` and it's branch-cases for root vs. | ||||
| #     subactor: | ||||
| #  -[ ] `._pause_from_root()` + `_pause_from_subactor()`? | ||||
| #  -[ ]  do the de-factor based on bg-thread usage in | ||||
| #    `.pause_from_sync()` & `_pause_from_bg_root_thread()`. | ||||
| #  -[ ] drop `debug_func == None` case which is confusing af.. | ||||
| #  -[ ]  factor out `_enter_repl_sync()` into a util func for calling | ||||
| #    the `_set_trace()` / `_post_mortem()` APIs? | ||||
| # | ||||
| # -[ ] figure out if we need `acquire_debug_lock()` and/or re-implement | ||||
| #    it as part of the `.pause_from_sync()` rework per above? | ||||
| # | ||||
| # -[ ] pair the `._pause_from_subactor()` impl with a "debug nursery" | ||||
| #   that's dynamically allocated inside the `._rpc` task thus | ||||
| #   avoiding the `._service_n.start()` usage for the IPC request? | ||||
| #  -[ ] see the TODO inside `._rpc._errors_relayed_via_ipc()` | ||||
| # | ||||
| # -[ ] impl a `open_debug_request()` which encaps all | ||||
| #   `request_root_stdio_lock()` task scheduling deats | ||||
| #   + `DebugStatus` state mgmt; which should prolly be re-branded as | ||||
| #   a `DebugRequest` type anyway AND with suppoort for bg-thread | ||||
| #   (from root actor) usage? | ||||
| # | ||||
| # -[ ] handle the `xonsh` case for bg-root-threads in the SIGINT | ||||
| #     handler! | ||||
| #   -[ ] do we need to do the same for subactors? | ||||
| #   -[ ] make the failing tests finally pass XD | ||||
| # | ||||
| # -[ ] simplify `maybe_wait_for_debugger()` to be a root-task only | ||||
| #     API? | ||||
| #   -[ ] currently it's implemented as that so might as well make it | ||||
| #     formal? | ||||
|  | @ -1,412 +0,0 @@ | |||
| # tractor: structured concurrent "actors". | ||||
| # Copyright 2018-eternity Tyler Goodlet. | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or | ||||
| # modify it under the terms of the GNU Affero General Public License | ||||
| # as published by the Free Software Foundation, either version 3 of | ||||
| # the License, or (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, but | ||||
| # WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU | ||||
| # Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public | ||||
| # License along with this program.  If not, see | ||||
| # <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| Post-mortem debugging APIs and surrounding machinery for both | ||||
| sync and async contexts. | ||||
| 
 | ||||
| Generally we maintain the same semantics a `pdb.post.mortem()` but | ||||
| with actor-tree-wide sync/cooperation around any (sub)actor's use of | ||||
| the root's TTY. | ||||
| 
 | ||||
| ''' | ||||
| from __future__ import annotations | ||||
| import bdb | ||||
| from contextlib import ( | ||||
|     AbstractContextManager, | ||||
|     contextmanager as cm, | ||||
|     nullcontext, | ||||
| ) | ||||
| from functools import ( | ||||
|     partial, | ||||
| ) | ||||
| import inspect | ||||
| import sys | ||||
| import traceback | ||||
| from typing import ( | ||||
|     Callable, | ||||
|     Sequence, | ||||
|     Type, | ||||
|     TYPE_CHECKING, | ||||
| ) | ||||
| from types import ( | ||||
|     TracebackType, | ||||
|     FrameType, | ||||
| ) | ||||
| 
 | ||||
| from msgspec import Struct | ||||
| import trio | ||||
| from tractor._exceptions import ( | ||||
|     NoRuntime, | ||||
| ) | ||||
| from tractor import _state | ||||
| from tractor._state import ( | ||||
|     current_actor, | ||||
|     debug_mode, | ||||
| ) | ||||
| from tractor.log import get_logger | ||||
| from tractor.trionics import ( | ||||
|     is_multi_cancelled, | ||||
| ) | ||||
| from ._trace import ( | ||||
|     _pause, | ||||
| ) | ||||
| from ._tty_lock import ( | ||||
|     DebugStatus, | ||||
| ) | ||||
| from ._repl import ( | ||||
|     PdbREPL, | ||||
|     mk_pdb, | ||||
|     TractorConfig as TractorConfig, | ||||
| ) | ||||
| 
 | ||||
| if TYPE_CHECKING: | ||||
|     from trio.lowlevel import Task | ||||
|     from tractor._runtime import ( | ||||
|         Actor, | ||||
|     ) | ||||
| 
 | ||||
| _crash_msg: str = ( | ||||
|     'Opening a pdb REPL in crashed actor' | ||||
| ) | ||||
| 
 | ||||
| log = get_logger(__package__) | ||||
| 
 | ||||
| 
 | ||||
| class BoxedMaybeException(Struct): | ||||
|     ''' | ||||
|     Box a maybe-exception for post-crash introspection usage | ||||
|     from the body of a `open_crash_handler()` scope. | ||||
| 
 | ||||
|     ''' | ||||
|     value: BaseException|None = None | ||||
| 
 | ||||
|     # handler can suppress crashes dynamically | ||||
|     raise_on_exit: bool|Sequence[Type[BaseException]] = True | ||||
| 
 | ||||
|     def pformat(self) -> str: | ||||
|         ''' | ||||
|         Repr the boxed `.value` error in more-than-string | ||||
|         repr form. | ||||
| 
 | ||||
|         ''' | ||||
|         if not self.value: | ||||
|             return f'<{type(self).__name__}( .value=None )>' | ||||
| 
 | ||||
|         return ( | ||||
|             f'<{type(self.value).__name__}(\n' | ||||
|             f' |_.value = {self.value}\n' | ||||
|             f')>\n' | ||||
|         ) | ||||
| 
 | ||||
|     __repr__ = pformat | ||||
| 
 | ||||
| 
 | ||||
| def _post_mortem( | ||||
|     repl: PdbREPL,  # normally passed by `_pause()` | ||||
| 
 | ||||
|     # XXX all `partial`-ed in by `post_mortem()` below! | ||||
|     tb: TracebackType, | ||||
|     api_frame: FrameType, | ||||
| 
 | ||||
|     shield: bool = False, | ||||
|     hide_tb: bool = True, | ||||
| 
 | ||||
|     # maybe pre/post REPL entry | ||||
|     repl_fixture: ( | ||||
|         AbstractContextManager[bool] | ||||
|         |None | ||||
|     ) = None, | ||||
| 
 | ||||
|     boxed_maybe_exc: BoxedMaybeException|None = None, | ||||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|     Enter the ``pdbpp`` port mortem entrypoint using our custom | ||||
|     debugger instance. | ||||
| 
 | ||||
|     ''' | ||||
|     __tracebackhide__: bool = hide_tb | ||||
| 
 | ||||
|     # maybe enter any user fixture | ||||
|     enter_repl: bool = DebugStatus.maybe_enter_repl_fixture( | ||||
|         repl=repl, | ||||
|         repl_fixture=repl_fixture, | ||||
|         boxed_maybe_exc=boxed_maybe_exc, | ||||
|     ) | ||||
|     try: | ||||
|         if not enter_repl: | ||||
|             # XXX, trigger `.release()` below immediately! | ||||
|             return | ||||
|         try: | ||||
|             actor: Actor = current_actor() | ||||
|             actor_repr: str = str(actor.uid) | ||||
|             # ^TODO, instead a nice runtime-info + maddr + uid? | ||||
|             # -[ ] impl a `Actor.__repr()__`?? | ||||
|             #  |_ <task>:<thread> @ <actor> | ||||
| 
 | ||||
|         except NoRuntime: | ||||
|             actor_repr: str = '<no-actor-runtime?>' | ||||
| 
 | ||||
|         try: | ||||
|             task_repr: Task = trio.lowlevel.current_task() | ||||
|         except RuntimeError: | ||||
|             task_repr: str = '<unknown-Task>' | ||||
| 
 | ||||
|         # TODO: print the actor supervion tree up to the root | ||||
|         # here! Bo | ||||
|         log.pdb( | ||||
|             f'{_crash_msg}\n' | ||||
|             f'x>(\n' | ||||
|             f' |_ {task_repr} @ {actor_repr}\n' | ||||
| 
 | ||||
|         ) | ||||
| 
 | ||||
|         # XXX NOTE(s) on `pdbp.xpm()` version.. | ||||
|         # | ||||
|         # - seems to lose the up-stack tb-info? | ||||
|         # - currently we're (only) replacing this from `pdbp.xpm()` | ||||
|         #   to add the `end=''` to the print XD | ||||
|         # | ||||
|         print(traceback.format_exc(), end='') | ||||
|         caller_frame: FrameType = api_frame.f_back | ||||
| 
 | ||||
|         # NOTE, see the impl details of these in the lib to | ||||
|         # understand usage: | ||||
|         # - `pdbp.post_mortem()` | ||||
|         # - `pdbp.xps()` | ||||
|         # - `bdb.interaction()` | ||||
|         repl.reset() | ||||
|         repl.interaction( | ||||
|             frame=caller_frame, | ||||
|             # frame=None, | ||||
|             traceback=tb, | ||||
|         ) | ||||
|     finally: | ||||
|         # XXX NOTE XXX: this is abs required to avoid hangs! | ||||
|         # | ||||
|         # Since we presume the post-mortem was enaged to | ||||
|         # a task-ending error, we MUST release the local REPL request | ||||
|         # so that not other local task nor the root remains blocked! | ||||
|         DebugStatus.release() | ||||
| 
 | ||||
| 
 | ||||
| async def post_mortem( | ||||
|     *, | ||||
|     tb: TracebackType|None = None, | ||||
|     api_frame: FrameType|None = None, | ||||
|     hide_tb: bool = False, | ||||
| 
 | ||||
|     # TODO: support shield here just like in `pause()`? | ||||
|     # shield: bool = False, | ||||
| 
 | ||||
|     **_pause_kwargs, | ||||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|     Our builtin async equivalient of `pdb.post_mortem()` which can be | ||||
|     used inside exception handlers. | ||||
| 
 | ||||
|     It's also used for the crash handler when `debug_mode == True` ;) | ||||
| 
 | ||||
|     ''' | ||||
|     __tracebackhide__: bool = hide_tb | ||||
| 
 | ||||
|     tb: TracebackType = tb or sys.exc_info()[2] | ||||
| 
 | ||||
|     # TODO: do upward stack scan for highest @api_frame and | ||||
|     # use its parent frame as the expected user-app code | ||||
|     # interact point. | ||||
|     api_frame: FrameType = api_frame or inspect.currentframe() | ||||
| 
 | ||||
|     # TODO, move to submod `._pausing` or ._api? _trace | ||||
|     await _pause( | ||||
|         debug_func=partial( | ||||
|             _post_mortem, | ||||
|             api_frame=api_frame, | ||||
|             tb=tb, | ||||
|         ), | ||||
|         hide_tb=hide_tb, | ||||
|         **_pause_kwargs | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| async def _maybe_enter_pm( | ||||
|     err: BaseException, | ||||
|     *, | ||||
|     tb: TracebackType|None = None, | ||||
|     api_frame: FrameType|None = None, | ||||
|     hide_tb: bool = True, | ||||
| 
 | ||||
|     # only enter debugger REPL when returns `True` | ||||
|     debug_filter: Callable[ | ||||
|         [BaseException|BaseExceptionGroup], | ||||
|         bool, | ||||
|     ] = lambda err: not is_multi_cancelled(err), | ||||
|     **_pause_kws, | ||||
| ): | ||||
|     if ( | ||||
|         debug_mode() | ||||
| 
 | ||||
|         # NOTE: don't enter debug mode recursively after quitting pdb | ||||
|         # Iow, don't re-enter the repl if the `quit` command was issued | ||||
|         # by the user. | ||||
|         and not isinstance(err, bdb.BdbQuit) | ||||
| 
 | ||||
|         # XXX: if the error is the likely result of runtime-wide | ||||
|         # cancellation, we don't want to enter the debugger since | ||||
|         # there's races between when the parent actor has killed all | ||||
|         # comms and when the child tries to contact said parent to | ||||
|         # acquire the tty lock. | ||||
| 
 | ||||
|         # Really we just want to mostly avoid catching KBIs here so there | ||||
|         # might be a simpler check we can do? | ||||
|         and | ||||
|         debug_filter(err) | ||||
|     ): | ||||
|         api_frame: FrameType = api_frame or inspect.currentframe() | ||||
|         tb: TracebackType = tb or sys.exc_info()[2] | ||||
|         await post_mortem( | ||||
|             api_frame=api_frame, | ||||
|             tb=tb, | ||||
|             **_pause_kws, | ||||
|         ) | ||||
|         return True | ||||
| 
 | ||||
|     else: | ||||
|         return False | ||||
| 
 | ||||
| 
 | ||||
| # TODO: better naming and what additionals? | ||||
| # - [ ] optional runtime plugging? | ||||
| # - [ ] detection for sync vs. async code? | ||||
| # - [ ] specialized REPL entry when in distributed mode? | ||||
| # -[x] hide tb by def | ||||
| # - [x] allow ignoring kbi Bo | ||||
| @cm | ||||
| def open_crash_handler( | ||||
|     catch: set[BaseException] = { | ||||
|         BaseException, | ||||
|     }, | ||||
|     ignore: set[BaseException] = { | ||||
|         KeyboardInterrupt, | ||||
|         trio.Cancelled, | ||||
|     }, | ||||
|     hide_tb: bool = True, | ||||
| 
 | ||||
|     repl_fixture: ( | ||||
|         AbstractContextManager[bool]  # pre/post REPL entry | ||||
|         |None | ||||
|     ) = None, | ||||
|     raise_on_exit: bool|Sequence[Type[BaseException]] = True, | ||||
| ): | ||||
|     ''' | ||||
|     Generic "post mortem" crash handler using `pdbp` REPL debugger. | ||||
| 
 | ||||
|     We expose this as a CLI framework addon to both `click` and | ||||
|     `typer` users so they can quickly wrap cmd endpoints which get | ||||
|     automatically wrapped to use the runtime's `debug_mode: bool` | ||||
|     AND `pdbp.pm()` around any code that is PRE-runtime entry | ||||
|     - any sync code which runs BEFORE the main call to | ||||
|       `trio.run()`. | ||||
| 
 | ||||
|     ''' | ||||
|     __tracebackhide__: bool = hide_tb | ||||
| 
 | ||||
|     # TODO, yield a `outcome.Error`-like boxed type? | ||||
|     # -[~] use `outcome.Value/Error` X-> frozen! | ||||
|     # -[x] write our own..? | ||||
|     # -[ ] consider just wtv is used by `pytest.raises()`? | ||||
|     # | ||||
|     boxed_maybe_exc = BoxedMaybeException( | ||||
|         raise_on_exit=raise_on_exit, | ||||
|     ) | ||||
|     err: BaseException | ||||
|     try: | ||||
|         yield boxed_maybe_exc | ||||
|     except tuple(catch) as err: | ||||
|         boxed_maybe_exc.value = err | ||||
|         if ( | ||||
|             type(err) not in ignore | ||||
|             and | ||||
|             not is_multi_cancelled( | ||||
|                 err, | ||||
|                 ignore_nested=ignore | ||||
|             ) | ||||
|         ): | ||||
|             try: | ||||
|                 # use our re-impl-ed version of `pdbp.xpm()` | ||||
|                 _post_mortem( | ||||
|                     repl=mk_pdb(), | ||||
|                     tb=sys.exc_info()[2], | ||||
|                     api_frame=inspect.currentframe().f_back, | ||||
|                     hide_tb=hide_tb, | ||||
| 
 | ||||
|                     repl_fixture=repl_fixture, | ||||
|                     boxed_maybe_exc=boxed_maybe_exc, | ||||
|                 ) | ||||
|             except bdb.BdbQuit: | ||||
|                 __tracebackhide__: bool = False | ||||
|                 raise err | ||||
| 
 | ||||
|         if ( | ||||
|             raise_on_exit is True | ||||
|             or ( | ||||
|                 raise_on_exit is not False | ||||
|                 and ( | ||||
|                     set(raise_on_exit) | ||||
|                     and | ||||
|                     type(err) in raise_on_exit | ||||
|                 ) | ||||
|             ) | ||||
|             and | ||||
|             boxed_maybe_exc.raise_on_exit == raise_on_exit | ||||
|         ): | ||||
|             raise err | ||||
| 
 | ||||
| 
 | ||||
| @cm | ||||
| def maybe_open_crash_handler( | ||||
|     pdb: bool|None = None, | ||||
|     hide_tb: bool = True, | ||||
| 
 | ||||
|     **kwargs, | ||||
| ): | ||||
|     ''' | ||||
|     Same as `open_crash_handler()` but with bool input flag | ||||
|     to allow conditional handling. | ||||
| 
 | ||||
|     Normally this is used with CLI endpoints such that if the --pdb | ||||
|     flag is passed the pdb REPL is engaed on any crashes B) | ||||
| 
 | ||||
|     ''' | ||||
|     __tracebackhide__: bool = hide_tb | ||||
| 
 | ||||
|     if pdb is None: | ||||
|         pdb: bool = _state.is_debug_mode() | ||||
| 
 | ||||
|     rtctx = nullcontext( | ||||
|         enter_result=BoxedMaybeException() | ||||
|     ) | ||||
|     if pdb: | ||||
|         rtctx = open_crash_handler( | ||||
|             hide_tb=hide_tb, | ||||
|             **kwargs, | ||||
|         ) | ||||
| 
 | ||||
|     with rtctx as boxed_maybe_exc: | ||||
|         yield boxed_maybe_exc | ||||
|  | @ -1,207 +0,0 @@ | |||
| # tractor: structured concurrent "actors". | ||||
| # Copyright 2018-eternity Tyler Goodlet. | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or | ||||
| # modify it under the terms of the GNU Affero General Public License | ||||
| # as published by the Free Software Foundation, either version 3 of | ||||
| # the License, or (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, but | ||||
| # WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU | ||||
| # Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public | ||||
| # License along with this program.  If not, see | ||||
| # <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| `pdpp.Pdb` extentions/customization and other delegate usage. | ||||
| 
 | ||||
| ''' | ||||
| from functools import ( | ||||
|     cached_property, | ||||
| ) | ||||
| import os | ||||
| 
 | ||||
| import pdbp | ||||
| from tractor._state import ( | ||||
|     is_root_process, | ||||
| ) | ||||
| 
 | ||||
| from ._tty_lock import ( | ||||
|     Lock, | ||||
|     DebugStatus, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| class TractorConfig(pdbp.DefaultConfig): | ||||
|     ''' | ||||
|     Custom `pdbp` config which tries to use the best tradeoff | ||||
|     between pretty and minimal. | ||||
| 
 | ||||
|     ''' | ||||
|     use_pygments: bool = True | ||||
|     sticky_by_default: bool = False | ||||
|     enable_hidden_frames: bool = True | ||||
| 
 | ||||
|     # much thanks @mdmintz for the hot tip! | ||||
|     # fixes line spacing issue when resizing terminal B) | ||||
|     truncate_long_lines: bool = False | ||||
| 
 | ||||
|     # ------ - ------ | ||||
|     # our own custom config vars mostly | ||||
|     # for syncing with the actor tree's singleton | ||||
|     # TTY `Lock`. | ||||
| 
 | ||||
| 
 | ||||
| class PdbREPL(pdbp.Pdb): | ||||
|     ''' | ||||
|     Add teardown hooks and local state describing any | ||||
|     ongoing TTY `Lock` request dialog. | ||||
| 
 | ||||
|     ''' | ||||
|     # override the pdbp config with our coolio one | ||||
|     # NOTE: this is only loaded when no `~/.pdbrc` exists | ||||
|     # so we should prolly pass it into the .__init__() instead? | ||||
|     # i dunno, see the `DefaultFactory` and `pdb.Pdb` impls. | ||||
|     DefaultConfig = TractorConfig | ||||
| 
 | ||||
|     status = DebugStatus | ||||
| 
 | ||||
|     # NOTE: see details in stdlib's `bdb.py` | ||||
|     # def user_exception(self, frame, exc_info): | ||||
|     #     ''' | ||||
|     #     Called when we stop on an exception. | ||||
|     #     ''' | ||||
|     #     log.warning( | ||||
|     #         'Exception during REPL sesh\n\n' | ||||
|     #         f'{frame}\n\n' | ||||
|     #         f'{exc_info}\n\n' | ||||
|     #     ) | ||||
| 
 | ||||
|     # NOTE: this actually hooks but i don't see anyway to detect | ||||
|     # if an error was caught.. this is why currently we just always | ||||
|     # call `DebugStatus.release` inside `_post_mortem()`. | ||||
|     # def preloop(self): | ||||
|     #     print('IN PRELOOP') | ||||
|     #     super().preloop() | ||||
| 
 | ||||
|     # TODO: cleaner re-wrapping of all this? | ||||
|     # -[ ] figure out how to disallow recursive .set_trace() entry | ||||
|     #     since that'll cause deadlock for us. | ||||
|     # -[ ] maybe a `@cm` to call `super().<same_meth_name>()`? | ||||
|     # -[ ] look at hooking into the `pp` hook specially with our | ||||
|     #     own set of pretty-printers? | ||||
|     #    * `.pretty_struct.Struct.pformat()` | ||||
|     #    * `.pformat(MsgType.pld)` | ||||
|     #    * `.pformat(Error.tb_str)`? | ||||
|     #    * .. maybe more? | ||||
|     # | ||||
|     def set_continue(self): | ||||
|         try: | ||||
|             super().set_continue() | ||||
|         finally: | ||||
|             # NOTE: for subactors the stdio lock is released via the | ||||
|             # allocated RPC locker task, so for root we have to do it | ||||
|             # manually. | ||||
|             if ( | ||||
|                 is_root_process() | ||||
|                 and | ||||
|                 Lock._debug_lock.locked() | ||||
|                 and | ||||
|                 DebugStatus.is_main_trio_thread() | ||||
|             ): | ||||
|                 # Lock.release(raise_on_thread=False) | ||||
|                 Lock.release() | ||||
| 
 | ||||
|             # XXX AFTER `Lock.release()` for root local repl usage | ||||
|             DebugStatus.release() | ||||
| 
 | ||||
|     def set_quit(self): | ||||
|         try: | ||||
|             super().set_quit() | ||||
|         finally: | ||||
|             if ( | ||||
|                 is_root_process() | ||||
|                 and | ||||
|                 Lock._debug_lock.locked() | ||||
|                 and | ||||
|                 DebugStatus.is_main_trio_thread() | ||||
|             ): | ||||
|                 # Lock.release(raise_on_thread=False) | ||||
|                 Lock.release() | ||||
| 
 | ||||
|             # XXX after `Lock.release()` for root local repl usage | ||||
|             DebugStatus.release() | ||||
| 
 | ||||
|     # XXX NOTE: we only override this because apparently the stdlib pdb | ||||
|     # bois likes to touch the SIGINT handler as much as i like to touch | ||||
|     # my d$%&. | ||||
|     def _cmdloop(self): | ||||
|         self.cmdloop() | ||||
| 
 | ||||
|     @cached_property | ||||
|     def shname(self) -> str | None: | ||||
|         ''' | ||||
|         Attempt to return the login shell name with a special check for | ||||
|         the infamous `xonsh` since it seems to have some issues much | ||||
|         different from std shells when it comes to flushing the prompt? | ||||
| 
 | ||||
|         ''' | ||||
|         # SUPER HACKY and only really works if `xonsh` is not used | ||||
|         # before spawning further sub-shells.. | ||||
|         shpath = os.getenv('SHELL', None) | ||||
| 
 | ||||
|         if shpath: | ||||
|             if ( | ||||
|                 os.getenv('XONSH_LOGIN', default=False) | ||||
|                 or 'xonsh' in shpath | ||||
|             ): | ||||
|                 return 'xonsh' | ||||
| 
 | ||||
|             return os.path.basename(shpath) | ||||
| 
 | ||||
|         return None | ||||
| 
 | ||||
| 
 | ||||
| def mk_pdb() -> PdbREPL: | ||||
|     ''' | ||||
|     Deliver a new `PdbREPL`: a multi-process safe `pdbp.Pdb`-variant | ||||
|     using the magic of `tractor`'s SC-safe IPC. | ||||
| 
 | ||||
|     B) | ||||
| 
 | ||||
|     Our `pdb.Pdb` subtype accomplishes multi-process safe debugging | ||||
|     by: | ||||
| 
 | ||||
|     - mutexing access to the root process' std-streams (& thus parent | ||||
|       process TTY) via an IPC managed `Lock` singleton per | ||||
|       actor-process tree. | ||||
| 
 | ||||
|     - temporarily overriding any subactor's SIGINT handler to shield | ||||
|       during live REPL sessions in sub-actors such that cancellation | ||||
|       is never (mistakenly) triggered by a ctrl-c and instead only by | ||||
|       explicit runtime API requests or after the | ||||
|       `pdb.Pdb.interaction()` call has returned. | ||||
| 
 | ||||
|     FURTHER, the `pdbp.Pdb` instance is configured to be `trio` | ||||
|     "compatible" from a SIGINT handling perspective; we mask out | ||||
|     the default `pdb` handler and instead apply `trio`s default | ||||
|     which mostly addresses all issues described in: | ||||
| 
 | ||||
|      - https://github.com/python-trio/trio/issues/1155 | ||||
| 
 | ||||
|     The instance returned from this factory should always be | ||||
|     preferred over the default `pdb[p].set_trace()` whenever using | ||||
|     a `pdb` REPL inside a `trio` based runtime. | ||||
| 
 | ||||
|     ''' | ||||
|     pdb = PdbREPL() | ||||
| 
 | ||||
|     # XXX: These are the important flags mentioned in | ||||
|     # https://github.com/python-trio/trio/issues/1155 | ||||
|     # which resolve the traceback spews to console. | ||||
|     pdb.allow_kbdint = True | ||||
|     pdb.nosigint = True | ||||
|     return pdb | ||||
|  | @ -1,333 +0,0 @@ | |||
| # tractor: structured concurrent "actors". | ||||
| # Copyright 2018-eternity Tyler Goodlet. | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or | ||||
| # modify it under the terms of the GNU Affero General Public License | ||||
| # as published by the Free Software Foundation, either version 3 of | ||||
| # the License, or (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, but | ||||
| # WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU | ||||
| # Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public | ||||
| # License along with this program.  If not, see | ||||
| # <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| A custom SIGINT handler which mainly shields actor (task) | ||||
| cancellation during REPL interaction. | ||||
| 
 | ||||
| ''' | ||||
| from __future__ import annotations | ||||
| from typing import ( | ||||
|     TYPE_CHECKING, | ||||
| ) | ||||
| import trio | ||||
| from tractor.log import get_logger | ||||
| from tractor._state import ( | ||||
|     current_actor, | ||||
|     is_root_process, | ||||
| ) | ||||
| from ._repl import ( | ||||
|     PdbREPL, | ||||
| ) | ||||
| from ._tty_lock import ( | ||||
|     any_connected_locker_child, | ||||
|     DebugStatus, | ||||
|     Lock, | ||||
| ) | ||||
| 
 | ||||
| if TYPE_CHECKING: | ||||
|     from tractor.ipc import ( | ||||
|         Channel, | ||||
|     ) | ||||
|     from tractor._runtime import ( | ||||
|         Actor, | ||||
|     ) | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
| _ctlc_ignore_header: str = ( | ||||
|     'Ignoring SIGINT while debug REPL in use' | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| def sigint_shield( | ||||
|     signum: int, | ||||
|     frame: 'frame',  # type: ignore # noqa | ||||
|     *args, | ||||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|     Specialized, debugger-aware SIGINT handler. | ||||
| 
 | ||||
|     In childred we always ignore/shield for SIGINT to avoid | ||||
|     deadlocks since cancellation should always be managed by the | ||||
|     supervising parent actor. The root actor-proces is always | ||||
|     cancelled on ctrl-c. | ||||
| 
 | ||||
|     ''' | ||||
|     __tracebackhide__: bool = True | ||||
|     actor: Actor = current_actor() | ||||
| 
 | ||||
|     def do_cancel(): | ||||
|         # If we haven't tried to cancel the runtime then do that instead | ||||
|         # of raising a KBI (which may non-gracefully destroy | ||||
|         # a ``trio.run()``). | ||||
|         if not actor._cancel_called: | ||||
|             actor.cancel_soon() | ||||
| 
 | ||||
|         # If the runtime is already cancelled it likely means the user | ||||
|         # hit ctrl-c again because teardown didn't fully take place in | ||||
|         # which case we do the "hard" raising of a local KBI. | ||||
|         else: | ||||
|             raise KeyboardInterrupt | ||||
| 
 | ||||
|     # only set in the actor actually running the REPL | ||||
|     repl: PdbREPL|None = DebugStatus.repl | ||||
| 
 | ||||
|     # TODO: maybe we should flatten out all these cases using | ||||
|     # a match/case? | ||||
|     # | ||||
|     # root actor branch that reports whether or not a child | ||||
|     # has locked debugger. | ||||
|     if is_root_process(): | ||||
|         # log.warning( | ||||
|         log.devx( | ||||
|             'Handling SIGINT in root actor\n' | ||||
|             f'{Lock.repr()}' | ||||
|             f'{DebugStatus.repr()}\n' | ||||
|         ) | ||||
|         # try to see if the supposed (sub)actor in debug still | ||||
|         # has an active connection to *this* actor, and if not | ||||
|         # it's likely they aren't using the TTY lock / debugger | ||||
|         # and we should propagate SIGINT normally. | ||||
|         any_connected: bool = any_connected_locker_child() | ||||
| 
 | ||||
|         problem = ( | ||||
|             f'root {actor.uid} handling SIGINT\n' | ||||
|             f'any_connected: {any_connected}\n\n' | ||||
| 
 | ||||
|             f'{Lock.repr()}\n' | ||||
|         ) | ||||
| 
 | ||||
|         if ( | ||||
|             (ctx := Lock.ctx_in_debug) | ||||
|             and | ||||
|             (uid_in_debug := ctx.chan.uid) # "someone" is (ostensibly) using debug `Lock` | ||||
|         ): | ||||
|             name_in_debug: str = uid_in_debug[0] | ||||
|             assert not repl | ||||
|             # if not repl:  # but it's NOT us, the root actor. | ||||
|             # sanity: since no repl ref is set, we def shouldn't | ||||
|             # be the lock owner! | ||||
|             assert name_in_debug != 'root' | ||||
| 
 | ||||
|             # IDEAL CASE: child has REPL as expected | ||||
|             if any_connected:  # there are subactors we can contact | ||||
|                 # XXX: only if there is an existing connection to the | ||||
|                 # (sub-)actor in debug do we ignore SIGINT in this | ||||
|                 # parent! Otherwise we may hang waiting for an actor | ||||
|                 # which has already terminated to unlock. | ||||
|                 # | ||||
|                 # NOTE: don't emit this with `.pdb()` level in | ||||
|                 # root without a higher level. | ||||
|                 log.runtime( | ||||
|                     _ctlc_ignore_header | ||||
|                     + | ||||
|                     f' by child ' | ||||
|                     f'{uid_in_debug}\n' | ||||
|                 ) | ||||
|                 problem = None | ||||
| 
 | ||||
|             else: | ||||
|                 problem += ( | ||||
|                     '\n' | ||||
|                     f'A `pdb` REPL is SUPPOSEDLY in use by child {uid_in_debug}\n' | ||||
|                     f'BUT, no child actors are IPC contactable!?!?\n' | ||||
|                 ) | ||||
| 
 | ||||
|         # IDEAL CASE: root has REPL as expected | ||||
|         else: | ||||
|             # root actor still has this SIGINT handler active without | ||||
|             # an actor using the `Lock` (a bug state) ?? | ||||
|             # => so immediately cancel any stale lock cs and revert | ||||
|             # the handler! | ||||
|             if not DebugStatus.repl: | ||||
|                 # TODO: WHEN should we revert back to ``trio`` | ||||
|                 # handler if this one is stale? | ||||
|                 # -[ ] maybe after a counts work of ctl-c mashes? | ||||
|                 # -[ ] use a state var like `stale_handler: bool`? | ||||
|                 problem += ( | ||||
|                     'No subactor is using a `pdb` REPL according `Lock.ctx_in_debug`?\n' | ||||
|                     'BUT, the root should be using it, WHY this handler ??\n\n' | ||||
|                     'So either..\n' | ||||
|                     '- some root-thread is using it but has no `.repl` set?, OR\n' | ||||
|                     '- something else weird is going on outside the runtime!?\n' | ||||
|                 ) | ||||
|             else: | ||||
|                 # NOTE: since we emit this msg on ctl-c, we should | ||||
|                 # also always re-print the prompt the tail block! | ||||
|                 log.pdb( | ||||
|                     _ctlc_ignore_header | ||||
|                     + | ||||
|                     f' by root actor..\n' | ||||
|                     f'{DebugStatus.repl_task}\n' | ||||
|                     f' |_{repl}\n' | ||||
|                 ) | ||||
|                 problem = None | ||||
| 
 | ||||
|         # XXX if one is set it means we ARE NOT operating an ideal | ||||
|         # case where a child subactor or us (the root) has the | ||||
|         # lock without any other detected problems. | ||||
|         if problem: | ||||
| 
 | ||||
|             # detect, report and maybe clear a stale lock request | ||||
|             # cancel scope. | ||||
|             lock_cs: trio.CancelScope = Lock.get_locking_task_cs() | ||||
|             maybe_stale_lock_cs: bool = ( | ||||
|                 lock_cs is not None | ||||
|                 and not lock_cs.cancel_called | ||||
|             ) | ||||
|             if maybe_stale_lock_cs: | ||||
|                 problem += ( | ||||
|                     '\n' | ||||
|                     'Stale `Lock.ctx_in_debug._scope: CancelScope` detected?\n' | ||||
|                     f'{Lock.ctx_in_debug}\n\n' | ||||
| 
 | ||||
|                     '-> Calling ctx._scope.cancel()!\n' | ||||
|                 ) | ||||
|                 lock_cs.cancel() | ||||
| 
 | ||||
|             # TODO: wen do we actually want/need this, see above. | ||||
|             # DebugStatus.unshield_sigint() | ||||
|             log.warning(problem) | ||||
| 
 | ||||
|     # child actor that has locked the debugger | ||||
|     elif not is_root_process(): | ||||
|         log.debug( | ||||
|             f'Subactor {actor.uid} handling SIGINT\n\n' | ||||
|             f'{Lock.repr()}\n' | ||||
|         ) | ||||
| 
 | ||||
|         rent_chan: Channel = actor._parent_chan | ||||
|         if ( | ||||
|             rent_chan is None | ||||
|             or | ||||
|             not rent_chan.connected() | ||||
|         ): | ||||
|             log.warning( | ||||
|                 'This sub-actor thinks it is debugging ' | ||||
|                 'but it has no connection to its parent ??\n' | ||||
|                 f'{actor.uid}\n' | ||||
|                 'Allowing SIGINT propagation..' | ||||
|             ) | ||||
|             DebugStatus.unshield_sigint() | ||||
| 
 | ||||
|         repl_task: str|None = DebugStatus.repl_task | ||||
|         req_task: str|None = DebugStatus.req_task | ||||
|         if ( | ||||
|             repl_task | ||||
|             and | ||||
|             repl | ||||
|         ): | ||||
|             log.pdb( | ||||
|                 _ctlc_ignore_header | ||||
|                 + | ||||
|                 f' by local task\n\n' | ||||
|                 f'{repl_task}\n' | ||||
|                 f' |_{repl}\n' | ||||
|             ) | ||||
|         elif req_task: | ||||
|             log.debug( | ||||
|                 _ctlc_ignore_header | ||||
|                 + | ||||
|                 f' by local request-task and either,\n' | ||||
|                 f'- someone else is already REPL-in and has the `Lock`, or\n' | ||||
|                 f'- some other local task already is replin?\n\n' | ||||
|                 f'{req_task}\n' | ||||
|             ) | ||||
| 
 | ||||
|         # TODO can we remove this now? | ||||
|         # -[ ] does this path ever get hit any more? | ||||
|         else: | ||||
|             msg: str = ( | ||||
|                 'SIGINT shield handler still active BUT, \n\n' | ||||
|             ) | ||||
|             if repl_task is None: | ||||
|                 msg += ( | ||||
|                     '- No local task claims to be in debug?\n' | ||||
|                 ) | ||||
| 
 | ||||
|             if repl is None: | ||||
|                 msg += ( | ||||
|                     '- No local REPL is currently active?\n' | ||||
|                 ) | ||||
| 
 | ||||
|             if req_task is None: | ||||
|                 msg += ( | ||||
|                     '- No debug request task is active?\n' | ||||
|                 ) | ||||
| 
 | ||||
|             log.warning( | ||||
|                 msg | ||||
|                 + | ||||
|                 'Reverting handler to `trio` default!\n' | ||||
|             ) | ||||
|             DebugStatus.unshield_sigint() | ||||
| 
 | ||||
|             # XXX ensure that the reverted-to-handler actually is | ||||
|             # able to rx what should have been **this** KBI ;) | ||||
|             do_cancel() | ||||
| 
 | ||||
|         # TODO: how to handle the case of an intermediary-child actor | ||||
|         # that **is not** marked in debug mode? See oustanding issue: | ||||
|         # https://github.com/goodboy/tractor/issues/320 | ||||
|         # elif debug_mode(): | ||||
| 
 | ||||
|     # maybe redraw/print last REPL output to console since | ||||
|     # we want to alert the user that more input is expect since | ||||
|     # nothing has been done dur to ignoring sigint. | ||||
|     if ( | ||||
|         DebugStatus.repl  # only when current actor has a REPL engaged | ||||
|     ): | ||||
|         flush_status: str = ( | ||||
|             'Flushing stdout to ensure new prompt line!\n' | ||||
|         ) | ||||
| 
 | ||||
|         # XXX: yah, mega hack, but how else do we catch this madness XD | ||||
|         if ( | ||||
|             repl.shname == 'xonsh' | ||||
|         ): | ||||
|             flush_status += ( | ||||
|                 '-> ALSO re-flushing due to `xonsh`..\n' | ||||
|             ) | ||||
|             repl.stdout.write(repl.prompt) | ||||
| 
 | ||||
|         # log.warning( | ||||
|         log.devx( | ||||
|             flush_status | ||||
|         ) | ||||
|         repl.stdout.flush() | ||||
| 
 | ||||
|         # TODO: better console UX to match the current "mode": | ||||
|         # -[ ] for example if in sticky mode where if there is output | ||||
|         #   detected as written to the tty we redraw this part underneath | ||||
|         #   and erase the past draw of this same bit above? | ||||
|         # repl.sticky = True | ||||
|         # repl._print_if_sticky() | ||||
| 
 | ||||
|         # also see these links for an approach from `ptk`: | ||||
|         # https://github.com/goodboy/tractor/issues/130#issuecomment-663752040 | ||||
|         # https://github.com/prompt-toolkit/python-prompt-toolkit/blob/c2c6af8a0308f9e5d7c0e28cb8a02963fe0ce07a/prompt_toolkit/patch_stdout.py | ||||
|     else: | ||||
|         log.devx( | ||||
|         # log.warning( | ||||
|             'Not flushing stdout since not needed?\n' | ||||
|             f'|_{repl}\n' | ||||
|         ) | ||||
| 
 | ||||
|     # XXX only for tracing this handler | ||||
|     log.devx('exiting SIGINT') | ||||
|  | @ -1,220 +0,0 @@ | |||
| # tractor: structured concurrent "actors". | ||||
| # Copyright 2018-eternity Tyler Goodlet. | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or | ||||
| # modify it under the terms of the GNU Affero General Public License | ||||
| # as published by the Free Software Foundation, either version 3 of | ||||
| # the License, or (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, but | ||||
| # WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU | ||||
| # Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public | ||||
| # License along with this program.  If not, see | ||||
| # <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| Debugger synchronization APIs to ensure orderly access and | ||||
| non-TTY-clobbering graceful teardown. | ||||
| 
 | ||||
| 
 | ||||
| ''' | ||||
| from __future__ import annotations | ||||
| from contextlib import ( | ||||
|     asynccontextmanager as acm, | ||||
| ) | ||||
| from functools import ( | ||||
|     partial, | ||||
| ) | ||||
| from typing import ( | ||||
|     AsyncGenerator, | ||||
|     Callable, | ||||
| ) | ||||
| 
 | ||||
| from tractor.log import get_logger | ||||
| import trio | ||||
| from trio.lowlevel import ( | ||||
|     current_task, | ||||
|     Task, | ||||
| ) | ||||
| from tractor._context import Context | ||||
| from tractor._state import ( | ||||
|     current_actor, | ||||
|     debug_mode, | ||||
|     is_root_process, | ||||
| ) | ||||
| from ._repl import ( | ||||
|     TractorConfig as TractorConfig, | ||||
| ) | ||||
| from ._tty_lock import ( | ||||
|     Lock, | ||||
|     request_root_stdio_lock, | ||||
|     any_connected_locker_child, | ||||
| ) | ||||
| from ._sigint import ( | ||||
|     sigint_shield as sigint_shield, | ||||
|     _ctlc_ignore_header as _ctlc_ignore_header | ||||
| ) | ||||
| 
 | ||||
| log = get_logger(__package__) | ||||
| 
 | ||||
| 
 | ||||
| async def maybe_wait_for_debugger( | ||||
|     poll_steps: int = 2, | ||||
|     poll_delay: float = 0.1, | ||||
|     child_in_debug: bool = False, | ||||
| 
 | ||||
|     header_msg: str = '', | ||||
|     _ll: str = 'devx', | ||||
| 
 | ||||
| ) -> bool:  # was locked and we polled? | ||||
| 
 | ||||
|     if ( | ||||
|         not debug_mode() | ||||
|         and | ||||
|         not child_in_debug | ||||
|     ): | ||||
|         return False | ||||
| 
 | ||||
|     logmeth: Callable = getattr(log, _ll) | ||||
| 
 | ||||
|     msg: str = header_msg | ||||
|     if ( | ||||
|         is_root_process() | ||||
|     ): | ||||
|         # If we error in the root but the debugger is | ||||
|         # engaged we don't want to prematurely kill (and | ||||
|         # thus clobber access to) the local tty since it | ||||
|         # will make the pdb repl unusable. | ||||
|         # Instead try to wait for pdb to be released before | ||||
|         # tearing down. | ||||
|         ctx_in_debug: Context|None = Lock.ctx_in_debug | ||||
|         in_debug: tuple[str, str]|None = ( | ||||
|             ctx_in_debug.chan.uid | ||||
|             if ctx_in_debug | ||||
|             else None | ||||
|         ) | ||||
|         if in_debug == current_actor().uid: | ||||
|             log.debug( | ||||
|                 msg | ||||
|                 + | ||||
|                 'Root already owns the TTY LOCK' | ||||
|             ) | ||||
|             return True | ||||
| 
 | ||||
|         elif in_debug: | ||||
|             msg += ( | ||||
|                 f'Debug `Lock` in use by subactor\n|\n|_{in_debug}\n' | ||||
|             ) | ||||
|             # TODO: could this make things more deterministic? | ||||
|             # wait to see if a sub-actor task will be | ||||
|             # scheduled and grab the tty lock on the next | ||||
|             # tick? | ||||
|             # XXX => but it doesn't seem to work.. | ||||
|             # await trio.testing.wait_all_tasks_blocked(cushion=0) | ||||
|         else: | ||||
|             logmeth( | ||||
|                 msg | ||||
|                 + | ||||
|                 'Root immediately acquired debug TTY LOCK' | ||||
|             ) | ||||
|             return False | ||||
| 
 | ||||
|         for istep in range(poll_steps): | ||||
|             if ( | ||||
|                 Lock.req_handler_finished is not None | ||||
|                 and not Lock.req_handler_finished.is_set() | ||||
|                 and in_debug is not None | ||||
|             ): | ||||
|                 # caller_frame_info: str = pformat_caller_frame() | ||||
|                 logmeth( | ||||
|                     msg | ||||
|                     + | ||||
|                     '\n^^ Root is waiting on tty lock release.. ^^\n' | ||||
|                     # f'{caller_frame_info}\n' | ||||
|                 ) | ||||
| 
 | ||||
|                 if not any_connected_locker_child(): | ||||
|                     Lock.get_locking_task_cs().cancel() | ||||
| 
 | ||||
|                 with trio.CancelScope(shield=True): | ||||
|                     await Lock.req_handler_finished.wait() | ||||
| 
 | ||||
|                 log.devx( | ||||
|                     f'Subactor released debug lock\n' | ||||
|                     f'|_{in_debug}\n' | ||||
|                 ) | ||||
|                 break | ||||
| 
 | ||||
|             # is no subactor locking debugger currently? | ||||
|             if ( | ||||
|                 in_debug is None | ||||
|                 and ( | ||||
|                     Lock.req_handler_finished is None | ||||
|                     or Lock.req_handler_finished.is_set() | ||||
|                 ) | ||||
|             ): | ||||
|                 logmeth( | ||||
|                     msg | ||||
|                     + | ||||
|                     'Root acquired tty lock!' | ||||
|                 ) | ||||
|                 break | ||||
| 
 | ||||
|             else: | ||||
|                 logmeth( | ||||
|                     'Root polling for debug:\n' | ||||
|                     f'poll step: {istep}\n' | ||||
|                     f'poll delya: {poll_delay}\n\n' | ||||
|                     f'{Lock.repr()}\n' | ||||
|                 ) | ||||
|                 with trio.CancelScope(shield=True): | ||||
|                     await trio.sleep(poll_delay) | ||||
|                     continue | ||||
| 
 | ||||
|         return True | ||||
| 
 | ||||
|     # else: | ||||
|     #     # TODO: non-root call for #320? | ||||
|     #     this_uid: tuple[str, str] = current_actor().uid | ||||
|     #     async with acquire_debug_lock( | ||||
|     #         subactor_uid=this_uid, | ||||
|     #     ): | ||||
|     #         pass | ||||
|     return False | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def acquire_debug_lock( | ||||
|     subactor_uid: tuple[str, str], | ||||
| ) -> AsyncGenerator[ | ||||
|     trio.CancelScope|None, | ||||
|     tuple, | ||||
| ]: | ||||
|     ''' | ||||
|     Request to acquire the TTY `Lock` in the root actor, release on | ||||
|     exit. | ||||
| 
 | ||||
|     This helper is for actor's who don't actually need to acquired | ||||
|     the debugger but want to wait until the lock is free in the | ||||
|     process-tree root such that they don't clobber an ongoing pdb | ||||
|     REPL session in some peer or child! | ||||
| 
 | ||||
|     ''' | ||||
|     if not debug_mode(): | ||||
|         yield None | ||||
|         return | ||||
| 
 | ||||
|     task: Task = current_task() | ||||
|     async with trio.open_nursery() as n: | ||||
|         ctx: Context = await n.start( | ||||
|             partial( | ||||
|                 request_root_stdio_lock, | ||||
|                 actor_uid=subactor_uid, | ||||
|                 task_uid=(task.name, id(task)), | ||||
|             ) | ||||
|         ) | ||||
|         yield ctx | ||||
|         ctx.cancel() | ||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							|  | @ -15,13 +15,10 @@ | |||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| Pretty formatters for use throughout our internals. | ||||
| 
 | ||||
| Handy for logging and exception message content but also for `repr()` | ||||
| in REPL(s). | ||||
| Pretty formatters for use throughout the code base. | ||||
| Mostly handy for logging and exception message content. | ||||
| 
 | ||||
| ''' | ||||
| import sys | ||||
| import textwrap | ||||
| import traceback | ||||
| 
 | ||||
|  | @ -118,85 +115,6 @@ def pformat_boxed_tb( | |||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| def pformat_exc( | ||||
|     exc: Exception, | ||||
|     header: str = '', | ||||
|     message: str = '', | ||||
|     body: str = '', | ||||
|     with_type_header: bool = True, | ||||
| ) -> str: | ||||
| 
 | ||||
|     # XXX when the currently raised exception is this instance, | ||||
|     # we do not ever use the "type header" style repr. | ||||
|     is_being_raised: bool = False | ||||
|     if ( | ||||
|         (curr_exc := sys.exception()) | ||||
|         and | ||||
|         curr_exc is exc | ||||
|     ): | ||||
|         is_being_raised: bool = True | ||||
| 
 | ||||
|     with_type_header: bool = ( | ||||
|         with_type_header | ||||
|         and | ||||
|         not is_being_raised | ||||
|     ) | ||||
| 
 | ||||
|     # <RemoteActorError( .. )> style | ||||
|     if ( | ||||
|         with_type_header | ||||
|         and | ||||
|         not header | ||||
|     ): | ||||
|         header: str = f'<{type(exc).__name__}(' | ||||
| 
 | ||||
|     message: str = ( | ||||
|         message | ||||
|         or | ||||
|         exc.message | ||||
|     ) | ||||
|     if message: | ||||
|         # split off the first line so, if needed, it isn't | ||||
|         # indented the same like the "boxed content" which | ||||
|         # since there is no `.tb_str` is just the `.message`. | ||||
|         lines: list[str] = message.splitlines() | ||||
|         first: str = lines[0] | ||||
|         message: str = message.removeprefix(first) | ||||
| 
 | ||||
|         # with a type-style header we, | ||||
|         # - have no special message "first line" extraction/handling | ||||
|         # - place the message a space in from the header: | ||||
|         #  `MsgTypeError( <message> ..` | ||||
|         #                 ^-here | ||||
|         # - indent the `.message` inside the type body. | ||||
|         if with_type_header: | ||||
|             first = f' {first} )>' | ||||
| 
 | ||||
|         message: str = textwrap.indent( | ||||
|             message, | ||||
|             prefix=' '*2, | ||||
|         ) | ||||
|         message: str = first + message | ||||
| 
 | ||||
|     tail: str = '' | ||||
|     if ( | ||||
|         with_type_header | ||||
|         and | ||||
|         not message | ||||
|     ): | ||||
|         tail: str = '>' | ||||
| 
 | ||||
|     return ( | ||||
|         header | ||||
|         + | ||||
|         message | ||||
|         + | ||||
|         f'{body}' | ||||
|         + | ||||
|         tail | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| def pformat_caller_frame( | ||||
|     stack_limit: int = 1, | ||||
|     box_tb: bool = True, | ||||
|  | @ -226,8 +144,8 @@ def pformat_cs( | |||
|     field_prefix: str = ' |_', | ||||
| ) -> str: | ||||
|     ''' | ||||
|     Pretty format info about a `trio.CancelScope` including most of | ||||
|     its public state and `._cancel_status`. | ||||
|     Pretty format info about a `trio.CancelScope` including most | ||||
|     of its public state and `._cancel_status`. | ||||
| 
 | ||||
|     The output can be modified to show a "var name" for the | ||||
|     instance as a field prefix, just a simple str before each | ||||
|  | @ -249,279 +167,3 @@ def pformat_cs( | |||
|         + | ||||
|         fields | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| def nest_from_op( | ||||
|     input_op: str,  # TODO, Literal of all op-"symbols" from below? | ||||
|     text: str, | ||||
|     prefix_op: bool = True,  # unset is to suffix the first line | ||||
|     # optionally suffix `text`, by def on a newline | ||||
|     op_suffix='\n', | ||||
| 
 | ||||
|     nest_prefix: str = '|_', | ||||
|     nest_indent: int|None = None, | ||||
|     # XXX indent `next_prefix` "to-the-right-of" `input_op` | ||||
|     # by this count of whitespaces (' '). | ||||
|     rm_from_first_ln: str|None = None, | ||||
| 
 | ||||
| ) -> str: | ||||
|     ''' | ||||
|     Depth-increment the input (presumably hierarchy/supervision) | ||||
|     input "tree string" below the provided `input_op` execution | ||||
|     operator, so injecting a `"\n|_{input_op}\n"`and indenting the | ||||
|     `tree_str` to nest content aligned with the ops last char. | ||||
| 
 | ||||
|     ''' | ||||
|     # `sclang` "structurred-concurrency-language": an ascii-encoded | ||||
|     # symbolic alphabet to describe concurrent systems. | ||||
|     # | ||||
|     # ?TODO? aa more fomal idea for a syntax to the state of | ||||
|     # concurrent systems as a "3-domain" (execution, scope, storage) | ||||
|     # model and using a minimal ascii/utf-8 operator-set. | ||||
|     # | ||||
|     # try not to take any of this seriously yet XD | ||||
|     # | ||||
|     # > is a "play operator" indicating (CPU bound) | ||||
|     #   exec/work/ops required at the "lowest level computing" | ||||
|     # | ||||
|     # execution primititves (tasks, threads, actors..) denote their | ||||
|     # lifetime with '(' and ')' since parentheses normally are used | ||||
|     # in many langs to denote function calls. | ||||
|     # | ||||
|     # starting = ( | ||||
|     # >(  opening/starting; beginning of the thread-of-exec (toe?) | ||||
|     # (>  opened/started,  (finished spawning toe) | ||||
|     # |_<Task: blah blah..>  repr of toe, in py these look like <objs> | ||||
|     # | ||||
|     # >) closing/exiting/stopping, | ||||
|     # )> closed/exited/stopped, | ||||
|     # |_<Task: blah blah..> | ||||
|     #   [OR <), )< ?? ] | ||||
|     # | ||||
|     # ending = ) | ||||
|     # >c) cancelling to close/exit | ||||
|     # c)> cancelled (caused close), OR? | ||||
|     #  |_<Actor: ..> | ||||
|     #   OR maybe "<c)" which better indicates the cancel being | ||||
|     #   "delivered/returned" / returned" to LHS? | ||||
|     # | ||||
|     # >x)  erroring to eventuall exit | ||||
|     # x)>  errored and terminated | ||||
|     #  |_<Actor: ...> | ||||
|     # | ||||
|     # scopes: supers/nurseries, IPC-ctxs, sessions, perms, etc. | ||||
|     # >{  opening | ||||
|     # {>  opened | ||||
|     # }>  closed | ||||
|     # >}  closing | ||||
|     # | ||||
|     # storage: like queues, shm-buffers, files, etc.. | ||||
|     # >[  opening | ||||
|     # [>  opened | ||||
|     #  |_<FileObj: ..> | ||||
|     # | ||||
|     # >]  closing | ||||
|     # ]>  closed | ||||
| 
 | ||||
|     # IPC ops: channels, transports, msging | ||||
|     # =>  req msg | ||||
|     # <=  resp msg | ||||
|     # <=> 2-way streaming (of msgs) | ||||
|     # <-  recv 1 msg | ||||
|     # ->  send 1 msg | ||||
|     # | ||||
|     # TODO: still not sure on R/L-HS approach..? | ||||
|     # =>(  send-req to exec start (task, actor, thread..) | ||||
|     # (<=  recv-req to ^ | ||||
|     # | ||||
|     # (<=  recv-req ^ | ||||
|     # <=(  recv-resp opened remote exec primitive | ||||
|     # <=)  recv-resp closed | ||||
|     # | ||||
|     # )<=c req to stop due to cancel | ||||
|     # c=>) req to stop due to cancel | ||||
|     # | ||||
|     # =>{  recv-req to open | ||||
|     # <={  send-status that it closed | ||||
|     # | ||||
|     if ( | ||||
|         nest_prefix | ||||
|         and | ||||
|         nest_indent != 0 | ||||
|     ): | ||||
|         if nest_indent is not None: | ||||
|             nest_prefix: str = textwrap.indent( | ||||
|                 nest_prefix, | ||||
|                 prefix=nest_indent*' ', | ||||
|             ) | ||||
|         nest_indent: int = len(nest_prefix) | ||||
| 
 | ||||
|     # determine body-text indent either by, | ||||
|     # - using wtv explicit indent value is provided, | ||||
|     # OR | ||||
|     # - auto-calcing the indent to embed `text` under | ||||
|     #   the `nest_prefix` if provided, **IFF** `nest_indent=None`. | ||||
|     tree_str_indent: int = 0 | ||||
|     if nest_indent not in {0, None}: | ||||
|         tree_str_indent = nest_indent | ||||
|     elif ( | ||||
|         nest_prefix | ||||
|         and | ||||
|         nest_indent != 0 | ||||
|     ): | ||||
|         tree_str_indent = len(nest_prefix) | ||||
| 
 | ||||
|     indented_tree_str: str = text | ||||
|     if tree_str_indent: | ||||
|         indented_tree_str: str = textwrap.indent( | ||||
|             text, | ||||
|             prefix=' '*tree_str_indent, | ||||
|         ) | ||||
| 
 | ||||
|     # inject any provided nesting-prefix chars | ||||
|     # into the head of the first line. | ||||
|     if nest_prefix: | ||||
|         indented_tree_str: str = ( | ||||
|             f'{nest_prefix}{indented_tree_str[tree_str_indent:]}' | ||||
|         ) | ||||
| 
 | ||||
|     if ( | ||||
|         not prefix_op | ||||
|         or | ||||
|         rm_from_first_ln | ||||
|     ): | ||||
|         tree_lns: list[str] = indented_tree_str.splitlines() | ||||
|         first: str = tree_lns[0] | ||||
|         if rm_from_first_ln: | ||||
|             first = first.strip().replace( | ||||
|                 rm_from_first_ln, | ||||
|                 '', | ||||
|             ) | ||||
|         indented_tree_str: str = '\n'.join(tree_lns[1:]) | ||||
| 
 | ||||
|         if prefix_op: | ||||
|             indented_tree_str = ( | ||||
|                 f'{first}\n' | ||||
|                 f'{indented_tree_str}' | ||||
|             ) | ||||
| 
 | ||||
|     if prefix_op: | ||||
|         return ( | ||||
|             f'{input_op}{op_suffix}' | ||||
|             f'{indented_tree_str}' | ||||
|         ) | ||||
|     else: | ||||
|         return ( | ||||
|             f'{first}{input_op}{op_suffix}' | ||||
|             f'{indented_tree_str}' | ||||
|         ) | ||||
| 
 | ||||
| 
 | ||||
| # ------ modden.repr ------ | ||||
| # XXX originally taken verbaatim from `modden.repr` | ||||
| ''' | ||||
| More "multi-line" representation then the stdlib's `pprint` equivs. | ||||
| 
 | ||||
| ''' | ||||
| from inspect import ( | ||||
|     FrameInfo, | ||||
|     stack, | ||||
| ) | ||||
| import pprint | ||||
| import reprlib | ||||
| from typing import ( | ||||
|     Callable, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| def mk_repr( | ||||
|     **repr_kws, | ||||
| ) -> Callable[[str], str]: | ||||
|     ''' | ||||
|     Allocate and deliver a `repr.Repr` instance with provided input | ||||
|     settings using the std-lib's `reprlib` mod, | ||||
|      * https://docs.python.org/3/library/reprlib.html | ||||
| 
 | ||||
|     ------ Ex. ------ | ||||
|     An up to 6-layer-nested `dict` as multi-line: | ||||
|     - https://stackoverflow.com/a/79102479 | ||||
|     - https://docs.python.org/3/library/reprlib.html#reprlib.Repr.maxlevel | ||||
| 
 | ||||
|     ''' | ||||
|     def_kws: dict[str, int] = dict( | ||||
|         indent=3,  # indent used for repr of recursive objects | ||||
|         maxlevel=616,  # recursion levels | ||||
|         maxdict=616,  # max items shown for `dict` | ||||
|         maxlist=616,  # max items shown for `dict` | ||||
|         maxstring=616,  # match editor line-len limit | ||||
|         maxtuple=616,  # match editor line-len limit | ||||
|         maxother=616,  # match editor line-len limit | ||||
|     ) | ||||
|     def_kws |= repr_kws | ||||
|     reprr = reprlib.Repr(**def_kws) | ||||
|     return reprr.repr | ||||
| 
 | ||||
| 
 | ||||
| def ppfmt( | ||||
|     obj: object, | ||||
|     do_print: bool = False, | ||||
| ) -> str: | ||||
|     ''' | ||||
|     The `pprint.pformat()` version of `pprint.pp()`, namely | ||||
|     a default `sort_dicts=False`.. (which i think should be | ||||
|     the normal default in the stdlib). | ||||
| 
 | ||||
|     ''' | ||||
|     pprepr: Callable = mk_repr() | ||||
|     repr_str: str = pprepr(obj) | ||||
| 
 | ||||
|     if do_print: | ||||
|         return pprint.pp(repr_str) | ||||
| 
 | ||||
|     return repr_str | ||||
| 
 | ||||
| 
 | ||||
| pformat = ppfmt | ||||
| 
 | ||||
| 
 | ||||
| def pfmt_frame_info(fi: FrameInfo) -> str: | ||||
|     ''' | ||||
|     Like a std `inspect.FrameInfo.__repr__()` but multi-line.. | ||||
| 
 | ||||
|     ''' | ||||
|     return ( | ||||
|         'FrameInfo(\n' | ||||
|         '  frame={!r},\n' | ||||
|         '  filename={!r},\n' | ||||
|         '  lineno={!r},\n' | ||||
|         '  function={!r},\n' | ||||
|         '  code_context={!r},\n' | ||||
|         '  index={!r},\n' | ||||
|         '  positions={!r})' | ||||
|         ).format( | ||||
|             fi.frame, | ||||
|             fi.filename, | ||||
|             fi.lineno, | ||||
|             fi.function, | ||||
|             fi.code_context, | ||||
|             fi.index, | ||||
|             fi.positions | ||||
|         ) | ||||
| 
 | ||||
| 
 | ||||
| def pfmt_callstack(frames: int = 1) -> str: | ||||
|     ''' | ||||
|     Generate a string of nested `inspect.FrameInfo` objects returned | ||||
|     from a `inspect.stack()` call such that only the `.frame` field | ||||
|     for each  layer is pprinted. | ||||
| 
 | ||||
|     ''' | ||||
|     caller_frames: list[FrameInfo] =  stack()[1:1+frames] | ||||
|     frames_str: str = '' | ||||
|     for i, frame_info in enumerate(caller_frames): | ||||
|         frames_str += textwrap.indent( | ||||
|             f'{frame_info.frame!r}\n', | ||||
|             prefix=' '*i, | ||||
| 
 | ||||
|         ) | ||||
|     return frames_str | ||||
|  |  | |||
|  | @ -45,8 +45,6 @@ __all__ = ['pub'] | |||
| log = get_logger('messaging') | ||||
| 
 | ||||
| 
 | ||||
| # TODO! this needs to reworked to use the modern | ||||
| # `Context`/`MsgStream` APIs!! | ||||
| async def fan_out_to_ctxs( | ||||
|     pub_async_gen_func: typing.Callable,  # it's an async gen ... gd mypy | ||||
|     topics2ctxs: dict[str, list], | ||||
|  |  | |||
|  | @ -1,24 +0,0 @@ | |||
| # tractor: structured concurrent "actors". | ||||
| # Copyright 2018-eternity Tyler Goodlet. | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| A modular IPC layer supporting the power of cross-process SC! | ||||
| 
 | ||||
| ''' | ||||
| from ._chan import ( | ||||
|     _connect_chan as _connect_chan, | ||||
|     Channel as Channel | ||||
| ) | ||||
|  | @ -1,503 +0,0 @@ | |||
| # tractor: structured concurrent "actors". | ||||
| # Copyright 2018-eternity Tyler Goodlet. | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| """ | ||||
| Inter-process comms abstractions | ||||
| 
 | ||||
| """ | ||||
| from __future__ import annotations | ||||
| from collections.abc import AsyncGenerator | ||||
| from contextlib import ( | ||||
|     asynccontextmanager as acm, | ||||
|     contextmanager as cm, | ||||
| ) | ||||
| import platform | ||||
| from pprint import pformat | ||||
| import typing | ||||
| from typing import ( | ||||
|     Any, | ||||
|     TYPE_CHECKING, | ||||
| ) | ||||
| import warnings | ||||
| 
 | ||||
| import trio | ||||
| 
 | ||||
| from ._types import ( | ||||
|     transport_from_addr, | ||||
|     transport_from_stream, | ||||
| ) | ||||
| from tractor._addr import ( | ||||
|     is_wrapped_addr, | ||||
|     wrap_address, | ||||
|     Address, | ||||
|     UnwrappedAddress, | ||||
| ) | ||||
| from tractor.log import get_logger | ||||
| from tractor._exceptions import ( | ||||
|     MsgTypeError, | ||||
|     pack_from_raise, | ||||
|     TransportClosed, | ||||
| ) | ||||
| from tractor.msg import ( | ||||
|     Aid, | ||||
|     MsgCodec, | ||||
| ) | ||||
| 
 | ||||
| if TYPE_CHECKING: | ||||
|     from ._transport import MsgTransport | ||||
| 
 | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
| _is_windows = platform.system() == 'Windows' | ||||
| 
 | ||||
| 
 | ||||
| class Channel: | ||||
|     ''' | ||||
|     An inter-process channel for communication between (remote) actors. | ||||
| 
 | ||||
|     Wraps a ``MsgStream``: transport + encoding IPC connection. | ||||
| 
 | ||||
|     Currently we only support ``trio.SocketStream`` for transport | ||||
|     (aka TCP) and the ``msgpack`` interchange format via the ``msgspec`` | ||||
|     codec libary. | ||||
| 
 | ||||
|     ''' | ||||
|     def __init__( | ||||
| 
 | ||||
|         self, | ||||
|         transport: MsgTransport|None = None, | ||||
|         # TODO: optional reconnection support? | ||||
|         # auto_reconnect: bool = False, | ||||
|         # on_reconnect: typing.Callable[..., typing.Awaitable] = None, | ||||
| 
 | ||||
|     ) -> None: | ||||
| 
 | ||||
|         # self._recon_seq = on_reconnect | ||||
|         # self._autorecon = auto_reconnect | ||||
| 
 | ||||
|         # Either created in ``.connect()`` or passed in by | ||||
|         # user in ``.from_stream()``. | ||||
|         self._transport: MsgTransport|None = transport | ||||
| 
 | ||||
|         # set after handshake - always info from peer end | ||||
|         self.aid: Aid|None = None | ||||
| 
 | ||||
|         self._aiter_msgs = self._iter_msgs() | ||||
|         self._exc: Exception|None = None | ||||
|         # ^XXX! ONLY set if a remote actor sends an `Error`-msg | ||||
|         self._closed: bool = False | ||||
| 
 | ||||
|         # flag set by `Portal.cancel_actor()` indicating remote | ||||
|         # (possibly peer) cancellation of the far end actor runtime. | ||||
|         self._cancel_called: bool = False | ||||
| 
 | ||||
|     @property | ||||
|     def closed(self) -> bool: | ||||
|         ''' | ||||
|         Was `.aclose()` successfully called? | ||||
| 
 | ||||
|         ''' | ||||
|         return self._closed | ||||
| 
 | ||||
|     @property | ||||
|     def cancel_called(self) -> bool: | ||||
|         ''' | ||||
|         Set when `Portal.cancel_actor()` is called on a portal which | ||||
|         wraps this IPC channel. | ||||
| 
 | ||||
|         ''' | ||||
|         return self._cancel_called | ||||
| 
 | ||||
|     @property | ||||
|     def uid(self) -> tuple[str, str]: | ||||
|         ''' | ||||
|         Peer actor's unique id. | ||||
| 
 | ||||
|         ''' | ||||
|         msg: str = ( | ||||
|             f'`{type(self).__name__}.uid` is now deprecated.\n' | ||||
|             'Use the new `.aid: tractor.msg.Aid` (struct) instead ' | ||||
|             'which also provides additional named (optional) fields ' | ||||
|             'beyond just the `.name` and `.uuid`.' | ||||
|         ) | ||||
|         warnings.warn( | ||||
|             msg, | ||||
|             DeprecationWarning, | ||||
|             stacklevel=2, | ||||
|         ) | ||||
|         peer_aid: Aid = self.aid | ||||
|         return ( | ||||
|             peer_aid.name, | ||||
|             peer_aid.uuid, | ||||
|         ) | ||||
| 
 | ||||
|     @property | ||||
|     def stream(self) -> trio.abc.Stream | None: | ||||
|         return self._transport.stream if self._transport else None | ||||
| 
 | ||||
|     @property | ||||
|     def msgstream(self) -> MsgTransport: | ||||
|         log.info( | ||||
|             '`Channel.msgstream` is an old name, use `._transport`' | ||||
|         ) | ||||
|         return self._transport | ||||
| 
 | ||||
|     @property | ||||
|     def transport(self) -> MsgTransport: | ||||
|         return self._transport | ||||
| 
 | ||||
|     @classmethod | ||||
|     def from_stream( | ||||
|         cls, | ||||
|         stream: trio.abc.Stream, | ||||
|     ) -> Channel: | ||||
|         transport_cls = transport_from_stream(stream) | ||||
|         return Channel( | ||||
|             transport=transport_cls(stream) | ||||
|         ) | ||||
| 
 | ||||
|     @classmethod | ||||
|     async def from_addr( | ||||
|         cls, | ||||
|         addr: UnwrappedAddress, | ||||
|         **kwargs | ||||
|     ) -> Channel: | ||||
| 
 | ||||
|         if not is_wrapped_addr(addr): | ||||
|             addr: Address = wrap_address(addr) | ||||
| 
 | ||||
|         transport_cls = transport_from_addr(addr) | ||||
|         transport = await transport_cls.connect_to( | ||||
|             addr, | ||||
|             **kwargs, | ||||
|         ) | ||||
|         # XXX, for UDS *no!* since we recv the peer-pid and build out | ||||
|         # a new addr.. | ||||
|         # assert transport.raddr == addr | ||||
|         chan = Channel(transport=transport) | ||||
| 
 | ||||
|         # ?TODO, compact this into adapter level-methods? | ||||
|         # -[ ] would avoid extra repr-calcs if level not active? | ||||
|         #   |_ how would the `calc_if_level` look though? func? | ||||
|         if log.at_least_level('runtime'): | ||||
|             from tractor.devx import ( | ||||
|                 pformat as _pformat, | ||||
|             ) | ||||
|             chan_repr: str = _pformat.nest_from_op( | ||||
|                 input_op='[>', | ||||
|                 text=chan.pformat(), | ||||
|                 nest_indent=1, | ||||
|             ) | ||||
|             log.runtime( | ||||
|                 f'Connected channel IPC transport\n' | ||||
|                 f'{chan_repr}' | ||||
|             ) | ||||
|         return chan | ||||
| 
 | ||||
|     @cm | ||||
|     def apply_codec( | ||||
|         self, | ||||
|         codec: MsgCodec, | ||||
|     ) -> None: | ||||
|         ''' | ||||
|         Temporarily override the underlying IPC msg codec for | ||||
|         dynamic enforcement of messaging schema. | ||||
| 
 | ||||
|         ''' | ||||
|         orig: MsgCodec = self._transport.codec | ||||
|         try: | ||||
|             self._transport.codec = codec | ||||
|             yield | ||||
|         finally: | ||||
|             self._transport.codec = orig | ||||
| 
 | ||||
|     # TODO: do a .src/.dst: str for maddrs? | ||||
|     def pformat( | ||||
|         self, | ||||
|         privates: bool = False, | ||||
|     ) -> str: | ||||
|         if not self._transport: | ||||
|             return '<Channel( with inactive transport? )>' | ||||
| 
 | ||||
|         tpt: MsgTransport = self._transport | ||||
|         tpt_name: str = type(tpt).__name__ | ||||
|         tpt_status: str = ( | ||||
|             'connected' if self.connected() | ||||
|             else 'closed' | ||||
|         ) | ||||
|         repr_str: str = ( | ||||
|             f'<Channel(\n' | ||||
|             f' |_status: {tpt_status!r}\n' | ||||
|         ) + ( | ||||
|             f'   _closed={self._closed}\n' | ||||
|             f'   _cancel_called={self._cancel_called}\n' | ||||
|             if privates else '' | ||||
|         ) + (  # peer-actor (processs) section | ||||
|             f' |_peer: {self.aid.reprol()!r}\n' | ||||
|             if self.aid else ' |_peer: <unknown>\n' | ||||
|         ) + ( | ||||
|             f' |_msgstream: {tpt_name}\n' | ||||
|             f'   maddr: {tpt.maddr!r}\n' | ||||
|             f'   proto: {tpt.laddr.proto_key!r}\n' | ||||
|             f'   layer: {tpt.layer_key!r}\n' | ||||
|             f'   codec: {tpt.codec_key!r}\n' | ||||
|             f'   .laddr={tpt.laddr}\n' | ||||
|             f'   .raddr={tpt.raddr}\n' | ||||
|         ) + ( | ||||
|             f'   ._transport.stream={tpt.stream}\n' | ||||
|             f'   ._transport.drained={tpt.drained}\n' | ||||
|             if privates else '' | ||||
|         ) + ( | ||||
|             f'   _send_lock={tpt._send_lock.statistics()}\n' | ||||
|             if privates else '' | ||||
|         ) + ( | ||||
|             ')>\n' | ||||
|         ) | ||||
|         return repr_str | ||||
| 
 | ||||
|     # NOTE: making this return a value that can be passed to | ||||
|     # `eval()` is entirely **optional** FYI! | ||||
|     # https://docs.python.org/3/library/functions.html#repr | ||||
|     # https://docs.python.org/3/reference/datamodel.html#object.__repr__ | ||||
|     # | ||||
|     # Currently we target **readability** from a (console) | ||||
|     # logging perspective over `eval()`-ability since we do NOT | ||||
|     # target serializing non-struct instances! | ||||
|     # def __repr__(self) -> str: | ||||
|     __str__ = pformat | ||||
|     __repr__ = pformat | ||||
| 
 | ||||
|     @property | ||||
|     def laddr(self) -> Address|None: | ||||
|         return self._transport.laddr if self._transport else None | ||||
| 
 | ||||
|     @property | ||||
|     def raddr(self) -> Address|None: | ||||
|         return self._transport.raddr if self._transport else None | ||||
| 
 | ||||
|     @property | ||||
|     def maddr(self) -> str: | ||||
|         return self._transport.maddr if self._transport else '<no-tpt>' | ||||
| 
 | ||||
|     # TODO: something like, | ||||
|     # `pdbp.hideframe_on(errors=[MsgTypeError])` | ||||
|     # instead of the `try/except` hack we have rn.. | ||||
|     # seems like a pretty useful thing to have in general | ||||
|     # along with being able to filter certain stack frame(s / sets) | ||||
|     # possibly based on the current log-level? | ||||
|     async def send( | ||||
|         self, | ||||
|         payload: Any, | ||||
| 
 | ||||
|         hide_tb: bool = False, | ||||
| 
 | ||||
|     ) -> None: | ||||
|         ''' | ||||
|         Send a coded msg-blob over the transport. | ||||
| 
 | ||||
|         ''' | ||||
|         __tracebackhide__: bool = hide_tb | ||||
|         try: | ||||
|             log.transport( | ||||
|                 '=> send IPC msg:\n\n' | ||||
|                 f'{pformat(payload)}\n' | ||||
|             ) | ||||
|             # assert self._transport  # but why typing? | ||||
|             await self._transport.send( | ||||
|                 payload, | ||||
|                 hide_tb=hide_tb, | ||||
|             ) | ||||
|         except ( | ||||
|             BaseException, | ||||
|             MsgTypeError, | ||||
|             TransportClosed, | ||||
|         )as _err: | ||||
|             err = _err  # bind for introspection | ||||
|             match err: | ||||
|                 case MsgTypeError(): | ||||
|                     try: | ||||
|                         assert err.cid | ||||
|                     except KeyError: | ||||
|                         raise err | ||||
|                 case TransportClosed(): | ||||
|                     log.transport( | ||||
|                         f'Transport stream closed due to\n' | ||||
|                         f'{err.repr_src_exc()}\n' | ||||
|                     ) | ||||
| 
 | ||||
|                 case _: | ||||
|                     # never suppress non-tpt sources | ||||
|                     __tracebackhide__: bool = False | ||||
|             raise | ||||
| 
 | ||||
|     async def recv(self) -> Any: | ||||
|         assert self._transport | ||||
|         return await self._transport.recv() | ||||
| 
 | ||||
|         # TODO: auto-reconnect features like 0mq/nanomsg? | ||||
|         # -[ ] implement it manually with nods to SC prot | ||||
|         #      possibly on multiple transport backends? | ||||
|         #  -> seems like that might be re-inventing scalability | ||||
|         #     prots tho no? | ||||
|         # try: | ||||
|         #     return await self._transport.recv() | ||||
|         # except trio.BrokenResourceError: | ||||
|         #     if self._autorecon: | ||||
|         #         await self._reconnect() | ||||
|         #         return await self.recv() | ||||
|         #     raise | ||||
| 
 | ||||
|     async def aclose(self) -> None: | ||||
| 
 | ||||
|         log.transport( | ||||
|             f'Closing channel to {self.aid} ' | ||||
|             f'{self.laddr} -> {self.raddr}' | ||||
|         ) | ||||
|         assert self._transport | ||||
|         await self._transport.stream.aclose() | ||||
|         self._closed = True | ||||
| 
 | ||||
|     async def __aenter__(self): | ||||
|         await self.connect() | ||||
|         return self | ||||
| 
 | ||||
|     async def __aexit__(self, *args): | ||||
|         await self.aclose(*args) | ||||
| 
 | ||||
|     def __aiter__(self): | ||||
|         return self._aiter_msgs | ||||
| 
 | ||||
|     # ?TODO? run any reconnection sequence? | ||||
|     # -[ ] prolly should be impl-ed as deco-API? | ||||
|     # | ||||
|     # async def _reconnect(self) -> None: | ||||
|     #     """Handle connection failures by polling until a reconnect can be | ||||
|     #     established. | ||||
|     #     """ | ||||
|     #     down = False | ||||
|     #     while True: | ||||
|     #         try: | ||||
|     #             with trio.move_on_after(3) as cancel_scope: | ||||
|     #                 await self.connect() | ||||
|     #             cancelled = cancel_scope.cancelled_caught | ||||
|     #             if cancelled: | ||||
|     #                 log.transport( | ||||
|     #                     "Reconnect timed out after 3 seconds, retrying...") | ||||
|     #                 continue | ||||
|     #             else: | ||||
|     #                 log.transport("Stream connection re-established!") | ||||
| 
 | ||||
|     #                 # on_recon = self._recon_seq | ||||
|     #                 # if on_recon: | ||||
|     #                 #     await on_recon(self) | ||||
| 
 | ||||
|     #                 break | ||||
|     #         except (OSError, ConnectionRefusedError): | ||||
|     #             if not down: | ||||
|     #                 down = True | ||||
|     #                 log.transport( | ||||
|     #                     f"Connection to {self.raddr} went down, waiting" | ||||
|     #                     " for re-establishment") | ||||
|     #             await trio.sleep(1) | ||||
| 
 | ||||
|     async def _iter_msgs( | ||||
|         self | ||||
|     ) -> AsyncGenerator[Any, None]: | ||||
|         ''' | ||||
|         Yield `MsgType` IPC msgs decoded and deliverd from | ||||
|         an underlying `MsgTransport` protocol. | ||||
| 
 | ||||
|         This is a streaming routine alo implemented as an async-gen | ||||
|         func (same a `MsgTransport._iter_pkts()`) gets allocated by | ||||
|         a `.__call__()` inside `.__init__()` where it is assigned to | ||||
|         the `._aiter_msgs` attr. | ||||
| 
 | ||||
|         ''' | ||||
|         assert self._transport | ||||
|         while True: | ||||
|             try: | ||||
|                 async for msg in self._transport: | ||||
|                     match msg: | ||||
|                         # NOTE: if transport/interchange delivers | ||||
|                         # a type error, we pack it with the far | ||||
|                         # end peer `Actor.uid` and relay the | ||||
|                         # `Error`-msg upward to the `._rpc` stack | ||||
|                         # for normal RAE handling. | ||||
|                         case MsgTypeError(): | ||||
|                             yield pack_from_raise( | ||||
|                                 local_err=msg, | ||||
|                                 cid=msg.cid, | ||||
| 
 | ||||
|                                 # XXX we pack it here bc lower | ||||
|                                 # layers have no notion of an | ||||
|                                 # actor-id ;) | ||||
|                                 src_uid=self.uid, | ||||
|                             ) | ||||
|                         case _: | ||||
|                             yield msg | ||||
| 
 | ||||
|             except trio.BrokenResourceError: | ||||
| 
 | ||||
|                 # if not self._autorecon: | ||||
|                 raise | ||||
| 
 | ||||
|             await self.aclose() | ||||
| 
 | ||||
|             # if self._autorecon:  # attempt reconnect | ||||
|             #     await self._reconnect() | ||||
|             #     continue | ||||
| 
 | ||||
|     def connected(self) -> bool: | ||||
|         return self._transport.connected() if self._transport else False | ||||
| 
 | ||||
|     async def _do_handshake( | ||||
|         self, | ||||
|         aid: Aid, | ||||
| 
 | ||||
|     ) -> Aid: | ||||
|         ''' | ||||
|         Exchange `(name, UUIDs)` identifiers as the first | ||||
|         communication step with any (peer) remote `Actor`. | ||||
| 
 | ||||
|         These are essentially the "mailbox addresses" found in | ||||
|         "actor model" parlance. | ||||
| 
 | ||||
|         ''' | ||||
|         await self.send(aid) | ||||
|         peer_aid: Aid = await self.recv() | ||||
|         log.runtime( | ||||
|             f'Received hanshake with peer\n' | ||||
|             f'<= {peer_aid.reprol(sin_uuid=False)}\n' | ||||
|         ) | ||||
|         # NOTE, we always are referencing the remote peer! | ||||
|         self.aid = peer_aid | ||||
|         return peer_aid | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def _connect_chan( | ||||
|     addr: UnwrappedAddress | ||||
| ) -> typing.AsyncGenerator[Channel, None]: | ||||
|     ''' | ||||
|     Create and connect a channel with disconnect on context manager | ||||
|     teardown. | ||||
| 
 | ||||
|     ''' | ||||
|     chan = await Channel.from_addr(addr) | ||||
|     yield chan | ||||
|     with trio.CancelScope(shield=True): | ||||
|         await chan.aclose() | ||||
|  | @ -1,163 +0,0 @@ | |||
| # tractor: structured concurrent "actors". | ||||
| # Copyright 2018-eternity Tyler Goodlet. | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| ''' | ||||
| File-descriptor-sharing on `linux` by "wilhelm_of_bohemia". | ||||
| 
 | ||||
| ''' | ||||
| from __future__ import annotations | ||||
| import os | ||||
| import array | ||||
| import socket | ||||
| import tempfile | ||||
| from pathlib import Path | ||||
| from contextlib import ExitStack | ||||
| 
 | ||||
| import trio | ||||
| import tractor | ||||
| from tractor.ipc import RBToken | ||||
| 
 | ||||
| 
 | ||||
| actor_name = 'ringd' | ||||
| 
 | ||||
| 
 | ||||
| _rings: dict[str, dict] = {} | ||||
| 
 | ||||
| 
 | ||||
| async def _attach_to_ring( | ||||
|     ring_name: str | ||||
| ) -> tuple[int, int, int]: | ||||
|     actor = tractor.current_actor() | ||||
| 
 | ||||
|     fd_amount = 3 | ||||
|     sock_path = ( | ||||
|         Path(tempfile.gettempdir()) | ||||
|         / | ||||
|         f'{os.getpid()}-pass-ring-fds-{ring_name}-to-{actor.name}.sock' | ||||
|     ) | ||||
|     sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) | ||||
|     sock.bind(sock_path) | ||||
|     sock.listen(1) | ||||
| 
 | ||||
|     async with ( | ||||
|         tractor.find_actor(actor_name) as ringd, | ||||
|         ringd.open_context( | ||||
|             _pass_fds, | ||||
|             name=ring_name, | ||||
|             sock_path=sock_path | ||||
|         ) as (ctx, _sent) | ||||
|     ): | ||||
|         # prepare array to receive FD | ||||
|         fds = array.array("i", [0] * fd_amount) | ||||
| 
 | ||||
|         conn, _ = sock.accept() | ||||
| 
 | ||||
|         # receive FD | ||||
|         msg, ancdata, flags, addr = conn.recvmsg( | ||||
|             1024, | ||||
|             socket.CMSG_LEN(fds.itemsize * fd_amount) | ||||
|         ) | ||||
| 
 | ||||
|         for ( | ||||
|             cmsg_level, | ||||
|             cmsg_type, | ||||
|             cmsg_data, | ||||
|         ) in ancdata: | ||||
|             if ( | ||||
|                 cmsg_level == socket.SOL_SOCKET | ||||
|                 and | ||||
|                 cmsg_type == socket.SCM_RIGHTS | ||||
|             ): | ||||
|                 fds.frombytes(cmsg_data[:fds.itemsize * fd_amount]) | ||||
|                 break | ||||
|             else: | ||||
|                 raise RuntimeError("Receiver: No FDs received") | ||||
| 
 | ||||
|         conn.close() | ||||
|         sock.close() | ||||
|         sock_path.unlink() | ||||
| 
 | ||||
|         return RBToken.from_msg( | ||||
|             await ctx.wait_for_result() | ||||
|         ) | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def _pass_fds( | ||||
|     ctx: tractor.Context, | ||||
|     name: str, | ||||
|     sock_path: str | ||||
| ) -> RBToken: | ||||
|     global _rings | ||||
|     token = _rings[name] | ||||
|     client = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) | ||||
|     client.connect(sock_path) | ||||
|     await ctx.started() | ||||
|     fds = array.array('i', token.fds) | ||||
|     client.sendmsg([b'FDs'], [(socket.SOL_SOCKET, socket.SCM_RIGHTS, fds)]) | ||||
|     client.close() | ||||
|     return token | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def _open_ringbuf( | ||||
|     ctx: tractor.Context, | ||||
|     name: str, | ||||
|     buf_size: int | ||||
| ) -> RBToken: | ||||
|     global _rings | ||||
|     is_owner = False | ||||
|     if name not in _rings: | ||||
|         stack = ExitStack() | ||||
|         token = stack.enter_context( | ||||
|             tractor.open_ringbuf( | ||||
|                 name, | ||||
|                 buf_size=buf_size | ||||
|             ) | ||||
|         ) | ||||
|         _rings[name] = { | ||||
|             'token': token, | ||||
|             'stack': stack, | ||||
|         } | ||||
|         is_owner = True | ||||
| 
 | ||||
|     ring = _rings[name] | ||||
|     await ctx.started() | ||||
| 
 | ||||
|     try: | ||||
|         await trio.sleep_forever() | ||||
| 
 | ||||
|     except tractor.ContextCancelled: | ||||
|         ... | ||||
| 
 | ||||
|     finally: | ||||
|         if is_owner: | ||||
|             ring['stack'].close() | ||||
| 
 | ||||
| 
 | ||||
| async def open_ringbuf( | ||||
|     name: str, | ||||
|     buf_size: int | ||||
| ) -> RBToken: | ||||
|     async with ( | ||||
|         tractor.find_actor(actor_name) as ringd, | ||||
|         ringd.open_context( | ||||
|             _open_ringbuf, | ||||
|             name=name, | ||||
|             buf_size=buf_size | ||||
|         ) as (rd_ctx, _) | ||||
|     ): | ||||
|         yield await _attach_to_ring(name) | ||||
|         await rd_ctx.cancel() | ||||
|  | @ -1,153 +0,0 @@ | |||
| # tractor: structured concurrent "actors". | ||||
| # Copyright 2018-eternity Tyler Goodlet. | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| ''' | ||||
| Linux specifics, for now we are only exposing EventFD | ||||
| 
 | ||||
| ''' | ||||
| import os | ||||
| import errno | ||||
| 
 | ||||
| import cffi | ||||
| import trio | ||||
| 
 | ||||
| ffi = cffi.FFI() | ||||
| 
 | ||||
| # Declare the C functions and types we plan to use. | ||||
| #    - eventfd: for creating the event file descriptor | ||||
| #    - write:   for writing to the file descriptor | ||||
| #    - read:    for reading from the file descriptor | ||||
| #    - close:   for closing the file descriptor | ||||
| ffi.cdef( | ||||
|     ''' | ||||
|     int eventfd(unsigned int initval, int flags); | ||||
| 
 | ||||
|     ssize_t write(int fd, const void *buf, size_t count); | ||||
|     ssize_t read(int fd, void *buf, size_t count); | ||||
| 
 | ||||
|     int close(int fd); | ||||
|     ''' | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| # Open the default dynamic library (essentially 'libc' in most cases) | ||||
| C = ffi.dlopen(None) | ||||
| 
 | ||||
| 
 | ||||
| # Constants from <sys/eventfd.h>, if needed. | ||||
| EFD_SEMAPHORE = 1 | ||||
| EFD_CLOEXEC = 0o2000000 | ||||
| EFD_NONBLOCK = 0o4000 | ||||
| 
 | ||||
| 
 | ||||
| def open_eventfd(initval: int = 0, flags: int = 0) -> int: | ||||
|     ''' | ||||
|     Open an eventfd with the given initial value and flags. | ||||
|     Returns the file descriptor on success, otherwise raises OSError. | ||||
| 
 | ||||
|     ''' | ||||
|     fd = C.eventfd(initval, flags) | ||||
|     if fd < 0: | ||||
|         raise OSError(errno.errorcode[ffi.errno], 'eventfd failed') | ||||
|     return fd | ||||
| 
 | ||||
| 
 | ||||
| def write_eventfd(fd: int, value: int) -> int: | ||||
|     ''' | ||||
|     Write a 64-bit integer (uint64_t) to the eventfd's counter. | ||||
| 
 | ||||
|     ''' | ||||
|     # Create a uint64_t* in C, store `value` | ||||
|     data_ptr = ffi.new('uint64_t *', value) | ||||
| 
 | ||||
|     # Call write(fd, data_ptr, 8) | ||||
|     # We expect to write exactly 8 bytes (sizeof(uint64_t)) | ||||
|     ret = C.write(fd, data_ptr, 8) | ||||
|     if ret < 0: | ||||
|         raise OSError(errno.errorcode[ffi.errno], 'write to eventfd failed') | ||||
|     return ret | ||||
| 
 | ||||
| 
 | ||||
| def read_eventfd(fd: int) -> int: | ||||
|     ''' | ||||
|     Read a 64-bit integer (uint64_t) from the eventfd, returning the value. | ||||
|     Reading resets the counter to 0 (unless using EFD_SEMAPHORE). | ||||
| 
 | ||||
|     ''' | ||||
|     # Allocate an 8-byte buffer in C for reading | ||||
|     buf = ffi.new('char[]', 8) | ||||
| 
 | ||||
|     ret = C.read(fd, buf, 8) | ||||
|     if ret < 0: | ||||
|         raise OSError(errno.errorcode[ffi.errno], 'read from eventfd failed') | ||||
|     # Convert the 8 bytes we read into a Python integer | ||||
|     data_bytes = ffi.unpack(buf, 8)  # returns a Python bytes object of length 8 | ||||
|     value = int.from_bytes(data_bytes, byteorder='little', signed=False) | ||||
|     return value | ||||
| 
 | ||||
| 
 | ||||
| def close_eventfd(fd: int) -> int: | ||||
|     ''' | ||||
|     Close the eventfd. | ||||
| 
 | ||||
|     ''' | ||||
|     ret = C.close(fd) | ||||
|     if ret < 0: | ||||
|         raise OSError(errno.errorcode[ffi.errno], 'close failed') | ||||
| 
 | ||||
| 
 | ||||
| class EventFD: | ||||
|     ''' | ||||
|     Use a previously opened eventfd(2), meant to be used in | ||||
|     sub-actors after root actor opens the eventfds then passes | ||||
|     them through pass_fds | ||||
| 
 | ||||
|     ''' | ||||
| 
 | ||||
|     def __init__( | ||||
|         self, | ||||
|         fd: int, | ||||
|         omode: str | ||||
|     ): | ||||
|         self._fd: int = fd | ||||
|         self._omode: str = omode | ||||
|         self._fobj = None | ||||
| 
 | ||||
|     @property | ||||
|     def fd(self) -> int | None: | ||||
|         return self._fd | ||||
| 
 | ||||
|     def write(self, value: int) -> int: | ||||
|         return write_eventfd(self._fd, value) | ||||
| 
 | ||||
|     async def read(self) -> int: | ||||
|         return await trio.to_thread.run_sync( | ||||
|             read_eventfd, self._fd, | ||||
|             abandon_on_cancel=True | ||||
|         ) | ||||
| 
 | ||||
|     def open(self): | ||||
|         self._fobj = os.fdopen(self._fd, self._omode) | ||||
| 
 | ||||
|     def close(self): | ||||
|         if self._fobj: | ||||
|             self._fobj.close() | ||||
| 
 | ||||
|     def __enter__(self): | ||||
|         self.open() | ||||
|         return self | ||||
| 
 | ||||
|     def __exit__(self, exc_type, exc_value, traceback): | ||||
|         self.close() | ||||
|  | @ -1,75 +0,0 @@ | |||
| # tractor: structured concurrent "actors". | ||||
| # Copyright 2018-eternity Tyler Goodlet. | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| ''' | ||||
| Utils to tame mp non-SC madeness | ||||
| 
 | ||||
| ''' | ||||
| import platform | ||||
| 
 | ||||
| 
 | ||||
| def disable_mantracker(): | ||||
|     ''' | ||||
|     Disable all `multiprocessing` "resource tracking" machinery since | ||||
|     it's an absolute multi-threaded mess of non-SC madness. | ||||
| 
 | ||||
|     ''' | ||||
|     from multiprocessing.shared_memory import SharedMemory | ||||
| 
 | ||||
| 
 | ||||
|     # 3.13+ only.. can pass `track=False` to disable | ||||
|     # all the resource tracker bs. | ||||
|     # https://docs.python.org/3/library/multiprocessing.shared_memory.html | ||||
|     if (_py_313 := ( | ||||
|             platform.python_version_tuple()[:-1] | ||||
|             >= | ||||
|             ('3', '13') | ||||
|         ) | ||||
|     ): | ||||
|         from functools import partial | ||||
|         return partial( | ||||
|             SharedMemory, | ||||
|             track=False, | ||||
|         ) | ||||
| 
 | ||||
|     # !TODO, once we drop 3.12- we can obvi remove all this! | ||||
|     else: | ||||
|         from multiprocessing import ( | ||||
|             resource_tracker as mantracker, | ||||
|         ) | ||||
| 
 | ||||
|         # Tell the "resource tracker" thing to fuck off. | ||||
|         class ManTracker(mantracker.ResourceTracker): | ||||
|             def register(self, name, rtype): | ||||
|                 pass | ||||
| 
 | ||||
|             def unregister(self, name, rtype): | ||||
|                 pass | ||||
| 
 | ||||
|             def ensure_running(self): | ||||
|                 pass | ||||
| 
 | ||||
|         # "know your land and know your prey" | ||||
|         # https://www.dailymotion.com/video/x6ozzco | ||||
|         mantracker._resource_tracker = ManTracker() | ||||
|         mantracker.register = mantracker._resource_tracker.register | ||||
|         mantracker.ensure_running = mantracker._resource_tracker.ensure_running | ||||
|         mantracker.unregister = mantracker._resource_tracker.unregister | ||||
|         mantracker.getfd = mantracker._resource_tracker.getfd | ||||
| 
 | ||||
|         # use std type verbatim | ||||
|         shmT = SharedMemory | ||||
| 
 | ||||
|     return shmT | ||||
|  | @ -1,253 +0,0 @@ | |||
| # tractor: structured concurrent "actors". | ||||
| # Copyright 2018-eternity Tyler Goodlet. | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| ''' | ||||
| IPC Reliable RingBuffer implementation | ||||
| 
 | ||||
| ''' | ||||
| from __future__ import annotations | ||||
| from contextlib import contextmanager as cm | ||||
| from multiprocessing.shared_memory import SharedMemory | ||||
| 
 | ||||
| import trio | ||||
| from msgspec import ( | ||||
|     Struct, | ||||
|     to_builtins | ||||
| ) | ||||
| 
 | ||||
| from ._linux import ( | ||||
|     EFD_NONBLOCK, | ||||
|     open_eventfd, | ||||
|     EventFD | ||||
| ) | ||||
| from ._mp_bs import disable_mantracker | ||||
| 
 | ||||
| 
 | ||||
| disable_mantracker() | ||||
| 
 | ||||
| 
 | ||||
| class RBToken(Struct, frozen=True): | ||||
|     ''' | ||||
|     RingBuffer token contains necesary info to open the two | ||||
|     eventfds and the shared memory | ||||
| 
 | ||||
|     ''' | ||||
|     shm_name: str | ||||
|     write_eventfd: int | ||||
|     wrap_eventfd: int | ||||
|     buf_size: int | ||||
| 
 | ||||
|     def as_msg(self): | ||||
|         return to_builtins(self) | ||||
| 
 | ||||
|     @classmethod | ||||
|     def from_msg(cls, msg: dict) -> RBToken: | ||||
|         if isinstance(msg, RBToken): | ||||
|             return msg | ||||
| 
 | ||||
|         return RBToken(**msg) | ||||
| 
 | ||||
| 
 | ||||
| @cm | ||||
| def open_ringbuf( | ||||
|     shm_name: str, | ||||
|     buf_size: int = 10 * 1024, | ||||
|     write_efd_flags: int = 0, | ||||
|     wrap_efd_flags: int = 0 | ||||
| ) -> RBToken: | ||||
|     shm = SharedMemory( | ||||
|         name=shm_name, | ||||
|         size=buf_size, | ||||
|         create=True | ||||
|     ) | ||||
|     try: | ||||
|         token = RBToken( | ||||
|             shm_name=shm_name, | ||||
|             write_eventfd=open_eventfd(flags=write_efd_flags), | ||||
|             wrap_eventfd=open_eventfd(flags=wrap_efd_flags), | ||||
|             buf_size=buf_size | ||||
|         ) | ||||
|         yield token | ||||
| 
 | ||||
|     finally: | ||||
|         shm.unlink() | ||||
| 
 | ||||
| 
 | ||||
| class RingBuffSender(trio.abc.SendStream): | ||||
|     ''' | ||||
|     IPC Reliable Ring Buffer sender side implementation | ||||
| 
 | ||||
|     `eventfd(2)` is used for wrap around sync, and also to signal | ||||
|     writes to the reader. | ||||
| 
 | ||||
|     ''' | ||||
|     def __init__( | ||||
|         self, | ||||
|         token: RBToken, | ||||
|         start_ptr: int = 0, | ||||
|     ): | ||||
|         token = RBToken.from_msg(token) | ||||
|         self._shm = SharedMemory( | ||||
|             name=token.shm_name, | ||||
|             size=token.buf_size, | ||||
|             create=False | ||||
|         ) | ||||
|         self._write_event = EventFD(token.write_eventfd, 'w') | ||||
|         self._wrap_event = EventFD(token.wrap_eventfd, 'r') | ||||
|         self._ptr = start_ptr | ||||
| 
 | ||||
|     @property | ||||
|     def key(self) -> str: | ||||
|         return self._shm.name | ||||
| 
 | ||||
|     @property | ||||
|     def size(self) -> int: | ||||
|         return self._shm.size | ||||
| 
 | ||||
|     @property | ||||
|     def ptr(self) -> int: | ||||
|         return self._ptr | ||||
| 
 | ||||
|     @property | ||||
|     def write_fd(self) -> int: | ||||
|         return self._write_event.fd | ||||
| 
 | ||||
|     @property | ||||
|     def wrap_fd(self) -> int: | ||||
|         return self._wrap_event.fd | ||||
| 
 | ||||
|     async def send_all(self, data: bytes | bytearray | memoryview): | ||||
|         # while data is larger than the remaining buf | ||||
|         target_ptr = self.ptr + len(data) | ||||
|         while target_ptr > self.size: | ||||
|             # write all bytes that fit | ||||
|             remaining = self.size - self.ptr | ||||
|             self._shm.buf[self.ptr:] = data[:remaining] | ||||
|             # signal write and wait for reader wrap around | ||||
|             self._write_event.write(remaining) | ||||
|             await self._wrap_event.read() | ||||
| 
 | ||||
|             # wrap around and trim already written bytes | ||||
|             self._ptr = 0 | ||||
|             data = data[remaining:] | ||||
|             target_ptr = self._ptr + len(data) | ||||
| 
 | ||||
|         # remaining data fits on buffer | ||||
|         self._shm.buf[self.ptr:target_ptr] = data | ||||
|         self._write_event.write(len(data)) | ||||
|         self._ptr = target_ptr | ||||
| 
 | ||||
|     async def wait_send_all_might_not_block(self): | ||||
|         raise NotImplementedError | ||||
| 
 | ||||
|     async def aclose(self): | ||||
|         self._write_event.close() | ||||
|         self._wrap_event.close() | ||||
|         self._shm.close() | ||||
| 
 | ||||
|     async def __aenter__(self): | ||||
|         self._write_event.open() | ||||
|         self._wrap_event.open() | ||||
|         return self | ||||
| 
 | ||||
| 
 | ||||
| class RingBuffReceiver(trio.abc.ReceiveStream): | ||||
|     ''' | ||||
|     IPC Reliable Ring Buffer receiver side implementation | ||||
| 
 | ||||
|     `eventfd(2)` is used for wrap around sync, and also to signal | ||||
|     writes to the reader. | ||||
| 
 | ||||
|     ''' | ||||
|     def __init__( | ||||
|         self, | ||||
|         token: RBToken, | ||||
|         start_ptr: int = 0, | ||||
|         flags: int = 0 | ||||
|     ): | ||||
|         token = RBToken.from_msg(token) | ||||
|         self._shm = SharedMemory( | ||||
|             name=token.shm_name, | ||||
|             size=token.buf_size, | ||||
|             create=False | ||||
|         ) | ||||
|         self._write_event = EventFD(token.write_eventfd, 'w') | ||||
|         self._wrap_event = EventFD(token.wrap_eventfd, 'r') | ||||
|         self._ptr = start_ptr | ||||
|         self._flags = flags | ||||
| 
 | ||||
|     @property | ||||
|     def key(self) -> str: | ||||
|         return self._shm.name | ||||
| 
 | ||||
|     @property | ||||
|     def size(self) -> int: | ||||
|         return self._shm.size | ||||
| 
 | ||||
|     @property | ||||
|     def ptr(self) -> int: | ||||
|         return self._ptr | ||||
| 
 | ||||
|     @property | ||||
|     def write_fd(self) -> int: | ||||
|         return self._write_event.fd | ||||
| 
 | ||||
|     @property | ||||
|     def wrap_fd(self) -> int: | ||||
|         return self._wrap_event.fd | ||||
| 
 | ||||
|     async def receive_some( | ||||
|         self, | ||||
|         max_bytes: int | None = None, | ||||
|         nb_timeout: float = 0.1 | ||||
|     ) -> memoryview: | ||||
|         # if non blocking eventfd enabled, do polling | ||||
|         # until next write, this allows signal handling | ||||
|         if self._flags | EFD_NONBLOCK: | ||||
|             delta = None | ||||
|             while delta is None: | ||||
|                 try: | ||||
|                     delta = await self._write_event.read() | ||||
| 
 | ||||
|                 except OSError as e: | ||||
|                     if e.errno == 'EAGAIN': | ||||
|                         continue | ||||
| 
 | ||||
|                     raise e | ||||
| 
 | ||||
|         else: | ||||
|             delta = await self._write_event.read() | ||||
| 
 | ||||
|         # fetch next segment and advance ptr | ||||
|         next_ptr = self._ptr + delta | ||||
|         segment = self._shm.buf[self._ptr:next_ptr] | ||||
|         self._ptr = next_ptr | ||||
| 
 | ||||
|         if self.ptr == self.size: | ||||
|             # reached the end, signal wrap around | ||||
|             self._ptr = 0 | ||||
|             self._wrap_event.write(1) | ||||
| 
 | ||||
|         return segment | ||||
| 
 | ||||
|     async def aclose(self): | ||||
|         self._write_event.close() | ||||
|         self._wrap_event.close() | ||||
|         self._shm.close() | ||||
| 
 | ||||
|     async def __aenter__(self): | ||||
|         self._write_event.open() | ||||
|         self._wrap_event.open() | ||||
|         return self | ||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							Some files were not shown because too many files have changed in this diff Show More
		Loading…
	
		Reference in New Issue